diff --git a/__tests__/badge.test.ts b/__tests__/badge.test.ts
new file mode 100644
index 0000000..69f5740
--- /dev/null
+++ b/__tests__/badge.test.ts
@@ -0,0 +1,65 @@
+import { buildBadgeUrl, buildSvg } from '../src/badge'
+
+describe('buildSvg', () => {
+ it('should return a valid svg', async () => {
+ const name = 'coverage'
+ const upper = 90
+ const lower = 70
+ const percentage = 80.5
+
+ const result = await buildSvg(name, upper, lower, percentage)
+ const expected =
+ ''
+ expect(result).toEqual(expected)
+ })
+})
+
+describe('buildBadgeUrl', () => {
+ it('should return a success badge URL when percentage is greater than or equal to upper', () => {
+ const name = 'coverage'
+ const upper = 90
+ const lower = 70
+ const percentage = 95.5
+
+ const expectedUrl = `http://img.shields.io/badge/coverage-95.50%25-success`
+ const actualUrl = buildBadgeUrl(name, upper, lower, percentage)
+
+ expect(actualUrl).toEqual(expectedUrl)
+ })
+
+ it('should return an important badge URL when percentage is greater than or equal to lower but less than upper', () => {
+ const name = 'coverage'
+ const upper = 90
+ const lower = 70
+ const percentage = 75.5
+
+ const expectedUrl = `http://img.shields.io/badge/coverage-75.50%25-important`
+ const actualUrl = buildBadgeUrl(name, upper, lower, percentage)
+
+ expect(actualUrl).toEqual(expectedUrl)
+ })
+
+ it('should return a critical badge URL when percentage is less than lower', () => {
+ const name = 'coverage'
+ const upper = 90
+ const lower = 70
+ const percentage = 50.5
+
+ const expectedUrl = `http://img.shields.io/badge/coverage-50.50%25-critical`
+ const actualUrl = buildBadgeUrl(name, upper, lower, percentage)
+
+ expect(actualUrl).toEqual(expectedUrl)
+ })
+
+ it('should return a badge URL without name when name is undefined', () => {
+ const name = undefined
+ const upper = 90
+ const lower = 70
+ const percentage = 80.5
+
+ const expectedUrl = `http://img.shields.io/badge/80.50%25-important`
+ const actualUrl = buildBadgeUrl(name, upper, lower, percentage)
+
+ expect(actualUrl).toEqual(expectedUrl)
+ })
+})
diff --git a/badges/coverage.svg b/badges/coverage.svg
index ba87fda..dc1d7e0 100644
--- a/badges/coverage.svg
+++ b/badges/coverage.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dist/index.js b/dist/index.js
index e68cc9d..fa17085 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -10234,29 +10234,6 @@ function regExpEscape (s) {
}
-/***/ }),
-
-/***/ 7760:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-/*! node-domexception. MIT License. Jimmy Wärting */
-
-if (!globalThis.DOMException) {
- try {
- const { MessageChannel } = __nccwpck_require__(1267),
- port = new MessageChannel().port1,
- ab = new ArrayBuffer()
- port.postMessage(ab, [ab, ab])
- } catch (err) {
- err.constructor.name === 'DOMException' && (
- globalThis.DOMException = err.constructor
- )
- }
-}
-
-module.exports = globalThis.DOMException
-
-
/***/ }),
/***/ 1223:
@@ -11461,10075 +11438,3087 @@ exports["default"] = _default;
/***/ }),
-/***/ 1452:
-/***/ (function(__unused_webpack_module, exports) {
+/***/ 4886:
+/***/ ((module) => {
-/**
- * web-streams-polyfill v3.2.1
- */
-(function (global, factory) {
- true ? factory(exports) :
- 0;
-}(this, (function (exports) { 'use strict';
-
- ///
- const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ?
- Symbol :
- description => `Symbol(${description})`;
-
- ///
- function noop() {
- return undefined;
- }
- function getGlobals() {
- if (typeof self !== 'undefined') {
- return self;
- }
- else if (typeof window !== 'undefined') {
- return window;
- }
- else if (typeof global !== 'undefined') {
- return global;
- }
- return undefined;
- }
- const globals = getGlobals();
+"use strict";
- function typeIsObject(x) {
- return (typeof x === 'object' && x !== null) || typeof x === 'function';
- }
- const rethrowAssertionErrorRejection = noop;
- const originalPromise = Promise;
- const originalPromiseThen = Promise.prototype.then;
- const originalPromiseResolve = Promise.resolve.bind(originalPromise);
- const originalPromiseReject = Promise.reject.bind(originalPromise);
- function newPromise(executor) {
- return new originalPromise(executor);
- }
- function promiseResolvedWith(value) {
- return originalPromiseResolve(value);
- }
- function promiseRejectedWith(reason) {
- return originalPromiseReject(reason);
- }
- function PerformPromiseThen(promise, onFulfilled, onRejected) {
- // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an
- // approximation.
- return originalPromiseThen.call(promise, onFulfilled, onRejected);
- }
- function uponPromise(promise, onFulfilled, onRejected) {
- PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection);
- }
- function uponFulfillment(promise, onFulfilled) {
- uponPromise(promise, onFulfilled);
- }
- function uponRejection(promise, onRejected) {
- uponPromise(promise, undefined, onRejected);
- }
- function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {
- return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler);
- }
- function setPromiseIsHandledToTrue(promise) {
- PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection);
- }
- const queueMicrotask = (() => {
- const globalQueueMicrotask = globals && globals.queueMicrotask;
- if (typeof globalQueueMicrotask === 'function') {
- return globalQueueMicrotask;
- }
- const resolvedPromise = promiseResolvedWith(undefined);
- return (fn) => PerformPromiseThen(resolvedPromise, fn);
- })();
- function reflectCall(F, V, args) {
- if (typeof F !== 'function') {
- throw new TypeError('Argument is not a function');
- }
- return Function.prototype.apply.call(F, V, args);
+var conversions = {};
+module.exports = conversions;
+
+function sign(x) {
+ return x < 0 ? -1 : 1;
+}
+
+function evenRound(x) {
+ // Round x to the nearest integer, choosing the even integer if it lies halfway between two.
+ if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)
+ return Math.floor(x);
+ } else {
+ return Math.round(x);
}
- function promiseCall(F, V, args) {
- try {
- return promiseResolvedWith(reflectCall(F, V, args));
- }
- catch (value) {
- return promiseRejectedWith(value);
- }
+}
+
+function createNumberConversion(bitLength, typeOpts) {
+ if (!typeOpts.unsigned) {
+ --bitLength;
}
+ const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);
+ const upperBound = Math.pow(2, bitLength) - 1;
- // Original from Chromium
- // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js
- const QUEUE_MAX_ARRAY_SIZE = 16384;
- /**
- * Simple queue structure.
- *
- * Avoids scalability issues with using a packed array directly by using
- * multiple arrays in a linked list and keeping the array size bounded.
- */
- class SimpleQueue {
- constructor() {
- this._cursor = 0;
- this._size = 0;
- // _front and _back are always defined.
- this._front = {
- _elements: [],
- _next: undefined
- };
- this._back = this._front;
- // The cursor is used to avoid calling Array.shift().
- // It contains the index of the front element of the array inside the
- // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).
- this._cursor = 0;
- // When there is only one node, size === elements.length - cursor.
- this._size = 0;
- }
- get length() {
- return this._size;
- }
- // For exception safety, this method is structured in order:
- // 1. Read state
- // 2. Calculate required state mutations
- // 3. Perform state mutations
- push(element) {
- const oldBack = this._back;
- let newBack = oldBack;
- if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) {
- newBack = {
- _elements: [],
- _next: undefined
- };
- }
- // push() is the mutation most likely to throw an exception, so it
- // goes first.
- oldBack._elements.push(element);
- if (newBack !== oldBack) {
- this._back = newBack;
- oldBack._next = newBack;
- }
- ++this._size;
- }
- // Like push(), shift() follows the read -> calculate -> mutate pattern for
- // exception safety.
- shift() { // must not be called on an empty queue
- const oldFront = this._front;
- let newFront = oldFront;
- const oldCursor = this._cursor;
- let newCursor = oldCursor + 1;
- const elements = oldFront._elements;
- const element = elements[oldCursor];
- if (newCursor === QUEUE_MAX_ARRAY_SIZE) {
- newFront = oldFront._next;
- newCursor = 0;
- }
- // No mutations before this point.
- --this._size;
- this._cursor = newCursor;
- if (oldFront !== newFront) {
- this._front = newFront;
+ const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);
+ const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);
+
+ return function(V, opts) {
+ if (!opts) opts = {};
+
+ let x = +V;
+
+ if (opts.enforceRange) {
+ if (!Number.isFinite(x)) {
+ throw new TypeError("Argument is not a finite number");
}
- // Permit shifted element to be garbage collected.
- elements[oldCursor] = undefined;
- return element;
- }
- // The tricky thing about forEach() is that it can be called
- // re-entrantly. The queue may be mutated inside the callback. It is easy to
- // see that push() within the callback has no negative effects since the end
- // of the queue is checked for on every iteration. If shift() is called
- // repeatedly within the callback then the next iteration may return an
- // element that has been removed. In this case the callback will be called
- // with undefined values until we either "catch up" with elements that still
- // exist or reach the back of the queue.
- forEach(callback) {
- let i = this._cursor;
- let node = this._front;
- let elements = node._elements;
- while (i !== elements.length || node._next !== undefined) {
- if (i === elements.length) {
- node = node._next;
- elements = node._elements;
- i = 0;
- if (elements.length === 0) {
- break;
- }
- }
- callback(elements[i]);
- ++i;
+
+ x = sign(x) * Math.floor(Math.abs(x));
+ if (x < lowerBound || x > upperBound) {
+ throw new TypeError("Argument is not in byte range");
}
- }
- // Return the element that would be returned if shift() was called now,
- // without modifying the queue.
- peek() { // must not be called on an empty queue
- const front = this._front;
- const cursor = this._cursor;
- return front._elements[cursor];
- }
- }
- function ReadableStreamReaderGenericInitialize(reader, stream) {
- reader._ownerReadableStream = stream;
- stream._reader = reader;
- if (stream._state === 'readable') {
- defaultReaderClosedPromiseInitialize(reader);
- }
- else if (stream._state === 'closed') {
- defaultReaderClosedPromiseInitializeAsResolved(reader);
- }
- else {
- defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);
- }
- }
- // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state
- // check.
- function ReadableStreamReaderGenericCancel(reader, reason) {
- const stream = reader._ownerReadableStream;
- return ReadableStreamCancel(stream, reason);
- }
- function ReadableStreamReaderGenericRelease(reader) {
- if (reader._ownerReadableStream._state === 'readable') {
- defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));
+ return x;
}
- else {
- defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));
+
+ if (!isNaN(x) && opts.clamp) {
+ x = evenRound(x);
+
+ if (x < lowerBound) x = lowerBound;
+ if (x > upperBound) x = upperBound;
+ return x;
}
- reader._ownerReadableStream._reader = undefined;
- reader._ownerReadableStream = undefined;
- }
- // Helper functions for the readers.
- function readerLockException(name) {
- return new TypeError('Cannot ' + name + ' a stream using a released reader');
- }
- // Helper functions for the ReadableStreamDefaultReader.
- function defaultReaderClosedPromiseInitialize(reader) {
- reader._closedPromise = newPromise((resolve, reject) => {
- reader._closedPromise_resolve = resolve;
- reader._closedPromise_reject = reject;
- });
- }
- function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {
- defaultReaderClosedPromiseInitialize(reader);
- defaultReaderClosedPromiseReject(reader, reason);
- }
- function defaultReaderClosedPromiseInitializeAsResolved(reader) {
- defaultReaderClosedPromiseInitialize(reader);
- defaultReaderClosedPromiseResolve(reader);
- }
- function defaultReaderClosedPromiseReject(reader, reason) {
- if (reader._closedPromise_reject === undefined) {
- return;
+
+ if (!Number.isFinite(x) || x === 0) {
+ return 0;
}
- setPromiseIsHandledToTrue(reader._closedPromise);
- reader._closedPromise_reject(reason);
- reader._closedPromise_resolve = undefined;
- reader._closedPromise_reject = undefined;
- }
- function defaultReaderClosedPromiseResetToRejected(reader, reason) {
- defaultReaderClosedPromiseInitializeAsRejected(reader, reason);
- }
- function defaultReaderClosedPromiseResolve(reader) {
- if (reader._closedPromise_resolve === undefined) {
- return;
+
+ x = sign(x) * Math.floor(Math.abs(x));
+ x = x % moduloVal;
+
+ if (!typeOpts.unsigned && x >= moduloBound) {
+ return x - moduloVal;
+ } else if (typeOpts.unsigned) {
+ if (x < 0) {
+ x += moduloVal;
+ } else if (x === -0) { // don't return negative zero
+ return 0;
+ }
}
- reader._closedPromise_resolve(undefined);
- reader._closedPromise_resolve = undefined;
- reader._closedPromise_reject = undefined;
+
+ return x;
}
+}
- const AbortSteps = SymbolPolyfill('[[AbortSteps]]');
- const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]');
- const CancelSteps = SymbolPolyfill('[[CancelSteps]]');
- const PullSteps = SymbolPolyfill('[[PullSteps]]');
+conversions["void"] = function () {
+ return undefined;
+};
- ///
- // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill
- const NumberIsFinite = Number.isFinite || function (x) {
- return typeof x === 'number' && isFinite(x);
- };
+conversions["boolean"] = function (val) {
+ return !!val;
+};
- ///
- // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill
- const MathTrunc = Math.trunc || function (v) {
- return v < 0 ? Math.ceil(v) : Math.floor(v);
- };
+conversions["byte"] = createNumberConversion(8, { unsigned: false });
+conversions["octet"] = createNumberConversion(8, { unsigned: true });
- // https://heycam.github.io/webidl/#idl-dictionaries
- function isDictionary(x) {
- return typeof x === 'object' || typeof x === 'function';
- }
- function assertDictionary(obj, context) {
- if (obj !== undefined && !isDictionary(obj)) {
- throw new TypeError(`${context} is not an object.`);
- }
- }
- // https://heycam.github.io/webidl/#idl-callback-functions
- function assertFunction(x, context) {
- if (typeof x !== 'function') {
- throw new TypeError(`${context} is not a function.`);
- }
- }
- // https://heycam.github.io/webidl/#idl-object
- function isObject(x) {
- return (typeof x === 'object' && x !== null) || typeof x === 'function';
- }
- function assertObject(x, context) {
- if (!isObject(x)) {
- throw new TypeError(`${context} is not an object.`);
- }
- }
- function assertRequiredArgument(x, position, context) {
- if (x === undefined) {
- throw new TypeError(`Parameter ${position} is required in '${context}'.`);
- }
- }
- function assertRequiredField(x, field, context) {
- if (x === undefined) {
- throw new TypeError(`${field} is required in '${context}'.`);
- }
- }
- // https://heycam.github.io/webidl/#idl-unrestricted-double
- function convertUnrestrictedDouble(value) {
- return Number(value);
+conversions["short"] = createNumberConversion(16, { unsigned: false });
+conversions["unsigned short"] = createNumberConversion(16, { unsigned: true });
+
+conversions["long"] = createNumberConversion(32, { unsigned: false });
+conversions["unsigned long"] = createNumberConversion(32, { unsigned: true });
+
+conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });
+conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });
+
+conversions["double"] = function (V) {
+ const x = +V;
+
+ if (!Number.isFinite(x)) {
+ throw new TypeError("Argument is not a finite floating-point value");
}
- function censorNegativeZero(x) {
- return x === 0 ? 0 : x;
+
+ return x;
+};
+
+conversions["unrestricted double"] = function (V) {
+ const x = +V;
+
+ if (isNaN(x)) {
+ throw new TypeError("Argument is NaN");
}
- function integerPart(x) {
- return censorNegativeZero(MathTrunc(x));
+
+ return x;
+};
+
+// not quite valid, but good enough for JS
+conversions["float"] = conversions["double"];
+conversions["unrestricted float"] = conversions["unrestricted double"];
+
+conversions["DOMString"] = function (V, opts) {
+ if (!opts) opts = {};
+
+ if (opts.treatNullAsEmptyString && V === null) {
+ return "";
}
- // https://heycam.github.io/webidl/#idl-unsigned-long-long
- function convertUnsignedLongLongWithEnforceRange(value, context) {
- const lowerBound = 0;
- const upperBound = Number.MAX_SAFE_INTEGER;
- let x = Number(value);
- x = censorNegativeZero(x);
- if (!NumberIsFinite(x)) {
- throw new TypeError(`${context} is not a finite number`);
- }
- x = integerPart(x);
- if (x < lowerBound || x > upperBound) {
- throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`);
- }
- if (!NumberIsFinite(x) || x === 0) {
- return 0;
+
+ return String(V);
+};
+
+conversions["ByteString"] = function (V, opts) {
+ const x = String(V);
+ let c = undefined;
+ for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {
+ if (c > 255) {
+ throw new TypeError("Argument is not a valid bytestring");
}
- // TODO Use BigInt if supported?
- // let xBigInt = BigInt(integerPart(x));
- // xBigInt = BigInt.asUintN(64, xBigInt);
- // return Number(xBigInt);
- return x;
}
- function assertReadableStream(x, context) {
- if (!IsReadableStream(x)) {
- throw new TypeError(`${context} is not a ReadableStream.`);
+ return x;
+};
+
+conversions["USVString"] = function (V) {
+ const S = String(V);
+ const n = S.length;
+ const U = [];
+ for (let i = 0; i < n; ++i) {
+ const c = S.charCodeAt(i);
+ if (c < 0xD800 || c > 0xDFFF) {
+ U.push(String.fromCodePoint(c));
+ } else if (0xDC00 <= c && c <= 0xDFFF) {
+ U.push(String.fromCodePoint(0xFFFD));
+ } else {
+ if (i === n - 1) {
+ U.push(String.fromCodePoint(0xFFFD));
+ } else {
+ const d = S.charCodeAt(i + 1);
+ if (0xDC00 <= d && d <= 0xDFFF) {
+ const a = c & 0x3FF;
+ const b = d & 0x3FF;
+ U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));
+ ++i;
+ } else {
+ U.push(String.fromCodePoint(0xFFFD));
+ }
+ }
}
}
- // Abstract operations for the ReadableStream.
- function AcquireReadableStreamDefaultReader(stream) {
- return new ReadableStreamDefaultReader(stream);
- }
- // ReadableStream API exposed for controllers.
- function ReadableStreamAddReadRequest(stream, readRequest) {
- stream._reader._readRequests.push(readRequest);
+ return U.join('');
+};
+
+conversions["Date"] = function (V, opts) {
+ if (!(V instanceof Date)) {
+ throw new TypeError("Argument is not a Date object");
}
- function ReadableStreamFulfillReadRequest(stream, chunk, done) {
- const reader = stream._reader;
- const readRequest = reader._readRequests.shift();
- if (done) {
- readRequest._closeSteps();
- }
- else {
- readRequest._chunkSteps(chunk);
- }
+ if (isNaN(V)) {
+ return undefined;
}
- function ReadableStreamGetNumReadRequests(stream) {
- return stream._reader._readRequests.length;
+
+ return V;
+};
+
+conversions["RegExp"] = function (V, opts) {
+ if (!(V instanceof RegExp)) {
+ V = new RegExp(V);
}
- function ReadableStreamHasDefaultReader(stream) {
- const reader = stream._reader;
- if (reader === undefined) {
- return false;
- }
- if (!IsReadableStreamDefaultReader(reader)) {
- return false;
- }
- return true;
- }
- /**
- * A default reader vended by a {@link ReadableStream}.
- *
- * @public
- */
- class ReadableStreamDefaultReader {
- constructor(stream) {
- assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader');
- assertReadableStream(stream, 'First parameter');
- if (IsReadableStreamLocked(stream)) {
- throw new TypeError('This stream has already been locked for exclusive reading by another reader');
- }
- ReadableStreamReaderGenericInitialize(this, stream);
- this._readRequests = new SimpleQueue();
- }
- /**
- * Returns a promise that will be fulfilled when the stream becomes closed,
- * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing.
- */
- get closed() {
- if (!IsReadableStreamDefaultReader(this)) {
- return promiseRejectedWith(defaultReaderBrandCheckException('closed'));
- }
- return this._closedPromise;
- }
- /**
- * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.
- */
- cancel(reason = undefined) {
- if (!IsReadableStreamDefaultReader(this)) {
- return promiseRejectedWith(defaultReaderBrandCheckException('cancel'));
- }
- if (this._ownerReadableStream === undefined) {
- return promiseRejectedWith(readerLockException('cancel'));
- }
- return ReadableStreamReaderGenericCancel(this, reason);
- }
- /**
- * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.
- *
- * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.
- */
- read() {
- if (!IsReadableStreamDefaultReader(this)) {
- return promiseRejectedWith(defaultReaderBrandCheckException('read'));
- }
- if (this._ownerReadableStream === undefined) {
- return promiseRejectedWith(readerLockException('read from'));
- }
- let resolvePromise;
- let rejectPromise;
- const promise = newPromise((resolve, reject) => {
- resolvePromise = resolve;
- rejectPromise = reject;
- });
- const readRequest = {
- _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),
- _closeSteps: () => resolvePromise({ value: undefined, done: true }),
- _errorSteps: e => rejectPromise(e)
- };
- ReadableStreamDefaultReaderRead(this, readRequest);
- return promise;
- }
- /**
- * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.
- * If the associated stream is errored when the lock is released, the reader will appear errored in the same way
- * from now on; otherwise, the reader will appear closed.
- *
- * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by
- * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to
- * do so will throw a `TypeError` and leave the reader locked to the stream.
- */
- releaseLock() {
- if (!IsReadableStreamDefaultReader(this)) {
- throw defaultReaderBrandCheckException('releaseLock');
- }
- if (this._ownerReadableStream === undefined) {
- return;
- }
- if (this._readRequests.length > 0) {
- throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');
- }
- ReadableStreamReaderGenericRelease(this);
- }
+
+ return V;
+};
+
+
+/***/ }),
+
+/***/ 7537:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+const usm = __nccwpck_require__(2158);
+
+exports.implementation = class URLImpl {
+ constructor(constructorArgs) {
+ const url = constructorArgs[0];
+ const base = constructorArgs[1];
+
+ let parsedBase = null;
+ if (base !== undefined) {
+ parsedBase = usm.basicURLParse(base);
+ if (parsedBase === "failure") {
+ throw new TypeError("Invalid base URL");
+ }
}
- Object.defineProperties(ReadableStreamDefaultReader.prototype, {
- cancel: { enumerable: true },
- read: { enumerable: true },
- releaseLock: { enumerable: true },
- closed: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, {
- value: 'ReadableStreamDefaultReader',
- configurable: true
- });
+
+ const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });
+ if (parsedURL === "failure") {
+ throw new TypeError("Invalid URL");
}
- // Abstract operations for the readers.
- function IsReadableStreamDefaultReader(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {
- return false;
- }
- return x instanceof ReadableStreamDefaultReader;
+
+ this._url = parsedURL;
+
+ // TODO: query stuff
+ }
+
+ get href() {
+ return usm.serializeURL(this._url);
+ }
+
+ set href(v) {
+ const parsedURL = usm.basicURLParse(v);
+ if (parsedURL === "failure") {
+ throw new TypeError("Invalid URL");
}
- function ReadableStreamDefaultReaderRead(reader, readRequest) {
- const stream = reader._ownerReadableStream;
- stream._disturbed = true;
- if (stream._state === 'closed') {
- readRequest._closeSteps();
- }
- else if (stream._state === 'errored') {
- readRequest._errorSteps(stream._storedError);
- }
- else {
- stream._readableStreamController[PullSteps](readRequest);
- }
+
+ this._url = parsedURL;
+ }
+
+ get origin() {
+ return usm.serializeURLOrigin(this._url);
+ }
+
+ get protocol() {
+ return this._url.scheme + ":";
+ }
+
+ set protocol(v) {
+ usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" });
+ }
+
+ get username() {
+ return this._url.username;
+ }
+
+ set username(v) {
+ if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
+ return;
}
- // Helper functions for the ReadableStreamDefaultReader.
- function defaultReaderBrandCheckException(name) {
- return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`);
+
+ usm.setTheUsername(this._url, v);
+ }
+
+ get password() {
+ return this._url.password;
+ }
+
+ set password(v) {
+ if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
+ return;
}
- ///
- /* eslint-disable @typescript-eslint/no-empty-function */
- const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype);
+ usm.setThePassword(this._url, v);
+ }
- ///
- class ReadableStreamAsyncIteratorImpl {
- constructor(reader, preventCancel) {
- this._ongoingPromise = undefined;
- this._isFinished = false;
- this._reader = reader;
- this._preventCancel = preventCancel;
- }
- next() {
- const nextSteps = () => this._nextSteps();
- this._ongoingPromise = this._ongoingPromise ?
- transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) :
- nextSteps();
- return this._ongoingPromise;
- }
- return(value) {
- const returnSteps = () => this._returnSteps(value);
- return this._ongoingPromise ?
- transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) :
- returnSteps();
- }
- _nextSteps() {
- if (this._isFinished) {
- return Promise.resolve({ value: undefined, done: true });
- }
- const reader = this._reader;
- if (reader._ownerReadableStream === undefined) {
- return promiseRejectedWith(readerLockException('iterate'));
- }
- let resolvePromise;
- let rejectPromise;
- const promise = newPromise((resolve, reject) => {
- resolvePromise = resolve;
- rejectPromise = reject;
- });
- const readRequest = {
- _chunkSteps: chunk => {
- this._ongoingPromise = undefined;
- // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.
- // FIXME Is this a bug in the specification, or in the test?
- queueMicrotask(() => resolvePromise({ value: chunk, done: false }));
- },
- _closeSteps: () => {
- this._ongoingPromise = undefined;
- this._isFinished = true;
- ReadableStreamReaderGenericRelease(reader);
- resolvePromise({ value: undefined, done: true });
- },
- _errorSteps: reason => {
- this._ongoingPromise = undefined;
- this._isFinished = true;
- ReadableStreamReaderGenericRelease(reader);
- rejectPromise(reason);
- }
- };
- ReadableStreamDefaultReaderRead(reader, readRequest);
- return promise;
- }
- _returnSteps(value) {
- if (this._isFinished) {
- return Promise.resolve({ value, done: true });
- }
- this._isFinished = true;
- const reader = this._reader;
- if (reader._ownerReadableStream === undefined) {
- return promiseRejectedWith(readerLockException('finish iterating'));
- }
- if (!this._preventCancel) {
- const result = ReadableStreamReaderGenericCancel(reader, value);
- ReadableStreamReaderGenericRelease(reader);
- return transformPromiseWith(result, () => ({ value, done: true }));
- }
- ReadableStreamReaderGenericRelease(reader);
- return promiseResolvedWith({ value, done: true });
- }
+ get host() {
+ const url = this._url;
+
+ if (url.host === null) {
+ return "";
}
- const ReadableStreamAsyncIteratorPrototype = {
- next() {
- if (!IsReadableStreamAsyncIterator(this)) {
- return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next'));
- }
- return this._asyncIteratorImpl.next();
- },
- return(value) {
- if (!IsReadableStreamAsyncIterator(this)) {
- return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return'));
- }
- return this._asyncIteratorImpl.return(value);
- }
- };
- if (AsyncIteratorPrototype !== undefined) {
- Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype);
- }
- // Abstract operations for the ReadableStream.
- function AcquireReadableStreamAsyncIterator(stream, preventCancel) {
- const reader = AcquireReadableStreamDefaultReader(stream);
- const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel);
- const iterator = Object.create(ReadableStreamAsyncIteratorPrototype);
- iterator._asyncIteratorImpl = impl;
- return iterator;
- }
- function IsReadableStreamAsyncIterator(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) {
- return false;
- }
- try {
- // noinspection SuspiciousTypeOfGuard
- return x._asyncIteratorImpl instanceof
- ReadableStreamAsyncIteratorImpl;
- }
- catch (_a) {
- return false;
- }
+
+ if (url.port === null) {
+ return usm.serializeHost(url.host);
}
- // Helper functions for the ReadableStream.
- function streamAsyncIteratorBrandCheckException(name) {
- return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`);
+
+ return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port);
+ }
+
+ set host(v) {
+ if (this._url.cannotBeABaseURL) {
+ return;
}
- ///
- // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill
- const NumberIsNaN = Number.isNaN || function (x) {
- // eslint-disable-next-line no-self-compare
- return x !== x;
- };
+ usm.basicURLParse(v, { url: this._url, stateOverride: "host" });
+ }
- function CreateArrayFromList(elements) {
- // We use arrays to represent lists, so this is basically a no-op.
- // Do a slice though just in case we happen to depend on the unique-ness.
- return elements.slice();
+ get hostname() {
+ if (this._url.host === null) {
+ return "";
}
- function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) {
- new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);
+
+ return usm.serializeHost(this._url.host);
+ }
+
+ set hostname(v) {
+ if (this._url.cannotBeABaseURL) {
+ return;
}
- // Not implemented correctly
- function TransferArrayBuffer(O) {
- return O;
+
+ usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" });
+ }
+
+ get port() {
+ if (this._url.port === null) {
+ return "";
}
- // Not implemented correctly
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- function IsDetachedBuffer(O) {
- return false;
+
+ return usm.serializeInteger(this._url.port);
+ }
+
+ set port(v) {
+ if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
+ return;
}
- function ArrayBufferSlice(buffer, begin, end) {
- // ArrayBuffer.prototype.slice is not available on IE10
- // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice
- if (buffer.slice) {
- return buffer.slice(begin, end);
- }
- const length = end - begin;
- const slice = new ArrayBuffer(length);
- CopyDataBlockBytes(slice, 0, buffer, begin, length);
- return slice;
+
+ if (v === "") {
+ this._url.port = null;
+ } else {
+ usm.basicURLParse(v, { url: this._url, stateOverride: "port" });
}
+ }
- function IsNonNegativeNumber(v) {
- if (typeof v !== 'number') {
- return false;
- }
- if (NumberIsNaN(v)) {
- return false;
- }
- if (v < 0) {
- return false;
- }
- return true;
+ get pathname() {
+ if (this._url.cannotBeABaseURL) {
+ return this._url.path[0];
}
- function CloneAsUint8Array(O) {
- const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength);
- return new Uint8Array(buffer);
+
+ if (this._url.path.length === 0) {
+ return "";
}
- function DequeueValue(container) {
- const pair = container._queue.shift();
- container._queueTotalSize -= pair.size;
- if (container._queueTotalSize < 0) {
- container._queueTotalSize = 0;
- }
- return pair.value;
+ return "/" + this._url.path.join("/");
+ }
+
+ set pathname(v) {
+ if (this._url.cannotBeABaseURL) {
+ return;
}
- function EnqueueValueWithSize(container, value, size) {
- if (!IsNonNegativeNumber(size) || size === Infinity) {
- throw new RangeError('Size must be a finite, non-NaN, non-negative number.');
- }
- container._queue.push({ value, size });
- container._queueTotalSize += size;
+
+ this._url.path = [];
+ usm.basicURLParse(v, { url: this._url, stateOverride: "path start" });
+ }
+
+ get search() {
+ if (this._url.query === null || this._url.query === "") {
+ return "";
}
- function PeekQueueValue(container) {
- const pair = container._queue.peek();
- return pair.value;
+
+ return "?" + this._url.query;
+ }
+
+ set search(v) {
+ // TODO: query stuff
+
+ const url = this._url;
+
+ if (v === "") {
+ url.query = null;
+ return;
}
- function ResetQueue(container) {
- container._queue = new SimpleQueue();
- container._queueTotalSize = 0;
+
+ const input = v[0] === "?" ? v.substring(1) : v;
+ url.query = "";
+ usm.basicURLParse(input, { url, stateOverride: "query" });
+ }
+
+ get hash() {
+ if (this._url.fragment === null || this._url.fragment === "") {
+ return "";
}
- /**
- * A pull-into request in a {@link ReadableByteStreamController}.
- *
- * @public
- */
- class ReadableStreamBYOBRequest {
- constructor() {
- throw new TypeError('Illegal constructor');
- }
- /**
- * Returns the view for writing in to, or `null` if the BYOB request has already been responded to.
- */
- get view() {
- if (!IsReadableStreamBYOBRequest(this)) {
- throw byobRequestBrandCheckException('view');
- }
- return this._view;
- }
- respond(bytesWritten) {
- if (!IsReadableStreamBYOBRequest(this)) {
- throw byobRequestBrandCheckException('respond');
- }
- assertRequiredArgument(bytesWritten, 1, 'respond');
- bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter');
- if (this._associatedReadableByteStreamController === undefined) {
- throw new TypeError('This BYOB request has been invalidated');
- }
- if (IsDetachedBuffer(this._view.buffer)) ;
- ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);
- }
- respondWithNewView(view) {
- if (!IsReadableStreamBYOBRequest(this)) {
- throw byobRequestBrandCheckException('respondWithNewView');
- }
- assertRequiredArgument(view, 1, 'respondWithNewView');
- if (!ArrayBuffer.isView(view)) {
- throw new TypeError('You can only respond with array buffer views');
- }
- if (this._associatedReadableByteStreamController === undefined) {
- throw new TypeError('This BYOB request has been invalidated');
- }
- if (IsDetachedBuffer(view.buffer)) ;
- ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);
- }
- }
- Object.defineProperties(ReadableStreamBYOBRequest.prototype, {
- respond: { enumerable: true },
- respondWithNewView: { enumerable: true },
- view: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, {
- value: 'ReadableStreamBYOBRequest',
- configurable: true
- });
- }
- /**
- * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue.
- *
- * @public
- */
- class ReadableByteStreamController {
- constructor() {
- throw new TypeError('Illegal constructor');
- }
- /**
- * Returns the current BYOB pull request, or `null` if there isn't one.
- */
- get byobRequest() {
- if (!IsReadableByteStreamController(this)) {
- throw byteStreamControllerBrandCheckException('byobRequest');
- }
- return ReadableByteStreamControllerGetBYOBRequest(this);
- }
- /**
- * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is
- * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure.
- */
- get desiredSize() {
- if (!IsReadableByteStreamController(this)) {
- throw byteStreamControllerBrandCheckException('desiredSize');
- }
- return ReadableByteStreamControllerGetDesiredSize(this);
- }
- /**
- * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from
- * the stream, but once those are read, the stream will become closed.
- */
- close() {
- if (!IsReadableByteStreamController(this)) {
- throw byteStreamControllerBrandCheckException('close');
- }
- if (this._closeRequested) {
- throw new TypeError('The stream has already been closed; do not close it again!');
- }
- const state = this._controlledReadableByteStream._state;
- if (state !== 'readable') {
- throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`);
- }
- ReadableByteStreamControllerClose(this);
- }
- enqueue(chunk) {
- if (!IsReadableByteStreamController(this)) {
- throw byteStreamControllerBrandCheckException('enqueue');
- }
- assertRequiredArgument(chunk, 1, 'enqueue');
- if (!ArrayBuffer.isView(chunk)) {
- throw new TypeError('chunk must be an array buffer view');
- }
- if (chunk.byteLength === 0) {
- throw new TypeError('chunk must have non-zero byteLength');
- }
- if (chunk.buffer.byteLength === 0) {
- throw new TypeError(`chunk's buffer must have non-zero byteLength`);
- }
- if (this._closeRequested) {
- throw new TypeError('stream is closed or draining');
- }
- const state = this._controlledReadableByteStream._state;
- if (state !== 'readable') {
- throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`);
- }
- ReadableByteStreamControllerEnqueue(this, chunk);
- }
- /**
- * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.
- */
- error(e = undefined) {
- if (!IsReadableByteStreamController(this)) {
- throw byteStreamControllerBrandCheckException('error');
- }
- ReadableByteStreamControllerError(this, e);
- }
- /** @internal */
- [CancelSteps](reason) {
- ReadableByteStreamControllerClearPendingPullIntos(this);
- ResetQueue(this);
- const result = this._cancelAlgorithm(reason);
- ReadableByteStreamControllerClearAlgorithms(this);
- return result;
- }
- /** @internal */
- [PullSteps](readRequest) {
- const stream = this._controlledReadableByteStream;
- if (this._queueTotalSize > 0) {
- const entry = this._queue.shift();
- this._queueTotalSize -= entry.byteLength;
- ReadableByteStreamControllerHandleQueueDrain(this);
- const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
- readRequest._chunkSteps(view);
- return;
- }
- const autoAllocateChunkSize = this._autoAllocateChunkSize;
- if (autoAllocateChunkSize !== undefined) {
- let buffer;
- try {
- buffer = new ArrayBuffer(autoAllocateChunkSize);
- }
- catch (bufferE) {
- readRequest._errorSteps(bufferE);
- return;
- }
- const pullIntoDescriptor = {
- buffer,
- bufferByteLength: autoAllocateChunkSize,
- byteOffset: 0,
- byteLength: autoAllocateChunkSize,
- bytesFilled: 0,
- elementSize: 1,
- viewConstructor: Uint8Array,
- readerType: 'default'
- };
- this._pendingPullIntos.push(pullIntoDescriptor);
- }
- ReadableStreamAddReadRequest(stream, readRequest);
- ReadableByteStreamControllerCallPullIfNeeded(this);
- }
- }
- Object.defineProperties(ReadableByteStreamController.prototype, {
- close: { enumerable: true },
- enqueue: { enumerable: true },
- error: { enumerable: true },
- byobRequest: { enumerable: true },
- desiredSize: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, {
- value: 'ReadableByteStreamController',
- configurable: true
- });
- }
- // Abstract operations for the ReadableByteStreamController.
- function IsReadableByteStreamController(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) {
- return false;
- }
- return x instanceof ReadableByteStreamController;
- }
- function IsReadableStreamBYOBRequest(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {
- return false;
- }
- return x instanceof ReadableStreamBYOBRequest;
- }
- function ReadableByteStreamControllerCallPullIfNeeded(controller) {
- const shouldPull = ReadableByteStreamControllerShouldCallPull(controller);
- if (!shouldPull) {
- return;
- }
- if (controller._pulling) {
- controller._pullAgain = true;
- return;
- }
- controller._pulling = true;
- // TODO: Test controller argument
- const pullPromise = controller._pullAlgorithm();
- uponPromise(pullPromise, () => {
- controller._pulling = false;
- if (controller._pullAgain) {
- controller._pullAgain = false;
- ReadableByteStreamControllerCallPullIfNeeded(controller);
- }
- }, e => {
- ReadableByteStreamControllerError(controller, e);
- });
- }
- function ReadableByteStreamControllerClearPendingPullIntos(controller) {
- ReadableByteStreamControllerInvalidateBYOBRequest(controller);
- controller._pendingPullIntos = new SimpleQueue();
- }
- function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {
- let done = false;
- if (stream._state === 'closed') {
- done = true;
- }
- const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);
- if (pullIntoDescriptor.readerType === 'default') {
- ReadableStreamFulfillReadRequest(stream, filledView, done);
- }
- else {
- ReadableStreamFulfillReadIntoRequest(stream, filledView, done);
- }
- }
- function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {
- const bytesFilled = pullIntoDescriptor.bytesFilled;
- const elementSize = pullIntoDescriptor.elementSize;
- return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);
- }
- function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {
- controller._queue.push({ buffer, byteOffset, byteLength });
- controller._queueTotalSize += byteLength;
- }
- function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {
- const elementSize = pullIntoDescriptor.elementSize;
- const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;
- const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);
- const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;
- const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;
- let totalBytesToCopyRemaining = maxBytesToCopy;
- let ready = false;
- if (maxAlignedBytes > currentAlignedBytes) {
- totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;
- ready = true;
- }
- const queue = controller._queue;
- while (totalBytesToCopyRemaining > 0) {
- const headOfQueue = queue.peek();
- const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);
- const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
- CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);
- if (headOfQueue.byteLength === bytesToCopy) {
- queue.shift();
- }
- else {
- headOfQueue.byteOffset += bytesToCopy;
- headOfQueue.byteLength -= bytesToCopy;
- }
- controller._queueTotalSize -= bytesToCopy;
- ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);
- totalBytesToCopyRemaining -= bytesToCopy;
- }
- return ready;
- }
- function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {
- pullIntoDescriptor.bytesFilled += size;
- }
- function ReadableByteStreamControllerHandleQueueDrain(controller) {
- if (controller._queueTotalSize === 0 && controller._closeRequested) {
- ReadableByteStreamControllerClearAlgorithms(controller);
- ReadableStreamClose(controller._controlledReadableByteStream);
- }
- else {
- ReadableByteStreamControllerCallPullIfNeeded(controller);
- }
- }
- function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {
- if (controller._byobRequest === null) {
- return;
- }
- controller._byobRequest._associatedReadableByteStreamController = undefined;
- controller._byobRequest._view = null;
- controller._byobRequest = null;
- }
- function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {
- while (controller._pendingPullIntos.length > 0) {
- if (controller._queueTotalSize === 0) {
- return;
- }
- const pullIntoDescriptor = controller._pendingPullIntos.peek();
- if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {
- ReadableByteStreamControllerShiftPendingPullInto(controller);
- ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);
- }
- }
- }
- function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {
- const stream = controller._controlledReadableByteStream;
- let elementSize = 1;
- if (view.constructor !== DataView) {
- elementSize = view.constructor.BYTES_PER_ELEMENT;
- }
- const ctor = view.constructor;
- // try {
- const buffer = TransferArrayBuffer(view.buffer);
- // } catch (e) {
- // readIntoRequest._errorSteps(e);
- // return;
- // }
- const pullIntoDescriptor = {
- buffer,
- bufferByteLength: buffer.byteLength,
- byteOffset: view.byteOffset,
- byteLength: view.byteLength,
- bytesFilled: 0,
- elementSize,
- viewConstructor: ctor,
- readerType: 'byob'
- };
- if (controller._pendingPullIntos.length > 0) {
- controller._pendingPullIntos.push(pullIntoDescriptor);
- // No ReadableByteStreamControllerCallPullIfNeeded() call since:
- // - No change happens on desiredSize
- // - The source has already been notified of that there's at least 1 pending read(view)
- ReadableStreamAddReadIntoRequest(stream, readIntoRequest);
- return;
- }
- if (stream._state === 'closed') {
- const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);
- readIntoRequest._closeSteps(emptyView);
- return;
- }
- if (controller._queueTotalSize > 0) {
- if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {
- const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);
- ReadableByteStreamControllerHandleQueueDrain(controller);
- readIntoRequest._chunkSteps(filledView);
- return;
- }
- if (controller._closeRequested) {
- const e = new TypeError('Insufficient bytes to fill elements in the given buffer');
- ReadableByteStreamControllerError(controller, e);
- readIntoRequest._errorSteps(e);
- return;
- }
- }
- controller._pendingPullIntos.push(pullIntoDescriptor);
- ReadableStreamAddReadIntoRequest(stream, readIntoRequest);
- ReadableByteStreamControllerCallPullIfNeeded(controller);
- }
- function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {
- const stream = controller._controlledReadableByteStream;
- if (ReadableStreamHasBYOBReader(stream)) {
- while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {
- const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);
- ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);
- }
- }
- }
- function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {
- ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);
- if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {
- return;
- }
- ReadableByteStreamControllerShiftPendingPullInto(controller);
- const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;
- if (remainderSize > 0) {
- const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
- const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end);
- ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);
- }
- pullIntoDescriptor.bytesFilled -= remainderSize;
- ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);
- ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
- }
- function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {
- const firstDescriptor = controller._pendingPullIntos.peek();
- ReadableByteStreamControllerInvalidateBYOBRequest(controller);
- const state = controller._controlledReadableByteStream._state;
- if (state === 'closed') {
- ReadableByteStreamControllerRespondInClosedState(controller);
- }
- else {
- ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);
- }
- ReadableByteStreamControllerCallPullIfNeeded(controller);
- }
- function ReadableByteStreamControllerShiftPendingPullInto(controller) {
- const descriptor = controller._pendingPullIntos.shift();
- return descriptor;
- }
- function ReadableByteStreamControllerShouldCallPull(controller) {
- const stream = controller._controlledReadableByteStream;
- if (stream._state !== 'readable') {
- return false;
- }
- if (controller._closeRequested) {
- return false;
- }
- if (!controller._started) {
- return false;
- }
- if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {
- return true;
- }
- if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) {
- return true;
- }
- const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller);
- if (desiredSize > 0) {
- return true;
- }
- return false;
- }
- function ReadableByteStreamControllerClearAlgorithms(controller) {
- controller._pullAlgorithm = undefined;
- controller._cancelAlgorithm = undefined;
- }
- // A client of ReadableByteStreamController may use these functions directly to bypass state check.
- function ReadableByteStreamControllerClose(controller) {
- const stream = controller._controlledReadableByteStream;
- if (controller._closeRequested || stream._state !== 'readable') {
- return;
- }
- if (controller._queueTotalSize > 0) {
- controller._closeRequested = true;
- return;
- }
- if (controller._pendingPullIntos.length > 0) {
- const firstPendingPullInto = controller._pendingPullIntos.peek();
- if (firstPendingPullInto.bytesFilled > 0) {
- const e = new TypeError('Insufficient bytes to fill elements in the given buffer');
- ReadableByteStreamControllerError(controller, e);
- throw e;
- }
- }
- ReadableByteStreamControllerClearAlgorithms(controller);
- ReadableStreamClose(stream);
- }
- function ReadableByteStreamControllerEnqueue(controller, chunk) {
- const stream = controller._controlledReadableByteStream;
- if (controller._closeRequested || stream._state !== 'readable') {
- return;
- }
- const buffer = chunk.buffer;
- const byteOffset = chunk.byteOffset;
- const byteLength = chunk.byteLength;
- const transferredBuffer = TransferArrayBuffer(buffer);
- if (controller._pendingPullIntos.length > 0) {
- const firstPendingPullInto = controller._pendingPullIntos.peek();
- if (IsDetachedBuffer(firstPendingPullInto.buffer)) ;
- firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer);
- }
- ReadableByteStreamControllerInvalidateBYOBRequest(controller);
- if (ReadableStreamHasDefaultReader(stream)) {
- if (ReadableStreamGetNumReadRequests(stream) === 0) {
- ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
- }
- else {
- if (controller._pendingPullIntos.length > 0) {
- ReadableByteStreamControllerShiftPendingPullInto(controller);
- }
- const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);
- ReadableStreamFulfillReadRequest(stream, transferredView, false);
- }
- }
- else if (ReadableStreamHasBYOBReader(stream)) {
- // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.
- ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
- ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
- }
- else {
- ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
- }
- ReadableByteStreamControllerCallPullIfNeeded(controller);
- }
- function ReadableByteStreamControllerError(controller, e) {
- const stream = controller._controlledReadableByteStream;
- if (stream._state !== 'readable') {
- return;
- }
- ReadableByteStreamControllerClearPendingPullIntos(controller);
- ResetQueue(controller);
- ReadableByteStreamControllerClearAlgorithms(controller);
- ReadableStreamError(stream, e);
- }
- function ReadableByteStreamControllerGetBYOBRequest(controller) {
- if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {
- const firstDescriptor = controller._pendingPullIntos.peek();
- const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
- const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);
- SetUpReadableStreamBYOBRequest(byobRequest, controller, view);
- controller._byobRequest = byobRequest;
- }
- return controller._byobRequest;
- }
- function ReadableByteStreamControllerGetDesiredSize(controller) {
- const state = controller._controlledReadableByteStream._state;
- if (state === 'errored') {
- return null;
- }
- if (state === 'closed') {
- return 0;
- }
- return controller._strategyHWM - controller._queueTotalSize;
- }
- function ReadableByteStreamControllerRespond(controller, bytesWritten) {
- const firstDescriptor = controller._pendingPullIntos.peek();
- const state = controller._controlledReadableByteStream._state;
- if (state === 'closed') {
- if (bytesWritten !== 0) {
- throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');
- }
- }
- else {
- if (bytesWritten === 0) {
- throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream');
- }
- if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) {
- throw new RangeError('bytesWritten out of range');
- }
- }
- firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);
- ReadableByteStreamControllerRespondInternal(controller, bytesWritten);
- }
- function ReadableByteStreamControllerRespondWithNewView(controller, view) {
- const firstDescriptor = controller._pendingPullIntos.peek();
- const state = controller._controlledReadableByteStream._state;
- if (state === 'closed') {
- if (view.byteLength !== 0) {
- throw new TypeError('The view\'s length must be 0 when calling respondWithNewView() on a closed stream');
- }
- }
- else {
- if (view.byteLength === 0) {
- throw new TypeError('The view\'s length must be greater than 0 when calling respondWithNewView() on a readable stream');
- }
- }
- if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {
- throw new RangeError('The region specified by view does not match byobRequest');
- }
- if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {
- throw new RangeError('The buffer of view has different capacity than byobRequest');
- }
- if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {
- throw new RangeError('The region specified by view is larger than byobRequest');
- }
- const viewByteLength = view.byteLength;
- firstDescriptor.buffer = TransferArrayBuffer(view.buffer);
- ReadableByteStreamControllerRespondInternal(controller, viewByteLength);
- }
- function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {
- controller._controlledReadableByteStream = stream;
- controller._pullAgain = false;
- controller._pulling = false;
- controller._byobRequest = null;
- // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.
- controller._queue = controller._queueTotalSize = undefined;
- ResetQueue(controller);
- controller._closeRequested = false;
- controller._started = false;
- controller._strategyHWM = highWaterMark;
- controller._pullAlgorithm = pullAlgorithm;
- controller._cancelAlgorithm = cancelAlgorithm;
- controller._autoAllocateChunkSize = autoAllocateChunkSize;
- controller._pendingPullIntos = new SimpleQueue();
- stream._readableStreamController = controller;
- const startResult = startAlgorithm();
- uponPromise(promiseResolvedWith(startResult), () => {
- controller._started = true;
- ReadableByteStreamControllerCallPullIfNeeded(controller);
- }, r => {
- ReadableByteStreamControllerError(controller, r);
- });
- }
- function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) {
- const controller = Object.create(ReadableByteStreamController.prototype);
- let startAlgorithm = () => undefined;
- let pullAlgorithm = () => promiseResolvedWith(undefined);
- let cancelAlgorithm = () => promiseResolvedWith(undefined);
- if (underlyingByteSource.start !== undefined) {
- startAlgorithm = () => underlyingByteSource.start(controller);
- }
- if (underlyingByteSource.pull !== undefined) {
- pullAlgorithm = () => underlyingByteSource.pull(controller);
- }
- if (underlyingByteSource.cancel !== undefined) {
- cancelAlgorithm = reason => underlyingByteSource.cancel(reason);
- }
- const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;
- if (autoAllocateChunkSize === 0) {
- throw new TypeError('autoAllocateChunkSize must be greater than 0');
- }
- SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);
- }
- function SetUpReadableStreamBYOBRequest(request, controller, view) {
- request._associatedReadableByteStreamController = controller;
- request._view = view;
- }
- // Helper functions for the ReadableStreamBYOBRequest.
- function byobRequestBrandCheckException(name) {
- return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);
- }
- // Helper functions for the ReadableByteStreamController.
- function byteStreamControllerBrandCheckException(name) {
- return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`);
- }
+ return "#" + this._url.fragment;
+ }
- // Abstract operations for the ReadableStream.
- function AcquireReadableStreamBYOBReader(stream) {
- return new ReadableStreamBYOBReader(stream);
- }
- // ReadableStream API exposed for controllers.
- function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) {
- stream._reader._readIntoRequests.push(readIntoRequest);
- }
- function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {
- const reader = stream._reader;
- const readIntoRequest = reader._readIntoRequests.shift();
- if (done) {
- readIntoRequest._closeSteps(chunk);
- }
- else {
- readIntoRequest._chunkSteps(chunk);
- }
- }
- function ReadableStreamGetNumReadIntoRequests(stream) {
- return stream._reader._readIntoRequests.length;
- }
- function ReadableStreamHasBYOBReader(stream) {
- const reader = stream._reader;
- if (reader === undefined) {
- return false;
- }
- if (!IsReadableStreamBYOBReader(reader)) {
- return false;
- }
- return true;
- }
- /**
- * A BYOB reader vended by a {@link ReadableStream}.
- *
- * @public
- */
- class ReadableStreamBYOBReader {
- constructor(stream) {
- assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader');
- assertReadableStream(stream, 'First parameter');
- if (IsReadableStreamLocked(stream)) {
- throw new TypeError('This stream has already been locked for exclusive reading by another reader');
- }
- if (!IsReadableByteStreamController(stream._readableStreamController)) {
- throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +
- 'source');
- }
- ReadableStreamReaderGenericInitialize(this, stream);
- this._readIntoRequests = new SimpleQueue();
- }
- /**
- * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or
- * the reader's lock is released before the stream finishes closing.
- */
- get closed() {
- if (!IsReadableStreamBYOBReader(this)) {
- return promiseRejectedWith(byobReaderBrandCheckException('closed'));
- }
- return this._closedPromise;
- }
- /**
- * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.
- */
- cancel(reason = undefined) {
- if (!IsReadableStreamBYOBReader(this)) {
- return promiseRejectedWith(byobReaderBrandCheckException('cancel'));
- }
- if (this._ownerReadableStream === undefined) {
- return promiseRejectedWith(readerLockException('cancel'));
- }
- return ReadableStreamReaderGenericCancel(this, reason);
- }
- /**
- * Attempts to reads bytes into view, and returns a promise resolved with the result.
- *
- * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.
- */
- read(view) {
- if (!IsReadableStreamBYOBReader(this)) {
- return promiseRejectedWith(byobReaderBrandCheckException('read'));
- }
- if (!ArrayBuffer.isView(view)) {
- return promiseRejectedWith(new TypeError('view must be an array buffer view'));
- }
- if (view.byteLength === 0) {
- return promiseRejectedWith(new TypeError('view must have non-zero byteLength'));
- }
- if (view.buffer.byteLength === 0) {
- return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));
- }
- if (IsDetachedBuffer(view.buffer)) ;
- if (this._ownerReadableStream === undefined) {
- return promiseRejectedWith(readerLockException('read from'));
- }
- let resolvePromise;
- let rejectPromise;
- const promise = newPromise((resolve, reject) => {
- resolvePromise = resolve;
- rejectPromise = reject;
- });
- const readIntoRequest = {
- _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),
- _closeSteps: chunk => resolvePromise({ value: chunk, done: true }),
- _errorSteps: e => rejectPromise(e)
- };
- ReadableStreamBYOBReaderRead(this, view, readIntoRequest);
- return promise;
- }
- /**
- * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.
- * If the associated stream is errored when the lock is released, the reader will appear errored in the same way
- * from now on; otherwise, the reader will appear closed.
- *
- * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by
- * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to
- * do so will throw a `TypeError` and leave the reader locked to the stream.
- */
- releaseLock() {
- if (!IsReadableStreamBYOBReader(this)) {
- throw byobReaderBrandCheckException('releaseLock');
- }
- if (this._ownerReadableStream === undefined) {
- return;
- }
- if (this._readIntoRequests.length > 0) {
- throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');
- }
- ReadableStreamReaderGenericRelease(this);
- }
- }
- Object.defineProperties(ReadableStreamBYOBReader.prototype, {
- cancel: { enumerable: true },
- read: { enumerable: true },
- releaseLock: { enumerable: true },
- closed: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, {
- value: 'ReadableStreamBYOBReader',
- configurable: true
- });
- }
- // Abstract operations for the readers.
- function IsReadableStreamBYOBReader(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {
- return false;
- }
- return x instanceof ReadableStreamBYOBReader;
- }
- function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {
- const stream = reader._ownerReadableStream;
- stream._disturbed = true;
- if (stream._state === 'errored') {
- readIntoRequest._errorSteps(stream._storedError);
- }
- else {
- ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);
- }
- }
- // Helper functions for the ReadableStreamBYOBReader.
- function byobReaderBrandCheckException(name) {
- return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`);
+ set hash(v) {
+ if (v === "") {
+ this._url.fragment = null;
+ return;
}
- function ExtractHighWaterMark(strategy, defaultHWM) {
- const { highWaterMark } = strategy;
- if (highWaterMark === undefined) {
- return defaultHWM;
- }
- if (NumberIsNaN(highWaterMark) || highWaterMark < 0) {
- throw new RangeError('Invalid highWaterMark');
- }
- return highWaterMark;
- }
- function ExtractSizeAlgorithm(strategy) {
- const { size } = strategy;
- if (!size) {
- return () => 1;
- }
- return size;
- }
+ const input = v[0] === "#" ? v.substring(1) : v;
+ this._url.fragment = "";
+ usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" });
+ }
- function convertQueuingStrategy(init, context) {
- assertDictionary(init, context);
- const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;
- const size = init === null || init === void 0 ? void 0 : init.size;
- return {
- highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark),
- size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`)
- };
- }
- function convertQueuingStrategySize(fn, context) {
- assertFunction(fn, context);
- return chunk => convertUnrestrictedDouble(fn(chunk));
- }
+ toJSON() {
+ return this.href;
+ }
+};
- function convertUnderlyingSink(original, context) {
- assertDictionary(original, context);
- const abort = original === null || original === void 0 ? void 0 : original.abort;
- const close = original === null || original === void 0 ? void 0 : original.close;
- const start = original === null || original === void 0 ? void 0 : original.start;
- const type = original === null || original === void 0 ? void 0 : original.type;
- const write = original === null || original === void 0 ? void 0 : original.write;
- return {
- abort: abort === undefined ?
- undefined :
- convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`),
- close: close === undefined ?
- undefined :
- convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`),
- start: start === undefined ?
- undefined :
- convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`),
- write: write === undefined ?
- undefined :
- convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`),
- type
- };
- }
- function convertUnderlyingSinkAbortCallback(fn, original, context) {
- assertFunction(fn, context);
- return (reason) => promiseCall(fn, original, [reason]);
- }
- function convertUnderlyingSinkCloseCallback(fn, original, context) {
- assertFunction(fn, context);
- return () => promiseCall(fn, original, []);
- }
- function convertUnderlyingSinkStartCallback(fn, original, context) {
- assertFunction(fn, context);
- return (controller) => reflectCall(fn, original, [controller]);
- }
- function convertUnderlyingSinkWriteCallback(fn, original, context) {
- assertFunction(fn, context);
- return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);
- }
- function assertWritableStream(x, context) {
- if (!IsWritableStream(x)) {
- throw new TypeError(`${context} is not a WritableStream.`);
- }
- }
+/***/ }),
- function isAbortSignal(value) {
- if (typeof value !== 'object' || value === null) {
- return false;
- }
- try {
- return typeof value.aborted === 'boolean';
- }
- catch (_a) {
- // AbortSignal.prototype.aborted throws if its brand check fails
- return false;
- }
- }
- const supportsAbortController = typeof AbortController === 'function';
- /**
- * Construct a new AbortController, if supported by the platform.
- *
- * @internal
- */
- function createAbortController() {
- if (supportsAbortController) {
- return new AbortController();
- }
- return undefined;
- }
+/***/ 3394:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- /**
- * A writable stream represents a destination for data, into which you can write.
- *
- * @public
- */
- class WritableStream {
- constructor(rawUnderlyingSink = {}, rawStrategy = {}) {
- if (rawUnderlyingSink === undefined) {
- rawUnderlyingSink = null;
- }
- else {
- assertObject(rawUnderlyingSink, 'First parameter');
- }
- const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');
- const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter');
- InitializeWritableStream(this);
- const type = underlyingSink.type;
- if (type !== undefined) {
- throw new RangeError('Invalid type is specified');
- }
- const sizeAlgorithm = ExtractSizeAlgorithm(strategy);
- const highWaterMark = ExtractHighWaterMark(strategy, 1);
- SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm);
- }
- /**
- * Returns whether or not the writable stream is locked to a writer.
- */
- get locked() {
- if (!IsWritableStream(this)) {
- throw streamBrandCheckException$2('locked');
- }
- return IsWritableStreamLocked(this);
- }
- /**
- * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be
- * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort
- * mechanism of the underlying sink.
- *
- * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled
- * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel
- * the stream) if the stream is currently locked.
- */
- abort(reason = undefined) {
- if (!IsWritableStream(this)) {
- return promiseRejectedWith(streamBrandCheckException$2('abort'));
- }
- if (IsWritableStreamLocked(this)) {
- return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer'));
- }
- return WritableStreamAbort(this, reason);
- }
- /**
- * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its
- * close behavior. During this time any further attempts to write will fail (without erroring the stream).
- *
- * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream
- * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with
- * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked.
- */
- close() {
- if (!IsWritableStream(this)) {
- return promiseRejectedWith(streamBrandCheckException$2('close'));
- }
- if (IsWritableStreamLocked(this)) {
- return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer'));
- }
- if (WritableStreamCloseQueuedOrInFlight(this)) {
- return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));
- }
- return WritableStreamClose(this);
- }
- /**
- * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream
- * is locked, no other writer can be acquired until this one is released.
- *
- * This functionality is especially useful for creating abstractions that desire the ability to write to a stream
- * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at
- * the same time, which would cause the resulting written data to be unpredictable and probably useless.
- */
- getWriter() {
- if (!IsWritableStream(this)) {
- throw streamBrandCheckException$2('getWriter');
- }
- return AcquireWritableStreamDefaultWriter(this);
- }
- }
- Object.defineProperties(WritableStream.prototype, {
- abort: { enumerable: true },
- close: { enumerable: true },
- getWriter: { enumerable: true },
- locked: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, {
- value: 'WritableStream',
- configurable: true
- });
- }
- // Abstract operations for the WritableStream.
- function AcquireWritableStreamDefaultWriter(stream) {
- return new WritableStreamDefaultWriter(stream);
- }
- // Throws if and only if startAlgorithm throws.
- function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {
- const stream = Object.create(WritableStream.prototype);
- InitializeWritableStream(stream);
- const controller = Object.create(WritableStreamDefaultController.prototype);
- SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);
- return stream;
- }
- function InitializeWritableStream(stream) {
- stream._state = 'writable';
- // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is
- // 'erroring' or 'errored'. May be set to an undefined value.
- stream._storedError = undefined;
- stream._writer = undefined;
- // Initialize to undefined first because the constructor of the controller checks this
- // variable to validate the caller.
- stream._writableStreamController = undefined;
- // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data
- // producer without waiting for the queued writes to finish.
- stream._writeRequests = new SimpleQueue();
- // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents
- // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.
- stream._inFlightWriteRequest = undefined;
- // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer
- // has been detached.
- stream._closeRequest = undefined;
- // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it
- // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.
- stream._inFlightCloseRequest = undefined;
- // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.
- stream._pendingAbortRequest = undefined;
- // The backpressure signal set by the controller.
- stream._backpressure = false;
- }
- function IsWritableStream(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {
- return false;
- }
- return x instanceof WritableStream;
- }
- function IsWritableStreamLocked(stream) {
- if (stream._writer === undefined) {
- return false;
- }
- return true;
- }
- function WritableStreamAbort(stream, reason) {
- var _a;
- if (stream._state === 'closed' || stream._state === 'errored') {
- return promiseResolvedWith(undefined);
- }
- stream._writableStreamController._abortReason = reason;
- (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort();
- // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',
- // but it doesn't know that signaling abort runs author code that might have changed the state.
- // Widen the type again by casting to WritableStreamState.
- const state = stream._state;
- if (state === 'closed' || state === 'errored') {
- return promiseResolvedWith(undefined);
- }
- if (stream._pendingAbortRequest !== undefined) {
- return stream._pendingAbortRequest._promise;
- }
- let wasAlreadyErroring = false;
- if (state === 'erroring') {
- wasAlreadyErroring = true;
- // reason will not be used, so don't keep a reference to it.
- reason = undefined;
- }
- const promise = newPromise((resolve, reject) => {
- stream._pendingAbortRequest = {
- _promise: undefined,
- _resolve: resolve,
- _reject: reject,
- _reason: reason,
- _wasAlreadyErroring: wasAlreadyErroring
- };
- });
- stream._pendingAbortRequest._promise = promise;
- if (!wasAlreadyErroring) {
- WritableStreamStartErroring(stream, reason);
- }
- return promise;
- }
- function WritableStreamClose(stream) {
- const state = stream._state;
- if (state === 'closed' || state === 'errored') {
- return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`));
- }
- const promise = newPromise((resolve, reject) => {
- const closeRequest = {
- _resolve: resolve,
- _reject: reject
- };
- stream._closeRequest = closeRequest;
- });
- const writer = stream._writer;
- if (writer !== undefined && stream._backpressure && state === 'writable') {
- defaultWriterReadyPromiseResolve(writer);
- }
- WritableStreamDefaultControllerClose(stream._writableStreamController);
- return promise;
- }
- // WritableStream API exposed for controllers.
- function WritableStreamAddWriteRequest(stream) {
- const promise = newPromise((resolve, reject) => {
- const writeRequest = {
- _resolve: resolve,
- _reject: reject
- };
- stream._writeRequests.push(writeRequest);
- });
- return promise;
- }
- function WritableStreamDealWithRejection(stream, error) {
- const state = stream._state;
- if (state === 'writable') {
- WritableStreamStartErroring(stream, error);
- return;
- }
- WritableStreamFinishErroring(stream);
- }
- function WritableStreamStartErroring(stream, reason) {
- const controller = stream._writableStreamController;
- stream._state = 'erroring';
- stream._storedError = reason;
- const writer = stream._writer;
- if (writer !== undefined) {
- WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);
- }
- if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) {
- WritableStreamFinishErroring(stream);
- }
- }
- function WritableStreamFinishErroring(stream) {
- stream._state = 'errored';
- stream._writableStreamController[ErrorSteps]();
- const storedError = stream._storedError;
- stream._writeRequests.forEach(writeRequest => {
- writeRequest._reject(storedError);
- });
- stream._writeRequests = new SimpleQueue();
- if (stream._pendingAbortRequest === undefined) {
- WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
- return;
- }
- const abortRequest = stream._pendingAbortRequest;
- stream._pendingAbortRequest = undefined;
- if (abortRequest._wasAlreadyErroring) {
- abortRequest._reject(storedError);
- WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
- return;
- }
- const promise = stream._writableStreamController[AbortSteps](abortRequest._reason);
- uponPromise(promise, () => {
- abortRequest._resolve();
- WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
- }, (reason) => {
- abortRequest._reject(reason);
- WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
- });
- }
- function WritableStreamFinishInFlightWrite(stream) {
- stream._inFlightWriteRequest._resolve(undefined);
- stream._inFlightWriteRequest = undefined;
- }
- function WritableStreamFinishInFlightWriteWithError(stream, error) {
- stream._inFlightWriteRequest._reject(error);
- stream._inFlightWriteRequest = undefined;
- WritableStreamDealWithRejection(stream, error);
- }
- function WritableStreamFinishInFlightClose(stream) {
- stream._inFlightCloseRequest._resolve(undefined);
- stream._inFlightCloseRequest = undefined;
- const state = stream._state;
- if (state === 'erroring') {
- // The error was too late to do anything, so it is ignored.
- stream._storedError = undefined;
- if (stream._pendingAbortRequest !== undefined) {
- stream._pendingAbortRequest._resolve();
- stream._pendingAbortRequest = undefined;
- }
- }
- stream._state = 'closed';
- const writer = stream._writer;
- if (writer !== undefined) {
- defaultWriterClosedPromiseResolve(writer);
- }
- }
- function WritableStreamFinishInFlightCloseWithError(stream, error) {
- stream._inFlightCloseRequest._reject(error);
- stream._inFlightCloseRequest = undefined;
- // Never execute sink abort() after sink close().
- if (stream._pendingAbortRequest !== undefined) {
- stream._pendingAbortRequest._reject(error);
- stream._pendingAbortRequest = undefined;
- }
- WritableStreamDealWithRejection(stream, error);
- }
- // TODO(ricea): Fix alphabetical order.
- function WritableStreamCloseQueuedOrInFlight(stream) {
- if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {
- return false;
- }
- return true;
- }
- function WritableStreamHasOperationMarkedInFlight(stream) {
- if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {
- return false;
- }
- return true;
- }
- function WritableStreamMarkCloseRequestInFlight(stream) {
- stream._inFlightCloseRequest = stream._closeRequest;
- stream._closeRequest = undefined;
- }
- function WritableStreamMarkFirstWriteRequestInFlight(stream) {
- stream._inFlightWriteRequest = stream._writeRequests.shift();
- }
- function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {
- if (stream._closeRequest !== undefined) {
- stream._closeRequest._reject(stream._storedError);
- stream._closeRequest = undefined;
- }
- const writer = stream._writer;
- if (writer !== undefined) {
- defaultWriterClosedPromiseReject(writer, stream._storedError);
- }
- }
- function WritableStreamUpdateBackpressure(stream, backpressure) {
- const writer = stream._writer;
- if (writer !== undefined && backpressure !== stream._backpressure) {
- if (backpressure) {
- defaultWriterReadyPromiseReset(writer);
- }
- else {
- defaultWriterReadyPromiseResolve(writer);
- }
- }
- stream._backpressure = backpressure;
- }
- /**
- * A default writer vended by a {@link WritableStream}.
- *
- * @public
- */
- class WritableStreamDefaultWriter {
- constructor(stream) {
- assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter');
- assertWritableStream(stream, 'First parameter');
- if (IsWritableStreamLocked(stream)) {
- throw new TypeError('This stream has already been locked for exclusive writing by another writer');
- }
- this._ownerWritableStream = stream;
- stream._writer = this;
- const state = stream._state;
- if (state === 'writable') {
- if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) {
- defaultWriterReadyPromiseInitialize(this);
- }
- else {
- defaultWriterReadyPromiseInitializeAsResolved(this);
- }
- defaultWriterClosedPromiseInitialize(this);
- }
- else if (state === 'erroring') {
- defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);
- defaultWriterClosedPromiseInitialize(this);
- }
- else if (state === 'closed') {
- defaultWriterReadyPromiseInitializeAsResolved(this);
- defaultWriterClosedPromiseInitializeAsResolved(this);
- }
- else {
- const storedError = stream._storedError;
- defaultWriterReadyPromiseInitializeAsRejected(this, storedError);
- defaultWriterClosedPromiseInitializeAsRejected(this, storedError);
- }
- }
- /**
- * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or
- * the writer’s lock is released before the stream finishes closing.
- */
- get closed() {
- if (!IsWritableStreamDefaultWriter(this)) {
- return promiseRejectedWith(defaultWriterBrandCheckException('closed'));
- }
- return this._closedPromise;
- }
- /**
- * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full.
- * A producer can use this information to determine the right amount of data to write.
- *
- * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort
- * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when
- * the writer’s lock is released.
- */
- get desiredSize() {
- if (!IsWritableStreamDefaultWriter(this)) {
- throw defaultWriterBrandCheckException('desiredSize');
- }
- if (this._ownerWritableStream === undefined) {
- throw defaultWriterLockException('desiredSize');
- }
- return WritableStreamDefaultWriterGetDesiredSize(this);
- }
- /**
- * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions
- * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips
- * back to zero or below, the getter will return a new promise that stays pending until the next transition.
- *
- * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become
- * rejected.
- */
- get ready() {
- if (!IsWritableStreamDefaultWriter(this)) {
- return promiseRejectedWith(defaultWriterBrandCheckException('ready'));
- }
- return this._readyPromise;
- }
- /**
- * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}.
- */
- abort(reason = undefined) {
- if (!IsWritableStreamDefaultWriter(this)) {
- return promiseRejectedWith(defaultWriterBrandCheckException('abort'));
- }
- if (this._ownerWritableStream === undefined) {
- return promiseRejectedWith(defaultWriterLockException('abort'));
- }
- return WritableStreamDefaultWriterAbort(this, reason);
- }
- /**
- * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}.
- */
- close() {
- if (!IsWritableStreamDefaultWriter(this)) {
- return promiseRejectedWith(defaultWriterBrandCheckException('close'));
- }
- const stream = this._ownerWritableStream;
- if (stream === undefined) {
- return promiseRejectedWith(defaultWriterLockException('close'));
- }
- if (WritableStreamCloseQueuedOrInFlight(stream)) {
- return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));
- }
- return WritableStreamDefaultWriterClose(this);
- }
- /**
- * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active.
- * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from
- * now on; otherwise, the writer will appear closed.
- *
- * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the
- * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled).
- * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents
- * other producers from writing in an interleaved manner.
- */
- releaseLock() {
- if (!IsWritableStreamDefaultWriter(this)) {
- throw defaultWriterBrandCheckException('releaseLock');
- }
- const stream = this._ownerWritableStream;
- if (stream === undefined) {
- return;
- }
- WritableStreamDefaultWriterRelease(this);
- }
- write(chunk = undefined) {
- if (!IsWritableStreamDefaultWriter(this)) {
- return promiseRejectedWith(defaultWriterBrandCheckException('write'));
- }
- if (this._ownerWritableStream === undefined) {
- return promiseRejectedWith(defaultWriterLockException('write to'));
- }
- return WritableStreamDefaultWriterWrite(this, chunk);
- }
- }
- Object.defineProperties(WritableStreamDefaultWriter.prototype, {
- abort: { enumerable: true },
- close: { enumerable: true },
- releaseLock: { enumerable: true },
- write: { enumerable: true },
- closed: { enumerable: true },
- desiredSize: { enumerable: true },
- ready: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, {
- value: 'WritableStreamDefaultWriter',
- configurable: true
- });
- }
- // Abstract operations for the WritableStreamDefaultWriter.
- function IsWritableStreamDefaultWriter(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {
- return false;
- }
- return x instanceof WritableStreamDefaultWriter;
- }
- // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.
- function WritableStreamDefaultWriterAbort(writer, reason) {
- const stream = writer._ownerWritableStream;
- return WritableStreamAbort(stream, reason);
- }
- function WritableStreamDefaultWriterClose(writer) {
- const stream = writer._ownerWritableStream;
- return WritableStreamClose(stream);
- }
- function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {
- const stream = writer._ownerWritableStream;
- const state = stream._state;
- if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {
- return promiseResolvedWith(undefined);
- }
- if (state === 'errored') {
- return promiseRejectedWith(stream._storedError);
- }
- return WritableStreamDefaultWriterClose(writer);
- }
- function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {
- if (writer._closedPromiseState === 'pending') {
- defaultWriterClosedPromiseReject(writer, error);
- }
- else {
- defaultWriterClosedPromiseResetToRejected(writer, error);
- }
- }
- function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {
- if (writer._readyPromiseState === 'pending') {
- defaultWriterReadyPromiseReject(writer, error);
- }
- else {
- defaultWriterReadyPromiseResetToRejected(writer, error);
- }
- }
- function WritableStreamDefaultWriterGetDesiredSize(writer) {
- const stream = writer._ownerWritableStream;
- const state = stream._state;
- if (state === 'errored' || state === 'erroring') {
- return null;
- }
- if (state === 'closed') {
- return 0;
- }
- return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);
- }
- function WritableStreamDefaultWriterRelease(writer) {
- const stream = writer._ownerWritableStream;
- const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`);
- WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);
- // The state transitions to "errored" before the sink abort() method runs, but the writer.closed promise is not
- // rejected until afterwards. This means that simply testing state will not work.
- WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);
- stream._writer = undefined;
- writer._ownerWritableStream = undefined;
- }
- function WritableStreamDefaultWriterWrite(writer, chunk) {
- const stream = writer._ownerWritableStream;
- const controller = stream._writableStreamController;
- const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);
- if (stream !== writer._ownerWritableStream) {
- return promiseRejectedWith(defaultWriterLockException('write to'));
- }
- const state = stream._state;
- if (state === 'errored') {
- return promiseRejectedWith(stream._storedError);
- }
- if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {
- return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to'));
- }
- if (state === 'erroring') {
- return promiseRejectedWith(stream._storedError);
- }
- const promise = WritableStreamAddWriteRequest(stream);
- WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);
- return promise;
- }
- const closeSentinel = {};
- /**
- * Allows control of a {@link WritableStream | writable stream}'s state and internal queue.
- *
- * @public
- */
- class WritableStreamDefaultController {
- constructor() {
- throw new TypeError('Illegal constructor');
- }
- /**
- * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted.
- *
- * @deprecated
- * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177.
- * Use {@link WritableStreamDefaultController.signal}'s `reason` instead.
- */
- get abortReason() {
- if (!IsWritableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$2('abortReason');
- }
- return this._abortReason;
- }
- /**
- * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted.
- */
- get signal() {
- if (!IsWritableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$2('signal');
- }
- if (this._abortController === undefined) {
- // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.
- // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,
- // so instead we only implement support for `signal` if we find a global `AbortController` constructor.
- throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported');
- }
- return this._abortController.signal;
- }
- /**
- * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`.
- *
- * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying
- * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the
- * normal lifecycle of interactions with the underlying sink.
- */
- error(e = undefined) {
- if (!IsWritableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$2('error');
- }
- const state = this._controlledWritableStream._state;
- if (state !== 'writable') {
- // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so
- // just treat it as a no-op.
- return;
- }
- WritableStreamDefaultControllerError(this, e);
- }
- /** @internal */
- [AbortSteps](reason) {
- const result = this._abortAlgorithm(reason);
- WritableStreamDefaultControllerClearAlgorithms(this);
- return result;
- }
- /** @internal */
- [ErrorSteps]() {
- ResetQueue(this);
- }
- }
- Object.defineProperties(WritableStreamDefaultController.prototype, {
- abortReason: { enumerable: true },
- signal: { enumerable: true },
- error: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {
- value: 'WritableStreamDefaultController',
- configurable: true
- });
- }
- // Abstract operations implementing interface required by the WritableStream.
- function IsWritableStreamDefaultController(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) {
- return false;
- }
- return x instanceof WritableStreamDefaultController;
- }
- function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) {
- controller._controlledWritableStream = stream;
- stream._writableStreamController = controller;
- // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.
- controller._queue = undefined;
- controller._queueTotalSize = undefined;
- ResetQueue(controller);
- controller._abortReason = undefined;
- controller._abortController = createAbortController();
- controller._started = false;
- controller._strategySizeAlgorithm = sizeAlgorithm;
- controller._strategyHWM = highWaterMark;
- controller._writeAlgorithm = writeAlgorithm;
- controller._closeAlgorithm = closeAlgorithm;
- controller._abortAlgorithm = abortAlgorithm;
- const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
- WritableStreamUpdateBackpressure(stream, backpressure);
- const startResult = startAlgorithm();
- const startPromise = promiseResolvedWith(startResult);
- uponPromise(startPromise, () => {
- controller._started = true;
- WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
- }, r => {
- controller._started = true;
- WritableStreamDealWithRejection(stream, r);
- });
- }
- function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) {
- const controller = Object.create(WritableStreamDefaultController.prototype);
- let startAlgorithm = () => undefined;
- let writeAlgorithm = () => promiseResolvedWith(undefined);
- let closeAlgorithm = () => promiseResolvedWith(undefined);
- let abortAlgorithm = () => promiseResolvedWith(undefined);
- if (underlyingSink.start !== undefined) {
- startAlgorithm = () => underlyingSink.start(controller);
- }
- if (underlyingSink.write !== undefined) {
- writeAlgorithm = chunk => underlyingSink.write(chunk, controller);
- }
- if (underlyingSink.close !== undefined) {
- closeAlgorithm = () => underlyingSink.close();
- }
- if (underlyingSink.abort !== undefined) {
- abortAlgorithm = reason => underlyingSink.abort(reason);
- }
- SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);
- }
- // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.
- function WritableStreamDefaultControllerClearAlgorithms(controller) {
- controller._writeAlgorithm = undefined;
- controller._closeAlgorithm = undefined;
- controller._abortAlgorithm = undefined;
- controller._strategySizeAlgorithm = undefined;
- }
- function WritableStreamDefaultControllerClose(controller) {
- EnqueueValueWithSize(controller, closeSentinel, 0);
- WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
- }
- function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {
- try {
- return controller._strategySizeAlgorithm(chunk);
- }
- catch (chunkSizeE) {
- WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);
- return 1;
- }
- }
- function WritableStreamDefaultControllerGetDesiredSize(controller) {
- return controller._strategyHWM - controller._queueTotalSize;
- }
- function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {
- try {
- EnqueueValueWithSize(controller, chunk, chunkSize);
- }
- catch (enqueueE) {
- WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);
- return;
- }
- const stream = controller._controlledWritableStream;
- if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') {
- const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
- WritableStreamUpdateBackpressure(stream, backpressure);
- }
- WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
- }
- // Abstract operations for the WritableStreamDefaultController.
- function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {
- const stream = controller._controlledWritableStream;
- if (!controller._started) {
- return;
- }
- if (stream._inFlightWriteRequest !== undefined) {
- return;
- }
- const state = stream._state;
- if (state === 'erroring') {
- WritableStreamFinishErroring(stream);
- return;
- }
- if (controller._queue.length === 0) {
- return;
- }
- const value = PeekQueueValue(controller);
- if (value === closeSentinel) {
- WritableStreamDefaultControllerProcessClose(controller);
- }
- else {
- WritableStreamDefaultControllerProcessWrite(controller, value);
- }
- }
- function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {
- if (controller._controlledWritableStream._state === 'writable') {
- WritableStreamDefaultControllerError(controller, error);
- }
- }
- function WritableStreamDefaultControllerProcessClose(controller) {
- const stream = controller._controlledWritableStream;
- WritableStreamMarkCloseRequestInFlight(stream);
- DequeueValue(controller);
- const sinkClosePromise = controller._closeAlgorithm();
- WritableStreamDefaultControllerClearAlgorithms(controller);
- uponPromise(sinkClosePromise, () => {
- WritableStreamFinishInFlightClose(stream);
- }, reason => {
- WritableStreamFinishInFlightCloseWithError(stream, reason);
- });
- }
- function WritableStreamDefaultControllerProcessWrite(controller, chunk) {
- const stream = controller._controlledWritableStream;
- WritableStreamMarkFirstWriteRequestInFlight(stream);
- const sinkWritePromise = controller._writeAlgorithm(chunk);
- uponPromise(sinkWritePromise, () => {
- WritableStreamFinishInFlightWrite(stream);
- const state = stream._state;
- DequeueValue(controller);
- if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') {
- const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
- WritableStreamUpdateBackpressure(stream, backpressure);
- }
- WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
- }, reason => {
- if (stream._state === 'writable') {
- WritableStreamDefaultControllerClearAlgorithms(controller);
- }
- WritableStreamFinishInFlightWriteWithError(stream, reason);
- });
- }
- function WritableStreamDefaultControllerGetBackpressure(controller) {
- const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);
- return desiredSize <= 0;
- }
- // A client of WritableStreamDefaultController may use these functions directly to bypass state check.
- function WritableStreamDefaultControllerError(controller, error) {
- const stream = controller._controlledWritableStream;
- WritableStreamDefaultControllerClearAlgorithms(controller);
- WritableStreamStartErroring(stream, error);
- }
- // Helper functions for the WritableStream.
- function streamBrandCheckException$2(name) {
- return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`);
- }
- // Helper functions for the WritableStreamDefaultController.
- function defaultControllerBrandCheckException$2(name) {
- return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`);
- }
- // Helper functions for the WritableStreamDefaultWriter.
- function defaultWriterBrandCheckException(name) {
- return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`);
- }
- function defaultWriterLockException(name) {
- return new TypeError('Cannot ' + name + ' a stream using a released writer');
- }
- function defaultWriterClosedPromiseInitialize(writer) {
- writer._closedPromise = newPromise((resolve, reject) => {
- writer._closedPromise_resolve = resolve;
- writer._closedPromise_reject = reject;
- writer._closedPromiseState = 'pending';
- });
- }
- function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {
- defaultWriterClosedPromiseInitialize(writer);
- defaultWriterClosedPromiseReject(writer, reason);
- }
- function defaultWriterClosedPromiseInitializeAsResolved(writer) {
- defaultWriterClosedPromiseInitialize(writer);
- defaultWriterClosedPromiseResolve(writer);
- }
- function defaultWriterClosedPromiseReject(writer, reason) {
- if (writer._closedPromise_reject === undefined) {
- return;
- }
- setPromiseIsHandledToTrue(writer._closedPromise);
- writer._closedPromise_reject(reason);
- writer._closedPromise_resolve = undefined;
- writer._closedPromise_reject = undefined;
- writer._closedPromiseState = 'rejected';
- }
- function defaultWriterClosedPromiseResetToRejected(writer, reason) {
- defaultWriterClosedPromiseInitializeAsRejected(writer, reason);
- }
- function defaultWriterClosedPromiseResolve(writer) {
- if (writer._closedPromise_resolve === undefined) {
- return;
- }
- writer._closedPromise_resolve(undefined);
- writer._closedPromise_resolve = undefined;
- writer._closedPromise_reject = undefined;
- writer._closedPromiseState = 'resolved';
- }
- function defaultWriterReadyPromiseInitialize(writer) {
- writer._readyPromise = newPromise((resolve, reject) => {
- writer._readyPromise_resolve = resolve;
- writer._readyPromise_reject = reject;
- });
- writer._readyPromiseState = 'pending';
- }
- function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {
- defaultWriterReadyPromiseInitialize(writer);
- defaultWriterReadyPromiseReject(writer, reason);
- }
- function defaultWriterReadyPromiseInitializeAsResolved(writer) {
- defaultWriterReadyPromiseInitialize(writer);
- defaultWriterReadyPromiseResolve(writer);
- }
- function defaultWriterReadyPromiseReject(writer, reason) {
- if (writer._readyPromise_reject === undefined) {
- return;
- }
- setPromiseIsHandledToTrue(writer._readyPromise);
- writer._readyPromise_reject(reason);
- writer._readyPromise_resolve = undefined;
- writer._readyPromise_reject = undefined;
- writer._readyPromiseState = 'rejected';
- }
- function defaultWriterReadyPromiseReset(writer) {
- defaultWriterReadyPromiseInitialize(writer);
- }
- function defaultWriterReadyPromiseResetToRejected(writer, reason) {
- defaultWriterReadyPromiseInitializeAsRejected(writer, reason);
- }
- function defaultWriterReadyPromiseResolve(writer) {
- if (writer._readyPromise_resolve === undefined) {
- return;
- }
- writer._readyPromise_resolve(undefined);
- writer._readyPromise_resolve = undefined;
- writer._readyPromise_reject = undefined;
- writer._readyPromiseState = 'fulfilled';
- }
+"use strict";
- ///
- const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined;
- ///
- function isDOMExceptionConstructor(ctor) {
- if (!(typeof ctor === 'function' || typeof ctor === 'object')) {
- return false;
- }
- try {
- new ctor();
- return true;
- }
- catch (_a) {
- return false;
- }
- }
- function createDOMExceptionPolyfill() {
- // eslint-disable-next-line no-shadow
- const ctor = function DOMException(message, name) {
- this.message = message || '';
- this.name = name || 'Error';
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
- }
- };
- ctor.prototype = Object.create(Error.prototype);
- Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true });
- return ctor;
- }
- // eslint-disable-next-line no-redeclare
- const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill();
-
- function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) {
- const reader = AcquireReadableStreamDefaultReader(source);
- const writer = AcquireWritableStreamDefaultWriter(dest);
- source._disturbed = true;
- let shuttingDown = false;
- // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.
- let currentWrite = promiseResolvedWith(undefined);
- return newPromise((resolve, reject) => {
- let abortAlgorithm;
- if (signal !== undefined) {
- abortAlgorithm = () => {
- const error = new DOMException$1('Aborted', 'AbortError');
- const actions = [];
- if (!preventAbort) {
- actions.push(() => {
- if (dest._state === 'writable') {
- return WritableStreamAbort(dest, error);
- }
- return promiseResolvedWith(undefined);
- });
- }
- if (!preventCancel) {
- actions.push(() => {
- if (source._state === 'readable') {
- return ReadableStreamCancel(source, error);
- }
- return promiseResolvedWith(undefined);
- });
- }
- shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error);
- };
- if (signal.aborted) {
- abortAlgorithm();
- return;
- }
- signal.addEventListener('abort', abortAlgorithm);
- }
- // Using reader and writer, read all chunks from this and write them to dest
- // - Backpressure must be enforced
- // - Shutdown must stop all activity
- function pipeLoop() {
- return newPromise((resolveLoop, rejectLoop) => {
- function next(done) {
- if (done) {
- resolveLoop();
- }
- else {
- // Use `PerformPromiseThen` instead of `uponPromise` to avoid
- // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers
- PerformPromiseThen(pipeStep(), next, rejectLoop);
- }
- }
- next(false);
- });
- }
- function pipeStep() {
- if (shuttingDown) {
- return promiseResolvedWith(true);
- }
- return PerformPromiseThen(writer._readyPromise, () => {
- return newPromise((resolveRead, rejectRead) => {
- ReadableStreamDefaultReaderRead(reader, {
- _chunkSteps: chunk => {
- currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop);
- resolveRead(false);
- },
- _closeSteps: () => resolveRead(true),
- _errorSteps: rejectRead
- });
- });
- });
- }
- // Errors must be propagated forward
- isOrBecomesErrored(source, reader._closedPromise, storedError => {
- if (!preventAbort) {
- shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError);
- }
- else {
- shutdown(true, storedError);
- }
- });
- // Errors must be propagated backward
- isOrBecomesErrored(dest, writer._closedPromise, storedError => {
- if (!preventCancel) {
- shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError);
- }
- else {
- shutdown(true, storedError);
- }
- });
- // Closing must be propagated forward
- isOrBecomesClosed(source, reader._closedPromise, () => {
- if (!preventClose) {
- shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer));
- }
- else {
- shutdown();
- }
- });
- // Closing must be propagated backward
- if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') {
- const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');
- if (!preventCancel) {
- shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed);
- }
- else {
- shutdown(true, destClosed);
- }
- }
- setPromiseIsHandledToTrue(pipeLoop());
- function waitForWritesToFinish() {
- // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait
- // for that too.
- const oldCurrentWrite = currentWrite;
- return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined);
- }
- function isOrBecomesErrored(stream, promise, action) {
- if (stream._state === 'errored') {
- action(stream._storedError);
- }
- else {
- uponRejection(promise, action);
- }
- }
- function isOrBecomesClosed(stream, promise, action) {
- if (stream._state === 'closed') {
- action();
- }
- else {
- uponFulfillment(promise, action);
- }
- }
- function shutdownWithAction(action, originalIsError, originalError) {
- if (shuttingDown) {
- return;
- }
- shuttingDown = true;
- if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {
- uponFulfillment(waitForWritesToFinish(), doTheRest);
- }
- else {
- doTheRest();
- }
- function doTheRest() {
- uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError));
- }
- }
- function shutdown(isError, error) {
- if (shuttingDown) {
- return;
- }
- shuttingDown = true;
- if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {
- uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error));
- }
- else {
- finalize(isError, error);
- }
- }
- function finalize(isError, error) {
- WritableStreamDefaultWriterRelease(writer);
- ReadableStreamReaderGenericRelease(reader);
- if (signal !== undefined) {
- signal.removeEventListener('abort', abortAlgorithm);
- }
- if (isError) {
- reject(error);
- }
- else {
- resolve(undefined);
- }
- }
- });
- }
+const conversions = __nccwpck_require__(4886);
+const utils = __nccwpck_require__(3185);
+const Impl = __nccwpck_require__(7537);
- /**
- * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue.
- *
- * @public
- */
- class ReadableStreamDefaultController {
- constructor() {
- throw new TypeError('Illegal constructor');
- }
- /**
- * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is
- * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.
- */
- get desiredSize() {
- if (!IsReadableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$1('desiredSize');
- }
- return ReadableStreamDefaultControllerGetDesiredSize(this);
- }
- /**
- * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from
- * the stream, but once those are read, the stream will become closed.
- */
- close() {
- if (!IsReadableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$1('close');
- }
- if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {
- throw new TypeError('The stream is not in a state that permits close');
- }
- ReadableStreamDefaultControllerClose(this);
- }
- enqueue(chunk = undefined) {
- if (!IsReadableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$1('enqueue');
- }
- if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {
- throw new TypeError('The stream is not in a state that permits enqueue');
- }
- return ReadableStreamDefaultControllerEnqueue(this, chunk);
- }
- /**
- * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.
- */
- error(e = undefined) {
- if (!IsReadableStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException$1('error');
- }
- ReadableStreamDefaultControllerError(this, e);
- }
- /** @internal */
- [CancelSteps](reason) {
- ResetQueue(this);
- const result = this._cancelAlgorithm(reason);
- ReadableStreamDefaultControllerClearAlgorithms(this);
- return result;
- }
- /** @internal */
- [PullSteps](readRequest) {
- const stream = this._controlledReadableStream;
- if (this._queue.length > 0) {
- const chunk = DequeueValue(this);
- if (this._closeRequested && this._queue.length === 0) {
- ReadableStreamDefaultControllerClearAlgorithms(this);
- ReadableStreamClose(stream);
- }
- else {
- ReadableStreamDefaultControllerCallPullIfNeeded(this);
- }
- readRequest._chunkSteps(chunk);
- }
- else {
- ReadableStreamAddReadRequest(stream, readRequest);
- ReadableStreamDefaultControllerCallPullIfNeeded(this);
- }
- }
- }
- Object.defineProperties(ReadableStreamDefaultController.prototype, {
- close: { enumerable: true },
- enqueue: { enumerable: true },
- error: { enumerable: true },
- desiredSize: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {
- value: 'ReadableStreamDefaultController',
- configurable: true
- });
- }
- // Abstract operations for the ReadableStreamDefaultController.
- function IsReadableStreamDefaultController(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) {
- return false;
- }
- return x instanceof ReadableStreamDefaultController;
- }
- function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {
- const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);
- if (!shouldPull) {
- return;
- }
- if (controller._pulling) {
- controller._pullAgain = true;
- return;
- }
- controller._pulling = true;
- const pullPromise = controller._pullAlgorithm();
- uponPromise(pullPromise, () => {
- controller._pulling = false;
- if (controller._pullAgain) {
- controller._pullAgain = false;
- ReadableStreamDefaultControllerCallPullIfNeeded(controller);
- }
- }, e => {
- ReadableStreamDefaultControllerError(controller, e);
- });
- }
- function ReadableStreamDefaultControllerShouldCallPull(controller) {
- const stream = controller._controlledReadableStream;
- if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {
- return false;
- }
- if (!controller._started) {
- return false;
- }
- if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {
- return true;
- }
- const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);
- if (desiredSize > 0) {
- return true;
- }
- return false;
- }
- function ReadableStreamDefaultControllerClearAlgorithms(controller) {
- controller._pullAlgorithm = undefined;
- controller._cancelAlgorithm = undefined;
- controller._strategySizeAlgorithm = undefined;
- }
- // A client of ReadableStreamDefaultController may use these functions directly to bypass state check.
- function ReadableStreamDefaultControllerClose(controller) {
- if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {
- return;
- }
- const stream = controller._controlledReadableStream;
- controller._closeRequested = true;
- if (controller._queue.length === 0) {
- ReadableStreamDefaultControllerClearAlgorithms(controller);
- ReadableStreamClose(stream);
- }
- }
- function ReadableStreamDefaultControllerEnqueue(controller, chunk) {
- if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {
- return;
- }
- const stream = controller._controlledReadableStream;
- if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {
- ReadableStreamFulfillReadRequest(stream, chunk, false);
- }
- else {
- let chunkSize;
- try {
- chunkSize = controller._strategySizeAlgorithm(chunk);
- }
- catch (chunkSizeE) {
- ReadableStreamDefaultControllerError(controller, chunkSizeE);
- throw chunkSizeE;
- }
- try {
- EnqueueValueWithSize(controller, chunk, chunkSize);
- }
- catch (enqueueE) {
- ReadableStreamDefaultControllerError(controller, enqueueE);
- throw enqueueE;
- }
- }
- ReadableStreamDefaultControllerCallPullIfNeeded(controller);
- }
- function ReadableStreamDefaultControllerError(controller, e) {
- const stream = controller._controlledReadableStream;
- if (stream._state !== 'readable') {
- return;
- }
- ResetQueue(controller);
- ReadableStreamDefaultControllerClearAlgorithms(controller);
- ReadableStreamError(stream, e);
- }
- function ReadableStreamDefaultControllerGetDesiredSize(controller) {
- const state = controller._controlledReadableStream._state;
- if (state === 'errored') {
- return null;
- }
- if (state === 'closed') {
- return 0;
- }
- return controller._strategyHWM - controller._queueTotalSize;
- }
- // This is used in the implementation of TransformStream.
- function ReadableStreamDefaultControllerHasBackpressure(controller) {
- if (ReadableStreamDefaultControllerShouldCallPull(controller)) {
- return false;
- }
- return true;
- }
- function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) {
- const state = controller._controlledReadableStream._state;
- if (!controller._closeRequested && state === 'readable') {
- return true;
- }
- return false;
- }
- function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) {
- controller._controlledReadableStream = stream;
- controller._queue = undefined;
- controller._queueTotalSize = undefined;
- ResetQueue(controller);
- controller._started = false;
- controller._closeRequested = false;
- controller._pullAgain = false;
- controller._pulling = false;
- controller._strategySizeAlgorithm = sizeAlgorithm;
- controller._strategyHWM = highWaterMark;
- controller._pullAlgorithm = pullAlgorithm;
- controller._cancelAlgorithm = cancelAlgorithm;
- stream._readableStreamController = controller;
- const startResult = startAlgorithm();
- uponPromise(promiseResolvedWith(startResult), () => {
- controller._started = true;
- ReadableStreamDefaultControllerCallPullIfNeeded(controller);
- }, r => {
- ReadableStreamDefaultControllerError(controller, r);
- });
- }
- function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) {
- const controller = Object.create(ReadableStreamDefaultController.prototype);
- let startAlgorithm = () => undefined;
- let pullAlgorithm = () => promiseResolvedWith(undefined);
- let cancelAlgorithm = () => promiseResolvedWith(undefined);
- if (underlyingSource.start !== undefined) {
- startAlgorithm = () => underlyingSource.start(controller);
- }
- if (underlyingSource.pull !== undefined) {
- pullAlgorithm = () => underlyingSource.pull(controller);
- }
- if (underlyingSource.cancel !== undefined) {
- cancelAlgorithm = reason => underlyingSource.cancel(reason);
- }
- SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);
- }
- // Helper functions for the ReadableStreamDefaultController.
- function defaultControllerBrandCheckException$1(name) {
- return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`);
- }
+const impl = utils.implSymbol;
- function ReadableStreamTee(stream, cloneForBranch2) {
- if (IsReadableByteStreamController(stream._readableStreamController)) {
- return ReadableByteStreamTee(stream);
- }
- return ReadableStreamDefaultTee(stream);
- }
- function ReadableStreamDefaultTee(stream, cloneForBranch2) {
- const reader = AcquireReadableStreamDefaultReader(stream);
- let reading = false;
- let readAgain = false;
- let canceled1 = false;
- let canceled2 = false;
- let reason1;
- let reason2;
- let branch1;
- let branch2;
- let resolveCancelPromise;
- const cancelPromise = newPromise(resolve => {
- resolveCancelPromise = resolve;
- });
- function pullAlgorithm() {
- if (reading) {
- readAgain = true;
- return promiseResolvedWith(undefined);
- }
- reading = true;
- const readRequest = {
- _chunkSteps: chunk => {
- // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using
- // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let
- // successful synchronously-available reads get ahead of asynchronously-available errors.
- queueMicrotask(() => {
- readAgain = false;
- const chunk1 = chunk;
- const chunk2 = chunk;
- // There is no way to access the cloning code right now in the reference implementation.
- // If we add one then we'll need an implementation for serializable objects.
- // if (!canceled2 && cloneForBranch2) {
- // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));
- // }
- if (!canceled1) {
- ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1);
- }
- if (!canceled2) {
- ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2);
- }
- reading = false;
- if (readAgain) {
- pullAlgorithm();
- }
- });
- },
- _closeSteps: () => {
- reading = false;
- if (!canceled1) {
- ReadableStreamDefaultControllerClose(branch1._readableStreamController);
- }
- if (!canceled2) {
- ReadableStreamDefaultControllerClose(branch2._readableStreamController);
- }
- if (!canceled1 || !canceled2) {
- resolveCancelPromise(undefined);
- }
- },
- _errorSteps: () => {
- reading = false;
- }
- };
- ReadableStreamDefaultReaderRead(reader, readRequest);
- return promiseResolvedWith(undefined);
- }
- function cancel1Algorithm(reason) {
- canceled1 = true;
- reason1 = reason;
- if (canceled2) {
- const compositeReason = CreateArrayFromList([reason1, reason2]);
- const cancelResult = ReadableStreamCancel(stream, compositeReason);
- resolveCancelPromise(cancelResult);
- }
- return cancelPromise;
- }
- function cancel2Algorithm(reason) {
- canceled2 = true;
- reason2 = reason;
- if (canceled1) {
- const compositeReason = CreateArrayFromList([reason1, reason2]);
- const cancelResult = ReadableStreamCancel(stream, compositeReason);
- resolveCancelPromise(cancelResult);
- }
- return cancelPromise;
- }
- function startAlgorithm() {
- // do nothing
- }
- branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm);
- branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm);
- uponRejection(reader._closedPromise, (r) => {
- ReadableStreamDefaultControllerError(branch1._readableStreamController, r);
- ReadableStreamDefaultControllerError(branch2._readableStreamController, r);
- if (!canceled1 || !canceled2) {
- resolveCancelPromise(undefined);
- }
- });
- return [branch1, branch2];
- }
- function ReadableByteStreamTee(stream) {
- let reader = AcquireReadableStreamDefaultReader(stream);
- let reading = false;
- let readAgainForBranch1 = false;
- let readAgainForBranch2 = false;
- let canceled1 = false;
- let canceled2 = false;
- let reason1;
- let reason2;
- let branch1;
- let branch2;
- let resolveCancelPromise;
- const cancelPromise = newPromise(resolve => {
- resolveCancelPromise = resolve;
- });
- function forwardReaderError(thisReader) {
- uponRejection(thisReader._closedPromise, r => {
- if (thisReader !== reader) {
- return;
- }
- ReadableByteStreamControllerError(branch1._readableStreamController, r);
- ReadableByteStreamControllerError(branch2._readableStreamController, r);
- if (!canceled1 || !canceled2) {
- resolveCancelPromise(undefined);
- }
- });
- }
- function pullWithDefaultReader() {
- if (IsReadableStreamBYOBReader(reader)) {
- ReadableStreamReaderGenericRelease(reader);
- reader = AcquireReadableStreamDefaultReader(stream);
- forwardReaderError(reader);
- }
- const readRequest = {
- _chunkSteps: chunk => {
- // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using
- // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let
- // successful synchronously-available reads get ahead of asynchronously-available errors.
- queueMicrotask(() => {
- readAgainForBranch1 = false;
- readAgainForBranch2 = false;
- const chunk1 = chunk;
- let chunk2 = chunk;
- if (!canceled1 && !canceled2) {
- try {
- chunk2 = CloneAsUint8Array(chunk);
- }
- catch (cloneE) {
- ReadableByteStreamControllerError(branch1._readableStreamController, cloneE);
- ReadableByteStreamControllerError(branch2._readableStreamController, cloneE);
- resolveCancelPromise(ReadableStreamCancel(stream, cloneE));
- return;
- }
- }
- if (!canceled1) {
- ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1);
- }
- if (!canceled2) {
- ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2);
- }
- reading = false;
- if (readAgainForBranch1) {
- pull1Algorithm();
- }
- else if (readAgainForBranch2) {
- pull2Algorithm();
- }
- });
- },
- _closeSteps: () => {
- reading = false;
- if (!canceled1) {
- ReadableByteStreamControllerClose(branch1._readableStreamController);
- }
- if (!canceled2) {
- ReadableByteStreamControllerClose(branch2._readableStreamController);
- }
- if (branch1._readableStreamController._pendingPullIntos.length > 0) {
- ReadableByteStreamControllerRespond(branch1._readableStreamController, 0);
- }
- if (branch2._readableStreamController._pendingPullIntos.length > 0) {
- ReadableByteStreamControllerRespond(branch2._readableStreamController, 0);
- }
- if (!canceled1 || !canceled2) {
- resolveCancelPromise(undefined);
- }
- },
- _errorSteps: () => {
- reading = false;
- }
- };
- ReadableStreamDefaultReaderRead(reader, readRequest);
- }
- function pullWithBYOBReader(view, forBranch2) {
- if (IsReadableStreamDefaultReader(reader)) {
- ReadableStreamReaderGenericRelease(reader);
- reader = AcquireReadableStreamBYOBReader(stream);
- forwardReaderError(reader);
- }
- const byobBranch = forBranch2 ? branch2 : branch1;
- const otherBranch = forBranch2 ? branch1 : branch2;
- const readIntoRequest = {
- _chunkSteps: chunk => {
- // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using
- // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let
- // successful synchronously-available reads get ahead of asynchronously-available errors.
- queueMicrotask(() => {
- readAgainForBranch1 = false;
- readAgainForBranch2 = false;
- const byobCanceled = forBranch2 ? canceled2 : canceled1;
- const otherCanceled = forBranch2 ? canceled1 : canceled2;
- if (!otherCanceled) {
- let clonedChunk;
- try {
- clonedChunk = CloneAsUint8Array(chunk);
- }
- catch (cloneE) {
- ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE);
- ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE);
- resolveCancelPromise(ReadableStreamCancel(stream, cloneE));
- return;
- }
- if (!byobCanceled) {
- ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);
- }
- ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk);
- }
- else if (!byobCanceled) {
- ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);
- }
- reading = false;
- if (readAgainForBranch1) {
- pull1Algorithm();
- }
- else if (readAgainForBranch2) {
- pull2Algorithm();
- }
- });
- },
- _closeSteps: chunk => {
- reading = false;
- const byobCanceled = forBranch2 ? canceled2 : canceled1;
- const otherCanceled = forBranch2 ? canceled1 : canceled2;
- if (!byobCanceled) {
- ReadableByteStreamControllerClose(byobBranch._readableStreamController);
- }
- if (!otherCanceled) {
- ReadableByteStreamControllerClose(otherBranch._readableStreamController);
- }
- if (chunk !== undefined) {
- if (!byobCanceled) {
- ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);
- }
- if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) {
- ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0);
- }
- }
- if (!byobCanceled || !otherCanceled) {
- resolveCancelPromise(undefined);
- }
- },
- _errorSteps: () => {
- reading = false;
- }
- };
- ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);
- }
- function pull1Algorithm() {
- if (reading) {
- readAgainForBranch1 = true;
- return promiseResolvedWith(undefined);
- }
- reading = true;
- const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController);
- if (byobRequest === null) {
- pullWithDefaultReader();
- }
- else {
- pullWithBYOBReader(byobRequest._view, false);
- }
- return promiseResolvedWith(undefined);
- }
- function pull2Algorithm() {
- if (reading) {
- readAgainForBranch2 = true;
- return promiseResolvedWith(undefined);
- }
- reading = true;
- const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController);
- if (byobRequest === null) {
- pullWithDefaultReader();
- }
- else {
- pullWithBYOBReader(byobRequest._view, true);
- }
- return promiseResolvedWith(undefined);
- }
- function cancel1Algorithm(reason) {
- canceled1 = true;
- reason1 = reason;
- if (canceled2) {
- const compositeReason = CreateArrayFromList([reason1, reason2]);
- const cancelResult = ReadableStreamCancel(stream, compositeReason);
- resolveCancelPromise(cancelResult);
- }
- return cancelPromise;
- }
- function cancel2Algorithm(reason) {
- canceled2 = true;
- reason2 = reason;
- if (canceled1) {
- const compositeReason = CreateArrayFromList([reason1, reason2]);
- const cancelResult = ReadableStreamCancel(stream, compositeReason);
- resolveCancelPromise(cancelResult);
- }
- return cancelPromise;
- }
- function startAlgorithm() {
- return;
- }
- branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm);
- branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm);
- forwardReaderError(reader);
- return [branch1, branch2];
- }
-
- function convertUnderlyingDefaultOrByteSource(source, context) {
- assertDictionary(source, context);
- const original = source;
- const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize;
- const cancel = original === null || original === void 0 ? void 0 : original.cancel;
- const pull = original === null || original === void 0 ? void 0 : original.pull;
- const start = original === null || original === void 0 ? void 0 : original.start;
- const type = original === null || original === void 0 ? void 0 : original.type;
- return {
- autoAllocateChunkSize: autoAllocateChunkSize === undefined ?
- undefined :
- convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`),
- cancel: cancel === undefined ?
- undefined :
- convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`),
- pull: pull === undefined ?
- undefined :
- convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`),
- start: start === undefined ?
- undefined :
- convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`),
- type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`)
- };
- }
- function convertUnderlyingSourceCancelCallback(fn, original, context) {
- assertFunction(fn, context);
- return (reason) => promiseCall(fn, original, [reason]);
- }
- function convertUnderlyingSourcePullCallback(fn, original, context) {
- assertFunction(fn, context);
- return (controller) => promiseCall(fn, original, [controller]);
- }
- function convertUnderlyingSourceStartCallback(fn, original, context) {
- assertFunction(fn, context);
- return (controller) => reflectCall(fn, original, [controller]);
- }
- function convertReadableStreamType(type, context) {
- type = `${type}`;
- if (type !== 'bytes') {
- throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`);
- }
- return type;
- }
+function URL(url) {
+ if (!this || this[impl] || !(this instanceof URL)) {
+ throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.");
+ }
+ if (arguments.length < 1) {
+ throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present.");
+ }
+ const args = [];
+ for (let i = 0; i < arguments.length && i < 2; ++i) {
+ args[i] = arguments[i];
+ }
+ args[0] = conversions["USVString"](args[0]);
+ if (args[1] !== undefined) {
+ args[1] = conversions["USVString"](args[1]);
+ }
- function convertReaderOptions(options, context) {
- assertDictionary(options, context);
- const mode = options === null || options === void 0 ? void 0 : options.mode;
- return {
- mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`)
- };
- }
- function convertReadableStreamReaderMode(mode, context) {
- mode = `${mode}`;
- if (mode !== 'byob') {
- throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`);
- }
- return mode;
- }
+ module.exports.setup(this, args);
+}
- function convertIteratorOptions(options, context) {
- assertDictionary(options, context);
- const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;
- return { preventCancel: Boolean(preventCancel) };
- }
+URL.prototype.toJSON = function toJSON() {
+ if (!this || !module.exports.is(this)) {
+ throw new TypeError("Illegal invocation");
+ }
+ const args = [];
+ for (let i = 0; i < arguments.length && i < 0; ++i) {
+ args[i] = arguments[i];
+ }
+ return this[impl].toJSON.apply(this[impl], args);
+};
+Object.defineProperty(URL.prototype, "href", {
+ get() {
+ return this[impl].href;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].href = V;
+ },
+ enumerable: true,
+ configurable: true
+});
- function convertPipeOptions(options, context) {
- assertDictionary(options, context);
- const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort;
- const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;
- const preventClose = options === null || options === void 0 ? void 0 : options.preventClose;
- const signal = options === null || options === void 0 ? void 0 : options.signal;
- if (signal !== undefined) {
- assertAbortSignal(signal, `${context} has member 'signal' that`);
- }
- return {
- preventAbort: Boolean(preventAbort),
- preventCancel: Boolean(preventCancel),
- preventClose: Boolean(preventClose),
- signal
- };
- }
- function assertAbortSignal(signal, context) {
- if (!isAbortSignal(signal)) {
- throw new TypeError(`${context} is not an AbortSignal.`);
- }
- }
+URL.prototype.toString = function () {
+ if (!this || !module.exports.is(this)) {
+ throw new TypeError("Illegal invocation");
+ }
+ return this.href;
+};
- function convertReadableWritablePair(pair, context) {
- assertDictionary(pair, context);
- const readable = pair === null || pair === void 0 ? void 0 : pair.readable;
- assertRequiredField(readable, 'readable', 'ReadableWritablePair');
- assertReadableStream(readable, `${context} has member 'readable' that`);
- const writable = pair === null || pair === void 0 ? void 0 : pair.writable;
- assertRequiredField(writable, 'writable', 'ReadableWritablePair');
- assertWritableStream(writable, `${context} has member 'writable' that`);
- return { readable, writable };
- }
+Object.defineProperty(URL.prototype, "origin", {
+ get() {
+ return this[impl].origin;
+ },
+ enumerable: true,
+ configurable: true
+});
- /**
- * A readable stream represents a source of data, from which you can read.
- *
- * @public
- */
- class ReadableStream {
- constructor(rawUnderlyingSource = {}, rawStrategy = {}) {
- if (rawUnderlyingSource === undefined) {
- rawUnderlyingSource = null;
- }
- else {
- assertObject(rawUnderlyingSource, 'First parameter');
- }
- const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');
- const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter');
- InitializeReadableStream(this);
- if (underlyingSource.type === 'bytes') {
- if (strategy.size !== undefined) {
- throw new RangeError('The strategy for a byte stream cannot have a size function');
- }
- const highWaterMark = ExtractHighWaterMark(strategy, 0);
- SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark);
- }
- else {
- const sizeAlgorithm = ExtractSizeAlgorithm(strategy);
- const highWaterMark = ExtractHighWaterMark(strategy, 1);
- SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm);
- }
- }
- /**
- * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}.
- */
- get locked() {
- if (!IsReadableStream(this)) {
- throw streamBrandCheckException$1('locked');
- }
- return IsReadableStreamLocked(this);
- }
- /**
- * Cancels the stream, signaling a loss of interest in the stream by a consumer.
- *
- * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()}
- * method, which might or might not use it.
- */
- cancel(reason = undefined) {
- if (!IsReadableStream(this)) {
- return promiseRejectedWith(streamBrandCheckException$1('cancel'));
- }
- if (IsReadableStreamLocked(this)) {
- return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader'));
- }
- return ReadableStreamCancel(this, reason);
- }
- getReader(rawOptions = undefined) {
- if (!IsReadableStream(this)) {
- throw streamBrandCheckException$1('getReader');
- }
- const options = convertReaderOptions(rawOptions, 'First parameter');
- if (options.mode === undefined) {
- return AcquireReadableStreamDefaultReader(this);
- }
- return AcquireReadableStreamBYOBReader(this);
- }
- pipeThrough(rawTransform, rawOptions = {}) {
- if (!IsReadableStream(this)) {
- throw streamBrandCheckException$1('pipeThrough');
- }
- assertRequiredArgument(rawTransform, 1, 'pipeThrough');
- const transform = convertReadableWritablePair(rawTransform, 'First parameter');
- const options = convertPipeOptions(rawOptions, 'Second parameter');
- if (IsReadableStreamLocked(this)) {
- throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream');
- }
- if (IsWritableStreamLocked(transform.writable)) {
- throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream');
- }
- const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal);
- setPromiseIsHandledToTrue(promise);
- return transform.readable;
- }
- pipeTo(destination, rawOptions = {}) {
- if (!IsReadableStream(this)) {
- return promiseRejectedWith(streamBrandCheckException$1('pipeTo'));
- }
- if (destination === undefined) {
- return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`);
- }
- if (!IsWritableStream(destination)) {
- return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`));
- }
- let options;
- try {
- options = convertPipeOptions(rawOptions, 'Second parameter');
- }
- catch (e) {
- return promiseRejectedWith(e);
- }
- if (IsReadableStreamLocked(this)) {
- return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));
- }
- if (IsWritableStreamLocked(destination)) {
- return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));
- }
- return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal);
- }
- /**
- * Tees this readable stream, returning a two-element array containing the two resulting branches as
- * new {@link ReadableStream} instances.
- *
- * Teeing a stream will lock it, preventing any other consumer from acquiring a reader.
- * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be
- * propagated to the stream's underlying source.
- *
- * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable,
- * this could allow interference between the two branches.
- */
- tee() {
- if (!IsReadableStream(this)) {
- throw streamBrandCheckException$1('tee');
- }
- const branches = ReadableStreamTee(this);
- return CreateArrayFromList(branches);
- }
- values(rawOptions = undefined) {
- if (!IsReadableStream(this)) {
- throw streamBrandCheckException$1('values');
- }
- const options = convertIteratorOptions(rawOptions, 'First parameter');
- return AcquireReadableStreamAsyncIterator(this, options.preventCancel);
- }
- }
- Object.defineProperties(ReadableStream.prototype, {
- cancel: { enumerable: true },
- getReader: { enumerable: true },
- pipeThrough: { enumerable: true },
- pipeTo: { enumerable: true },
- tee: { enumerable: true },
- values: { enumerable: true },
- locked: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, {
- value: 'ReadableStream',
- configurable: true
- });
- }
- if (typeof SymbolPolyfill.asyncIterator === 'symbol') {
- Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, {
- value: ReadableStream.prototype.values,
- writable: true,
- configurable: true
- });
- }
- // Abstract operations for the ReadableStream.
- // Throws if and only if startAlgorithm throws.
- function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {
- const stream = Object.create(ReadableStream.prototype);
- InitializeReadableStream(stream);
- const controller = Object.create(ReadableStreamDefaultController.prototype);
- SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);
- return stream;
- }
- // Throws if and only if startAlgorithm throws.
- function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) {
- const stream = Object.create(ReadableStream.prototype);
- InitializeReadableStream(stream);
- const controller = Object.create(ReadableByteStreamController.prototype);
- SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined);
- return stream;
- }
- function InitializeReadableStream(stream) {
- stream._state = 'readable';
- stream._reader = undefined;
- stream._storedError = undefined;
- stream._disturbed = false;
- }
- function IsReadableStream(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {
- return false;
- }
- return x instanceof ReadableStream;
- }
- function IsReadableStreamLocked(stream) {
- if (stream._reader === undefined) {
- return false;
- }
- return true;
- }
- // ReadableStream API exposed for controllers.
- function ReadableStreamCancel(stream, reason) {
- stream._disturbed = true;
- if (stream._state === 'closed') {
- return promiseResolvedWith(undefined);
- }
- if (stream._state === 'errored') {
- return promiseRejectedWith(stream._storedError);
- }
- ReadableStreamClose(stream);
- const reader = stream._reader;
- if (reader !== undefined && IsReadableStreamBYOBReader(reader)) {
- reader._readIntoRequests.forEach(readIntoRequest => {
- readIntoRequest._closeSteps(undefined);
- });
- reader._readIntoRequests = new SimpleQueue();
- }
- const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason);
- return transformPromiseWith(sourceCancelPromise, noop);
- }
- function ReadableStreamClose(stream) {
- stream._state = 'closed';
- const reader = stream._reader;
- if (reader === undefined) {
- return;
- }
- defaultReaderClosedPromiseResolve(reader);
- if (IsReadableStreamDefaultReader(reader)) {
- reader._readRequests.forEach(readRequest => {
- readRequest._closeSteps();
- });
- reader._readRequests = new SimpleQueue();
- }
- }
- function ReadableStreamError(stream, e) {
- stream._state = 'errored';
- stream._storedError = e;
- const reader = stream._reader;
- if (reader === undefined) {
- return;
- }
- defaultReaderClosedPromiseReject(reader, e);
- if (IsReadableStreamDefaultReader(reader)) {
- reader._readRequests.forEach(readRequest => {
- readRequest._errorSteps(e);
- });
- reader._readRequests = new SimpleQueue();
- }
- else {
- reader._readIntoRequests.forEach(readIntoRequest => {
- readIntoRequest._errorSteps(e);
- });
- reader._readIntoRequests = new SimpleQueue();
- }
- }
- // Helper functions for the ReadableStream.
- function streamBrandCheckException$1(name) {
- return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`);
- }
+Object.defineProperty(URL.prototype, "protocol", {
+ get() {
+ return this[impl].protocol;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].protocol = V;
+ },
+ enumerable: true,
+ configurable: true
+});
- function convertQueuingStrategyInit(init, context) {
- assertDictionary(init, context);
- const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;
- assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit');
- return {
- highWaterMark: convertUnrestrictedDouble(highWaterMark)
- };
- }
+Object.defineProperty(URL.prototype, "username", {
+ get() {
+ return this[impl].username;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].username = V;
+ },
+ enumerable: true,
+ configurable: true
+});
- // The size function must not have a prototype property nor be a constructor
- const byteLengthSizeFunction = (chunk) => {
- return chunk.byteLength;
- };
- try {
- Object.defineProperty(byteLengthSizeFunction, 'name', {
- value: 'size',
- configurable: true
- });
- }
- catch (_a) {
- // This property is non-configurable in older browsers, so ignore if this throws.
- // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility
- }
- /**
- * A queuing strategy that counts the number of bytes in each chunk.
- *
- * @public
- */
- class ByteLengthQueuingStrategy {
- constructor(options) {
- assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy');
- options = convertQueuingStrategyInit(options, 'First parameter');
- this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark;
- }
- /**
- * Returns the high water mark provided to the constructor.
- */
- get highWaterMark() {
- if (!IsByteLengthQueuingStrategy(this)) {
- throw byteLengthBrandCheckException('highWaterMark');
- }
- return this._byteLengthQueuingStrategyHighWaterMark;
- }
- /**
- * Measures the size of `chunk` by returning the value of its `byteLength` property.
- */
- get size() {
- if (!IsByteLengthQueuingStrategy(this)) {
- throw byteLengthBrandCheckException('size');
- }
- return byteLengthSizeFunction;
- }
- }
- Object.defineProperties(ByteLengthQueuingStrategy.prototype, {
- highWaterMark: { enumerable: true },
- size: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {
- value: 'ByteLengthQueuingStrategy',
- configurable: true
- });
- }
- // Helper functions for the ByteLengthQueuingStrategy.
- function byteLengthBrandCheckException(name) {
- return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`);
- }
- function IsByteLengthQueuingStrategy(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) {
- return false;
- }
- return x instanceof ByteLengthQueuingStrategy;
- }
+Object.defineProperty(URL.prototype, "password", {
+ get() {
+ return this[impl].password;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].password = V;
+ },
+ enumerable: true,
+ configurable: true
+});
- // The size function must not have a prototype property nor be a constructor
- const countSizeFunction = () => {
- return 1;
- };
- try {
- Object.defineProperty(countSizeFunction, 'name', {
- value: 'size',
- configurable: true
- });
- }
- catch (_a) {
- // This property is non-configurable in older browsers, so ignore if this throws.
- // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility
- }
- /**
- * A queuing strategy that counts the number of chunks.
- *
- * @public
- */
- class CountQueuingStrategy {
- constructor(options) {
- assertRequiredArgument(options, 1, 'CountQueuingStrategy');
- options = convertQueuingStrategyInit(options, 'First parameter');
- this._countQueuingStrategyHighWaterMark = options.highWaterMark;
- }
- /**
- * Returns the high water mark provided to the constructor.
- */
- get highWaterMark() {
- if (!IsCountQueuingStrategy(this)) {
- throw countBrandCheckException('highWaterMark');
- }
- return this._countQueuingStrategyHighWaterMark;
- }
- /**
- * Measures the size of `chunk` by always returning 1.
- * This ensures that the total queue size is a count of the number of chunks in the queue.
- */
- get size() {
- if (!IsCountQueuingStrategy(this)) {
- throw countBrandCheckException('size');
- }
- return countSizeFunction;
- }
- }
- Object.defineProperties(CountQueuingStrategy.prototype, {
- highWaterMark: { enumerable: true },
- size: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {
- value: 'CountQueuingStrategy',
- configurable: true
- });
- }
- // Helper functions for the CountQueuingStrategy.
- function countBrandCheckException(name) {
- return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`);
- }
- function IsCountQueuingStrategy(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) {
- return false;
- }
- return x instanceof CountQueuingStrategy;
- }
-
- function convertTransformer(original, context) {
- assertDictionary(original, context);
- const flush = original === null || original === void 0 ? void 0 : original.flush;
- const readableType = original === null || original === void 0 ? void 0 : original.readableType;
- const start = original === null || original === void 0 ? void 0 : original.start;
- const transform = original === null || original === void 0 ? void 0 : original.transform;
- const writableType = original === null || original === void 0 ? void 0 : original.writableType;
- return {
- flush: flush === undefined ?
- undefined :
- convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`),
- readableType,
- start: start === undefined ?
- undefined :
- convertTransformerStartCallback(start, original, `${context} has member 'start' that`),
- transform: transform === undefined ?
- undefined :
- convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`),
- writableType
- };
- }
- function convertTransformerFlushCallback(fn, original, context) {
- assertFunction(fn, context);
- return (controller) => promiseCall(fn, original, [controller]);
- }
- function convertTransformerStartCallback(fn, original, context) {
- assertFunction(fn, context);
- return (controller) => reflectCall(fn, original, [controller]);
- }
- function convertTransformerTransformCallback(fn, original, context) {
- assertFunction(fn, context);
- return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);
- }
-
- // Class TransformStream
- /**
- * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream},
- * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side.
- * In a manner specific to the transform stream in question, writes to the writable side result in new data being
- * made available for reading from the readable side.
- *
- * @public
- */
- class TransformStream {
- constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) {
- if (rawTransformer === undefined) {
- rawTransformer = null;
- }
- const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter');
- const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter');
- const transformer = convertTransformer(rawTransformer, 'First parameter');
- if (transformer.readableType !== undefined) {
- throw new RangeError('Invalid readableType specified');
- }
- if (transformer.writableType !== undefined) {
- throw new RangeError('Invalid writableType specified');
- }
- const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0);
- const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy);
- const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1);
- const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy);
- let startPromise_resolve;
- const startPromise = newPromise(resolve => {
- startPromise_resolve = resolve;
- });
- InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm);
- SetUpTransformStreamDefaultControllerFromTransformer(this, transformer);
- if (transformer.start !== undefined) {
- startPromise_resolve(transformer.start(this._transformStreamController));
- }
- else {
- startPromise_resolve(undefined);
- }
- }
- /**
- * The readable side of the transform stream.
- */
- get readable() {
- if (!IsTransformStream(this)) {
- throw streamBrandCheckException('readable');
- }
- return this._readable;
- }
- /**
- * The writable side of the transform stream.
- */
- get writable() {
- if (!IsTransformStream(this)) {
- throw streamBrandCheckException('writable');
- }
- return this._writable;
- }
- }
- Object.defineProperties(TransformStream.prototype, {
- readable: { enumerable: true },
- writable: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, {
- value: 'TransformStream',
- configurable: true
- });
- }
- function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) {
- function startAlgorithm() {
- return startPromise;
- }
- function writeAlgorithm(chunk) {
- return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk);
- }
- function abortAlgorithm(reason) {
- return TransformStreamDefaultSinkAbortAlgorithm(stream, reason);
- }
- function closeAlgorithm() {
- return TransformStreamDefaultSinkCloseAlgorithm(stream);
- }
- stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm);
- function pullAlgorithm() {
- return TransformStreamDefaultSourcePullAlgorithm(stream);
- }
- function cancelAlgorithm(reason) {
- TransformStreamErrorWritableAndUnblockWrite(stream, reason);
- return promiseResolvedWith(undefined);
- }
- stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm);
- // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.
- stream._backpressure = undefined;
- stream._backpressureChangePromise = undefined;
- stream._backpressureChangePromise_resolve = undefined;
- TransformStreamSetBackpressure(stream, true);
- stream._transformStreamController = undefined;
- }
- function IsTransformStream(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {
- return false;
- }
- return x instanceof TransformStream;
- }
- // This is a no-op if both sides are already errored.
- function TransformStreamError(stream, e) {
- ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e);
- TransformStreamErrorWritableAndUnblockWrite(stream, e);
- }
- function TransformStreamErrorWritableAndUnblockWrite(stream, e) {
- TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController);
- WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e);
- if (stream._backpressure) {
- // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()
- // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time
- // _backpressure is set.
- TransformStreamSetBackpressure(stream, false);
- }
- }
- function TransformStreamSetBackpressure(stream, backpressure) {
- // Passes also when called during construction.
- if (stream._backpressureChangePromise !== undefined) {
- stream._backpressureChangePromise_resolve();
- }
- stream._backpressureChangePromise = newPromise(resolve => {
- stream._backpressureChangePromise_resolve = resolve;
- });
- stream._backpressure = backpressure;
- }
- // Class TransformStreamDefaultController
- /**
- * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}.
- *
- * @public
- */
- class TransformStreamDefaultController {
- constructor() {
- throw new TypeError('Illegal constructor');
- }
- /**
- * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full.
- */
- get desiredSize() {
- if (!IsTransformStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException('desiredSize');
- }
- const readableController = this._controlledTransformStream._readable._readableStreamController;
- return ReadableStreamDefaultControllerGetDesiredSize(readableController);
- }
- enqueue(chunk = undefined) {
- if (!IsTransformStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException('enqueue');
- }
- TransformStreamDefaultControllerEnqueue(this, chunk);
- }
- /**
- * Errors both the readable side and the writable side of the controlled transform stream, making all future
- * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded.
- */
- error(reason = undefined) {
- if (!IsTransformStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException('error');
- }
- TransformStreamDefaultControllerError(this, reason);
- }
- /**
- * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the
- * transformer only needs to consume a portion of the chunks written to the writable side.
- */
- terminate() {
- if (!IsTransformStreamDefaultController(this)) {
- throw defaultControllerBrandCheckException('terminate');
- }
- TransformStreamDefaultControllerTerminate(this);
- }
- }
- Object.defineProperties(TransformStreamDefaultController.prototype, {
- enqueue: { enumerable: true },
- error: { enumerable: true },
- terminate: { enumerable: true },
- desiredSize: { enumerable: true }
- });
- if (typeof SymbolPolyfill.toStringTag === 'symbol') {
- Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {
- value: 'TransformStreamDefaultController',
- configurable: true
- });
- }
- // Transform Stream Default Controller Abstract Operations
- function IsTransformStreamDefaultController(x) {
- if (!typeIsObject(x)) {
- return false;
- }
- if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {
- return false;
- }
- return x instanceof TransformStreamDefaultController;
- }
- function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) {
- controller._controlledTransformStream = stream;
- stream._transformStreamController = controller;
- controller._transformAlgorithm = transformAlgorithm;
- controller._flushAlgorithm = flushAlgorithm;
- }
- function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) {
- const controller = Object.create(TransformStreamDefaultController.prototype);
- let transformAlgorithm = (chunk) => {
- try {
- TransformStreamDefaultControllerEnqueue(controller, chunk);
- return promiseResolvedWith(undefined);
- }
- catch (transformResultE) {
- return promiseRejectedWith(transformResultE);
- }
- };
- let flushAlgorithm = () => promiseResolvedWith(undefined);
- if (transformer.transform !== undefined) {
- transformAlgorithm = chunk => transformer.transform(chunk, controller);
- }
- if (transformer.flush !== undefined) {
- flushAlgorithm = () => transformer.flush(controller);
- }
- SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm);
- }
- function TransformStreamDefaultControllerClearAlgorithms(controller) {
- controller._transformAlgorithm = undefined;
- controller._flushAlgorithm = undefined;
- }
- function TransformStreamDefaultControllerEnqueue(controller, chunk) {
- const stream = controller._controlledTransformStream;
- const readableController = stream._readable._readableStreamController;
- if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {
- throw new TypeError('Readable side is not in a state that permits enqueue');
- }
- // We throttle transform invocations based on the backpressure of the ReadableStream, but we still
- // accept TransformStreamDefaultControllerEnqueue() calls.
- try {
- ReadableStreamDefaultControllerEnqueue(readableController, chunk);
- }
- catch (e) {
- // This happens when readableStrategy.size() throws.
- TransformStreamErrorWritableAndUnblockWrite(stream, e);
- throw stream._readable._storedError;
- }
- const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController);
- if (backpressure !== stream._backpressure) {
- TransformStreamSetBackpressure(stream, true);
- }
- }
- function TransformStreamDefaultControllerError(controller, e) {
- TransformStreamError(controller._controlledTransformStream, e);
- }
- function TransformStreamDefaultControllerPerformTransform(controller, chunk) {
- const transformPromise = controller._transformAlgorithm(chunk);
- return transformPromiseWith(transformPromise, undefined, r => {
- TransformStreamError(controller._controlledTransformStream, r);
- throw r;
- });
- }
- function TransformStreamDefaultControllerTerminate(controller) {
- const stream = controller._controlledTransformStream;
- const readableController = stream._readable._readableStreamController;
- ReadableStreamDefaultControllerClose(readableController);
- const error = new TypeError('TransformStream terminated');
- TransformStreamErrorWritableAndUnblockWrite(stream, error);
- }
- // TransformStreamDefaultSink Algorithms
- function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) {
- const controller = stream._transformStreamController;
- if (stream._backpressure) {
- const backpressureChangePromise = stream._backpressureChangePromise;
- return transformPromiseWith(backpressureChangePromise, () => {
- const writable = stream._writable;
- const state = writable._state;
- if (state === 'erroring') {
- throw writable._storedError;
- }
- return TransformStreamDefaultControllerPerformTransform(controller, chunk);
- });
- }
- return TransformStreamDefaultControllerPerformTransform(controller, chunk);
- }
- function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) {
- // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already
- // errored.
- TransformStreamError(stream, reason);
- return promiseResolvedWith(undefined);
- }
- function TransformStreamDefaultSinkCloseAlgorithm(stream) {
- // stream._readable cannot change after construction, so caching it across a call to user code is safe.
- const readable = stream._readable;
- const controller = stream._transformStreamController;
- const flushPromise = controller._flushAlgorithm();
- TransformStreamDefaultControllerClearAlgorithms(controller);
- // Return a promise that is fulfilled with undefined on success.
- return transformPromiseWith(flushPromise, () => {
- if (readable._state === 'errored') {
- throw readable._storedError;
- }
- ReadableStreamDefaultControllerClose(readable._readableStreamController);
- }, r => {
- TransformStreamError(stream, r);
- throw readable._storedError;
- });
- }
- // TransformStreamDefaultSource Algorithms
- function TransformStreamDefaultSourcePullAlgorithm(stream) {
- // Invariant. Enforced by the promises returned by start() and pull().
- TransformStreamSetBackpressure(stream, false);
- // Prevent the next pull() call until there is backpressure.
- return stream._backpressureChangePromise;
- }
- // Helper functions for the TransformStreamDefaultController.
- function defaultControllerBrandCheckException(name) {
- return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`);
- }
- // Helper functions for the TransformStream.
- function streamBrandCheckException(name) {
- return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`);
- }
-
- exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy;
- exports.CountQueuingStrategy = CountQueuingStrategy;
- exports.ReadableByteStreamController = ReadableByteStreamController;
- exports.ReadableStream = ReadableStream;
- exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader;
- exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest;
- exports.ReadableStreamDefaultController = ReadableStreamDefaultController;
- exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader;
- exports.TransformStream = TransformStream;
- exports.TransformStreamDefaultController = TransformStreamDefaultController;
- exports.WritableStream = WritableStream;
- exports.WritableStreamDefaultController = WritableStreamDefaultController;
- exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter;
-
- Object.defineProperty(exports, '__esModule', { value: true });
+Object.defineProperty(URL.prototype, "host", {
+ get() {
+ return this[impl].host;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].host = V;
+ },
+ enumerable: true,
+ configurable: true
+});
-})));
-//# sourceMappingURL=ponyfill.es2018.js.map
+Object.defineProperty(URL.prototype, "hostname", {
+ get() {
+ return this[impl].hostname;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].hostname = V;
+ },
+ enumerable: true,
+ configurable: true
+});
+Object.defineProperty(URL.prototype, "port", {
+ get() {
+ return this[impl].port;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].port = V;
+ },
+ enumerable: true,
+ configurable: true
+});
-/***/ }),
+Object.defineProperty(URL.prototype, "pathname", {
+ get() {
+ return this[impl].pathname;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].pathname = V;
+ },
+ enumerable: true,
+ configurable: true
+});
-/***/ 4886:
-/***/ ((module) => {
+Object.defineProperty(URL.prototype, "search", {
+ get() {
+ return this[impl].search;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].search = V;
+ },
+ enumerable: true,
+ configurable: true
+});
-"use strict";
+Object.defineProperty(URL.prototype, "hash", {
+ get() {
+ return this[impl].hash;
+ },
+ set(V) {
+ V = conversions["USVString"](V);
+ this[impl].hash = V;
+ },
+ enumerable: true,
+ configurable: true
+});
-var conversions = {};
-module.exports = conversions;
+module.exports = {
+ is(obj) {
+ return !!obj && obj[impl] instanceof Impl.implementation;
+ },
+ create(constructorArgs, privateData) {
+ let obj = Object.create(URL.prototype);
+ this.setup(obj, constructorArgs, privateData);
+ return obj;
+ },
+ setup(obj, constructorArgs, privateData) {
+ if (!privateData) privateData = {};
+ privateData.wrapper = obj;
-function sign(x) {
- return x < 0 ? -1 : 1;
-}
+ obj[impl] = new Impl.implementation(constructorArgs, privateData);
+ obj[impl][utils.wrapperSymbol] = obj;
+ },
+ interface: URL,
+ expose: {
+ Window: { URL: URL },
+ Worker: { URL: URL }
+ }
+};
-function evenRound(x) {
- // Round x to the nearest integer, choosing the even integer if it lies halfway between two.
- if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)
- return Math.floor(x);
- } else {
- return Math.round(x);
- }
-}
-function createNumberConversion(bitLength, typeOpts) {
- if (!typeOpts.unsigned) {
- --bitLength;
- }
- const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);
- const upperBound = Math.pow(2, bitLength) - 1;
- const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);
- const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);
+/***/ }),
- return function(V, opts) {
- if (!opts) opts = {};
+/***/ 8665:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
- let x = +V;
+"use strict";
- if (opts.enforceRange) {
- if (!Number.isFinite(x)) {
- throw new TypeError("Argument is not a finite number");
- }
- x = sign(x) * Math.floor(Math.abs(x));
- if (x < lowerBound || x > upperBound) {
- throw new TypeError("Argument is not in byte range");
- }
+exports.URL = __nccwpck_require__(3394)["interface"];
+exports.serializeURL = __nccwpck_require__(2158).serializeURL;
+exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin;
+exports.basicURLParse = __nccwpck_require__(2158).basicURLParse;
+exports.setTheUsername = __nccwpck_require__(2158).setTheUsername;
+exports.setThePassword = __nccwpck_require__(2158).setThePassword;
+exports.serializeHost = __nccwpck_require__(2158).serializeHost;
+exports.serializeInteger = __nccwpck_require__(2158).serializeInteger;
+exports.parseURL = __nccwpck_require__(2158).parseURL;
- return x;
- }
- if (!isNaN(x) && opts.clamp) {
- x = evenRound(x);
+/***/ }),
- if (x < lowerBound) x = lowerBound;
- if (x > upperBound) x = upperBound;
- return x;
- }
-
- if (!Number.isFinite(x) || x === 0) {
- return 0;
- }
-
- x = sign(x) * Math.floor(Math.abs(x));
- x = x % moduloVal;
-
- if (!typeOpts.unsigned && x >= moduloBound) {
- return x - moduloVal;
- } else if (typeOpts.unsigned) {
- if (x < 0) {
- x += moduloVal;
- } else if (x === -0) { // don't return negative zero
- return 0;
- }
- }
-
- return x;
- }
-}
-
-conversions["void"] = function () {
- return undefined;
-};
-
-conversions["boolean"] = function (val) {
- return !!val;
-};
-
-conversions["byte"] = createNumberConversion(8, { unsigned: false });
-conversions["octet"] = createNumberConversion(8, { unsigned: true });
-
-conversions["short"] = createNumberConversion(16, { unsigned: false });
-conversions["unsigned short"] = createNumberConversion(16, { unsigned: true });
-
-conversions["long"] = createNumberConversion(32, { unsigned: false });
-conversions["unsigned long"] = createNumberConversion(32, { unsigned: true });
-
-conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });
-conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });
-
-conversions["double"] = function (V) {
- const x = +V;
-
- if (!Number.isFinite(x)) {
- throw new TypeError("Argument is not a finite floating-point value");
- }
-
- return x;
-};
-
-conversions["unrestricted double"] = function (V) {
- const x = +V;
-
- if (isNaN(x)) {
- throw new TypeError("Argument is NaN");
- }
-
- return x;
-};
-
-// not quite valid, but good enough for JS
-conversions["float"] = conversions["double"];
-conversions["unrestricted float"] = conversions["unrestricted double"];
-
-conversions["DOMString"] = function (V, opts) {
- if (!opts) opts = {};
-
- if (opts.treatNullAsEmptyString && V === null) {
- return "";
- }
-
- return String(V);
-};
-
-conversions["ByteString"] = function (V, opts) {
- const x = String(V);
- let c = undefined;
- for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {
- if (c > 255) {
- throw new TypeError("Argument is not a valid bytestring");
- }
- }
-
- return x;
-};
-
-conversions["USVString"] = function (V) {
- const S = String(V);
- const n = S.length;
- const U = [];
- for (let i = 0; i < n; ++i) {
- const c = S.charCodeAt(i);
- if (c < 0xD800 || c > 0xDFFF) {
- U.push(String.fromCodePoint(c));
- } else if (0xDC00 <= c && c <= 0xDFFF) {
- U.push(String.fromCodePoint(0xFFFD));
- } else {
- if (i === n - 1) {
- U.push(String.fromCodePoint(0xFFFD));
- } else {
- const d = S.charCodeAt(i + 1);
- if (0xDC00 <= d && d <= 0xDFFF) {
- const a = c & 0x3FF;
- const b = d & 0x3FF;
- U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));
- ++i;
- } else {
- U.push(String.fromCodePoint(0xFFFD));
- }
- }
- }
- }
-
- return U.join('');
-};
-
-conversions["Date"] = function (V, opts) {
- if (!(V instanceof Date)) {
- throw new TypeError("Argument is not a Date object");
- }
- if (isNaN(V)) {
- return undefined;
- }
-
- return V;
-};
-
-conversions["RegExp"] = function (V, opts) {
- if (!(V instanceof RegExp)) {
- V = new RegExp(V);
- }
-
- return V;
-};
-
-
-/***/ }),
-
-/***/ 7537:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 2158:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+
+const punycode = __nccwpck_require__(5477);
+const tr46 = __nccwpck_require__(4256);
+
+const specialSchemes = {
+ ftp: 21,
+ file: null,
+ gopher: 70,
+ http: 80,
+ https: 443,
+ ws: 80,
+ wss: 443
+};
+
+const failure = Symbol("failure");
+
+function countSymbols(str) {
+ return punycode.ucs2.decode(str).length;
+}
+
+function at(input, idx) {
+ const c = input[idx];
+ return isNaN(c) ? undefined : String.fromCodePoint(c);
+}
+
+function isASCIIDigit(c) {
+ return c >= 0x30 && c <= 0x39;
+}
+
+function isASCIIAlpha(c) {
+ return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);
+}
+
+function isASCIIAlphanumeric(c) {
+ return isASCIIAlpha(c) || isASCIIDigit(c);
+}
+
+function isASCIIHex(c) {
+ return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);
+}
+
+function isSingleDot(buffer) {
+ return buffer === "." || buffer.toLowerCase() === "%2e";
+}
+
+function isDoubleDot(buffer) {
+ buffer = buffer.toLowerCase();
+ return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e";
+}
+
+function isWindowsDriveLetterCodePoints(cp1, cp2) {
+ return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);
+}
+
+function isWindowsDriveLetterString(string) {
+ return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|");
+}
+
+function isNormalizedWindowsDriveLetterString(string) {
+ return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":";
+}
+
+function containsForbiddenHostCodePoint(string) {
+ return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1;
+}
+
+function containsForbiddenHostCodePointExcludingPercent(string) {
+ return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1;
+}
+
+function isSpecialScheme(scheme) {
+ return specialSchemes[scheme] !== undefined;
+}
+
+function isSpecial(url) {
+ return isSpecialScheme(url.scheme);
+}
+
+function defaultPort(scheme) {
+ return specialSchemes[scheme];
+}
+
+function percentEncode(c) {
+ let hex = c.toString(16).toUpperCase();
+ if (hex.length === 1) {
+ hex = "0" + hex;
+ }
+
+ return "%" + hex;
+}
+
+function utf8PercentEncode(c) {
+ const buf = new Buffer(c);
+
+ let str = "";
+
+ for (let i = 0; i < buf.length; ++i) {
+ str += percentEncode(buf[i]);
+ }
+
+ return str;
+}
+
+function utf8PercentDecode(str) {
+ const input = new Buffer(str);
+ const output = [];
+ for (let i = 0; i < input.length; ++i) {
+ if (input[i] !== 37) {
+ output.push(input[i]);
+ } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {
+ output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));
+ i += 2;
+ } else {
+ output.push(input[i]);
+ }
+ }
+ return new Buffer(output).toString();
+}
+
+function isC0ControlPercentEncode(c) {
+ return c <= 0x1F || c > 0x7E;
+}
+
+const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);
+function isPathPercentEncode(c) {
+ return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);
+}
+
+const extraUserinfoPercentEncodeSet =
+ new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);
+function isUserinfoPercentEncode(c) {
+ return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);
+}
+
+function percentEncodeChar(c, encodeSetPredicate) {
+ const cStr = String.fromCodePoint(c);
+
+ if (encodeSetPredicate(c)) {
+ return utf8PercentEncode(cStr);
+ }
+
+ return cStr;
+}
+
+function parseIPv4Number(input) {
+ let R = 10;
+
+ if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") {
+ input = input.substring(2);
+ R = 16;
+ } else if (input.length >= 2 && input.charAt(0) === "0") {
+ input = input.substring(1);
+ R = 8;
+ }
+
+ if (input === "") {
+ return 0;
+ }
+
+ const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);
+ if (regex.test(input)) {
+ return failure;
+ }
+
+ return parseInt(input, R);
+}
+
+function parseIPv4(input) {
+ const parts = input.split(".");
+ if (parts[parts.length - 1] === "") {
+ if (parts.length > 1) {
+ parts.pop();
+ }
+ }
+
+ if (parts.length > 4) {
+ return input;
+ }
+
+ const numbers = [];
+ for (const part of parts) {
+ if (part === "") {
+ return input;
+ }
+ const n = parseIPv4Number(part);
+ if (n === failure) {
+ return input;
+ }
+
+ numbers.push(n);
+ }
+
+ for (let i = 0; i < numbers.length - 1; ++i) {
+ if (numbers[i] > 255) {
+ return failure;
+ }
+ }
+ if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {
+ return failure;
+ }
+
+ let ipv4 = numbers.pop();
+ let counter = 0;
+
+ for (const n of numbers) {
+ ipv4 += n * Math.pow(256, 3 - counter);
+ ++counter;
+ }
+
+ return ipv4;
+}
+
+function serializeIPv4(address) {
+ let output = "";
+ let n = address;
+
+ for (let i = 1; i <= 4; ++i) {
+ output = String(n % 256) + output;
+ if (i !== 4) {
+ output = "." + output;
+ }
+ n = Math.floor(n / 256);
+ }
+
+ return output;
+}
+
+function parseIPv6(input) {
+ const address = [0, 0, 0, 0, 0, 0, 0, 0];
+ let pieceIndex = 0;
+ let compress = null;
+ let pointer = 0;
+
+ input = punycode.ucs2.decode(input);
+
+ if (input[pointer] === 58) {
+ if (input[pointer + 1] !== 58) {
+ return failure;
+ }
+
+ pointer += 2;
+ ++pieceIndex;
+ compress = pieceIndex;
+ }
+
+ while (pointer < input.length) {
+ if (pieceIndex === 8) {
+ return failure;
+ }
+
+ if (input[pointer] === 58) {
+ if (compress !== null) {
+ return failure;
+ }
+ ++pointer;
+ ++pieceIndex;
+ compress = pieceIndex;
+ continue;
+ }
+
+ let value = 0;
+ let length = 0;
+
+ while (length < 4 && isASCIIHex(input[pointer])) {
+ value = value * 0x10 + parseInt(at(input, pointer), 16);
+ ++pointer;
+ ++length;
+ }
+
+ if (input[pointer] === 46) {
+ if (length === 0) {
+ return failure;
+ }
+
+ pointer -= length;
+
+ if (pieceIndex > 6) {
+ return failure;
+ }
+
+ let numbersSeen = 0;
+
+ while (input[pointer] !== undefined) {
+ let ipv4Piece = null;
+
+ if (numbersSeen > 0) {
+ if (input[pointer] === 46 && numbersSeen < 4) {
+ ++pointer;
+ } else {
+ return failure;
+ }
+ }
+
+ if (!isASCIIDigit(input[pointer])) {
+ return failure;
+ }
+
+ while (isASCIIDigit(input[pointer])) {
+ const number = parseInt(at(input, pointer));
+ if (ipv4Piece === null) {
+ ipv4Piece = number;
+ } else if (ipv4Piece === 0) {
+ return failure;
+ } else {
+ ipv4Piece = ipv4Piece * 10 + number;
+ }
+ if (ipv4Piece > 255) {
+ return failure;
+ }
+ ++pointer;
+ }
+
+ address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;
+
+ ++numbersSeen;
+
+ if (numbersSeen === 2 || numbersSeen === 4) {
+ ++pieceIndex;
+ }
+ }
+
+ if (numbersSeen !== 4) {
+ return failure;
+ }
+
+ break;
+ } else if (input[pointer] === 58) {
+ ++pointer;
+ if (input[pointer] === undefined) {
+ return failure;
+ }
+ } else if (input[pointer] !== undefined) {
+ return failure;
+ }
+
+ address[pieceIndex] = value;
+ ++pieceIndex;
+ }
+
+ if (compress !== null) {
+ let swaps = pieceIndex - compress;
+ pieceIndex = 7;
+ while (pieceIndex !== 0 && swaps > 0) {
+ const temp = address[compress + swaps - 1];
+ address[compress + swaps - 1] = address[pieceIndex];
+ address[pieceIndex] = temp;
+ --pieceIndex;
+ --swaps;
+ }
+ } else if (compress === null && pieceIndex !== 8) {
+ return failure;
+ }
+
+ return address;
+}
+
+function serializeIPv6(address) {
+ let output = "";
+ const seqResult = findLongestZeroSequence(address);
+ const compress = seqResult.idx;
+ let ignore0 = false;
+
+ for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {
+ if (ignore0 && address[pieceIndex] === 0) {
+ continue;
+ } else if (ignore0) {
+ ignore0 = false;
+ }
+
+ if (compress === pieceIndex) {
+ const separator = pieceIndex === 0 ? "::" : ":";
+ output += separator;
+ ignore0 = true;
+ continue;
+ }
+
+ output += address[pieceIndex].toString(16);
+
+ if (pieceIndex !== 7) {
+ output += ":";
+ }
+ }
+
+ return output;
+}
+
+function parseHost(input, isSpecialArg) {
+ if (input[0] === "[") {
+ if (input[input.length - 1] !== "]") {
+ return failure;
+ }
+
+ return parseIPv6(input.substring(1, input.length - 1));
+ }
+
+ if (!isSpecialArg) {
+ return parseOpaqueHost(input);
+ }
+
+ const domain = utf8PercentDecode(input);
+ const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);
+ if (asciiDomain === null) {
+ return failure;
+ }
+
+ if (containsForbiddenHostCodePoint(asciiDomain)) {
+ return failure;
+ }
+
+ const ipv4Host = parseIPv4(asciiDomain);
+ if (typeof ipv4Host === "number" || ipv4Host === failure) {
+ return ipv4Host;
+ }
+
+ return asciiDomain;
+}
+
+function parseOpaqueHost(input) {
+ if (containsForbiddenHostCodePointExcludingPercent(input)) {
+ return failure;
+ }
+
+ let output = "";
+ const decoded = punycode.ucs2.decode(input);
+ for (let i = 0; i < decoded.length; ++i) {
+ output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);
+ }
+ return output;
+}
+
+function findLongestZeroSequence(arr) {
+ let maxIdx = null;
+ let maxLen = 1; // only find elements > 1
+ let currStart = null;
+ let currLen = 0;
+
+ for (let i = 0; i < arr.length; ++i) {
+ if (arr[i] !== 0) {
+ if (currLen > maxLen) {
+ maxIdx = currStart;
+ maxLen = currLen;
+ }
+
+ currStart = null;
+ currLen = 0;
+ } else {
+ if (currStart === null) {
+ currStart = i;
+ }
+ ++currLen;
+ }
+ }
+
+ // if trailing zeros
+ if (currLen > maxLen) {
+ maxIdx = currStart;
+ maxLen = currLen;
+ }
+
+ return {
+ idx: maxIdx,
+ len: maxLen
+ };
+}
+
+function serializeHost(host) {
+ if (typeof host === "number") {
+ return serializeIPv4(host);
+ }
+
+ // IPv6 serializer
+ if (host instanceof Array) {
+ return "[" + serializeIPv6(host) + "]";
+ }
+
+ return host;
+}
+
+function trimControlChars(url) {
+ return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, "");
+}
+
+function trimTabAndNewline(url) {
+ return url.replace(/\u0009|\u000A|\u000D/g, "");
+}
+
+function shortenPath(url) {
+ const path = url.path;
+ if (path.length === 0) {
+ return;
+ }
+ if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {
+ return;
+ }
+
+ path.pop();
+}
+
+function includesCredentials(url) {
+ return url.username !== "" || url.password !== "";
+}
+
+function cannotHaveAUsernamePasswordPort(url) {
+ return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file";
+}
+
+function isNormalizedWindowsDriveLetter(string) {
+ return /^[A-Za-z]:$/.test(string);
+}
+
+function URLStateMachine(input, base, encodingOverride, url, stateOverride) {
+ this.pointer = 0;
+ this.input = input;
+ this.base = base || null;
+ this.encodingOverride = encodingOverride || "utf-8";
+ this.stateOverride = stateOverride;
+ this.url = url;
+ this.failure = false;
+ this.parseError = false;
+
+ if (!this.url) {
+ this.url = {
+ scheme: "",
+ username: "",
+ password: "",
+ host: null,
+ port: null,
+ path: [],
+ query: null,
+ fragment: null,
+
+ cannotBeABaseURL: false
+ };
+
+ const res = trimControlChars(this.input);
+ if (res !== this.input) {
+ this.parseError = true;
+ }
+ this.input = res;
+ }
+
+ const res = trimTabAndNewline(this.input);
+ if (res !== this.input) {
+ this.parseError = true;
+ }
+ this.input = res;
+
+ this.state = stateOverride || "scheme start";
+
+ this.buffer = "";
+ this.atFlag = false;
+ this.arrFlag = false;
+ this.passwordTokenSeenFlag = false;
+
+ this.input = punycode.ucs2.decode(this.input);
+
+ for (; this.pointer <= this.input.length; ++this.pointer) {
+ const c = this.input[this.pointer];
+ const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);
+
+ // exec state machine
+ const ret = this["parse " + this.state](c, cStr);
+ if (!ret) {
+ break; // terminate algorithm
+ } else if (ret === failure) {
+ this.failure = true;
+ break;
+ }
+ }
+}
+
+URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) {
+ if (isASCIIAlpha(c)) {
+ this.buffer += cStr.toLowerCase();
+ this.state = "scheme";
+ } else if (!this.stateOverride) {
+ this.state = "no scheme";
+ --this.pointer;
+ } else {
+ this.parseError = true;
+ return failure;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) {
+ if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {
+ this.buffer += cStr.toLowerCase();
+ } else if (c === 58) {
+ if (this.stateOverride) {
+ if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {
+ return false;
+ }
+
+ if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {
+ return false;
+ }
+
+ if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") {
+ return false;
+ }
+
+ if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) {
+ return false;
+ }
+ }
+ this.url.scheme = this.buffer;
+ this.buffer = "";
+ if (this.stateOverride) {
+ return false;
+ }
+ if (this.url.scheme === "file") {
+ if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {
+ this.parseError = true;
+ }
+ this.state = "file";
+ } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {
+ this.state = "special relative or authority";
+ } else if (isSpecial(this.url)) {
+ this.state = "special authority slashes";
+ } else if (this.input[this.pointer + 1] === 47) {
+ this.state = "path or authority";
+ ++this.pointer;
+ } else {
+ this.url.cannotBeABaseURL = true;
+ this.url.path.push("");
+ this.state = "cannot-be-a-base-URL path";
+ }
+ } else if (!this.stateOverride) {
+ this.buffer = "";
+ this.state = "no scheme";
+ this.pointer = -1;
+ } else {
+ this.parseError = true;
+ return failure;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) {
+ if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {
+ return failure;
+ } else if (this.base.cannotBeABaseURL && c === 35) {
+ this.url.scheme = this.base.scheme;
+ this.url.path = this.base.path.slice();
+ this.url.query = this.base.query;
+ this.url.fragment = "";
+ this.url.cannotBeABaseURL = true;
+ this.state = "fragment";
+ } else if (this.base.scheme === "file") {
+ this.state = "file";
+ --this.pointer;
+ } else {
+ this.state = "relative";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) {
+ if (c === 47 && this.input[this.pointer + 1] === 47) {
+ this.state = "special authority ignore slashes";
+ ++this.pointer;
+ } else {
+ this.parseError = true;
+ this.state = "relative";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) {
+ if (c === 47) {
+ this.state = "authority";
+ } else {
+ this.state = "path";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse relative"] = function parseRelative(c) {
+ this.url.scheme = this.base.scheme;
+ if (isNaN(c)) {
+ this.url.username = this.base.username;
+ this.url.password = this.base.password;
+ this.url.host = this.base.host;
+ this.url.port = this.base.port;
+ this.url.path = this.base.path.slice();
+ this.url.query = this.base.query;
+ } else if (c === 47) {
+ this.state = "relative slash";
+ } else if (c === 63) {
+ this.url.username = this.base.username;
+ this.url.password = this.base.password;
+ this.url.host = this.base.host;
+ this.url.port = this.base.port;
+ this.url.path = this.base.path.slice();
+ this.url.query = "";
+ this.state = "query";
+ } else if (c === 35) {
+ this.url.username = this.base.username;
+ this.url.password = this.base.password;
+ this.url.host = this.base.host;
+ this.url.port = this.base.port;
+ this.url.path = this.base.path.slice();
+ this.url.query = this.base.query;
+ this.url.fragment = "";
+ this.state = "fragment";
+ } else if (isSpecial(this.url) && c === 92) {
+ this.parseError = true;
+ this.state = "relative slash";
+ } else {
+ this.url.username = this.base.username;
+ this.url.password = this.base.password;
+ this.url.host = this.base.host;
+ this.url.port = this.base.port;
+ this.url.path = this.base.path.slice(0, this.base.path.length - 1);
+
+ this.state = "path";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) {
+ if (isSpecial(this.url) && (c === 47 || c === 92)) {
+ if (c === 92) {
+ this.parseError = true;
+ }
+ this.state = "special authority ignore slashes";
+ } else if (c === 47) {
+ this.state = "authority";
+ } else {
+ this.url.username = this.base.username;
+ this.url.password = this.base.password;
+ this.url.host = this.base.host;
+ this.url.port = this.base.port;
+ this.state = "path";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) {
+ if (c === 47 && this.input[this.pointer + 1] === 47) {
+ this.state = "special authority ignore slashes";
+ ++this.pointer;
+ } else {
+ this.parseError = true;
+ this.state = "special authority ignore slashes";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) {
+ if (c !== 47 && c !== 92) {
+ this.state = "authority";
+ --this.pointer;
+ } else {
+ this.parseError = true;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) {
+ if (c === 64) {
+ this.parseError = true;
+ if (this.atFlag) {
+ this.buffer = "%40" + this.buffer;
+ }
+ this.atFlag = true;
+
+ // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars
+ const len = countSymbols(this.buffer);
+ for (let pointer = 0; pointer < len; ++pointer) {
+ const codePoint = this.buffer.codePointAt(pointer);
+
+ if (codePoint === 58 && !this.passwordTokenSeenFlag) {
+ this.passwordTokenSeenFlag = true;
+ continue;
+ }
+ const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);
+ if (this.passwordTokenSeenFlag) {
+ this.url.password += encodedCodePoints;
+ } else {
+ this.url.username += encodedCodePoints;
+ }
+ }
+ this.buffer = "";
+ } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
+ (isSpecial(this.url) && c === 92)) {
+ if (this.atFlag && this.buffer === "") {
+ this.parseError = true;
+ return failure;
+ }
+ this.pointer -= countSymbols(this.buffer) + 1;
+ this.buffer = "";
+ this.state = "host";
+ } else {
+ this.buffer += cStr;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse hostname"] =
+URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) {
+ if (this.stateOverride && this.url.scheme === "file") {
+ --this.pointer;
+ this.state = "file host";
+ } else if (c === 58 && !this.arrFlag) {
+ if (this.buffer === "") {
+ this.parseError = true;
+ return failure;
+ }
+
+ const host = parseHost(this.buffer, isSpecial(this.url));
+ if (host === failure) {
+ return failure;
+ }
+
+ this.url.host = host;
+ this.buffer = "";
+ this.state = "port";
+ if (this.stateOverride === "hostname") {
+ return false;
+ }
+ } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
+ (isSpecial(this.url) && c === 92)) {
+ --this.pointer;
+ if (isSpecial(this.url) && this.buffer === "") {
+ this.parseError = true;
+ return failure;
+ } else if (this.stateOverride && this.buffer === "" &&
+ (includesCredentials(this.url) || this.url.port !== null)) {
+ this.parseError = true;
+ return false;
+ }
+
+ const host = parseHost(this.buffer, isSpecial(this.url));
+ if (host === failure) {
+ return failure;
+ }
+
+ this.url.host = host;
+ this.buffer = "";
+ this.state = "path start";
+ if (this.stateOverride) {
+ return false;
+ }
+ } else {
+ if (c === 91) {
+ this.arrFlag = true;
+ } else if (c === 93) {
+ this.arrFlag = false;
+ }
+ this.buffer += cStr;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) {
+ if (isASCIIDigit(c)) {
+ this.buffer += cStr;
+ } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
+ (isSpecial(this.url) && c === 92) ||
+ this.stateOverride) {
+ if (this.buffer !== "") {
+ const port = parseInt(this.buffer);
+ if (port > Math.pow(2, 16) - 1) {
+ this.parseError = true;
+ return failure;
+ }
+ this.url.port = port === defaultPort(this.url.scheme) ? null : port;
+ this.buffer = "";
+ }
+ if (this.stateOverride) {
+ return false;
+ }
+ this.state = "path start";
+ --this.pointer;
+ } else {
+ this.parseError = true;
+ return failure;
+ }
+
+ return true;
+};
+
+const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);
+
+URLStateMachine.prototype["parse file"] = function parseFile(c) {
+ this.url.scheme = "file";
+
+ if (c === 47 || c === 92) {
+ if (c === 92) {
+ this.parseError = true;
+ }
+ this.state = "file slash";
+ } else if (this.base !== null && this.base.scheme === "file") {
+ if (isNaN(c)) {
+ this.url.host = this.base.host;
+ this.url.path = this.base.path.slice();
+ this.url.query = this.base.query;
+ } else if (c === 63) {
+ this.url.host = this.base.host;
+ this.url.path = this.base.path.slice();
+ this.url.query = "";
+ this.state = "query";
+ } else if (c === 35) {
+ this.url.host = this.base.host;
+ this.url.path = this.base.path.slice();
+ this.url.query = this.base.query;
+ this.url.fragment = "";
+ this.state = "fragment";
+ } else {
+ if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points
+ !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||
+ (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points
+ !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {
+ this.url.host = this.base.host;
+ this.url.path = this.base.path.slice();
+ shortenPath(this.url);
+ } else {
+ this.parseError = true;
+ }
+
+ this.state = "path";
+ --this.pointer;
+ }
+ } else {
+ this.state = "path";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) {
+ if (c === 47 || c === 92) {
+ if (c === 92) {
+ this.parseError = true;
+ }
+ this.state = "file host";
+ } else {
+ if (this.base !== null && this.base.scheme === "file") {
+ if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {
+ this.url.path.push(this.base.path[0]);
+ } else {
+ this.url.host = this.base.host;
+ }
+ }
+ this.state = "path";
+ --this.pointer;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) {
+ if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {
+ --this.pointer;
+ if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {
+ this.parseError = true;
+ this.state = "path";
+ } else if (this.buffer === "") {
+ this.url.host = "";
+ if (this.stateOverride) {
+ return false;
+ }
+ this.state = "path start";
+ } else {
+ let host = parseHost(this.buffer, isSpecial(this.url));
+ if (host === failure) {
+ return failure;
+ }
+ if (host === "localhost") {
+ host = "";
+ }
+ this.url.host = host;
+
+ if (this.stateOverride) {
+ return false;
+ }
+
+ this.buffer = "";
+ this.state = "path start";
+ }
+ } else {
+ this.buffer += cStr;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse path start"] = function parsePathStart(c) {
+ if (isSpecial(this.url)) {
+ if (c === 92) {
+ this.parseError = true;
+ }
+ this.state = "path";
+
+ if (c !== 47 && c !== 92) {
+ --this.pointer;
+ }
+ } else if (!this.stateOverride && c === 63) {
+ this.url.query = "";
+ this.state = "query";
+ } else if (!this.stateOverride && c === 35) {
+ this.url.fragment = "";
+ this.state = "fragment";
+ } else if (c !== undefined) {
+ this.state = "path";
+ if (c !== 47) {
+ --this.pointer;
+ }
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse path"] = function parsePath(c) {
+ if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||
+ (!this.stateOverride && (c === 63 || c === 35))) {
+ if (isSpecial(this.url) && c === 92) {
+ this.parseError = true;
+ }
+
+ if (isDoubleDot(this.buffer)) {
+ shortenPath(this.url);
+ if (c !== 47 && !(isSpecial(this.url) && c === 92)) {
+ this.url.path.push("");
+ }
+ } else if (isSingleDot(this.buffer) && c !== 47 &&
+ !(isSpecial(this.url) && c === 92)) {
+ this.url.path.push("");
+ } else if (!isSingleDot(this.buffer)) {
+ if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {
+ if (this.url.host !== "" && this.url.host !== null) {
+ this.parseError = true;
+ this.url.host = "";
+ }
+ this.buffer = this.buffer[0] + ":";
+ }
+ this.url.path.push(this.buffer);
+ }
+ this.buffer = "";
+ if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) {
+ while (this.url.path.length > 1 && this.url.path[0] === "") {
+ this.parseError = true;
+ this.url.path.shift();
+ }
+ }
+ if (c === 63) {
+ this.url.query = "";
+ this.state = "query";
+ }
+ if (c === 35) {
+ this.url.fragment = "";
+ this.state = "fragment";
+ }
+ } else {
+ // TODO: If c is not a URL code point and not "%", parse error.
+
+ if (c === 37 &&
+ (!isASCIIHex(this.input[this.pointer + 1]) ||
+ !isASCIIHex(this.input[this.pointer + 2]))) {
+ this.parseError = true;
+ }
+
+ this.buffer += percentEncodeChar(c, isPathPercentEncode);
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) {
+ if (c === 63) {
+ this.url.query = "";
+ this.state = "query";
+ } else if (c === 35) {
+ this.url.fragment = "";
+ this.state = "fragment";
+ } else {
+ // TODO: Add: not a URL code point
+ if (!isNaN(c) && c !== 37) {
+ this.parseError = true;
+ }
+
+ if (c === 37 &&
+ (!isASCIIHex(this.input[this.pointer + 1]) ||
+ !isASCIIHex(this.input[this.pointer + 2]))) {
+ this.parseError = true;
+ }
+
+ if (!isNaN(c)) {
+ this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);
+ }
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) {
+ if (isNaN(c) || (!this.stateOverride && c === 35)) {
+ if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") {
+ this.encodingOverride = "utf-8";
+ }
+
+ const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead
+ for (let i = 0; i < buffer.length; ++i) {
+ if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||
+ buffer[i] === 0x3C || buffer[i] === 0x3E) {
+ this.url.query += percentEncode(buffer[i]);
+ } else {
+ this.url.query += String.fromCodePoint(buffer[i]);
+ }
+ }
+
+ this.buffer = "";
+ if (c === 35) {
+ this.url.fragment = "";
+ this.state = "fragment";
+ }
+ } else {
+ // TODO: If c is not a URL code point and not "%", parse error.
+ if (c === 37 &&
+ (!isASCIIHex(this.input[this.pointer + 1]) ||
+ !isASCIIHex(this.input[this.pointer + 2]))) {
+ this.parseError = true;
+ }
+
+ this.buffer += cStr;
+ }
+
+ return true;
+};
+
+URLStateMachine.prototype["parse fragment"] = function parseFragment(c) {
+ if (isNaN(c)) { // do nothing
+ } else if (c === 0x0) {
+ this.parseError = true;
+ } else {
+ // TODO: If c is not a URL code point and not "%", parse error.
+ if (c === 37 &&
+ (!isASCIIHex(this.input[this.pointer + 1]) ||
+ !isASCIIHex(this.input[this.pointer + 2]))) {
+ this.parseError = true;
+ }
+
+ this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);
+ }
+
+ return true;
+};
+
+function serializeURL(url, excludeFragment) {
+ let output = url.scheme + ":";
+ if (url.host !== null) {
+ output += "//";
+
+ if (url.username !== "" || url.password !== "") {
+ output += url.username;
+ if (url.password !== "") {
+ output += ":" + url.password;
+ }
+ output += "@";
+ }
+
+ output += serializeHost(url.host);
+
+ if (url.port !== null) {
+ output += ":" + url.port;
+ }
+ } else if (url.host === null && url.scheme === "file") {
+ output += "//";
+ }
+
+ if (url.cannotBeABaseURL) {
+ output += url.path[0];
+ } else {
+ for (const string of url.path) {
+ output += "/" + string;
+ }
+ }
+
+ if (url.query !== null) {
+ output += "?" + url.query;
+ }
+
+ if (!excludeFragment && url.fragment !== null) {
+ output += "#" + url.fragment;
+ }
+
+ return output;
+}
+
+function serializeOrigin(tuple) {
+ let result = tuple.scheme + "://";
+ result += serializeHost(tuple.host);
+
+ if (tuple.port !== null) {
+ result += ":" + tuple.port;
+ }
+
+ return result;
+}
+
+module.exports.serializeURL = serializeURL;
+
+module.exports.serializeURLOrigin = function (url) {
+ // https://url.spec.whatwg.org/#concept-url-origin
+ switch (url.scheme) {
+ case "blob":
+ try {
+ return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));
+ } catch (e) {
+ // serializing an opaque origin returns "null"
+ return "null";
+ }
+ case "ftp":
+ case "gopher":
+ case "http":
+ case "https":
+ case "ws":
+ case "wss":
+ return serializeOrigin({
+ scheme: url.scheme,
+ host: url.host,
+ port: url.port
+ });
+ case "file":
+ // spec says "exercise to the reader", chrome says "file://"
+ return "file://";
+ default:
+ // serializing an opaque origin returns "null"
+ return "null";
+ }
+};
+
+module.exports.basicURLParse = function (input, options) {
+ if (options === undefined) {
+ options = {};
+ }
+
+ const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);
+ if (usm.failure) {
+ return "failure";
+ }
+
+ return usm.url;
+};
+
+module.exports.setTheUsername = function (url, username) {
+ url.username = "";
+ const decoded = punycode.ucs2.decode(username);
+ for (let i = 0; i < decoded.length; ++i) {
+ url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
+ }
+};
+
+module.exports.setThePassword = function (url, password) {
+ url.password = "";
+ const decoded = punycode.ucs2.decode(password);
+ for (let i = 0; i < decoded.length; ++i) {
+ url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
+ }
+};
+
+module.exports.serializeHost = serializeHost;
+
+module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;
+
+module.exports.serializeInteger = function (integer) {
+ return String(integer);
+};
+
+module.exports.parseURL = function (input, options) {
+ if (options === undefined) {
+ options = {};
+ }
+
+ // We don't handle blobs, so this just delegates:
+ return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });
+};
-const usm = __nccwpck_require__(2158);
-
-exports.implementation = class URLImpl {
- constructor(constructorArgs) {
- const url = constructorArgs[0];
- const base = constructorArgs[1];
-
- let parsedBase = null;
- if (base !== undefined) {
- parsedBase = usm.basicURLParse(base);
- if (parsedBase === "failure") {
- throw new TypeError("Invalid base URL");
- }
- }
-
- const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });
- if (parsedURL === "failure") {
- throw new TypeError("Invalid URL");
- }
-
- this._url = parsedURL;
-
- // TODO: query stuff
- }
-
- get href() {
- return usm.serializeURL(this._url);
- }
-
- set href(v) {
- const parsedURL = usm.basicURLParse(v);
- if (parsedURL === "failure") {
- throw new TypeError("Invalid URL");
- }
-
- this._url = parsedURL;
- }
-
- get origin() {
- return usm.serializeURLOrigin(this._url);
- }
-
- get protocol() {
- return this._url.scheme + ":";
- }
-
- set protocol(v) {
- usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" });
- }
-
- get username() {
- return this._url.username;
- }
-
- set username(v) {
- if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
- return;
- }
-
- usm.setTheUsername(this._url, v);
- }
-
- get password() {
- return this._url.password;
- }
-
- set password(v) {
- if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
- return;
- }
-
- usm.setThePassword(this._url, v);
- }
-
- get host() {
- const url = this._url;
-
- if (url.host === null) {
- return "";
- }
-
- if (url.port === null) {
- return usm.serializeHost(url.host);
- }
-
- return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port);
- }
-
- set host(v) {
- if (this._url.cannotBeABaseURL) {
- return;
- }
-
- usm.basicURLParse(v, { url: this._url, stateOverride: "host" });
- }
-
- get hostname() {
- if (this._url.host === null) {
- return "";
- }
-
- return usm.serializeHost(this._url.host);
- }
-
- set hostname(v) {
- if (this._url.cannotBeABaseURL) {
- return;
- }
-
- usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" });
- }
-
- get port() {
- if (this._url.port === null) {
- return "";
- }
-
- return usm.serializeInteger(this._url.port);
- }
-
- set port(v) {
- if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
- return;
- }
-
- if (v === "") {
- this._url.port = null;
- } else {
- usm.basicURLParse(v, { url: this._url, stateOverride: "port" });
- }
- }
-
- get pathname() {
- if (this._url.cannotBeABaseURL) {
- return this._url.path[0];
- }
-
- if (this._url.path.length === 0) {
- return "";
- }
-
- return "/" + this._url.path.join("/");
- }
-
- set pathname(v) {
- if (this._url.cannotBeABaseURL) {
- return;
- }
-
- this._url.path = [];
- usm.basicURLParse(v, { url: this._url, stateOverride: "path start" });
- }
-
- get search() {
- if (this._url.query === null || this._url.query === "") {
- return "";
- }
-
- return "?" + this._url.query;
- }
-
- set search(v) {
- // TODO: query stuff
-
- const url = this._url;
-
- if (v === "") {
- url.query = null;
- return;
- }
-
- const input = v[0] === "?" ? v.substring(1) : v;
- url.query = "";
- usm.basicURLParse(input, { url, stateOverride: "query" });
- }
-
- get hash() {
- if (this._url.fragment === null || this._url.fragment === "") {
- return "";
- }
-
- return "#" + this._url.fragment;
- }
-
- set hash(v) {
- if (v === "") {
- this._url.fragment = null;
- return;
- }
-
- const input = v[0] === "#" ? v.substring(1) : v;
- this._url.fragment = "";
- usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" });
- }
-
- toJSON() {
- return this.href;
- }
-};
-
-
-/***/ }),
-
-/***/ 3394:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const conversions = __nccwpck_require__(4886);
-const utils = __nccwpck_require__(3185);
-const Impl = __nccwpck_require__(7537);
-
-const impl = utils.implSymbol;
-
-function URL(url) {
- if (!this || this[impl] || !(this instanceof URL)) {
- throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.");
- }
- if (arguments.length < 1) {
- throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present.");
- }
- const args = [];
- for (let i = 0; i < arguments.length && i < 2; ++i) {
- args[i] = arguments[i];
- }
- args[0] = conversions["USVString"](args[0]);
- if (args[1] !== undefined) {
- args[1] = conversions["USVString"](args[1]);
- }
-
- module.exports.setup(this, args);
-}
-
-URL.prototype.toJSON = function toJSON() {
- if (!this || !module.exports.is(this)) {
- throw new TypeError("Illegal invocation");
- }
- const args = [];
- for (let i = 0; i < arguments.length && i < 0; ++i) {
- args[i] = arguments[i];
- }
- return this[impl].toJSON.apply(this[impl], args);
-};
-Object.defineProperty(URL.prototype, "href", {
- get() {
- return this[impl].href;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].href = V;
- },
- enumerable: true,
- configurable: true
-});
-
-URL.prototype.toString = function () {
- if (!this || !module.exports.is(this)) {
- throw new TypeError("Illegal invocation");
- }
- return this.href;
-};
-
-Object.defineProperty(URL.prototype, "origin", {
- get() {
- return this[impl].origin;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "protocol", {
- get() {
- return this[impl].protocol;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].protocol = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "username", {
- get() {
- return this[impl].username;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].username = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "password", {
- get() {
- return this[impl].password;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].password = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "host", {
- get() {
- return this[impl].host;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].host = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "hostname", {
- get() {
- return this[impl].hostname;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].hostname = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "port", {
- get() {
- return this[impl].port;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].port = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "pathname", {
- get() {
- return this[impl].pathname;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].pathname = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "search", {
- get() {
- return this[impl].search;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].search = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "hash", {
- get() {
- return this[impl].hash;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].hash = V;
- },
- enumerable: true,
- configurable: true
-});
-
-
-module.exports = {
- is(obj) {
- return !!obj && obj[impl] instanceof Impl.implementation;
- },
- create(constructorArgs, privateData) {
- let obj = Object.create(URL.prototype);
- this.setup(obj, constructorArgs, privateData);
- return obj;
- },
- setup(obj, constructorArgs, privateData) {
- if (!privateData) privateData = {};
- privateData.wrapper = obj;
-
- obj[impl] = new Impl.implementation(constructorArgs, privateData);
- obj[impl][utils.wrapperSymbol] = obj;
- },
- interface: URL,
- expose: {
- Window: { URL: URL },
- Worker: { URL: URL }
- }
-};
-
-
-
-/***/ }),
-
-/***/ 8665:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-exports.URL = __nccwpck_require__(3394)["interface"];
-exports.serializeURL = __nccwpck_require__(2158).serializeURL;
-exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin;
-exports.basicURLParse = __nccwpck_require__(2158).basicURLParse;
-exports.setTheUsername = __nccwpck_require__(2158).setTheUsername;
-exports.setThePassword = __nccwpck_require__(2158).setThePassword;
-exports.serializeHost = __nccwpck_require__(2158).serializeHost;
-exports.serializeInteger = __nccwpck_require__(2158).serializeInteger;
-exports.parseURL = __nccwpck_require__(2158).parseURL;
-
-
-/***/ }),
-
-/***/ 2158:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-const punycode = __nccwpck_require__(5477);
-const tr46 = __nccwpck_require__(4256);
-
-const specialSchemes = {
- ftp: 21,
- file: null,
- gopher: 70,
- http: 80,
- https: 443,
- ws: 80,
- wss: 443
-};
-
-const failure = Symbol("failure");
-
-function countSymbols(str) {
- return punycode.ucs2.decode(str).length;
-}
-
-function at(input, idx) {
- const c = input[idx];
- return isNaN(c) ? undefined : String.fromCodePoint(c);
-}
-
-function isASCIIDigit(c) {
- return c >= 0x30 && c <= 0x39;
-}
-
-function isASCIIAlpha(c) {
- return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);
-}
-
-function isASCIIAlphanumeric(c) {
- return isASCIIAlpha(c) || isASCIIDigit(c);
-}
-
-function isASCIIHex(c) {
- return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);
-}
-
-function isSingleDot(buffer) {
- return buffer === "." || buffer.toLowerCase() === "%2e";
-}
-
-function isDoubleDot(buffer) {
- buffer = buffer.toLowerCase();
- return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e";
-}
-
-function isWindowsDriveLetterCodePoints(cp1, cp2) {
- return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);
-}
-
-function isWindowsDriveLetterString(string) {
- return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|");
-}
-
-function isNormalizedWindowsDriveLetterString(string) {
- return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":";
-}
-
-function containsForbiddenHostCodePoint(string) {
- return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1;
-}
-
-function containsForbiddenHostCodePointExcludingPercent(string) {
- return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1;
-}
-
-function isSpecialScheme(scheme) {
- return specialSchemes[scheme] !== undefined;
-}
-
-function isSpecial(url) {
- return isSpecialScheme(url.scheme);
-}
-
-function defaultPort(scheme) {
- return specialSchemes[scheme];
-}
-
-function percentEncode(c) {
- let hex = c.toString(16).toUpperCase();
- if (hex.length === 1) {
- hex = "0" + hex;
- }
-
- return "%" + hex;
-}
-
-function utf8PercentEncode(c) {
- const buf = new Buffer(c);
-
- let str = "";
-
- for (let i = 0; i < buf.length; ++i) {
- str += percentEncode(buf[i]);
- }
-
- return str;
-}
-
-function utf8PercentDecode(str) {
- const input = new Buffer(str);
- const output = [];
- for (let i = 0; i < input.length; ++i) {
- if (input[i] !== 37) {
- output.push(input[i]);
- } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {
- output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));
- i += 2;
- } else {
- output.push(input[i]);
- }
- }
- return new Buffer(output).toString();
-}
-
-function isC0ControlPercentEncode(c) {
- return c <= 0x1F || c > 0x7E;
-}
-
-const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);
-function isPathPercentEncode(c) {
- return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);
-}
-
-const extraUserinfoPercentEncodeSet =
- new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);
-function isUserinfoPercentEncode(c) {
- return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);
-}
-
-function percentEncodeChar(c, encodeSetPredicate) {
- const cStr = String.fromCodePoint(c);
-
- if (encodeSetPredicate(c)) {
- return utf8PercentEncode(cStr);
- }
-
- return cStr;
-}
-
-function parseIPv4Number(input) {
- let R = 10;
-
- if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") {
- input = input.substring(2);
- R = 16;
- } else if (input.length >= 2 && input.charAt(0) === "0") {
- input = input.substring(1);
- R = 8;
- }
-
- if (input === "") {
- return 0;
- }
-
- const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);
- if (regex.test(input)) {
- return failure;
- }
-
- return parseInt(input, R);
-}
-
-function parseIPv4(input) {
- const parts = input.split(".");
- if (parts[parts.length - 1] === "") {
- if (parts.length > 1) {
- parts.pop();
- }
- }
-
- if (parts.length > 4) {
- return input;
- }
-
- const numbers = [];
- for (const part of parts) {
- if (part === "") {
- return input;
- }
- const n = parseIPv4Number(part);
- if (n === failure) {
- return input;
- }
-
- numbers.push(n);
- }
-
- for (let i = 0; i < numbers.length - 1; ++i) {
- if (numbers[i] > 255) {
- return failure;
- }
- }
- if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {
- return failure;
- }
-
- let ipv4 = numbers.pop();
- let counter = 0;
-
- for (const n of numbers) {
- ipv4 += n * Math.pow(256, 3 - counter);
- ++counter;
- }
-
- return ipv4;
-}
-
-function serializeIPv4(address) {
- let output = "";
- let n = address;
-
- for (let i = 1; i <= 4; ++i) {
- output = String(n % 256) + output;
- if (i !== 4) {
- output = "." + output;
- }
- n = Math.floor(n / 256);
- }
-
- return output;
-}
-
-function parseIPv6(input) {
- const address = [0, 0, 0, 0, 0, 0, 0, 0];
- let pieceIndex = 0;
- let compress = null;
- let pointer = 0;
-
- input = punycode.ucs2.decode(input);
-
- if (input[pointer] === 58) {
- if (input[pointer + 1] !== 58) {
- return failure;
- }
-
- pointer += 2;
- ++pieceIndex;
- compress = pieceIndex;
- }
-
- while (pointer < input.length) {
- if (pieceIndex === 8) {
- return failure;
- }
-
- if (input[pointer] === 58) {
- if (compress !== null) {
- return failure;
- }
- ++pointer;
- ++pieceIndex;
- compress = pieceIndex;
- continue;
- }
-
- let value = 0;
- let length = 0;
-
- while (length < 4 && isASCIIHex(input[pointer])) {
- value = value * 0x10 + parseInt(at(input, pointer), 16);
- ++pointer;
- ++length;
- }
-
- if (input[pointer] === 46) {
- if (length === 0) {
- return failure;
- }
-
- pointer -= length;
-
- if (pieceIndex > 6) {
- return failure;
- }
-
- let numbersSeen = 0;
-
- while (input[pointer] !== undefined) {
- let ipv4Piece = null;
-
- if (numbersSeen > 0) {
- if (input[pointer] === 46 && numbersSeen < 4) {
- ++pointer;
- } else {
- return failure;
- }
- }
-
- if (!isASCIIDigit(input[pointer])) {
- return failure;
- }
-
- while (isASCIIDigit(input[pointer])) {
- const number = parseInt(at(input, pointer));
- if (ipv4Piece === null) {
- ipv4Piece = number;
- } else if (ipv4Piece === 0) {
- return failure;
- } else {
- ipv4Piece = ipv4Piece * 10 + number;
- }
- if (ipv4Piece > 255) {
- return failure;
- }
- ++pointer;
- }
-
- address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;
-
- ++numbersSeen;
-
- if (numbersSeen === 2 || numbersSeen === 4) {
- ++pieceIndex;
- }
- }
-
- if (numbersSeen !== 4) {
- return failure;
- }
-
- break;
- } else if (input[pointer] === 58) {
- ++pointer;
- if (input[pointer] === undefined) {
- return failure;
- }
- } else if (input[pointer] !== undefined) {
- return failure;
- }
-
- address[pieceIndex] = value;
- ++pieceIndex;
- }
-
- if (compress !== null) {
- let swaps = pieceIndex - compress;
- pieceIndex = 7;
- while (pieceIndex !== 0 && swaps > 0) {
- const temp = address[compress + swaps - 1];
- address[compress + swaps - 1] = address[pieceIndex];
- address[pieceIndex] = temp;
- --pieceIndex;
- --swaps;
- }
- } else if (compress === null && pieceIndex !== 8) {
- return failure;
- }
-
- return address;
-}
-
-function serializeIPv6(address) {
- let output = "";
- const seqResult = findLongestZeroSequence(address);
- const compress = seqResult.idx;
- let ignore0 = false;
-
- for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {
- if (ignore0 && address[pieceIndex] === 0) {
- continue;
- } else if (ignore0) {
- ignore0 = false;
- }
-
- if (compress === pieceIndex) {
- const separator = pieceIndex === 0 ? "::" : ":";
- output += separator;
- ignore0 = true;
- continue;
- }
-
- output += address[pieceIndex].toString(16);
-
- if (pieceIndex !== 7) {
- output += ":";
- }
- }
-
- return output;
-}
-
-function parseHost(input, isSpecialArg) {
- if (input[0] === "[") {
- if (input[input.length - 1] !== "]") {
- return failure;
- }
-
- return parseIPv6(input.substring(1, input.length - 1));
- }
-
- if (!isSpecialArg) {
- return parseOpaqueHost(input);
- }
-
- const domain = utf8PercentDecode(input);
- const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);
- if (asciiDomain === null) {
- return failure;
- }
-
- if (containsForbiddenHostCodePoint(asciiDomain)) {
- return failure;
- }
-
- const ipv4Host = parseIPv4(asciiDomain);
- if (typeof ipv4Host === "number" || ipv4Host === failure) {
- return ipv4Host;
- }
-
- return asciiDomain;
-}
-
-function parseOpaqueHost(input) {
- if (containsForbiddenHostCodePointExcludingPercent(input)) {
- return failure;
- }
-
- let output = "";
- const decoded = punycode.ucs2.decode(input);
- for (let i = 0; i < decoded.length; ++i) {
- output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);
- }
- return output;
-}
-
-function findLongestZeroSequence(arr) {
- let maxIdx = null;
- let maxLen = 1; // only find elements > 1
- let currStart = null;
- let currLen = 0;
-
- for (let i = 0; i < arr.length; ++i) {
- if (arr[i] !== 0) {
- if (currLen > maxLen) {
- maxIdx = currStart;
- maxLen = currLen;
- }
-
- currStart = null;
- currLen = 0;
- } else {
- if (currStart === null) {
- currStart = i;
- }
- ++currLen;
- }
- }
-
- // if trailing zeros
- if (currLen > maxLen) {
- maxIdx = currStart;
- maxLen = currLen;
- }
-
- return {
- idx: maxIdx,
- len: maxLen
- };
-}
-
-function serializeHost(host) {
- if (typeof host === "number") {
- return serializeIPv4(host);
- }
-
- // IPv6 serializer
- if (host instanceof Array) {
- return "[" + serializeIPv6(host) + "]";
- }
-
- return host;
-}
-
-function trimControlChars(url) {
- return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, "");
-}
-
-function trimTabAndNewline(url) {
- return url.replace(/\u0009|\u000A|\u000D/g, "");
-}
-
-function shortenPath(url) {
- const path = url.path;
- if (path.length === 0) {
- return;
- }
- if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {
- return;
- }
-
- path.pop();
-}
-
-function includesCredentials(url) {
- return url.username !== "" || url.password !== "";
-}
-
-function cannotHaveAUsernamePasswordPort(url) {
- return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file";
-}
-
-function isNormalizedWindowsDriveLetter(string) {
- return /^[A-Za-z]:$/.test(string);
-}
-
-function URLStateMachine(input, base, encodingOverride, url, stateOverride) {
- this.pointer = 0;
- this.input = input;
- this.base = base || null;
- this.encodingOverride = encodingOverride || "utf-8";
- this.stateOverride = stateOverride;
- this.url = url;
- this.failure = false;
- this.parseError = false;
-
- if (!this.url) {
- this.url = {
- scheme: "",
- username: "",
- password: "",
- host: null,
- port: null,
- path: [],
- query: null,
- fragment: null,
-
- cannotBeABaseURL: false
- };
-
- const res = trimControlChars(this.input);
- if (res !== this.input) {
- this.parseError = true;
- }
- this.input = res;
- }
-
- const res = trimTabAndNewline(this.input);
- if (res !== this.input) {
- this.parseError = true;
- }
- this.input = res;
-
- this.state = stateOverride || "scheme start";
-
- this.buffer = "";
- this.atFlag = false;
- this.arrFlag = false;
- this.passwordTokenSeenFlag = false;
-
- this.input = punycode.ucs2.decode(this.input);
-
- for (; this.pointer <= this.input.length; ++this.pointer) {
- const c = this.input[this.pointer];
- const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);
-
- // exec state machine
- const ret = this["parse " + this.state](c, cStr);
- if (!ret) {
- break; // terminate algorithm
- } else if (ret === failure) {
- this.failure = true;
- break;
- }
- }
-}
-
-URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) {
- if (isASCIIAlpha(c)) {
- this.buffer += cStr.toLowerCase();
- this.state = "scheme";
- } else if (!this.stateOverride) {
- this.state = "no scheme";
- --this.pointer;
- } else {
- this.parseError = true;
- return failure;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) {
- if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {
- this.buffer += cStr.toLowerCase();
- } else if (c === 58) {
- if (this.stateOverride) {
- if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {
- return false;
- }
-
- if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {
- return false;
- }
-
- if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") {
- return false;
- }
-
- if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) {
- return false;
- }
- }
- this.url.scheme = this.buffer;
- this.buffer = "";
- if (this.stateOverride) {
- return false;
- }
- if (this.url.scheme === "file") {
- if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {
- this.parseError = true;
- }
- this.state = "file";
- } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {
- this.state = "special relative or authority";
- } else if (isSpecial(this.url)) {
- this.state = "special authority slashes";
- } else if (this.input[this.pointer + 1] === 47) {
- this.state = "path or authority";
- ++this.pointer;
- } else {
- this.url.cannotBeABaseURL = true;
- this.url.path.push("");
- this.state = "cannot-be-a-base-URL path";
- }
- } else if (!this.stateOverride) {
- this.buffer = "";
- this.state = "no scheme";
- this.pointer = -1;
- } else {
- this.parseError = true;
- return failure;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) {
- if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {
- return failure;
- } else if (this.base.cannotBeABaseURL && c === 35) {
- this.url.scheme = this.base.scheme;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- this.url.fragment = "";
- this.url.cannotBeABaseURL = true;
- this.state = "fragment";
- } else if (this.base.scheme === "file") {
- this.state = "file";
- --this.pointer;
- } else {
- this.state = "relative";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) {
- if (c === 47 && this.input[this.pointer + 1] === 47) {
- this.state = "special authority ignore slashes";
- ++this.pointer;
- } else {
- this.parseError = true;
- this.state = "relative";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) {
- if (c === 47) {
- this.state = "authority";
- } else {
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse relative"] = function parseRelative(c) {
- this.url.scheme = this.base.scheme;
- if (isNaN(c)) {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- } else if (c === 47) {
- this.state = "relative slash";
- } else if (c === 63) {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice();
- this.url.query = "";
- this.state = "query";
- } else if (c === 35) {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- this.url.fragment = "";
- this.state = "fragment";
- } else if (isSpecial(this.url) && c === 92) {
- this.parseError = true;
- this.state = "relative slash";
- } else {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice(0, this.base.path.length - 1);
-
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) {
- if (isSpecial(this.url) && (c === 47 || c === 92)) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "special authority ignore slashes";
- } else if (c === 47) {
- this.state = "authority";
- } else {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) {
- if (c === 47 && this.input[this.pointer + 1] === 47) {
- this.state = "special authority ignore slashes";
- ++this.pointer;
- } else {
- this.parseError = true;
- this.state = "special authority ignore slashes";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) {
- if (c !== 47 && c !== 92) {
- this.state = "authority";
- --this.pointer;
- } else {
- this.parseError = true;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) {
- if (c === 64) {
- this.parseError = true;
- if (this.atFlag) {
- this.buffer = "%40" + this.buffer;
- }
- this.atFlag = true;
-
- // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars
- const len = countSymbols(this.buffer);
- for (let pointer = 0; pointer < len; ++pointer) {
- const codePoint = this.buffer.codePointAt(pointer);
-
- if (codePoint === 58 && !this.passwordTokenSeenFlag) {
- this.passwordTokenSeenFlag = true;
- continue;
- }
- const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);
- if (this.passwordTokenSeenFlag) {
- this.url.password += encodedCodePoints;
- } else {
- this.url.username += encodedCodePoints;
- }
- }
- this.buffer = "";
- } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
- (isSpecial(this.url) && c === 92)) {
- if (this.atFlag && this.buffer === "") {
- this.parseError = true;
- return failure;
- }
- this.pointer -= countSymbols(this.buffer) + 1;
- this.buffer = "";
- this.state = "host";
- } else {
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse hostname"] =
-URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) {
- if (this.stateOverride && this.url.scheme === "file") {
- --this.pointer;
- this.state = "file host";
- } else if (c === 58 && !this.arrFlag) {
- if (this.buffer === "") {
- this.parseError = true;
- return failure;
- }
-
- const host = parseHost(this.buffer, isSpecial(this.url));
- if (host === failure) {
- return failure;
- }
-
- this.url.host = host;
- this.buffer = "";
- this.state = "port";
- if (this.stateOverride === "hostname") {
- return false;
- }
- } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
- (isSpecial(this.url) && c === 92)) {
- --this.pointer;
- if (isSpecial(this.url) && this.buffer === "") {
- this.parseError = true;
- return failure;
- } else if (this.stateOverride && this.buffer === "" &&
- (includesCredentials(this.url) || this.url.port !== null)) {
- this.parseError = true;
- return false;
- }
-
- const host = parseHost(this.buffer, isSpecial(this.url));
- if (host === failure) {
- return failure;
- }
-
- this.url.host = host;
- this.buffer = "";
- this.state = "path start";
- if (this.stateOverride) {
- return false;
- }
- } else {
- if (c === 91) {
- this.arrFlag = true;
- } else if (c === 93) {
- this.arrFlag = false;
- }
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) {
- if (isASCIIDigit(c)) {
- this.buffer += cStr;
- } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
- (isSpecial(this.url) && c === 92) ||
- this.stateOverride) {
- if (this.buffer !== "") {
- const port = parseInt(this.buffer);
- if (port > Math.pow(2, 16) - 1) {
- this.parseError = true;
- return failure;
- }
- this.url.port = port === defaultPort(this.url.scheme) ? null : port;
- this.buffer = "";
- }
- if (this.stateOverride) {
- return false;
- }
- this.state = "path start";
- --this.pointer;
- } else {
- this.parseError = true;
- return failure;
- }
-
- return true;
-};
-
-const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);
-
-URLStateMachine.prototype["parse file"] = function parseFile(c) {
- this.url.scheme = "file";
-
- if (c === 47 || c === 92) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "file slash";
- } else if (this.base !== null && this.base.scheme === "file") {
- if (isNaN(c)) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- } else if (c === 63) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- this.url.query = "";
- this.state = "query";
- } else if (c === 35) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- this.url.fragment = "";
- this.state = "fragment";
- } else {
- if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points
- !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||
- (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points
- !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- shortenPath(this.url);
- } else {
- this.parseError = true;
- }
-
- this.state = "path";
- --this.pointer;
- }
- } else {
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) {
- if (c === 47 || c === 92) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "file host";
- } else {
- if (this.base !== null && this.base.scheme === "file") {
- if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {
- this.url.path.push(this.base.path[0]);
- } else {
- this.url.host = this.base.host;
- }
- }
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) {
- if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {
- --this.pointer;
- if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {
- this.parseError = true;
- this.state = "path";
- } else if (this.buffer === "") {
- this.url.host = "";
- if (this.stateOverride) {
- return false;
- }
- this.state = "path start";
- } else {
- let host = parseHost(this.buffer, isSpecial(this.url));
- if (host === failure) {
- return failure;
- }
- if (host === "localhost") {
- host = "";
- }
- this.url.host = host;
-
- if (this.stateOverride) {
- return false;
- }
-
- this.buffer = "";
- this.state = "path start";
- }
- } else {
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse path start"] = function parsePathStart(c) {
- if (isSpecial(this.url)) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "path";
-
- if (c !== 47 && c !== 92) {
- --this.pointer;
- }
- } else if (!this.stateOverride && c === 63) {
- this.url.query = "";
- this.state = "query";
- } else if (!this.stateOverride && c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- } else if (c !== undefined) {
- this.state = "path";
- if (c !== 47) {
- --this.pointer;
- }
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse path"] = function parsePath(c) {
- if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||
- (!this.stateOverride && (c === 63 || c === 35))) {
- if (isSpecial(this.url) && c === 92) {
- this.parseError = true;
- }
-
- if (isDoubleDot(this.buffer)) {
- shortenPath(this.url);
- if (c !== 47 && !(isSpecial(this.url) && c === 92)) {
- this.url.path.push("");
- }
- } else if (isSingleDot(this.buffer) && c !== 47 &&
- !(isSpecial(this.url) && c === 92)) {
- this.url.path.push("");
- } else if (!isSingleDot(this.buffer)) {
- if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {
- if (this.url.host !== "" && this.url.host !== null) {
- this.parseError = true;
- this.url.host = "";
- }
- this.buffer = this.buffer[0] + ":";
- }
- this.url.path.push(this.buffer);
- }
- this.buffer = "";
- if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) {
- while (this.url.path.length > 1 && this.url.path[0] === "") {
- this.parseError = true;
- this.url.path.shift();
- }
- }
- if (c === 63) {
- this.url.query = "";
- this.state = "query";
- }
- if (c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- }
- } else {
- // TODO: If c is not a URL code point and not "%", parse error.
-
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- this.buffer += percentEncodeChar(c, isPathPercentEncode);
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) {
- if (c === 63) {
- this.url.query = "";
- this.state = "query";
- } else if (c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- } else {
- // TODO: Add: not a URL code point
- if (!isNaN(c) && c !== 37) {
- this.parseError = true;
- }
-
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- if (!isNaN(c)) {
- this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);
- }
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) {
- if (isNaN(c) || (!this.stateOverride && c === 35)) {
- if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") {
- this.encodingOverride = "utf-8";
- }
-
- const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead
- for (let i = 0; i < buffer.length; ++i) {
- if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||
- buffer[i] === 0x3C || buffer[i] === 0x3E) {
- this.url.query += percentEncode(buffer[i]);
- } else {
- this.url.query += String.fromCodePoint(buffer[i]);
- }
- }
-
- this.buffer = "";
- if (c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- }
- } else {
- // TODO: If c is not a URL code point and not "%", parse error.
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse fragment"] = function parseFragment(c) {
- if (isNaN(c)) { // do nothing
- } else if (c === 0x0) {
- this.parseError = true;
- } else {
- // TODO: If c is not a URL code point and not "%", parse error.
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);
- }
-
- return true;
-};
-
-function serializeURL(url, excludeFragment) {
- let output = url.scheme + ":";
- if (url.host !== null) {
- output += "//";
-
- if (url.username !== "" || url.password !== "") {
- output += url.username;
- if (url.password !== "") {
- output += ":" + url.password;
- }
- output += "@";
- }
-
- output += serializeHost(url.host);
-
- if (url.port !== null) {
- output += ":" + url.port;
- }
- } else if (url.host === null && url.scheme === "file") {
- output += "//";
- }
-
- if (url.cannotBeABaseURL) {
- output += url.path[0];
- } else {
- for (const string of url.path) {
- output += "/" + string;
- }
- }
-
- if (url.query !== null) {
- output += "?" + url.query;
- }
-
- if (!excludeFragment && url.fragment !== null) {
- output += "#" + url.fragment;
- }
-
- return output;
-}
-
-function serializeOrigin(tuple) {
- let result = tuple.scheme + "://";
- result += serializeHost(tuple.host);
-
- if (tuple.port !== null) {
- result += ":" + tuple.port;
- }
-
- return result;
-}
-
-module.exports.serializeURL = serializeURL;
-
-module.exports.serializeURLOrigin = function (url) {
- // https://url.spec.whatwg.org/#concept-url-origin
- switch (url.scheme) {
- case "blob":
- try {
- return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));
- } catch (e) {
- // serializing an opaque origin returns "null"
- return "null";
- }
- case "ftp":
- case "gopher":
- case "http":
- case "https":
- case "ws":
- case "wss":
- return serializeOrigin({
- scheme: url.scheme,
- host: url.host,
- port: url.port
- });
- case "file":
- // spec says "exercise to the reader", chrome says "file://"
- return "file://";
- default:
- // serializing an opaque origin returns "null"
- return "null";
- }
-};
-
-module.exports.basicURLParse = function (input, options) {
- if (options === undefined) {
- options = {};
- }
-
- const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);
- if (usm.failure) {
- return "failure";
- }
-
- return usm.url;
-};
-
-module.exports.setTheUsername = function (url, username) {
- url.username = "";
- const decoded = punycode.ucs2.decode(username);
- for (let i = 0; i < decoded.length; ++i) {
- url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
- }
-};
-
-module.exports.setThePassword = function (url, password) {
- url.password = "";
- const decoded = punycode.ucs2.decode(password);
- for (let i = 0; i < decoded.length; ++i) {
- url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
- }
-};
-
-module.exports.serializeHost = serializeHost;
-
-module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;
-
-module.exports.serializeInteger = function (integer) {
- return String(integer);
-};
-
-module.exports.parseURL = function (input, options) {
- if (options === undefined) {
- options = {};
- }
-
- // We don't handle blobs, so this just delegates:
- return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });
-};
-
-
-/***/ }),
-
-/***/ 3185:
-/***/ ((module) => {
-
-"use strict";
-
-
-module.exports.mixin = function mixin(target, source) {
- const keys = Object.getOwnPropertyNames(source);
- for (let i = 0; i < keys.length; ++i) {
- Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));
- }
-};
-
-module.exports.wrapperSymbol = Symbol("wrapper");
-module.exports.implSymbol = Symbol("impl");
-
-module.exports.wrapperForImpl = function (impl) {
- return impl[module.exports.wrapperSymbol];
-};
-
-module.exports.implForWrapper = function (wrapper) {
- return wrapper[module.exports.implSymbol];
-};
-
-
-
-/***/ }),
-
-/***/ 2940:
-/***/ ((module) => {
-
-// Returns a wrapper function that returns a wrapped callback
-// The wrapper function should do some stuff, and return a
-// presumably different callback function.
-// This makes sure that own properties are retained, so that
-// decorations and such are not lost along the way.
-module.exports = wrappy
-function wrappy (fn, cb) {
- if (fn && cb) return wrappy(fn)(cb)
-
- if (typeof fn !== 'function')
- throw new TypeError('need wrapper function')
-
- Object.keys(fn).forEach(function (k) {
- wrapper[k] = fn[k]
- })
-
- return wrapper
-
- function wrapper() {
- var args = new Array(arguments.length)
- for (var i = 0; i < args.length; i++) {
- args[i] = arguments[i]
- }
- var ret = fn.apply(this, args)
- var cb = args[args.length-1]
- if (typeof ret === 'function' && ret !== cb) {
- Object.keys(cb).forEach(function (k) {
- ret[k] = cb[k]
- })
- }
- return ret
- }
-}
-
-
-/***/ }),
-
-/***/ 3128:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.buildBadgeUrl = exports.generateBadges = void 0;
-const fs = __importStar(__nccwpck_require__(7147));
-const config_1 = __nccwpck_require__(6373);
-const lcov_1 = __nccwpck_require__(4888);
-async function generateBadges(projects) {
- for (const project of projects.filter(p => p.coverage)) {
- const percentage = (0, lcov_1.getProjectPercentage)(project);
- if (percentage === undefined) {
- continue;
- }
- const svg = await buildSvg(project.name, config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, percentage);
- const path = project.pubspecFile.split('/').slice(0, -1).join('/');
- // write svg to file
- fs.writeFileSync(`${path}/coverage.svg`, svg);
- }
- const totalPercentage = (0, lcov_1.getTotalPercentage)(projects);
- if (totalPercentage === undefined) {
- return;
- }
- const svg = await buildSvg('Test Coverage', config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, totalPercentage);
- fs.writeFileSync(`./coverage-total.svg`, svg);
-}
-exports.generateBadges = generateBadges;
-async function buildSvg(name, upper, lower, percentage) {
- // get url
- const url = buildBadgeUrl(name, upper, lower, percentage);
- const fetch = await Promise.resolve().then(() => __importStar(__nccwpck_require__(4429)));
- // fetch url
- const response = await fetch.default(url);
- // get svg
- const svg = await response.text();
- return svg;
-}
-function buildBadgeUrl(name, upper, lower, percentage) {
- let color = '';
- if (percentage >= upper) {
- color = 'success';
- }
- else if (percentage >= lower) {
- color = 'important';
- }
- else {
- color = 'critical';
- }
- const firstHalf = name ? `${name}-` : '';
- const secondHalf = `${percentage.toFixed(2)}%`;
- return `https://img.shields.io/badge/${firstHalf}${secondHalf}-${color}`;
-}
-exports.buildBadgeUrl = buildBadgeUrl;
-
-
-/***/ }),
-
-/***/ 7810:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.commentsEqual = exports.getBodyOf = exports.minimizeComment = exports.deleteComment = exports.createComment = exports.updateComment = exports.findPreviousComment = void 0;
-const core = __importStar(__nccwpck_require__(2186));
-function headerComment(header) {
- return ``;
-}
-function bodyWithHeader(body, header) {
- return `${body}\n${headerComment(header)}`;
-}
-async function findPreviousComment(octokit, repo, number, header) {
- let after = null;
- let hasNextPage = true;
- const h = headerComment(header);
- while (hasNextPage) {
- const data = await octokit.graphql(`
- query($repo: String! $owner: String! $number: Int! $after: String) {
- viewer { login }
- repository(name: $repo owner: $owner) {
- pullRequest(number: $number) {
- comments(first: 100 after: $after) {
- nodes {
- id
- author {
- login
- }
- isMinimized
- body
- }
- pageInfo {
- endCursor
- hasNextPage
- }
- }
- }
- }
- }
- `, { ...repo, after, number });
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
- const viewer = data.viewer;
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
- const repository = data.repository;
- const target = repository.pullRequest?.comments?.nodes?.find((node) => node?.author?.login === viewer.login.replace('[bot]', '') &&
- !node?.isMinimized &&
- node?.body?.includes(h));
- if (target) {
- return target;
- }
- after = repository.pullRequest?.comments?.pageInfo?.endCursor;
- hasNextPage =
- repository.pullRequest?.comments?.pageInfo?.hasNextPage ?? false;
- }
- return undefined;
-}
-exports.findPreviousComment = findPreviousComment;
-async function updateComment(octokit, id, body, header, previousBody) {
- if (!body && !previousBody)
- return core.warning('Comment body cannot be blank');
- await octokit.graphql(`
- mutation($input: UpdateIssueCommentInput!) {
- updateIssueComment(input: $input) {
- issueComment {
- id
- body
- }
- }
- }
- `, {
- input: {
- id,
- body: previousBody
- ? `${previousBody}\n${body}`
- : bodyWithHeader(body, header)
- }
- });
-}
-exports.updateComment = updateComment;
-async function createComment(octokit, repo, issue_number, body, header, previousBody) {
- if (!body && !previousBody) {
- core.warning('Comment body cannot be blank');
- return;
- }
- return await octokit.rest.issues.createComment({
- ...repo,
- issue_number,
- body: previousBody
- ? `${previousBody}\n${body}`
- : bodyWithHeader(body, header)
- });
-}
-exports.createComment = createComment;
-async function deleteComment(octokit, id) {
- await octokit.graphql(`
- mutation($id: ID!) {
- deleteIssueComment(input: { id: $id }) {
- clientMutationId
- }
- }
- `, { id });
-}
-exports.deleteComment = deleteComment;
-async function minimizeComment(octokit, subjectId, classifier) {
- await octokit.graphql(`
- mutation($input: MinimizeCommentInput!) {
- minimizeComment(input: $input) {
- clientMutationId
- }
- }
- `, { input: { subjectId, classifier } });
-}
-exports.minimizeComment = minimizeComment;
-function getBodyOf(previous, append, hideDetails) {
- if (!append) {
- return undefined;
- }
- if (!hideDetails) {
- return previous.body;
- }
- return previous.body?.replace(/()/g, '$1$2');
-}
-exports.getBodyOf = getBodyOf;
-function commentsEqual(body, previous, header) {
- const newBody = bodyWithHeader(body, header);
- return newBody === previous;
-}
-exports.commentsEqual = commentsEqual;
-
-
-/***/ }),
-
-/***/ 6373:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.Config = void 0;
-const core_1 = __nccwpck_require__(2186);
-class Config {
- static get githubToken() {
- return (0, core_1.getInput)('GITHUB_TOKEN', { required: true });
- }
- static get githubHeadRef() {
- return (0, core_1.getInput)('GITHUB_HEAD_REF', { required: true });
- }
- static get upperCoverageThreshold() {
- return parseFloat((0, core_1.getInput)('upper_threshold'));
- }
- static get lowerCoverageThreshold() {
- return parseFloat((0, core_1.getInput)('lower_threshold'));
- }
- static get compareAgainstBase() {
- return (0, core_1.getInput)('compare_against_base') === 'true';
- }
- static get enforceThreshold() {
- return this.parseCoverageRule((0, core_1.getInput)('enforce_threshold'));
- }
- static get enforceForbiddenDecrease() {
- return this.parseCoverageRule((0, core_1.getInput)('enforce_forbidden_decrease'));
- }
- static get generateBadges() {
- return (0, core_1.getInput)('generate_badges') === 'true';
- }
- static parseCoverageRule(rule) {
- switch (rule) {
- case 'none':
- case 'false':
- return 'none';
- case 'single':
- return 'single';
- case 'total':
- return 'total';
- default:
- throw new Error(`Unknown coverage rule '${rule}'`);
- }
- }
-}
-exports.Config = Config;
-
-
-/***/ }),
-
-/***/ 6932:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.findProjects = void 0;
-const fs = __importStar(__nccwpck_require__(7147));
-const glob = __importStar(__nccwpck_require__(8090));
-/**
- * Finds all projects in the current directory
- * @param excludePattern Glob patterns of pubspec files that will be disregarded, seperated by newlines
- */
-async function findProjects(excludePattern) {
- const excludes = excludePattern
- ?.split('\n')
- .map(p => `!${p}`)
- ?.join('\n') ?? '';
- // Find all pubspec.yaml files in directory
- const globber = await glob.create(`**/pubspec.yaml\n${excludes}`);
- const results = await globber.glob();
- const files = results.filter(f => fs.lstatSync(f).isFile());
- // Sort files by depth and then alphabetically
- files.sort((a, b) => {
- const aDepth = a.split('/').length;
- const bDepth = b.split('/').length;
- if (aDepth === bDepth) {
- return a.localeCompare(b);
- }
- return aDepth - bDepth;
- });
- const projects = [];
- for (const file of files) {
- const project = getProject(file);
- if (project) {
- projects.push(project);
- }
- }
- return projects;
-}
-exports.findProjects = findProjects;
-/**
- * Returns the project that is described in the pubspec.yaml file
- * @param file The pubspec.yaml of this file
- */
-function getProject(file) {
- // Read file in lines
- const lines = fs.readFileSync(file, 'utf8').split('\n');
- // Find name
- const name = lines
- .find(l => l.startsWith('name:'))
- ?.split(':')[1]
- ?.trim();
- // Find description
- const description = lines
- .find(l => l.startsWith('description:'))
- ?.split(':')[1]
- ?.trim();
- // Check if project contains coverage
- const parentDirectory = file.split('/').slice(0, -1).join('/');
- const coverageFile = `${parentDirectory}/coverage/lcov.info`;
- const containsCoverage = fs.existsSync(coverageFile);
- if (name && description) {
- return {
- name,
- description,
- pubspecFile: file,
- coverageFile: containsCoverage ? coverageFile : null
- };
- }
- return null;
-}
-
-
-/***/ }),
-
-/***/ 6350:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.commitAndPushChanges = exports.popStash = exports.checkoutRef = exports.stashChanges = exports.configureGit = void 0;
-const exec = __importStar(__nccwpck_require__(1514));
-const github_1 = __nccwpck_require__(5438);
-const config_1 = __nccwpck_require__(6373);
-async function configureGit() {
- await exec.exec('git', ['config', 'user.name', 'github-actions[bot]']);
- await exec.exec('git', [
- 'config',
- 'user.email',
- 'github-actions[bot]@users.noreply.github.com'
- ]);
- const url = `https://x-access-token:${config_1.Config.githubToken}@github.com/${github_1.context.payload.repository?.full_name}`;
- await exec.exec('git', ['remote', 'set-url', 'origin', url]);
-}
-exports.configureGit = configureGit;
-async function stashChanges() {
- await exec.exec('git', ['stash']);
-}
-exports.stashChanges = stashChanges;
-async function checkoutRef(ref) {
- await exec.exec('git', ['fetch', 'origin', ref]);
- await exec.exec('git', ['checkout', ref]);
-}
-exports.checkoutRef = checkoutRef;
-async function popStash() {
- try {
- await exec.exec('git', ['stash', 'pop']);
- return true;
- }
- catch (error) {
- return false;
- }
-}
-exports.popStash = popStash;
-async function commitAndPushChanges(commitMessage) {
- await exec.exec('git', ['add', '.']);
- await exec.exec('git', ['commit', '-am', commitMessage]);
- await exec.exec('git', ['push', 'origin']);
-}
-exports.commitAndPushChanges = commitAndPushChanges;
-
-
-/***/ }),
-
-/***/ 4888:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getTotalPercentageBefore = exports.getTotalPercentage = exports.getProjectPercentageBefore = exports.getProjectPercentage = exports.getLcovPercentage = exports.parseLcovBefore = exports.coverProject = void 0;
-const lcov_parse_1 = __importDefault(__nccwpck_require__(7454));
-/**
- * Converts a project into a CoveredProject object by attempting to parse the coverage file
- * @param project the project for which the coverage should be calculated
- * @returns a CoveredProject object that contains the name, description and coverage of the project
- */
-async function coverProject(project) {
- return {
- name: project.name,
- description: project.description,
- coverageFile: project.coverageFile,
- pubspecFile: project.pubspecFile,
- coverage: await parseLcov(project),
- coverageBefore: undefined
- };
-}
-exports.coverProject = coverProject;
-/**
- * Should be called once we are in "before" state. Parses coverage file and sets it as coverageBefore
- * @param project Project to parse
- * @returns Project including coverage before
- */
-async function parseLcovBefore(project) {
- return {
- ...project,
- coverageBefore: await parseLcov(project)
- };
-}
-exports.parseLcovBefore = parseLcovBefore;
-async function parseLcov(project) {
- const file = project.coverageFile;
- if (!file) {
- return undefined;
- }
- return new Promise((resolve, reject) => {
- (0, lcov_parse_1.default)(file, (err, data) => {
- if (err) {
- reject(err);
- }
- resolve(data);
- });
- });
-}
-function getLcovPercentage(lcov) {
- let hit = 0;
- let found = 0;
- for (const entry of lcov) {
- hit += entry.lines.hit;
- found += entry.lines.found;
- }
- return (hit / found) * 100;
-}
-exports.getLcovPercentage = getLcovPercentage;
-function getProjectPercentage(project) {
- if (project.coverage === undefined) {
- return undefined;
- }
- return getLcovPercentage(project.coverage);
-}
-exports.getProjectPercentage = getProjectPercentage;
-function getProjectPercentageBefore(project) {
- if (project.coverageBefore === undefined) {
- return undefined;
- }
- if (project.coverageBefore === null) {
- return 0;
- }
- return getLcovPercentage(project.coverageBefore);
-}
-exports.getProjectPercentageBefore = getProjectPercentageBefore;
-function getTotalPercentage(projects) {
- const coverages = projects
- .map(getProjectPercentage)
- .filter(c => c !== undefined);
- if (coverages.length === 0) {
- return undefined;
- }
- return coverages.reduce((a, b) => a + b) / coverages.length;
-}
-exports.getTotalPercentage = getTotalPercentage;
-function getTotalPercentageBefore(projects) {
- const coverages = projects
- .map(p => p.coverageBefore)
- .filter(c => c !== undefined && c !== null)
- .map(a => getLcovPercentage(a));
- if (coverages.length === 0) {
- return undefined;
- }
- return coverages.reduce((a, b) => a + b) / coverages.length;
-}
-exports.getTotalPercentageBefore = getTotalPercentageBefore;
-
-
-/***/ }),
-
-/***/ 399:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.run = void 0;
-const core = __importStar(__nccwpck_require__(2186));
-const github_1 = __nccwpck_require__(5438);
-const badge_1 = __nccwpck_require__(3128);
-const comment_1 = __nccwpck_require__(7810);
-const config_1 = __nccwpck_require__(6373);
-const finder_1 = __nccwpck_require__(6932);
-const git_1 = __nccwpck_require__(6350);
-const lcov_1 = __nccwpck_require__(4888);
-const message_1 = __nccwpck_require__(7899);
-const semaphor_1 = __nccwpck_require__(3624);
-/**
- * The main function for the action.
- */
-async function run() {
- try {
- core.info(`Finding projects...`);
- const projects = await (0, finder_1.findProjects)(null);
- core.info(`Found ${projects.length} projects`);
- core.info(`Parsing coverage...`);
- let projectsWithCoverage = await Promise.all(projects.map(lcov_1.coverProject));
- if (config_1.Config.compareAgainstBase && github_1.context.payload.pull_request) {
- try {
- await (0, git_1.stashChanges)();
- await (0, git_1.checkoutRef)(github_1.context.payload.pull_request.base.ref);
- projectsWithCoverage = await Promise.all(projectsWithCoverage.map(lcov_1.parseLcovBefore));
- await (0, git_1.checkoutRef)(github_1.context.payload.pull_request.head.ref);
- await (0, git_1.popStash)();
- }
- catch (error) {
- core.warning(`Failed to checkout base ref due to ${error}.`);
- }
- }
- /// Projects that actually have coverage
- const coveredProjects = projectsWithCoverage.filter(p => p.coverage?.length);
- core.info(`${coveredProjects.filter(p => p.coverage).length} projects covered.`);
- // If we are in a Push event and generateBadges is true, generate badges
- if (config_1.Config.generateBadges && github_1.context.eventName === 'push') {
- try {
- core.info(`Configuring git...`);
- await (0, git_1.configureGit)();
- core.info('Updating and pushing coverage badge...');
- await (0, badge_1.generateBadges)(coveredProjects);
- await (0, git_1.commitAndPushChanges)('chore: coverage badges [skip ci]');
- }
- catch (error) {
- core.warning(`Failed to commit and push coverage badge due to ${error}.`);
- }
- }
- const pr = github_1.context.payload.pull_request;
- if (pr) {
- core.info(`Building message...`);
- const message = (0, message_1.buildMessage)(coveredProjects, pr.html_url ?? '', pr.head.sha);
- core.setOutput('message', message);
- try {
- await comment(message);
- }
- catch (error) {
- core.warning(`Failed to comment due to ${error}.`);
- }
- }
- const coverageThresholdMet = (0, semaphor_1.verifyCoverageThreshold)(coveredProjects);
- const noDecreaseMet = (0, semaphor_1.verifyNoCoverageDecrease)(coveredProjects);
- if (!coverageThresholdMet || !noDecreaseMet) {
- core.setFailed('Configured conditions were not met.');
- }
- }
- catch (error) {
- // Fail the workflow run if an error occurs
- if (error instanceof Error)
- core.setFailed(error.message);
- }
-}
-exports.run = run;
-async function comment(body) {
- const octokit = (0, github_1.getOctokit)(config_1.Config.githubToken);
- const header = 'dart-coverage-assistant';
- if (github_1.context.payload.pull_request) {
- const previous = await (0, comment_1.findPreviousComment)(octokit, github_1.context.repo, github_1.context.payload.pull_request?.number, header);
- if (previous) {
- core.info(`Updating previous comment #${previous.id}`);
- await (0, comment_1.updateComment)(octokit, previous.id, body, header);
- }
- else {
- core.info(`Writing a new comment`);
- await (0, comment_1.createComment)(octokit, github_1.context.repo, github_1.context.payload.pull_request?.number, body, header);
- }
- }
- else {
- core.info(`Not a pull request, skipping comment`);
- }
-}
-
-
-/***/ }),
-
-/***/ 7899:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.buildMessage = void 0;
-const lcov_1 = __nccwpck_require__(4888);
-const config_1 = __nccwpck_require__(6373);
-const badge_1 = __nccwpck_require__(3128);
-function buildMessage(projects, url, sha) {
- if (projects.length === 0) {
- return '';
- }
- const shaShort = sha.slice(0, 8);
- const commit = `[${shaShort}](${url}/commits/${sha})`;
- let md = '';
- md += '# Coverage Report\n';
- md += `Automatic coverage report for ${commit}.\n`;
- md += '\n';
- md += `> Generated by [dart-coverage-assistant](https://github.com/whynotmake-it/dart-coverage-assistant)\n`;
- md += '\n';
- if (projects.length > 1) {
- md += buildTotalTable(projects) + '\n';
- }
- for (const project of projects) {
- md += buildTable(project) + '\n';
- }
- md += '\n';
- return md;
-}
-exports.buildMessage = buildMessage;
-function buildTotalTable(projects) {
- let md = '';
- const combined = (0, lcov_1.getTotalPercentage)(projects);
- if (combined !== undefined) {
- const badge = buildBadge(config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, combined);
- md += `## Total coverage: \n`;
- md += '\n';
- md += `| Coverage | Diff |\n`;
- md += `| --- | --- |\n`;
- md += `| ${badge} | ${buildDiffString(getTotalDiff(projects))} |\n`;
- }
- return md;
-}
-function buildTable(project) {
- let md = '';
- md += buildHeader(project) + '\n';
- md += '\n';
- md += buildBody(project) + '\n';
- return md;
-}
-function buildHeader(project) {
- const percentage = project.coverage
- ? (0, lcov_1.getProjectPercentage)(project)
- : undefined;
- const diff = buildDiffString(getDiff(project));
- const badgeCell = percentage
- ? `${buildBadge(config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, percentage)}`
- : '';
- let md = `## \`${project.name}\`\n`;
- md += '\n';
- md += `> ${project.description}\n`;
- md += '\n';
- md += '| Coverage | Diff |\n';
- md += '| --- | --- |\n';
- md += `| ${badgeCell} | ${diff} |\n`;
- return md;
-}
-function buildBody(project) {
- if (project.coverage === undefined) {
- return '';
- }
- let tableMd = '';
- tableMd += '| File | Line Percentage | Line Count |\n';
- tableMd += '| --- | --- | --- |\n';
- const folders = {};
- // Group files by folder
- for (const file of project.coverage) {
- const parts = file.file.split('/');
- const folder = parts.slice(0, -1).join('/');
- folders[folder] = folders[folder] || [];
- folders[folder].push(file);
- }
- // Add all folders to the table
- for (const folder of Object.keys(folders).sort()) {
- tableMd += `| **${folder}** | | |\n`;
- for (const file of folders[folder]) {
- const name = file.file.split('/').slice(-1)[0];
- tableMd += `| ${name} | ${(0, lcov_1.getLcovPercentage)([file])} | ${file.lines.details.length} |\n`;
- }
- }
- let md = '\n';
- md += `Coverage Details for ${project.name}
\n`;
- md += '\n';
- md += tableMd;
- md += '\n';
- md += ' ';
- return md;
-}
-function buildDiffString(diff) {
- if (diff === undefined) {
- return '-';
- }
- if (diff === 0) {
- return `➡️ ${diff.toFixed(2)}%`;
- }
- else if (diff > 0) {
- return `⬆️ +${diff.toFixed(2)}%`;
- }
- else {
- return `⬇️ ${diff.toFixed(2)}%`;
- }
-}
-function getDiff(project) {
- if (project.coverageBefore === undefined || !project.coverage) {
- return undefined;
- }
- const current = (0, lcov_1.getLcovPercentage)(project.coverage);
- const before = project.coverageBefore === null
- ? 0
- : (0, lcov_1.getLcovPercentage)(project.coverageBefore);
- return current - before;
-}
-function getTotalDiff(projects) {
- const current = (0, lcov_1.getTotalPercentage)(projects);
- const before = (0, lcov_1.getTotalPercentageBefore)(projects);
- if (current === undefined || before === undefined) {
- return undefined;
- }
- return current - before;
-}
-function buildBadge(upper, lower, percentage) {
- const alt = percentage >= upper ? 'pass' : percentage >= lower ? 'warning' : 'fail';
- const url = (0, badge_1.buildBadgeUrl)(undefined, upper, lower, percentage);
- return `} "${alt}")`;
-}
-
-
-/***/ }),
-
-/***/ 3624:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.verifyNoCoverageDecrease = exports.verifyCoverageThreshold = void 0;
-const config_1 = __nccwpck_require__(6373);
-const lcov_1 = __nccwpck_require__(4888);
-const core = __importStar(__nccwpck_require__(2186));
-function verifyCoverageThreshold(projects) {
- if (config_1.Config.enforceThreshold === 'none') {
- return true;
- }
- else if (config_1.Config.enforceThreshold === 'single') {
- const failedProjects = projects.filter(p => {
- const percentage = (0, lcov_1.getProjectPercentage)(p);
- return (percentage === undefined || percentage < config_1.Config.lowerCoverageThreshold);
- });
- if (failedProjects.length) {
- core.error(`Coverage threshold not met for ${failedProjects.length} projects`);
- return false;
- }
- }
- else if (config_1.Config.enforceThreshold === 'total') {
- const percentage = (0, lcov_1.getTotalPercentage)(projects);
- if (percentage === undefined ||
- percentage < config_1.Config.lowerCoverageThreshold) {
- core.error(`Coverage threshold not met for total coverage`);
- return false;
- }
- }
- return true;
-}
-exports.verifyCoverageThreshold = verifyCoverageThreshold;
-function verifyNoCoverageDecrease(projects) {
- projects;
- if (config_1.Config.enforceForbiddenDecrease === 'none') {
- return true;
- }
- else if (config_1.Config.enforceForbiddenDecrease === 'single') {
- const failed = projects.filter(p => {
- const percentage = (0, lcov_1.getProjectPercentage)(p);
- const before = (0, lcov_1.getProjectPercentageBefore)(p);
- return (percentage !== undefined && before !== undefined && percentage < before);
- });
- if (failed.length) {
- core.error(`Coverage decrease detected for ${failed.length} projects`);
- return false;
- }
- }
- else if (config_1.Config.enforceForbiddenDecrease === 'total') {
- const total = (0, lcov_1.getTotalPercentage)(projects);
- const totalBefore = (0, lcov_1.getTotalPercentageBefore)(projects);
- if (total !== undefined &&
- totalBefore !== undefined &&
- total < totalBefore) {
- core.error(`Total coverage decreased by ${(totalBefore - total).toFixed(2)}%`);
- return false;
- }
- }
- return true;
-}
-exports.verifyNoCoverageDecrease = verifyNoCoverageDecrease;
-
-
-/***/ }),
-
-/***/ 2877:
-/***/ ((module) => {
-
-module.exports = eval("require")("encoding");
-
-
-/***/ }),
-
-/***/ 9491:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("assert");
-
-/***/ }),
-
-/***/ 4300:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("buffer");
-
-/***/ }),
-
-/***/ 2081:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("child_process");
-
-/***/ }),
-
-/***/ 6113:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("crypto");
-
-/***/ }),
-
-/***/ 2361:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("events");
-
-/***/ }),
-
-/***/ 7147:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("fs");
-
-/***/ }),
-
-/***/ 3685:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("http");
-
-/***/ }),
-
-/***/ 5687:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("https");
-
-/***/ }),
-
-/***/ 1808:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("net");
-
-/***/ }),
-
-/***/ 7742:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("node:process");
-
-/***/ }),
-
-/***/ 2477:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("node:stream/web");
-
-/***/ }),
-
-/***/ 2037:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("os");
-
-/***/ }),
-
-/***/ 1017:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("path");
-
-/***/ }),
-
-/***/ 5477:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("punycode");
-
-/***/ }),
-
-/***/ 2781:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("stream");
-
-/***/ }),
-
-/***/ 1576:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("string_decoder");
-
-/***/ }),
-
-/***/ 9512:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("timers");
-
-/***/ }),
-
-/***/ 4404:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("tls");
-
-/***/ }),
-
-/***/ 7310:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("url");
-
-/***/ }),
-
-/***/ 3837:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("util");
-
-/***/ }),
-
-/***/ 1267:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("worker_threads");
-
-/***/ }),
-
-/***/ 9796:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("zlib");
-
-/***/ }),
-
-/***/ 8572:
-/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => {
-
-/* c8 ignore start */
-// 64 KiB (same size chrome slice theirs blob into Uint8array's)
-const POOL_SIZE = 65536
-
-if (!globalThis.ReadableStream) {
- // `node:stream/web` got introduced in v16.5.0 as experimental
- // and it's preferred over the polyfilled version. So we also
- // suppress the warning that gets emitted by NodeJS for using it.
- try {
- const process = __nccwpck_require__(7742)
- const { emitWarning } = process
- try {
- process.emitWarning = () => {}
- Object.assign(globalThis, __nccwpck_require__(2477))
- process.emitWarning = emitWarning
- } catch (error) {
- process.emitWarning = emitWarning
- throw error
- }
- } catch (error) {
- // fallback to polyfill implementation
- Object.assign(globalThis, __nccwpck_require__(1452))
- }
-}
-
-try {
- // Don't use node: prefix for this, require+node: is not supported until node v14.14
- // Only `import()` can use prefix in 12.20 and later
- const { Blob } = __nccwpck_require__(4300)
- if (Blob && !Blob.prototype.stream) {
- Blob.prototype.stream = function name (params) {
- let position = 0
- const blob = this
-
- return new ReadableStream({
- type: 'bytes',
- async pull (ctrl) {
- const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))
- const buffer = await chunk.arrayBuffer()
- position += buffer.byteLength
- ctrl.enqueue(new Uint8Array(buffer))
-
- if (position === blob.size) {
- ctrl.close()
- }
- }
- })
- }
- }
-} catch (error) {}
-/* c8 ignore end */
-
-
-/***/ }),
-
-/***/ 3213:
-/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
-
-"use strict";
-/* harmony export */ __nccwpck_require__.d(__webpack_exports__, {
-/* harmony export */ "Z": () => (__WEBPACK_DEFAULT_EXPORT__)
-/* harmony export */ });
-/* unused harmony export File */
-/* harmony import */ var _index_js__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(1410);
-
-
-const _File = class File extends _index_js__WEBPACK_IMPORTED_MODULE_0__/* ["default"] */ .Z {
- #lastModified = 0
- #name = ''
-
- /**
- * @param {*[]} fileBits
- * @param {string} fileName
- * @param {{lastModified?: number, type?: string}} options
- */// @ts-ignore
- constructor (fileBits, fileName, options = {}) {
- if (arguments.length < 2) {
- throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)
- }
- super(fileBits, options)
-
- if (options === null) options = {}
-
- // Simulate WebIDL type casting for NaN value in lastModified option.
- const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)
- if (!Number.isNaN(lastModified)) {
- this.#lastModified = lastModified
- }
-
- this.#name = String(fileName)
- }
-
- get name () {
- return this.#name
- }
-
- get lastModified () {
- return this.#lastModified
- }
-
- get [Symbol.toStringTag] () {
- return 'File'
- }
-
- static [Symbol.hasInstance] (object) {
- return !!object && object instanceof _index_js__WEBPACK_IMPORTED_MODULE_0__/* ["default"] */ .Z &&
- /^(File)$/.test(object[Symbol.toStringTag])
- }
-}
-
-/** @type {typeof globalThis.File} */// @ts-ignore
-const File = _File
-/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (File);
-
-
-/***/ }),
-
-/***/ 2777:
-/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
-
-"use strict";
-
-// EXPORTS
-__nccwpck_require__.d(__webpack_exports__, {
- "t6": () => (/* reexport */ fetch_blob/* default */.Z),
- "$B": () => (/* reexport */ file/* default */.Z),
- "xB": () => (/* binding */ blobFrom),
- "SX": () => (/* binding */ blobFromSync),
- "e2": () => (/* binding */ fileFrom),
- "RA": () => (/* binding */ fileFromSync)
-});
-
-// UNUSED EXPORTS: default
-
-;// CONCATENATED MODULE: external "node:fs"
-const external_node_fs_namespaceObject = require("node:fs");
-;// CONCATENATED MODULE: external "node:path"
-const external_node_path_namespaceObject = require("node:path");
-// EXTERNAL MODULE: ./node_modules/node-domexception/index.js
-var node_domexception = __nccwpck_require__(7760);
-// EXTERNAL MODULE: ./node_modules/fetch-blob/file.js
-var file = __nccwpck_require__(3213);
-// EXTERNAL MODULE: ./node_modules/fetch-blob/index.js
-var fetch_blob = __nccwpck_require__(1410);
-;// CONCATENATED MODULE: ./node_modules/fetch-blob/from.js
-
-
-
-
-
-
-
-const { stat } = external_node_fs_namespaceObject.promises
-
-/**
- * @param {string} path filepath on the disk
- * @param {string} [type] mimetype to use
- */
-const blobFromSync = (path, type) => fromBlob((0,external_node_fs_namespaceObject.statSync)(path), path, type)
-
-/**
- * @param {string} path filepath on the disk
- * @param {string} [type] mimetype to use
- * @returns {Promise}
- */
-const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))
-
-/**
- * @param {string} path filepath on the disk
- * @param {string} [type] mimetype to use
- * @returns {Promise}
- */
-const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))
-
-/**
- * @param {string} path filepath on the disk
- * @param {string} [type] mimetype to use
- */
-const fileFromSync = (path, type) => fromFile((0,external_node_fs_namespaceObject.statSync)(path), path, type)
-
-// @ts-ignore
-const fromBlob = (stat, path, type = '') => new fetch_blob/* default */.Z([new BlobDataItem({
- path,
- size: stat.size,
- lastModified: stat.mtimeMs,
- start: 0
-})], { type })
-
-// @ts-ignore
-const fromFile = (stat, path, type = '') => new file/* default */.Z([new BlobDataItem({
- path,
- size: stat.size,
- lastModified: stat.mtimeMs,
- start: 0
-})], (0,external_node_path_namespaceObject.basename)(path), { type, lastModified: stat.mtimeMs })
-
-/**
- * This is a blob backed up by a file on the disk
- * with minium requirement. Its wrapped around a Blob as a blobPart
- * so you have no direct access to this.
- *
- * @private
- */
-class BlobDataItem {
- #path
- #start
-
- constructor (options) {
- this.#path = options.path
- this.#start = options.start
- this.size = options.size
- this.lastModified = options.lastModified
- }
-
- /**
- * Slicing arguments is first validated and formatted
- * to not be out of range by Blob.prototype.slice
- */
- slice (start, end) {
- return new BlobDataItem({
- path: this.#path,
- lastModified: this.lastModified,
- size: end - start,
- start: this.#start + start
- })
- }
-
- async * stream () {
- const { mtimeMs } = await stat(this.#path)
- if (mtimeMs > this.lastModified) {
- throw new node_domexception('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')
- }
- yield * (0,external_node_fs_namespaceObject.createReadStream)(this.#path, {
- start: this.#start,
- end: this.#start + this.size - 1
- })
- }
-
- get [Symbol.toStringTag] () {
- return 'Blob'
- }
-}
-
-/* harmony default export */ const from = ((/* unused pure expression or super */ null && (blobFromSync)));
-
-
-
-/***/ }),
-
-/***/ 1410:
-/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
-
-"use strict";
-/* harmony export */ __nccwpck_require__.d(__webpack_exports__, {
-/* harmony export */ "Z": () => (__WEBPACK_DEFAULT_EXPORT__)
-/* harmony export */ });
-/* unused harmony export Blob */
-/* harmony import */ var _streams_cjs__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(8572);
-/*! fetch-blob. MIT License. Jimmy Wärting */
-
-// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)
-// Node has recently added whatwg stream into core
-
-
-
-// 64 KiB (same size chrome slice theirs blob into Uint8array's)
-const POOL_SIZE = 65536
-
-/** @param {(Blob | Uint8Array)[]} parts */
-async function * toIterator (parts, clone = true) {
- for (const part of parts) {
- if ('stream' in part) {
- yield * (/** @type {AsyncIterableIterator} */ (part.stream()))
- } else if (ArrayBuffer.isView(part)) {
- if (clone) {
- let position = part.byteOffset
- const end = part.byteOffset + part.byteLength
- while (position !== end) {
- const size = Math.min(end - position, POOL_SIZE)
- const chunk = part.buffer.slice(position, position + size)
- position += chunk.byteLength
- yield new Uint8Array(chunk)
- }
- } else {
- yield part
- }
- /* c8 ignore next 10 */
- } else {
- // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)
- let position = 0, b = (/** @type {Blob} */ (part))
- while (position !== b.size) {
- const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))
- const buffer = await chunk.arrayBuffer()
- position += buffer.byteLength
- yield new Uint8Array(buffer)
- }
- }
- }
-}
-
-const _Blob = class Blob {
- /** @type {Array.<(Blob|Uint8Array)>} */
- #parts = []
- #type = ''
- #size = 0
- #endings = 'transparent'
-
- /**
- * The Blob() constructor returns a new Blob object. The content
- * of the blob consists of the concatenation of the values given
- * in the parameter array.
- *
- * @param {*} blobParts
- * @param {{ type?: string, endings?: string }} [options]
- */
- constructor (blobParts = [], options = {}) {
- if (typeof blobParts !== 'object' || blobParts === null) {
- throw new TypeError('Failed to construct \'Blob\': The provided value cannot be converted to a sequence.')
- }
-
- if (typeof blobParts[Symbol.iterator] !== 'function') {
- throw new TypeError('Failed to construct \'Blob\': The object must have a callable @@iterator property.')
- }
-
- if (typeof options !== 'object' && typeof options !== 'function') {
- throw new TypeError('Failed to construct \'Blob\': parameter 2 cannot convert to dictionary.')
- }
-
- if (options === null) options = {}
-
- const encoder = new TextEncoder()
- for (const element of blobParts) {
- let part
- if (ArrayBuffer.isView(element)) {
- part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))
- } else if (element instanceof ArrayBuffer) {
- part = new Uint8Array(element.slice(0))
- } else if (element instanceof Blob) {
- part = element
- } else {
- part = encoder.encode(`${element}`)
- }
-
- this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size
- this.#parts.push(part)
- }
-
- this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`
- const type = options.type === undefined ? '' : String(options.type)
- this.#type = /^[\x20-\x7E]*$/.test(type) ? type : ''
- }
-
- /**
- * The Blob interface's size property returns the
- * size of the Blob in bytes.
- */
- get size () {
- return this.#size
- }
-
- /**
- * The type property of a Blob object returns the MIME type of the file.
- */
- get type () {
- return this.#type
- }
-
- /**
- * The text() method in the Blob interface returns a Promise
- * that resolves with a string containing the contents of
- * the blob, interpreted as UTF-8.
- *
- * @return {Promise}
- */
- async text () {
- // More optimized than using this.arrayBuffer()
- // that requires twice as much ram
- const decoder = new TextDecoder()
- let str = ''
- for await (const part of toIterator(this.#parts, false)) {
- str += decoder.decode(part, { stream: true })
- }
- // Remaining
- str += decoder.decode()
- return str
- }
-
- /**
- * The arrayBuffer() method in the Blob interface returns a
- * Promise that resolves with the contents of the blob as
- * binary data contained in an ArrayBuffer.
- *
- * @return {Promise}
- */
- async arrayBuffer () {
- // Easier way... Just a unnecessary overhead
- // const view = new Uint8Array(this.size);
- // await this.stream().getReader({mode: 'byob'}).read(view);
- // return view.buffer;
-
- const data = new Uint8Array(this.size)
- let offset = 0
- for await (const chunk of toIterator(this.#parts, false)) {
- data.set(chunk, offset)
- offset += chunk.length
- }
-
- return data.buffer
- }
-
- stream () {
- const it = toIterator(this.#parts, true)
-
- return new globalThis.ReadableStream({
- // @ts-ignore
- type: 'bytes',
- async pull (ctrl) {
- const chunk = await it.next()
- chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)
- },
-
- async cancel () {
- await it.return()
- }
- })
- }
-
- /**
- * The Blob interface's slice() method creates and returns a
- * new Blob object which contains data from a subset of the
- * blob on which it's called.
- *
- * @param {number} [start]
- * @param {number} [end]
- * @param {string} [type]
- */
- slice (start = 0, end = this.size, type = '') {
- const { size } = this
-
- let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)
- let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)
-
- const span = Math.max(relativeEnd - relativeStart, 0)
- const parts = this.#parts
- const blobParts = []
- let added = 0
-
- for (const part of parts) {
- // don't add the overflow to new blobParts
- if (added >= span) {
- break
- }
-
- const size = ArrayBuffer.isView(part) ? part.byteLength : part.size
- if (relativeStart && size <= relativeStart) {
- // Skip the beginning and change the relative
- // start & end position as we skip the unwanted parts
- relativeStart -= size
- relativeEnd -= size
- } else {
- let chunk
- if (ArrayBuffer.isView(part)) {
- chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))
- added += chunk.byteLength
- } else {
- chunk = part.slice(relativeStart, Math.min(size, relativeEnd))
- added += chunk.size
- }
- relativeEnd -= size
- blobParts.push(chunk)
- relativeStart = 0 // All next sequential parts should start at 0
- }
- }
-
- const blob = new Blob([], { type: String(type).toLowerCase() })
- blob.#size = span
- blob.#parts = blobParts
-
- return blob
- }
-
- get [Symbol.toStringTag] () {
- return 'Blob'
- }
-
- static [Symbol.hasInstance] (object) {
- return (
- object &&
- typeof object === 'object' &&
- typeof object.constructor === 'function' &&
- (
- typeof object.stream === 'function' ||
- typeof object.arrayBuffer === 'function'
- ) &&
- /^(Blob|File)$/.test(object[Symbol.toStringTag])
- )
- }
-}
-
-Object.defineProperties(_Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-})
-
-/** @type {typeof globalThis.Blob} */
-const Blob = _Blob
-/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (Blob);
-
-
-/***/ }),
-
-/***/ 8010:
-/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
-
-"use strict";
-/* harmony export */ __nccwpck_require__.d(__webpack_exports__, {
-/* harmony export */ "Ct": () => (/* binding */ FormData),
-/* harmony export */ "au": () => (/* binding */ formDataToBlob)
-/* harmony export */ });
-/* unused harmony export File */
-/* harmony import */ var fetch_blob__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(1410);
-/* harmony import */ var fetch_blob_file_js__WEBPACK_IMPORTED_MODULE_1__ = __nccwpck_require__(3213);
-/*! formdata-polyfill. MIT License. Jimmy Wärting */
-
-
-
-
-var {toStringTag:t,iterator:i,hasInstance:h}=Symbol,
-r=Math.random,
-m='append,set,get,getAll,delete,keys,values,entries,forEach,constructor'.split(','),
-f=(a,b,c)=>(a+='',/^(Blob|File)$/.test(b && b[t])?[(c=c!==void 0?c+'':b[t]=='File'?b.name:'blob',a),b.name!==c||b[t]=='blob'?new fetch_blob_file_js__WEBPACK_IMPORTED_MODULE_1__/* ["default"] */ .Z([b],c,b):b]:[a,b+'']),
-e=(c,f)=>(f?c:c.replace(/\r?\n|\r/g,'\r\n')).replace(/\n/g,'%0A').replace(/\r/g,'%0D').replace(/"/g,'%22'),
-x=(n, a, e)=>{if(a.lengthtypeof o[m]!='function')}
-append(...a){x('append',arguments,2);this.#d.push(f(...a))}
-delete(a){x('delete',arguments,1);a+='';this.#d=this.#d.filter(([b])=>b!==a)}
-get(a){x('get',arguments,1);a+='';for(var b=this.#d,l=b.length,c=0;cc[0]===a&&b.push(c[1]));return b}
-has(a){x('has',arguments,1);a+='';return this.#d.some(b=>b[0]===a)}
-forEach(a,b){x('forEach',arguments,1);for(var [c,d]of this)a.call(b,d,c,this)}
-set(...a){x('set',arguments,2);var b=[],c=!0;a=f(...a);this.#d.forEach(d=>{d[0]===a[0]?c&&(c=!b.push(a)):b.push(d)});c&&b.push(a);this.#d=b}
-*entries(){yield*this.#d}
-*keys(){for(var[a]of this)yield a}
-*values(){for(var[,a]of this)yield a}}
-
-/** @param {FormData} F */
-function formDataToBlob (F,B=fetch_blob__WEBPACK_IMPORTED_MODULE_0__/* ["default"] */ .Z){
-var b=`${r()}${r()}`.replace(/\./g, '').slice(-28).padStart(32, '-'),c=[],p=`--${b}\r\nContent-Disposition: form-data; name="`
-F.forEach((v,n)=>typeof v=='string'
-?c.push(p+e(n)+`"\r\n\r\n${v.replace(/\r(?!\n)|(? {
-
-"use strict";
-// ESM COMPAT FLAG
-__nccwpck_require__.r(__webpack_exports__);
-
-// EXPORTS
-__nccwpck_require__.d(__webpack_exports__, {
- "AbortError": () => (/* reexport */ AbortError),
- "Blob": () => (/* reexport */ from/* Blob */.t6),
- "FetchError": () => (/* reexport */ FetchError),
- "File": () => (/* reexport */ from/* File */.$B),
- "FormData": () => (/* reexport */ esm_min/* FormData */.Ct),
- "Headers": () => (/* reexport */ Headers),
- "Request": () => (/* reexport */ Request),
- "Response": () => (/* reexport */ Response),
- "blobFrom": () => (/* reexport */ from/* blobFrom */.xB),
- "blobFromSync": () => (/* reexport */ from/* blobFromSync */.SX),
- "default": () => (/* binding */ fetch),
- "fileFrom": () => (/* reexport */ from/* fileFrom */.e2),
- "fileFromSync": () => (/* reexport */ from/* fileFromSync */.RA),
- "isRedirect": () => (/* reexport */ isRedirect)
-});
-
-;// CONCATENATED MODULE: external "node:http"
-const external_node_http_namespaceObject = require("node:http");
-;// CONCATENATED MODULE: external "node:https"
-const external_node_https_namespaceObject = require("node:https");
-;// CONCATENATED MODULE: external "node:zlib"
-const external_node_zlib_namespaceObject = require("node:zlib");
-;// CONCATENATED MODULE: external "node:stream"
-const external_node_stream_namespaceObject = require("node:stream");
-;// CONCATENATED MODULE: external "node:buffer"
-const external_node_buffer_namespaceObject = require("node:buffer");
-;// CONCATENATED MODULE: ./node_modules/data-uri-to-buffer/dist/index.js
-/**
- * Returns a `Buffer` instance from the given data URI `uri`.
- *
- * @param {String} uri Data URI to turn into a Buffer instance
- * @returns {Buffer} Buffer instance from Data URI
- * @api public
- */
-function dataUriToBuffer(uri) {
- if (!/^data:/i.test(uri)) {
- throw new TypeError('`uri` does not appear to be a Data URI (must begin with "data:")');
- }
- // strip newlines
- uri = uri.replace(/\r?\n/g, '');
- // split the URI up into the "metadata" and the "data" portions
- const firstComma = uri.indexOf(',');
- if (firstComma === -1 || firstComma <= 4) {
- throw new TypeError('malformed data: URI');
- }
- // remove the "data:" scheme and parse the metadata
- const meta = uri.substring(5, firstComma).split(';');
- let charset = '';
- let base64 = false;
- const type = meta[0] || 'text/plain';
- let typeFull = type;
- for (let i = 1; i < meta.length; i++) {
- if (meta[i] === 'base64') {
- base64 = true;
- }
- else if (meta[i]) {
- typeFull += `;${meta[i]}`;
- if (meta[i].indexOf('charset=') === 0) {
- charset = meta[i].substring(8);
- }
- }
- }
- // defaults to US-ASCII only if type is not provided
- if (!meta[0] && !charset.length) {
- typeFull += ';charset=US-ASCII';
- charset = 'US-ASCII';
- }
- // get the encoded data portion and decode URI-encoded chars
- const encoding = base64 ? 'base64' : 'ascii';
- const data = unescape(uri.substring(firstComma + 1));
- const buffer = Buffer.from(data, encoding);
- // set `.type` and `.typeFull` properties to MIME type
- buffer.type = type;
- buffer.typeFull = typeFull;
- // set the `.charset` property
- buffer.charset = charset;
- return buffer;
-}
-/* harmony default export */ const dist = (dataUriToBuffer);
-//# sourceMappingURL=index.js.map
-;// CONCATENATED MODULE: external "node:util"
-const external_node_util_namespaceObject = require("node:util");
-// EXTERNAL MODULE: ./node_modules/fetch-blob/index.js
-var fetch_blob = __nccwpck_require__(1410);
-// EXTERNAL MODULE: ./node_modules/formdata-polyfill/esm.min.js
-var esm_min = __nccwpck_require__(8010);
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/base.js
-class FetchBaseError extends Error {
- constructor(message, type) {
- super(message);
- // Hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-
- this.type = type;
- }
-
- get name() {
- return this.constructor.name;
- }
-
- get [Symbol.toStringTag]() {
- return this.constructor.name;
- }
-}
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/fetch-error.js
-
-
-
-/**
- * @typedef {{ address?: string, code: string, dest?: string, errno: number, info?: object, message: string, path?: string, port?: number, syscall: string}} SystemError
-*/
-
-/**
- * FetchError interface for operational errors
- */
-class FetchError extends FetchBaseError {
- /**
- * @param {string} message - Error message for human
- * @param {string} [type] - Error type for machine
- * @param {SystemError} [systemError] - For Node.js system error
- */
- constructor(message, type, systemError) {
- super(message, type);
- // When err.type is `system`, err.erroredSysCall contains system error and err.code contains system error code
- if (systemError) {
- // eslint-disable-next-line no-multi-assign
- this.code = this.errno = systemError.code;
- this.erroredSysCall = systemError.syscall;
- }
- }
-}
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/is.js
-/**
- * Is.js
- *
- * Object type checks.
- */
-
-const NAME = Symbol.toStringTag;
-
-/**
- * Check if `obj` is a URLSearchParams object
- * ref: https://github.com/node-fetch/node-fetch/issues/296#issuecomment-307598143
- * @param {*} object - Object to check for
- * @return {boolean}
- */
-const isURLSearchParameters = object => {
- return (
- typeof object === 'object' &&
- typeof object.append === 'function' &&
- typeof object.delete === 'function' &&
- typeof object.get === 'function' &&
- typeof object.getAll === 'function' &&
- typeof object.has === 'function' &&
- typeof object.set === 'function' &&
- typeof object.sort === 'function' &&
- object[NAME] === 'URLSearchParams'
- );
-};
-
-/**
- * Check if `object` is a W3C `Blob` object (which `File` inherits from)
- * @param {*} object - Object to check for
- * @return {boolean}
- */
-const isBlob = object => {
- return (
- object &&
- typeof object === 'object' &&
- typeof object.arrayBuffer === 'function' &&
- typeof object.type === 'string' &&
- typeof object.stream === 'function' &&
- typeof object.constructor === 'function' &&
- /^(Blob|File)$/.test(object[NAME])
- );
-};
-
-/**
- * Check if `obj` is an instance of AbortSignal.
- * @param {*} object - Object to check for
- * @return {boolean}
- */
-const isAbortSignal = object => {
- return (
- typeof object === 'object' && (
- object[NAME] === 'AbortSignal' ||
- object[NAME] === 'EventTarget'
- )
- );
-};
-
-/**
- * isDomainOrSubdomain reports whether sub is a subdomain (or exact match) of
- * the parent domain.
- *
- * Both domains must already be in canonical form.
- * @param {string|URL} original
- * @param {string|URL} destination
- */
-const isDomainOrSubdomain = (destination, original) => {
- const orig = new URL(original).hostname;
- const dest = new URL(destination).hostname;
-
- return orig === dest || orig.endsWith(`.${dest}`);
-};
-
-/**
- * isSameProtocol reports whether the two provided URLs use the same protocol.
- *
- * Both domains must already be in canonical form.
- * @param {string|URL} original
- * @param {string|URL} destination
- */
-const isSameProtocol = (destination, original) => {
- const orig = new URL(original).protocol;
- const dest = new URL(destination).protocol;
-
- return orig === dest;
-};
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/body.js
-
-/**
- * Body.js
- *
- * Body interface provides common methods for Request and Response
- */
-
-
-
-
-
-
-
-
-
-
-
-
-const pipeline = (0,external_node_util_namespaceObject.promisify)(external_node_stream_namespaceObject.pipeline);
-const INTERNALS = Symbol('Body internals');
-
-/**
- * Body mixin
- *
- * Ref: https://fetch.spec.whatwg.org/#body
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Body {
- constructor(body, {
- size = 0
- } = {}) {
- let boundary = null;
-
- if (body === null) {
- // Body is undefined or null
- body = null;
- } else if (isURLSearchParameters(body)) {
- // Body is a URLSearchParams
- body = external_node_buffer_namespaceObject.Buffer.from(body.toString());
- } else if (isBlob(body)) {
- // Body is blob
- } else if (external_node_buffer_namespaceObject.Buffer.isBuffer(body)) {
- // Body is Buffer
- } else if (external_node_util_namespaceObject.types.isAnyArrayBuffer(body)) {
- // Body is ArrayBuffer
- body = external_node_buffer_namespaceObject.Buffer.from(body);
- } else if (ArrayBuffer.isView(body)) {
- // Body is ArrayBufferView
- body = external_node_buffer_namespaceObject.Buffer.from(body.buffer, body.byteOffset, body.byteLength);
- } else if (body instanceof external_node_stream_namespaceObject) {
- // Body is stream
- } else if (body instanceof esm_min/* FormData */.Ct) {
- // Body is FormData
- body = (0,esm_min/* formDataToBlob */.au)(body);
- boundary = body.type.split('=')[1];
- } else {
- // None of the above
- // coerce to string then buffer
- body = external_node_buffer_namespaceObject.Buffer.from(String(body));
- }
-
- let stream = body;
-
- if (external_node_buffer_namespaceObject.Buffer.isBuffer(body)) {
- stream = external_node_stream_namespaceObject.Readable.from(body);
- } else if (isBlob(body)) {
- stream = external_node_stream_namespaceObject.Readable.from(body.stream());
- }
-
- this[INTERNALS] = {
- body,
- stream,
- boundary,
- disturbed: false,
- error: null
- };
- this.size = size;
-
- if (body instanceof external_node_stream_namespaceObject) {
- body.on('error', error_ => {
- const error = error_ instanceof FetchBaseError ?
- error_ :
- new FetchError(`Invalid response body while trying to fetch ${this.url}: ${error_.message}`, 'system', error_);
- this[INTERNALS].error = error;
- });
- }
- }
-
- get body() {
- return this[INTERNALS].stream;
- }
-
- get bodyUsed() {
- return this[INTERNALS].disturbed;
- }
-
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- async arrayBuffer() {
- const {buffer, byteOffset, byteLength} = await consumeBody(this);
- return buffer.slice(byteOffset, byteOffset + byteLength);
- }
-
- async formData() {
- const ct = this.headers.get('content-type');
-
- if (ct.startsWith('application/x-www-form-urlencoded')) {
- const formData = new esm_min/* FormData */.Ct();
- const parameters = new URLSearchParams(await this.text());
-
- for (const [name, value] of parameters) {
- formData.append(name, value);
- }
-
- return formData;
- }
-
- const {toFormData} = await __nccwpck_require__.e(/* import() */ 37).then(__nccwpck_require__.bind(__nccwpck_require__, 4037));
- return toFormData(this.body, ct);
- }
-
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- async blob() {
- const ct = (this.headers && this.headers.get('content-type')) || (this[INTERNALS].body && this[INTERNALS].body.type) || '';
- const buf = await this.arrayBuffer();
-
- return new fetch_blob/* default */.Z([buf], {
- type: ct
- });
- }
-
- /**
- * Decode response as json
- *
- * @return Promise
- */
- async json() {
- const text = await this.text();
- return JSON.parse(text);
- }
-
- /**
- * Decode response as text
- *
- * @return Promise
- */
- async text() {
- const buffer = await consumeBody(this);
- return new TextDecoder().decode(buffer);
- }
-
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer() {
- return consumeBody(this);
- }
-}
-
-Body.prototype.buffer = (0,external_node_util_namespaceObject.deprecate)(Body.prototype.buffer, 'Please use \'response.arrayBuffer()\' instead of \'response.buffer()\'', 'node-fetch#buffer');
-
-// In browsers, all properties are enumerable.
-Object.defineProperties(Body.prototype, {
- body: {enumerable: true},
- bodyUsed: {enumerable: true},
- arrayBuffer: {enumerable: true},
- blob: {enumerable: true},
- json: {enumerable: true},
- text: {enumerable: true},
- data: {get: (0,external_node_util_namespaceObject.deprecate)(() => {},
- 'data doesn\'t exist, use json(), text(), arrayBuffer(), or body instead',
- 'https://github.com/node-fetch/node-fetch/issues/1000 (response)')}
-});
-
-/**
- * Consume and convert an entire Body to a Buffer.
- *
- * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
- *
- * @return Promise
- */
-async function consumeBody(data) {
- if (data[INTERNALS].disturbed) {
- throw new TypeError(`body used already for: ${data.url}`);
- }
-
- data[INTERNALS].disturbed = true;
-
- if (data[INTERNALS].error) {
- throw data[INTERNALS].error;
- }
-
- const {body} = data;
-
- // Body is null
- if (body === null) {
- return external_node_buffer_namespaceObject.Buffer.alloc(0);
- }
-
- /* c8 ignore next 3 */
- if (!(body instanceof external_node_stream_namespaceObject)) {
- return external_node_buffer_namespaceObject.Buffer.alloc(0);
- }
-
- // Body is stream
- // get ready to actually consume the body
- const accum = [];
- let accumBytes = 0;
-
- try {
- for await (const chunk of body) {
- if (data.size > 0 && accumBytes + chunk.length > data.size) {
- const error = new FetchError(`content size at ${data.url} over limit: ${data.size}`, 'max-size');
- body.destroy(error);
- throw error;
- }
-
- accumBytes += chunk.length;
- accum.push(chunk);
- }
- } catch (error) {
- const error_ = error instanceof FetchBaseError ? error : new FetchError(`Invalid response body while trying to fetch ${data.url}: ${error.message}`, 'system', error);
- throw error_;
- }
-
- if (body.readableEnded === true || body._readableState.ended === true) {
- try {
- if (accum.every(c => typeof c === 'string')) {
- return external_node_buffer_namespaceObject.Buffer.from(accum.join(''));
- }
-
- return external_node_buffer_namespaceObject.Buffer.concat(accum, accumBytes);
- } catch (error) {
- throw new FetchError(`Could not create Buffer from response body for ${data.url}: ${error.message}`, 'system', error);
- }
- } else {
- throw new FetchError(`Premature close of server response while trying to fetch ${data.url}`);
- }
-}
-
-/**
- * Clone body given Res/Req instance
- *
- * @param Mixed instance Response or Request instance
- * @param String highWaterMark highWaterMark for both PassThrough body streams
- * @return Mixed
- */
-const clone = (instance, highWaterMark) => {
- let p1;
- let p2;
- let {body} = instance[INTERNALS];
-
- // Don't allow cloning a used body
- if (instance.bodyUsed) {
- throw new Error('cannot clone body after it is used');
- }
-
- // Check that body is a stream and not form-data object
- // note: we can't clone the form-data object without having it as a dependency
- if ((body instanceof external_node_stream_namespaceObject) && (typeof body.getBoundary !== 'function')) {
- // Tee instance body
- p1 = new external_node_stream_namespaceObject.PassThrough({highWaterMark});
- p2 = new external_node_stream_namespaceObject.PassThrough({highWaterMark});
- body.pipe(p1);
- body.pipe(p2);
- // Set instance body to teed body and return the other teed body
- instance[INTERNALS].stream = p1;
- body = p2;
- }
-
- return body;
-};
-
-const getNonSpecFormDataBoundary = (0,external_node_util_namespaceObject.deprecate)(
- body => body.getBoundary(),
- 'form-data doesn\'t follow the spec and requires special treatment. Use alternative package',
- 'https://github.com/node-fetch/node-fetch/issues/1167'
-);
-
-/**
- * Performs the operation "extract a `Content-Type` value from |object|" as
- * specified in the specification:
- * https://fetch.spec.whatwg.org/#concept-bodyinit-extract
- *
- * This function assumes that instance.body is present.
- *
- * @param {any} body Any options.body input
- * @returns {string | null}
- */
-const extractContentType = (body, request) => {
- // Body is null or undefined
- if (body === null) {
- return null;
- }
-
- // Body is string
- if (typeof body === 'string') {
- return 'text/plain;charset=UTF-8';
- }
-
- // Body is a URLSearchParams
- if (isURLSearchParameters(body)) {
- return 'application/x-www-form-urlencoded;charset=UTF-8';
- }
-
- // Body is blob
- if (isBlob(body)) {
- return body.type || null;
- }
-
- // Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView)
- if (external_node_buffer_namespaceObject.Buffer.isBuffer(body) || external_node_util_namespaceObject.types.isAnyArrayBuffer(body) || ArrayBuffer.isView(body)) {
- return null;
- }
-
- if (body instanceof esm_min/* FormData */.Ct) {
- return `multipart/form-data; boundary=${request[INTERNALS].boundary}`;
- }
-
- // Detect form data input from form-data module
- if (body && typeof body.getBoundary === 'function') {
- return `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`;
- }
-
- // Body is stream - can't really do much about this
- if (body instanceof external_node_stream_namespaceObject) {
- return null;
- }
-
- // Body constructor defaults other things to string
- return 'text/plain;charset=UTF-8';
-};
-
-/**
- * The Fetch Standard treats this as if "total bytes" is a property on the body.
- * For us, we have to explicitly get it with a function.
- *
- * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
- *
- * @param {any} obj.body Body object from the Body instance.
- * @returns {number | null}
- */
-const getTotalBytes = request => {
- const {body} = request[INTERNALS];
-
- // Body is null or undefined
- if (body === null) {
- return 0;
- }
-
- // Body is Blob
- if (isBlob(body)) {
- return body.size;
- }
-
- // Body is Buffer
- if (external_node_buffer_namespaceObject.Buffer.isBuffer(body)) {
- return body.length;
- }
-
- // Detect form data input from form-data module
- if (body && typeof body.getLengthSync === 'function') {
- return body.hasKnownLength && body.hasKnownLength() ? body.getLengthSync() : null;
- }
-
- // Body is stream
- return null;
-};
-
-/**
- * Write a Body to a Node.js WritableStream (e.g. http.Request) object.
- *
- * @param {Stream.Writable} dest The stream to write to.
- * @param obj.body Body object from the Body instance.
- * @returns {Promise}
- */
-const writeToStream = async (dest, {body}) => {
- if (body === null) {
- // Body is null
- dest.end();
- } else {
- // Body is stream
- await pipeline(body, dest);
- }
-};
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/headers.js
-/**
- * Headers.js
- *
- * Headers class offers convenient helpers
- */
-
-
-
-
-/* c8 ignore next 9 */
-const validateHeaderName = typeof external_node_http_namespaceObject.validateHeaderName === 'function' ?
- external_node_http_namespaceObject.validateHeaderName :
- name => {
- if (!/^[\^`\-\w!#$%&'*+.|~]+$/.test(name)) {
- const error = new TypeError(`Header name must be a valid HTTP token [${name}]`);
- Object.defineProperty(error, 'code', {value: 'ERR_INVALID_HTTP_TOKEN'});
- throw error;
- }
- };
-
-/* c8 ignore next 9 */
-const validateHeaderValue = typeof external_node_http_namespaceObject.validateHeaderValue === 'function' ?
- external_node_http_namespaceObject.validateHeaderValue :
- (name, value) => {
- if (/[^\t\u0020-\u007E\u0080-\u00FF]/.test(value)) {
- const error = new TypeError(`Invalid character in header content ["${name}"]`);
- Object.defineProperty(error, 'code', {value: 'ERR_INVALID_CHAR'});
- throw error;
- }
- };
-
-/**
- * @typedef {Headers | Record | Iterable | Iterable>} HeadersInit
- */
-
-/**
- * This Fetch API interface allows you to perform various actions on HTTP request and response headers.
- * These actions include retrieving, setting, adding to, and removing.
- * A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.
- * You can add to this using methods like append() (see Examples.)
- * In all methods of this interface, header names are matched by case-insensitive byte sequence.
- *
- */
-class Headers extends URLSearchParams {
- /**
- * Headers class
- *
- * @constructor
- * @param {HeadersInit} [init] - Response headers
- */
- constructor(init) {
- // Validate and normalize init object in [name, value(s)][]
- /** @type {string[][]} */
- let result = [];
- if (init instanceof Headers) {
- const raw = init.raw();
- for (const [name, values] of Object.entries(raw)) {
- result.push(...values.map(value => [name, value]));
- }
- } else if (init == null) { // eslint-disable-line no-eq-null, eqeqeq
- // No op
- } else if (typeof init === 'object' && !external_node_util_namespaceObject.types.isBoxedPrimitive(init)) {
- const method = init[Symbol.iterator];
- // eslint-disable-next-line no-eq-null, eqeqeq
- if (method == null) {
- // Record
- result.push(...Object.entries(init));
- } else {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable');
- }
-
- // Sequence>
- // Note: per spec we have to first exhaust the lists then process them
- result = [...init]
- .map(pair => {
- if (
- typeof pair !== 'object' || external_node_util_namespaceObject.types.isBoxedPrimitive(pair)
- ) {
- throw new TypeError('Each header pair must be an iterable object');
- }
-
- return [...pair];
- }).map(pair => {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple');
- }
-
- return [...pair];
- });
- }
- } else {
- throw new TypeError('Failed to construct \'Headers\': The provided value is not of type \'(sequence> or record)');
- }
-
- // Validate and lowercase
- result =
- result.length > 0 ?
- result.map(([name, value]) => {
- validateHeaderName(name);
- validateHeaderValue(name, String(value));
- return [String(name).toLowerCase(), String(value)];
- }) :
- undefined;
-
- super(result);
-
- // Returning a Proxy that will lowercase key names, validate parameters and sort keys
- // eslint-disable-next-line no-constructor-return
- return new Proxy(this, {
- get(target, p, receiver) {
- switch (p) {
- case 'append':
- case 'set':
- return (name, value) => {
- validateHeaderName(name);
- validateHeaderValue(name, String(value));
- return URLSearchParams.prototype[p].call(
- target,
- String(name).toLowerCase(),
- String(value)
- );
- };
-
- case 'delete':
- case 'has':
- case 'getAll':
- return name => {
- validateHeaderName(name);
- return URLSearchParams.prototype[p].call(
- target,
- String(name).toLowerCase()
- );
- };
-
- case 'keys':
- return () => {
- target.sort();
- return new Set(URLSearchParams.prototype.keys.call(target)).keys();
- };
-
- default:
- return Reflect.get(target, p, receiver);
- }
- }
- });
- /* c8 ignore next */
- }
-
- get [Symbol.toStringTag]() {
- return this.constructor.name;
- }
-
- toString() {
- return Object.prototype.toString.call(this);
- }
-
- get(name) {
- const values = this.getAll(name);
- if (values.length === 0) {
- return null;
- }
-
- let value = values.join(', ');
- if (/^content-encoding$/i.test(name)) {
- value = value.toLowerCase();
- }
-
- return value;
- }
-
- forEach(callback, thisArg = undefined) {
- for (const name of this.keys()) {
- Reflect.apply(callback, thisArg, [this.get(name), name, this]);
- }
- }
-
- * values() {
- for (const name of this.keys()) {
- yield this.get(name);
- }
- }
-
- /**
- * @type {() => IterableIterator<[string, string]>}
- */
- * entries() {
- for (const name of this.keys()) {
- yield [name, this.get(name)];
- }
- }
-
- [Symbol.iterator]() {
- return this.entries();
- }
-
- /**
- * Node-fetch non-spec method
- * returning all headers and their values as array
- * @returns {Record}
- */
- raw() {
- return [...this.keys()].reduce((result, key) => {
- result[key] = this.getAll(key);
- return result;
- }, {});
- }
-
- /**
- * For better console.log(headers) and also to convert Headers into Node.js Request compatible format
- */
- [Symbol.for('nodejs.util.inspect.custom')]() {
- return [...this.keys()].reduce((result, key) => {
- const values = this.getAll(key);
- // Http.request() only supports string as Host header.
- // This hack makes specifying custom Host header possible.
- if (key === 'host') {
- result[key] = values[0];
- } else {
- result[key] = values.length > 1 ? values : values[0];
- }
-
- return result;
- }, {});
- }
-}
-
-/**
- * Re-shaping object for Web IDL tests
- * Only need to do it for overridden methods
- */
-Object.defineProperties(
- Headers.prototype,
- ['get', 'entries', 'forEach', 'values'].reduce((result, property) => {
- result[property] = {enumerable: true};
- return result;
- }, {})
-);
-
-/**
- * Create a Headers object from an http.IncomingMessage.rawHeaders, ignoring those that do
- * not conform to HTTP grammar productions.
- * @param {import('http').IncomingMessage['rawHeaders']} headers
- */
-function fromRawHeaders(headers = []) {
- return new Headers(
- headers
- // Split into pairs
- .reduce((result, value, index, array) => {
- if (index % 2 === 0) {
- result.push(array.slice(index, index + 2));
- }
-
- return result;
- }, [])
- .filter(([name, value]) => {
- try {
- validateHeaderName(name);
- validateHeaderValue(name, String(value));
- return true;
- } catch {
- return false;
- }
- })
-
- );
-}
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/is-redirect.js
-const redirectStatus = new Set([301, 302, 303, 307, 308]);
-
-/**
- * Redirect code matching
- *
- * @param {number} code - Status code
- * @return {boolean}
- */
-const isRedirect = code => {
- return redirectStatus.has(code);
-};
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/response.js
-/**
- * Response.js
- *
- * Response class provides content decoding
- */
-
-
-
-
-
-const response_INTERNALS = Symbol('Response internals');
-
-/**
- * Response class
- *
- * Ref: https://fetch.spec.whatwg.org/#response-class
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Response extends Body {
- constructor(body = null, options = {}) {
- super(body, options);
-
- // eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition
- const status = options.status != null ? options.status : 200;
-
- const headers = new Headers(options.headers);
-
- if (body !== null && !headers.has('Content-Type')) {
- const contentType = extractContentType(body, this);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- this[response_INTERNALS] = {
- type: 'default',
- url: options.url,
- status,
- statusText: options.statusText || '',
- headers,
- counter: options.counter,
- highWaterMark: options.highWaterMark
- };
- }
-
- get type() {
- return this[response_INTERNALS].type;
- }
-
- get url() {
- return this[response_INTERNALS].url || '';
- }
-
- get status() {
- return this[response_INTERNALS].status;
- }
-
- /**
- * Convenience property representing if the request ended normally
- */
- get ok() {
- return this[response_INTERNALS].status >= 200 && this[response_INTERNALS].status < 300;
- }
-
- get redirected() {
- return this[response_INTERNALS].counter > 0;
- }
-
- get statusText() {
- return this[response_INTERNALS].statusText;
- }
-
- get headers() {
- return this[response_INTERNALS].headers;
- }
-
- get highWaterMark() {
- return this[response_INTERNALS].highWaterMark;
- }
-
- /**
- * Clone this response
- *
- * @return Response
- */
- clone() {
- return new Response(clone(this, this.highWaterMark), {
- type: this.type,
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected,
- size: this.size,
- highWaterMark: this.highWaterMark
- });
- }
-
- /**
- * @param {string} url The URL that the new response is to originate from.
- * @param {number} status An optional status code for the response (e.g., 302.)
- * @returns {Response} A Response object.
- */
- static redirect(url, status = 302) {
- if (!isRedirect(status)) {
- throw new RangeError('Failed to execute "redirect" on "response": Invalid status code');
- }
-
- return new Response(null, {
- headers: {
- location: new URL(url).toString()
- },
- status
- });
- }
-
- static error() {
- const response = new Response(null, {status: 0, statusText: ''});
- response[response_INTERNALS].type = 'error';
- return response;
- }
-
- static json(data = undefined, init = {}) {
- const body = JSON.stringify(data);
-
- if (body === undefined) {
- throw new TypeError('data is not JSON serializable');
- }
-
- const headers = new Headers(init && init.headers);
-
- if (!headers.has('content-type')) {
- headers.set('content-type', 'application/json');
- }
- return new Response(body, {
- ...init,
- headers
- });
- }
+/***/ }),
- get [Symbol.toStringTag]() {
- return 'Response';
- }
-}
+/***/ 3185:
+/***/ ((module) => {
-Object.defineProperties(Response.prototype, {
- type: {enumerable: true},
- url: {enumerable: true},
- status: {enumerable: true},
- ok: {enumerable: true},
- redirected: {enumerable: true},
- statusText: {enumerable: true},
- headers: {enumerable: true},
- clone: {enumerable: true}
-});
+"use strict";
-;// CONCATENATED MODULE: external "node:url"
-const external_node_url_namespaceObject = require("node:url");
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/get-search.js
-const getSearch = parsedURL => {
- if (parsedURL.search) {
- return parsedURL.search;
- }
- const lastOffset = parsedURL.href.length - 1;
- const hash = parsedURL.hash || (parsedURL.href[lastOffset] === '#' ? '#' : '');
- return parsedURL.href[lastOffset - hash.length] === '?' ? '?' : '';
+module.exports.mixin = function mixin(target, source) {
+ const keys = Object.getOwnPropertyNames(source);
+ for (let i = 0; i < keys.length; ++i) {
+ Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));
+ }
};
-;// CONCATENATED MODULE: external "node:net"
-const external_node_net_namespaceObject = require("node:net");
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/referrer.js
-
-
-/**
- * @external URL
- * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/URL|URL}
- */
-
-/**
- * @module utils/referrer
- * @private
- */
-
-/**
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer}
- * @param {string} URL
- * @param {boolean} [originOnly=false]
- */
-function stripURLForUseAsAReferrer(url, originOnly = false) {
- // 1. If url is null, return no referrer.
- if (url == null) { // eslint-disable-line no-eq-null, eqeqeq
- return 'no-referrer';
- }
-
- url = new URL(url);
-
- // 2. If url's scheme is a local scheme, then return no referrer.
- if (/^(about|blob|data):$/.test(url.protocol)) {
- return 'no-referrer';
- }
-
- // 3. Set url's username to the empty string.
- url.username = '';
-
- // 4. Set url's password to null.
- // Note: `null` appears to be a mistake as this actually results in the password being `"null"`.
- url.password = '';
-
- // 5. Set url's fragment to null.
- // Note: `null` appears to be a mistake as this actually results in the fragment being `"#null"`.
- url.hash = '';
-
- // 6. If the origin-only flag is true, then:
- if (originOnly) {
- // 6.1. Set url's path to null.
- // Note: `null` appears to be a mistake as this actually results in the path being `"/null"`.
- url.pathname = '';
-
- // 6.2. Set url's query to null.
- // Note: `null` appears to be a mistake as this actually results in the query being `"?null"`.
- url.search = '';
- }
-
- // 7. Return url.
- return url;
-}
-
-/**
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy}
- */
-const ReferrerPolicy = new Set([
- '',
- 'no-referrer',
- 'no-referrer-when-downgrade',
- 'same-origin',
- 'origin',
- 'strict-origin',
- 'origin-when-cross-origin',
- 'strict-origin-when-cross-origin',
- 'unsafe-url'
-]);
-
-/**
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy}
- */
-const DEFAULT_REFERRER_POLICY = 'strict-origin-when-cross-origin';
-
-/**
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies}
- * @param {string} referrerPolicy
- * @returns {string} referrerPolicy
- */
-function validateReferrerPolicy(referrerPolicy) {
- if (!ReferrerPolicy.has(referrerPolicy)) {
- throw new TypeError(`Invalid referrerPolicy: ${referrerPolicy}`);
- }
-
- return referrerPolicy;
-}
-
-/**
- * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?}
- * @param {external:URL} url
- * @returns `true`: "Potentially Trustworthy", `false`: "Not Trustworthy"
- */
-function isOriginPotentiallyTrustworthy(url) {
- // 1. If origin is an opaque origin, return "Not Trustworthy".
- // Not applicable
-
- // 2. Assert: origin is a tuple origin.
- // Not for implementations
-
- // 3. If origin's scheme is either "https" or "wss", return "Potentially Trustworthy".
- if (/^(http|ws)s:$/.test(url.protocol)) {
- return true;
- }
-
- // 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return "Potentially Trustworthy".
- const hostIp = url.host.replace(/(^\[)|(]$)/g, '');
- const hostIPVersion = (0,external_node_net_namespaceObject.isIP)(hostIp);
-
- if (hostIPVersion === 4 && /^127\./.test(hostIp)) {
- return true;
- }
-
- if (hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/.test(hostIp)) {
- return true;
- }
-
- // 5. If origin's host component is "localhost" or falls within ".localhost", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return "Potentially Trustworthy".
- // We are returning FALSE here because we cannot ensure conformance to
- // let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost)
- if (url.host === 'localhost' || url.host.endsWith('.localhost')) {
- return false;
- }
-
- // 6. If origin's scheme component is file, return "Potentially Trustworthy".
- if (url.protocol === 'file:') {
- return true;
- }
-
- // 7. If origin's scheme component is one which the user agent considers to be authenticated, return "Potentially Trustworthy".
- // Not supported
-
- // 8. If origin has been configured as a trustworthy origin, return "Potentially Trustworthy".
- // Not supported
-
- // 9. Return "Not Trustworthy".
- return false;
-}
-
-/**
- * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?}
- * @param {external:URL} url
- * @returns `true`: "Potentially Trustworthy", `false`: "Not Trustworthy"
- */
-function isUrlPotentiallyTrustworthy(url) {
- // 1. If url is "about:blank" or "about:srcdoc", return "Potentially Trustworthy".
- if (/^about:(blank|srcdoc)$/.test(url)) {
- return true;
- }
-
- // 2. If url's scheme is "data", return "Potentially Trustworthy".
- if (url.protocol === 'data:') {
- return true;
- }
-
- // Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were
- // created. Therefore, blobs created in a trustworthy origin will themselves be potentially
- // trustworthy.
- if (/^(blob|filesystem):$/.test(url.protocol)) {
- return true;
- }
-
- // 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin.
- return isOriginPotentiallyTrustworthy(url);
-}
-
-/**
- * Modifies the referrerURL to enforce any extra security policy considerations.
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7
- * @callback module:utils/referrer~referrerURLCallback
- * @param {external:URL} referrerURL
- * @returns {external:URL} modified referrerURL
- */
-
-/**
- * Modifies the referrerOrigin to enforce any extra security policy considerations.
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7
- * @callback module:utils/referrer~referrerOriginCallback
- * @param {external:URL} referrerOrigin
- * @returns {external:URL} modified referrerOrigin
- */
-
-/**
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}
- * @param {Request} request
- * @param {object} o
- * @param {module:utils/referrer~referrerURLCallback} o.referrerURLCallback
- * @param {module:utils/referrer~referrerOriginCallback} o.referrerOriginCallback
- * @returns {external:URL} Request's referrer
- */
-function determineRequestsReferrer(request, {referrerURLCallback, referrerOriginCallback} = {}) {
- // There are 2 notes in the specification about invalid pre-conditions. We return null, here, for
- // these cases:
- // > Note: If request's referrer is "no-referrer", Fetch will not call into this algorithm.
- // > Note: If request's referrer policy is the empty string, Fetch will not call into this
- // > algorithm.
- if (request.referrer === 'no-referrer' || request.referrerPolicy === '') {
- return null;
- }
-
- // 1. Let policy be request's associated referrer policy.
- const policy = request.referrerPolicy;
-
- // 2. Let environment be request's client.
- // not applicable to node.js
-
- // 3. Switch on request's referrer:
- if (request.referrer === 'about:client') {
- return 'no-referrer';
- }
-
- // "a URL": Let referrerSource be request's referrer.
- const referrerSource = request.referrer;
-
- // 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer.
- let referrerURL = stripURLForUseAsAReferrer(referrerSource);
-
- // 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the
- // origin-only flag set to true.
- let referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true);
-
- // 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set
- // referrerURL to referrerOrigin.
- if (referrerURL.toString().length > 4096) {
- referrerURL = referrerOrigin;
- }
-
- // 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary
- // policy considerations in the interests of minimizing data leakage. For example, the user
- // agent could strip the URL down to an origin, modify its host, replace it with an empty
- // string, etc.
- if (referrerURLCallback) {
- referrerURL = referrerURLCallback(referrerURL);
- }
-
- if (referrerOriginCallback) {
- referrerOrigin = referrerOriginCallback(referrerOrigin);
- }
-
- // 8.Execute the statements corresponding to the value of policy:
- const currentURL = new URL(request.url);
-
- switch (policy) {
- case 'no-referrer':
- return 'no-referrer';
-
- case 'origin':
- return referrerOrigin;
-
- case 'unsafe-url':
- return referrerURL;
-
- case 'strict-origin':
- // 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a
- // potentially trustworthy URL, then return no referrer.
- if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {
- return 'no-referrer';
- }
-
- // 2. Return referrerOrigin.
- return referrerOrigin.toString();
-
- case 'strict-origin-when-cross-origin':
- // 1. If the origin of referrerURL and the origin of request's current URL are the same, then
- // return referrerURL.
- if (referrerURL.origin === currentURL.origin) {
- return referrerURL;
- }
-
- // 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a
- // potentially trustworthy URL, then return no referrer.
- if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {
- return 'no-referrer';
- }
-
- // 3. Return referrerOrigin.
- return referrerOrigin;
-
- case 'same-origin':
- // 1. If the origin of referrerURL and the origin of request's current URL are the same, then
- // return referrerURL.
- if (referrerURL.origin === currentURL.origin) {
- return referrerURL;
- }
-
- // 2. Return no referrer.
- return 'no-referrer';
-
- case 'origin-when-cross-origin':
- // 1. If the origin of referrerURL and the origin of request's current URL are the same, then
- // return referrerURL.
- if (referrerURL.origin === currentURL.origin) {
- return referrerURL;
- }
-
- // Return referrerOrigin.
- return referrerOrigin;
-
- case 'no-referrer-when-downgrade':
- // 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a
- // potentially trustworthy URL, then return no referrer.
- if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {
- return 'no-referrer';
- }
-
- // 2. Return referrerURL.
- return referrerURL;
-
- default:
- throw new TypeError(`Invalid referrerPolicy: ${policy}`);
- }
-}
-
-/**
- * @see {@link https://w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header}
- * @param {Headers} headers Response headers
- * @returns {string} policy
- */
-function parseReferrerPolicyFromHeader(headers) {
- // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy`
- // and response’s header list.
- const policyTokens = (headers.get('referrer-policy') || '').split(/[,\s]+/);
-
- // 2. Let policy be the empty string.
- let policy = '';
-
- // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty
- // string, then set policy to token.
- // Note: This algorithm loops over multiple policy values to allow deployment of new policy
- // values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values.
- for (const token of policyTokens) {
- if (token && ReferrerPolicy.has(token)) {
- policy = token;
- }
- }
-
- // 4. Return policy.
- return policy;
-}
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/request.js
-/**
- * Request.js
- *
- * Request class contains server only options
- *
- * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
- */
-
-
-
-
-
-
-
-
-
-const request_INTERNALS = Symbol('Request internals');
+module.exports.wrapperSymbol = Symbol("wrapper");
+module.exports.implSymbol = Symbol("impl");
-/**
- * Check if `obj` is an instance of Request.
- *
- * @param {*} object
- * @return {boolean}
- */
-const isRequest = object => {
- return (
- typeof object === 'object' &&
- typeof object[request_INTERNALS] === 'object'
- );
+module.exports.wrapperForImpl = function (impl) {
+ return impl[module.exports.wrapperSymbol];
};
-const doBadDataWarn = (0,external_node_util_namespaceObject.deprecate)(() => {},
- '.data is not a valid RequestInit property, use .body instead',
- 'https://github.com/node-fetch/node-fetch/issues/1000 (request)');
-
-/**
- * Request class
- *
- * Ref: https://fetch.spec.whatwg.org/#request-class
- *
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
- */
-class Request extends Body {
- constructor(input, init = {}) {
- let parsedURL;
-
- // Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245)
- if (isRequest(input)) {
- parsedURL = new URL(input.url);
- } else {
- parsedURL = new URL(input);
- input = {};
- }
+module.exports.implForWrapper = function (wrapper) {
+ return wrapper[module.exports.implSymbol];
+};
- if (parsedURL.username !== '' || parsedURL.password !== '') {
- throw new TypeError(`${parsedURL} is an url with embedded credentials.`);
- }
- let method = init.method || input.method || 'GET';
- if (/^(delete|get|head|options|post|put)$/i.test(method)) {
- method = method.toUpperCase();
- }
- if (!isRequest(init) && 'data' in init) {
- doBadDataWarn();
- }
+/***/ }),
- // eslint-disable-next-line no-eq-null, eqeqeq
- if ((init.body != null || (isRequest(input) && input.body !== null)) &&
- (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body');
- }
+/***/ 2940:
+/***/ ((module) => {
- const inputBody = init.body ?
- init.body :
- (isRequest(input) && input.body !== null ?
- clone(input) :
- null);
+// Returns a wrapper function that returns a wrapped callback
+// The wrapper function should do some stuff, and return a
+// presumably different callback function.
+// This makes sure that own properties are retained, so that
+// decorations and such are not lost along the way.
+module.exports = wrappy
+function wrappy (fn, cb) {
+ if (fn && cb) return wrappy(fn)(cb)
- super(inputBody, {
- size: init.size || input.size || 0
- });
+ if (typeof fn !== 'function')
+ throw new TypeError('need wrapper function')
- const headers = new Headers(init.headers || input.headers || {});
+ Object.keys(fn).forEach(function (k) {
+ wrapper[k] = fn[k]
+ })
- if (inputBody !== null && !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody, this);
- if (contentType) {
- headers.set('Content-Type', contentType);
- }
- }
+ return wrapper
- let signal = isRequest(input) ?
- input.signal :
- null;
- if ('signal' in init) {
- signal = init.signal;
- }
+ function wrapper() {
+ var args = new Array(arguments.length)
+ for (var i = 0; i < args.length; i++) {
+ args[i] = arguments[i]
+ }
+ var ret = fn.apply(this, args)
+ var cb = args[args.length-1]
+ if (typeof ret === 'function' && ret !== cb) {
+ Object.keys(cb).forEach(function (k) {
+ ret[k] = cb[k]
+ })
+ }
+ return ret
+ }
+}
- // eslint-disable-next-line no-eq-null, eqeqeq
- if (signal != null && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal to be an instanceof AbortSignal or EventTarget');
- }
- // §5.4, Request constructor steps, step 15.1
- // eslint-disable-next-line no-eq-null, eqeqeq
- let referrer = init.referrer == null ? input.referrer : init.referrer;
- if (referrer === '') {
- // §5.4, Request constructor steps, step 15.2
- referrer = 'no-referrer';
- } else if (referrer) {
- // §5.4, Request constructor steps, step 15.3.1, 15.3.2
- const parsedReferrer = new URL(referrer);
- // §5.4, Request constructor steps, step 15.3.3, 15.3.4
- referrer = /^about:(\/\/)?client$/.test(parsedReferrer) ? 'client' : parsedReferrer;
- } else {
- referrer = undefined;
- }
+/***/ }),
- this[request_INTERNALS] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal,
- referrer
- };
+/***/ 3128:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- // Node-fetch-only options
- this.follow = init.follow === undefined ? (input.follow === undefined ? 20 : input.follow) : init.follow;
- this.compress = init.compress === undefined ? (input.compress === undefined ? true : input.compress) : init.compress;
- this.counter = init.counter || input.counter || 0;
- this.agent = init.agent || input.agent;
- this.highWaterMark = init.highWaterMark || input.highWaterMark || 16384;
- this.insecureHTTPParser = init.insecureHTTPParser || input.insecureHTTPParser || false;
+"use strict";
- // §5.4, Request constructor steps, step 16.
- // Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy
- this.referrerPolicy = init.referrerPolicy || input.referrerPolicy || '';
- }
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.buildBadgeUrl = exports.buildSvg = exports.generateBadges = void 0;
+const fs = __importStar(__nccwpck_require__(7147));
+const config_1 = __nccwpck_require__(6373);
+const lcov_1 = __nccwpck_require__(4888);
+async function generateBadges(projects) {
+ for (const project of projects.filter(p => p.coverage)) {
+ const percentage = (0, lcov_1.getProjectPercentage)(project);
+ if (percentage === undefined) {
+ continue;
+ }
+ const svg = await buildSvg(project.name, config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, percentage);
+ const path = project.pubspecFile.split('/').slice(0, -1).join('/');
+ // write svg to file
+ fs.writeFileSync(`${path}/coverage.svg`, svg);
+ }
+ const totalPercentage = (0, lcov_1.getTotalPercentage)(projects);
+ if (totalPercentage === undefined) {
+ return;
+ }
+ const svg = await buildSvg('Test Coverage', config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, totalPercentage);
+ fs.writeFileSync(`./coverage-total.svg`, svg);
+}
+exports.generateBadges = generateBadges;
+async function buildSvg(name, upper, lower, percentage) {
+ // get url
+ const url = buildBadgeUrl(name, upper, lower, percentage);
+ const res = await fetch(url);
+ return res.text();
+}
+exports.buildSvg = buildSvg;
+function buildBadgeUrl(name, upper, lower, percentage) {
+ let color = '';
+ if (percentage >= upper) {
+ color = 'success';
+ }
+ else if (percentage >= lower) {
+ color = 'important';
+ }
+ else {
+ color = 'critical';
+ }
+ const firstHalf = name ? `${name}-` : '';
+ const secondHalf = `${percentage.toFixed(2)}%`;
+ return encodeURI(`http://img.shields.io/badge/${firstHalf}${secondHalf}-${color}`);
+}
+exports.buildBadgeUrl = buildBadgeUrl;
- /** @returns {string} */
- get method() {
- return this[request_INTERNALS].method;
- }
- /** @returns {string} */
- get url() {
- return (0,external_node_url_namespaceObject.format)(this[request_INTERNALS].parsedURL);
- }
+/***/ }),
- /** @returns {Headers} */
- get headers() {
- return this[request_INTERNALS].headers;
- }
+/***/ 7810:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- get redirect() {
- return this[request_INTERNALS].redirect;
- }
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.commentsEqual = exports.getBodyOf = exports.minimizeComment = exports.deleteComment = exports.createComment = exports.updateComment = exports.findPreviousComment = void 0;
+const core = __importStar(__nccwpck_require__(2186));
+function headerComment(header) {
+ return ``;
+}
+function bodyWithHeader(body, header) {
+ return `${body}\n${headerComment(header)}`;
+}
+async function findPreviousComment(octokit, repo, number, header) {
+ let after = null;
+ let hasNextPage = true;
+ const h = headerComment(header);
+ while (hasNextPage) {
+ const data = await octokit.graphql(`
+ query($repo: String! $owner: String! $number: Int! $after: String) {
+ viewer { login }
+ repository(name: $repo owner: $owner) {
+ pullRequest(number: $number) {
+ comments(first: 100 after: $after) {
+ nodes {
+ id
+ author {
+ login
+ }
+ isMinimized
+ body
+ }
+ pageInfo {
+ endCursor
+ hasNextPage
+ }
+ }
+ }
+ }
+ }
+ `, { ...repo, after, number });
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
+ const viewer = data.viewer;
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
+ const repository = data.repository;
+ const target = repository.pullRequest?.comments?.nodes?.find((node) => node?.author?.login === viewer.login.replace('[bot]', '') &&
+ !node?.isMinimized &&
+ node?.body?.includes(h));
+ if (target) {
+ return target;
+ }
+ after = repository.pullRequest?.comments?.pageInfo?.endCursor;
+ hasNextPage =
+ repository.pullRequest?.comments?.pageInfo?.hasNextPage ?? false;
+ }
+ return undefined;
+}
+exports.findPreviousComment = findPreviousComment;
+async function updateComment(octokit, id, body, header, previousBody) {
+ if (!body && !previousBody)
+ return core.warning('Comment body cannot be blank');
+ await octokit.graphql(`
+ mutation($input: UpdateIssueCommentInput!) {
+ updateIssueComment(input: $input) {
+ issueComment {
+ id
+ body
+ }
+ }
+ }
+ `, {
+ input: {
+ id,
+ body: previousBody
+ ? `${previousBody}\n${body}`
+ : bodyWithHeader(body, header)
+ }
+ });
+}
+exports.updateComment = updateComment;
+async function createComment(octokit, repo, issue_number, body, header, previousBody) {
+ if (!body && !previousBody) {
+ core.warning('Comment body cannot be blank');
+ return;
+ }
+ return await octokit.rest.issues.createComment({
+ ...repo,
+ issue_number,
+ body: previousBody
+ ? `${previousBody}\n${body}`
+ : bodyWithHeader(body, header)
+ });
+}
+exports.createComment = createComment;
+async function deleteComment(octokit, id) {
+ await octokit.graphql(`
+ mutation($id: ID!) {
+ deleteIssueComment(input: { id: $id }) {
+ clientMutationId
+ }
+ }
+ `, { id });
+}
+exports.deleteComment = deleteComment;
+async function minimizeComment(octokit, subjectId, classifier) {
+ await octokit.graphql(`
+ mutation($input: MinimizeCommentInput!) {
+ minimizeComment(input: $input) {
+ clientMutationId
+ }
+ }
+ `, { input: { subjectId, classifier } });
+}
+exports.minimizeComment = minimizeComment;
+function getBodyOf(previous, append, hideDetails) {
+ if (!append) {
+ return undefined;
+ }
+ if (!hideDetails) {
+ return previous.body;
+ }
+ return previous.body?.replace(/()/g, '$1$2');
+}
+exports.getBodyOf = getBodyOf;
+function commentsEqual(body, previous, header) {
+ const newBody = bodyWithHeader(body, header);
+ return newBody === previous;
+}
+exports.commentsEqual = commentsEqual;
- /** @returns {AbortSignal} */
- get signal() {
- return this[request_INTERNALS].signal;
- }
- // https://fetch.spec.whatwg.org/#dom-request-referrer
- get referrer() {
- if (this[request_INTERNALS].referrer === 'no-referrer') {
- return '';
- }
+/***/ }),
- if (this[request_INTERNALS].referrer === 'client') {
- return 'about:client';
- }
+/***/ 6373:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
- if (this[request_INTERNALS].referrer) {
- return this[request_INTERNALS].referrer.toString();
- }
+"use strict";
- return undefined;
- }
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Config = void 0;
+const core_1 = __nccwpck_require__(2186);
+class Config {
+ static get githubToken() {
+ return (0, core_1.getInput)('GITHUB_TOKEN', { required: true });
+ }
+ static get githubHeadRef() {
+ return (0, core_1.getInput)('GITHUB_HEAD_REF', { required: true });
+ }
+ static get upperCoverageThreshold() {
+ return parseFloat((0, core_1.getInput)('upper_threshold'));
+ }
+ static get lowerCoverageThreshold() {
+ return parseFloat((0, core_1.getInput)('lower_threshold'));
+ }
+ static get compareAgainstBase() {
+ return (0, core_1.getInput)('compare_against_base') === 'true';
+ }
+ static get enforceThreshold() {
+ return this.parseCoverageRule((0, core_1.getInput)('enforce_threshold'));
+ }
+ static get enforceForbiddenDecrease() {
+ return this.parseCoverageRule((0, core_1.getInput)('enforce_forbidden_decrease'));
+ }
+ static get generateBadges() {
+ return (0, core_1.getInput)('generate_badges') === 'true';
+ }
+ static parseCoverageRule(rule) {
+ switch (rule) {
+ case 'none':
+ case 'false':
+ return 'none';
+ case 'single':
+ return 'single';
+ case 'total':
+ return 'total';
+ default:
+ throw new Error(`Unknown coverage rule '${rule}'`);
+ }
+ }
+}
+exports.Config = Config;
- get referrerPolicy() {
- return this[request_INTERNALS].referrerPolicy;
- }
- set referrerPolicy(referrerPolicy) {
- this[request_INTERNALS].referrerPolicy = validateReferrerPolicy(referrerPolicy);
- }
+/***/ }),
- /**
- * Clone this request
- *
- * @return Request
- */
- clone() {
- return new Request(this);
- }
+/***/ 6932:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- get [Symbol.toStringTag]() {
- return 'Request';
- }
-}
+"use strict";
-Object.defineProperties(Request.prototype, {
- method: {enumerable: true},
- url: {enumerable: true},
- headers: {enumerable: true},
- redirect: {enumerable: true},
- clone: {enumerable: true},
- signal: {enumerable: true},
- referrer: {enumerable: true},
- referrerPolicy: {enumerable: true}
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
});
-
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.findProjects = void 0;
+const fs = __importStar(__nccwpck_require__(7147));
+const glob = __importStar(__nccwpck_require__(8090));
/**
- * Convert a Request to Node.js http request options.
- *
- * @param {Request} request - A Request instance
- * @return The options object to be passed to http.request
+ * Finds all projects in the current directory
+ * @param excludePattern Glob patterns of pubspec files that will be disregarded, seperated by newlines
+ */
+async function findProjects(excludePattern) {
+ const excludes = excludePattern
+ ?.split('\n')
+ .map(p => `!${p}`)
+ ?.join('\n') ?? '';
+ // Find all pubspec.yaml files in directory
+ const globber = await glob.create(`**/pubspec.yaml\n${excludes}`);
+ const results = await globber.glob();
+ const files = results.filter(f => fs.lstatSync(f).isFile());
+ // Sort files by depth and then alphabetically
+ files.sort((a, b) => {
+ const aDepth = a.split('/').length;
+ const bDepth = b.split('/').length;
+ if (aDepth === bDepth) {
+ return a.localeCompare(b);
+ }
+ return aDepth - bDepth;
+ });
+ const projects = [];
+ for (const file of files) {
+ const project = getProject(file);
+ if (project) {
+ projects.push(project);
+ }
+ }
+ return projects;
+}
+exports.findProjects = findProjects;
+/**
+ * Returns the project that is described in the pubspec.yaml file
+ * @param file The pubspec.yaml of this file
*/
-const getNodeRequestOptions = request => {
- const {parsedURL} = request[request_INTERNALS];
- const headers = new Headers(request[request_INTERNALS].headers);
+function getProject(file) {
+ // Read file in lines
+ const lines = fs.readFileSync(file, 'utf8').split('\n');
+ // Find name
+ const name = lines
+ .find(l => l.startsWith('name:'))
+ ?.split(':')[1]
+ ?.trim();
+ // Find description
+ const description = lines
+ .find(l => l.startsWith('description:'))
+ ?.split(':')[1]
+ ?.trim();
+ // Check if project contains coverage
+ const parentDirectory = file.split('/').slice(0, -1).join('/');
+ const coverageFile = `${parentDirectory}/coverage/lcov.info`;
+ const containsCoverage = fs.existsSync(coverageFile);
+ if (name && description) {
+ return {
+ name,
+ description,
+ pubspecFile: file,
+ coverageFile: containsCoverage ? coverageFile : null
+ };
+ }
+ return null;
+}
- // Fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*');
- }
- // HTTP-network-or-cache fetch steps 2.4-2.7
- let contentLengthValue = null;
- if (request.body === null && /^(post|put)$/i.test(request.method)) {
- contentLengthValue = '0';
- }
+/***/ }),
- if (request.body !== null) {
- const totalBytes = getTotalBytes(request);
- // Set Content-Length if totalBytes is a number (that is not NaN)
- if (typeof totalBytes === 'number' && !Number.isNaN(totalBytes)) {
- contentLengthValue = String(totalBytes);
- }
- }
+/***/ 6350:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue);
- }
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.commitAndPushChanges = exports.popStash = exports.checkoutRef = exports.stashChanges = exports.configureGit = void 0;
+const exec = __importStar(__nccwpck_require__(1514));
+const github_1 = __nccwpck_require__(5438);
+const config_1 = __nccwpck_require__(6373);
+async function configureGit() {
+ await exec.exec('git', ['config', 'user.name', 'github-actions[bot]']);
+ await exec.exec('git', [
+ 'config',
+ 'user.email',
+ 'github-actions[bot]@users.noreply.github.com'
+ ]);
+ const url = `https://x-access-token:${config_1.Config.githubToken}@github.com/${github_1.context.payload.repository?.full_name}`;
+ await exec.exec('git', ['remote', 'set-url', 'origin', url]);
+}
+exports.configureGit = configureGit;
+async function stashChanges() {
+ await exec.exec('git', ['stash']);
+}
+exports.stashChanges = stashChanges;
+async function checkoutRef(ref) {
+ await exec.exec('git', ['fetch', 'origin', ref]);
+ await exec.exec('git', ['checkout', ref]);
+}
+exports.checkoutRef = checkoutRef;
+async function popStash() {
+ try {
+ await exec.exec('git', ['stash', 'pop']);
+ return true;
+ }
+ catch (error) {
+ return false;
+ }
+}
+exports.popStash = popStash;
+async function commitAndPushChanges(commitMessage) {
+ await exec.exec('git', ['add', '.']);
+ await exec.exec('git', ['commit', '-am', commitMessage]);
+ await exec.exec('git', ['push', 'origin']);
+}
+exports.commitAndPushChanges = commitAndPushChanges;
- // 4.1. Main fetch, step 2.6
- // > If request's referrer policy is the empty string, then set request's referrer policy to the
- // > default referrer policy.
- if (request.referrerPolicy === '') {
- request.referrerPolicy = DEFAULT_REFERRER_POLICY;
- }
- // 4.1. Main fetch, step 2.7
- // > If request's referrer is not "no-referrer", set request's referrer to the result of invoking
- // > determine request's referrer.
- if (request.referrer && request.referrer !== 'no-referrer') {
- request[request_INTERNALS].referrer = determineRequestsReferrer(request);
- } else {
- request[request_INTERNALS].referrer = 'no-referrer';
- }
+/***/ }),
- // 4.5. HTTP-network-or-cache fetch, step 6.9
- // > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized
- // > and isomorphic encoded, to httpRequest's header list.
- if (request[request_INTERNALS].referrer instanceof URL) {
- headers.set('Referer', request.referrer);
- }
+/***/ 4888:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch');
- }
+"use strict";
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip, deflate, br');
- }
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getTotalPercentageBefore = exports.getTotalPercentage = exports.getProjectPercentageBefore = exports.getProjectPercentage = exports.getLcovPercentage = exports.parseLcovBefore = exports.coverProject = void 0;
+const lcov_parse_1 = __importDefault(__nccwpck_require__(7454));
+/**
+ * Converts a project into a CoveredProject object by attempting to parse the coverage file
+ * @param project the project for which the coverage should be calculated
+ * @returns a CoveredProject object that contains the name, description and coverage of the project
+ */
+async function coverProject(project) {
+ return {
+ name: project.name,
+ description: project.description,
+ coverageFile: project.coverageFile,
+ pubspecFile: project.pubspecFile,
+ coverage: await parseLcov(project),
+ coverageBefore: undefined
+ };
+}
+exports.coverProject = coverProject;
+/**
+ * Should be called once we are in "before" state. Parses coverage file and sets it as coverageBefore
+ * @param project Project to parse
+ * @returns Project including coverage before
+ */
+async function parseLcovBefore(project) {
+ return {
+ ...project,
+ coverageBefore: await parseLcov(project)
+ };
+}
+exports.parseLcovBefore = parseLcovBefore;
+async function parseLcov(project) {
+ const file = project.coverageFile;
+ if (!file) {
+ return undefined;
+ }
+ return new Promise((resolve, reject) => {
+ (0, lcov_parse_1.default)(file, (err, data) => {
+ if (err) {
+ reject(err);
+ }
+ resolve(data);
+ });
+ });
+}
+function getLcovPercentage(lcov) {
+ let hit = 0;
+ let found = 0;
+ for (const entry of lcov) {
+ hit += entry.lines.hit;
+ found += entry.lines.found;
+ }
+ return (hit / found) * 100;
+}
+exports.getLcovPercentage = getLcovPercentage;
+function getProjectPercentage(project) {
+ if (project.coverage === undefined) {
+ return undefined;
+ }
+ return getLcovPercentage(project.coverage);
+}
+exports.getProjectPercentage = getProjectPercentage;
+function getProjectPercentageBefore(project) {
+ if (project.coverageBefore === undefined) {
+ return undefined;
+ }
+ if (project.coverageBefore === null) {
+ return 0;
+ }
+ return getLcovPercentage(project.coverageBefore);
+}
+exports.getProjectPercentageBefore = getProjectPercentageBefore;
+function getTotalPercentage(projects) {
+ const coverages = projects
+ .map(getProjectPercentage)
+ .filter(c => c !== undefined);
+ if (coverages.length === 0) {
+ return undefined;
+ }
+ return coverages.reduce((a, b) => a + b) / coverages.length;
+}
+exports.getTotalPercentage = getTotalPercentage;
+function getTotalPercentageBefore(projects) {
+ const coverages = projects
+ .map(p => p.coverageBefore)
+ .filter(c => c !== undefined && c !== null)
+ .map(a => getLcovPercentage(a));
+ if (coverages.length === 0) {
+ return undefined;
+ }
+ return coverages.reduce((a, b) => a + b) / coverages.length;
+}
+exports.getTotalPercentageBefore = getTotalPercentageBefore;
- let {agent} = request;
- if (typeof agent === 'function') {
- agent = agent(parsedURL);
- }
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
+/***/ }),
- const search = getSearch(parsedURL);
+/***/ 399:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- // Pass the full URL directly to request(), but overwrite the following
- // options:
- const options = {
- // Overwrite search to retain trailing ? (issue #776)
- path: parsedURL.pathname + search,
- // The following options are not expressed in the URL
- method: request.method,
- headers: headers[Symbol.for('nodejs.util.inspect.custom')](),
- insecureHTTPParser: request.insecureHTTPParser,
- agent
- };
+"use strict";
- return {
- /** @type {URL} */
- parsedURL,
- options
- };
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
};
-
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/abort-error.js
-
-
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.run = void 0;
+const core = __importStar(__nccwpck_require__(2186));
+const github_1 = __nccwpck_require__(5438);
+const badge_1 = __nccwpck_require__(3128);
+const comment_1 = __nccwpck_require__(7810);
+const config_1 = __nccwpck_require__(6373);
+const finder_1 = __nccwpck_require__(6932);
+const git_1 = __nccwpck_require__(6350);
+const lcov_1 = __nccwpck_require__(4888);
+const message_1 = __nccwpck_require__(7899);
+const semaphor_1 = __nccwpck_require__(3624);
/**
- * AbortError interface for cancelled requests
+ * The main function for the action.
*/
-class AbortError extends FetchBaseError {
- constructor(message, type = 'aborted') {
- super(message, type);
- }
+async function run() {
+ try {
+ core.info(`Finding projects...`);
+ const projects = await (0, finder_1.findProjects)(null);
+ core.info(`Found ${projects.length} projects`);
+ core.info(`Parsing coverage...`);
+ let projectsWithCoverage = await Promise.all(projects.map(lcov_1.coverProject));
+ if (config_1.Config.compareAgainstBase && github_1.context.payload.pull_request) {
+ try {
+ await (0, git_1.stashChanges)();
+ await (0, git_1.checkoutRef)(github_1.context.payload.pull_request.base.ref);
+ projectsWithCoverage = await Promise.all(projectsWithCoverage.map(lcov_1.parseLcovBefore));
+ await (0, git_1.checkoutRef)(github_1.context.payload.pull_request.head.ref);
+ await (0, git_1.popStash)();
+ }
+ catch (error) {
+ core.warning(`Failed to checkout base ref due to ${error}.`);
+ }
+ }
+ /// Projects that actually have coverage
+ const coveredProjects = projectsWithCoverage.filter(p => p.coverage?.length);
+ core.info(`${coveredProjects.filter(p => p.coverage).length} projects covered.`);
+ // If we are in a Push event and generateBadges is true, generate badges
+ if (config_1.Config.generateBadges && github_1.context.eventName === 'push') {
+ try {
+ core.info(`Configuring git...`);
+ await (0, git_1.configureGit)();
+ core.info('Updating and pushing coverage badge...');
+ await (0, badge_1.generateBadges)(coveredProjects);
+ await (0, git_1.commitAndPushChanges)('chore: coverage badges [skip ci]');
+ }
+ catch (error) {
+ core.warning(`Failed to commit and push coverage badge due to ${error}.`);
+ }
+ }
+ const pr = github_1.context.payload.pull_request;
+ if (pr) {
+ core.info(`Building message...`);
+ const message = (0, message_1.buildMessage)(coveredProjects, pr.html_url ?? '', pr.head.sha);
+ core.setOutput('message', message);
+ try {
+ await comment(message);
+ }
+ catch (error) {
+ core.warning(`Failed to comment due to ${error}.`);
+ }
+ }
+ const coverageThresholdMet = (0, semaphor_1.verifyCoverageThreshold)(coveredProjects);
+ const noDecreaseMet = (0, semaphor_1.verifyNoCoverageDecrease)(coveredProjects);
+ if (!coverageThresholdMet || !noDecreaseMet) {
+ core.setFailed('Configured conditions were not met.');
+ }
+ }
+ catch (error) {
+ // Fail the workflow run if an error occurs
+ if (error instanceof Error)
+ core.setFailed(error.message);
+ }
+}
+exports.run = run;
+async function comment(body) {
+ const octokit = (0, github_1.getOctokit)(config_1.Config.githubToken);
+ const header = 'dart-coverage-assistant';
+ if (github_1.context.payload.pull_request) {
+ const previous = await (0, comment_1.findPreviousComment)(octokit, github_1.context.repo, github_1.context.payload.pull_request?.number, header);
+ if (previous) {
+ core.info(`Updating previous comment #${previous.id}`);
+ await (0, comment_1.updateComment)(octokit, previous.id, body, header);
+ }
+ else {
+ core.info(`Writing a new comment`);
+ await (0, comment_1.createComment)(octokit, github_1.context.repo, github_1.context.payload.pull_request?.number, body, header);
+ }
+ }
+ else {
+ core.info(`Not a pull request, skipping comment`);
+ }
}
-// EXTERNAL MODULE: ./node_modules/fetch-blob/from.js + 2 modules
-var from = __nccwpck_require__(2777);
-;// CONCATENATED MODULE: ./node_modules/node-fetch/src/index.js
-/**
- * Index.js
- *
- * a request API compatible with window.fetch
- *
- * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
- */
-
-
-
-
-
-
+/***/ }),
+/***/ 7899:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+"use strict";
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.buildMessage = void 0;
+const lcov_1 = __nccwpck_require__(4888);
+const config_1 = __nccwpck_require__(6373);
+const badge_1 = __nccwpck_require__(3128);
+function buildMessage(projects, url, sha) {
+ if (projects.length === 0) {
+ return '';
+ }
+ const shaShort = sha.slice(0, 8);
+ const commit = `[${shaShort}](${url}/commits/${sha})`;
+ let md = '';
+ md += '# Coverage Report\n';
+ md += `Automatic coverage report for ${commit}.\n`;
+ md += '\n';
+ md += `> Generated by [dart-coverage-assistant](https://github.com/whynotmake-it/dart-coverage-assistant)\n`;
+ md += '\n';
+ if (projects.length > 1) {
+ md += buildTotalTable(projects) + '\n';
+ }
+ for (const project of projects) {
+ md += buildTable(project) + '\n';
+ }
+ md += '\n';
+ return md;
+}
+exports.buildMessage = buildMessage;
+function buildTotalTable(projects) {
+ let md = '';
+ const combined = (0, lcov_1.getTotalPercentage)(projects);
+ if (combined !== undefined) {
+ const badge = buildBadge(config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, combined);
+ md += `## Total coverage: \n`;
+ md += '\n';
+ md += `| Coverage | Diff |\n`;
+ md += `| --- | --- |\n`;
+ md += `| ${badge} | ${buildDiffString(getTotalDiff(projects))} |\n`;
+ }
+ return md;
+}
+function buildTable(project) {
+ let md = '';
+ md += buildHeader(project) + '\n';
+ md += '\n';
+ md += buildBody(project) + '\n';
+ return md;
+}
+function buildHeader(project) {
+ const percentage = project.coverage
+ ? (0, lcov_1.getProjectPercentage)(project)
+ : undefined;
+ const diff = buildDiffString(getDiff(project));
+ const badgeCell = percentage
+ ? `${buildBadge(config_1.Config.upperCoverageThreshold, config_1.Config.lowerCoverageThreshold, percentage)}`
+ : '';
+ let md = `## \`${project.name}\`\n`;
+ md += '\n';
+ md += `> ${project.description}\n`;
+ md += '\n';
+ md += '| Coverage | Diff |\n';
+ md += '| --- | --- |\n';
+ md += `| ${badgeCell} | ${diff} |\n`;
+ return md;
+}
+function buildBody(project) {
+ if (project.coverage === undefined) {
+ return '';
+ }
+ let tableMd = '';
+ tableMd += '| File | Line Percentage | Line Count |\n';
+ tableMd += '| --- | --- | --- |\n';
+ const folders = {};
+ // Group files by folder
+ for (const file of project.coverage) {
+ const parts = file.file.split('/');
+ const folder = parts.slice(0, -1).join('/');
+ folders[folder] = folders[folder] || [];
+ folders[folder].push(file);
+ }
+ // Add all folders to the table
+ for (const folder of Object.keys(folders).sort()) {
+ tableMd += `| **${folder}** | | |\n`;
+ for (const file of folders[folder]) {
+ const name = file.file.split('/').slice(-1)[0];
+ tableMd += `| ${name} | ${(0, lcov_1.getLcovPercentage)([file])} | ${file.lines.details.length} |\n`;
+ }
+ }
+ let md = '\n';
+ md += `Coverage Details for ${project.name}
\n`;
+ md += '\n';
+ md += tableMd;
+ md += '\n';
+ md += ' ';
+ return md;
+}
+function buildDiffString(diff) {
+ if (diff === undefined) {
+ return '-';
+ }
+ if (diff === 0) {
+ return `➡️ ${diff.toFixed(2)}%`;
+ }
+ else if (diff > 0) {
+ return `⬆️ +${diff.toFixed(2)}%`;
+ }
+ else {
+ return `⬇️ ${diff.toFixed(2)}%`;
+ }
+}
+function getDiff(project) {
+ if (project.coverageBefore === undefined || !project.coverage) {
+ return undefined;
+ }
+ const current = (0, lcov_1.getLcovPercentage)(project.coverage);
+ const before = project.coverageBefore === null
+ ? 0
+ : (0, lcov_1.getLcovPercentage)(project.coverageBefore);
+ return current - before;
+}
+function getTotalDiff(projects) {
+ const current = (0, lcov_1.getTotalPercentage)(projects);
+ const before = (0, lcov_1.getTotalPercentageBefore)(projects);
+ if (current === undefined || before === undefined) {
+ return undefined;
+ }
+ return current - before;
+}
+function buildBadge(upper, lower, percentage) {
+ const alt = percentage >= upper ? 'pass' : percentage >= lower ? 'warning' : 'fail';
+ const url = (0, badge_1.buildBadgeUrl)(undefined, upper, lower, percentage);
+ return ``;
+}
+/***/ }),
+/***/ 3624:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.verifyNoCoverageDecrease = exports.verifyCoverageThreshold = void 0;
+const config_1 = __nccwpck_require__(6373);
+const lcov_1 = __nccwpck_require__(4888);
+const core = __importStar(__nccwpck_require__(2186));
+function verifyCoverageThreshold(projects) {
+ if (config_1.Config.enforceThreshold === 'none') {
+ return true;
+ }
+ else if (config_1.Config.enforceThreshold === 'single') {
+ const failedProjects = projects.filter(p => {
+ const percentage = (0, lcov_1.getProjectPercentage)(p);
+ return (percentage === undefined || percentage < config_1.Config.lowerCoverageThreshold);
+ });
+ if (failedProjects.length) {
+ core.error(`Coverage threshold (${config_1.Config.lowerCoverageThreshold}%) not met for ${failedProjects.length} projects:`);
+ for (const failed of failedProjects) {
+ core.error(`${failed.name} - ${(0, lcov_1.getProjectPercentage)(failed)}%`);
+ }
+ return false;
+ }
+ }
+ else if (config_1.Config.enforceThreshold === 'total') {
+ const percentage = (0, lcov_1.getTotalPercentage)(projects);
+ if (percentage === undefined) {
+ core.error(`Total coverage threshold of ${config_1.Config.lowerCoverageThreshold}% not met.`);
+ return false;
+ }
+ else if (percentage < config_1.Config.lowerCoverageThreshold) {
+ core.error(`Total coverage of ${percentage.toFixed(2)}% does not meet threshold of ${config_1.Config.lowerCoverageThreshold}%`);
+ return false;
+ }
+ }
+ return true;
+}
+exports.verifyCoverageThreshold = verifyCoverageThreshold;
+function verifyNoCoverageDecrease(projects) {
+ projects;
+ if (config_1.Config.enforceForbiddenDecrease === 'none') {
+ return true;
+ }
+ else if (config_1.Config.enforceForbiddenDecrease === 'single') {
+ const failedProjects = projects.filter(p => {
+ const percentage = (0, lcov_1.getProjectPercentage)(p);
+ const before = (0, lcov_1.getProjectPercentageBefore)(p);
+ return (percentage !== undefined && before !== undefined && percentage < before);
+ });
+ if (failedProjects.length) {
+ core.error(`Coverage decrease detected for ${failedProjects.length} projects:`);
+ for (const failed of failedProjects) {
+ core.error(`${failed.name} - ${(0, lcov_1.getProjectPercentageBefore)(failed)?.toFixed()}% -> ${(0, lcov_1.getProjectPercentage)(failed)?.toFixed(2)}%`);
+ }
+ return false;
+ }
+ }
+ else if (config_1.Config.enforceForbiddenDecrease === 'total') {
+ const total = (0, lcov_1.getTotalPercentage)(projects);
+ const totalBefore = (0, lcov_1.getTotalPercentageBefore)(projects);
+ if (total !== undefined &&
+ totalBefore !== undefined &&
+ total < totalBefore) {
+ core.error(`Total coverage decreased by ${(totalBefore - total).toFixed(2)}%`);
+ return false;
+ }
+ }
+ return true;
+}
+exports.verifyNoCoverageDecrease = verifyNoCoverageDecrease;
+/***/ }),
+/***/ 2877:
+/***/ ((module) => {
+module.exports = eval("require")("encoding");
+/***/ }),
+/***/ 9491:
+/***/ ((module) => {
+"use strict";
+module.exports = require("assert");
-const supportedSchemas = new Set(['data:', 'http:', 'https:']);
+/***/ }),
-/**
- * Fetch function
- *
- * @param {string | URL | import('./request').default} url - Absolute url or Request instance
- * @param {*} [options_] - Fetch options
- * @return {Promise}
- */
-async function fetch(url, options_) {
- return new Promise((resolve, reject) => {
- // Build request object
- const request = new Request(url, options_);
- const {parsedURL, options} = getNodeRequestOptions(request);
- if (!supportedSchemas.has(parsedURL.protocol)) {
- throw new TypeError(`node-fetch cannot load ${url}. URL scheme "${parsedURL.protocol.replace(/:$/, '')}" is not supported.`);
- }
+/***/ 2081:
+/***/ ((module) => {
- if (parsedURL.protocol === 'data:') {
- const data = dist(request.url);
- const response = new Response(data, {headers: {'Content-Type': data.typeFull}});
- resolve(response);
- return;
- }
+"use strict";
+module.exports = require("child_process");
- // Wrap http.request into fetch
- const send = (parsedURL.protocol === 'https:' ? external_node_https_namespaceObject : external_node_http_namespaceObject).request;
- const {signal} = request;
- let response = null;
+/***/ }),
- const abort = () => {
- const error = new AbortError('The operation was aborted.');
- reject(error);
- if (request.body && request.body instanceof external_node_stream_namespaceObject.Readable) {
- request.body.destroy(error);
- }
+/***/ 6113:
+/***/ ((module) => {
- if (!response || !response.body) {
- return;
- }
+"use strict";
+module.exports = require("crypto");
- response.body.emit('error', error);
- };
+/***/ }),
- if (signal && signal.aborted) {
- abort();
- return;
- }
+/***/ 2361:
+/***/ ((module) => {
- const abortAndFinalize = () => {
- abort();
- finalize();
- };
+"use strict";
+module.exports = require("events");
- // Send request
- const request_ = send(parsedURL.toString(), options);
+/***/ }),
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize);
- }
+/***/ 7147:
+/***/ ((module) => {
- const finalize = () => {
- request_.abort();
- if (signal) {
- signal.removeEventListener('abort', abortAndFinalize);
- }
- };
+"use strict";
+module.exports = require("fs");
- request_.on('error', error => {
- reject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, 'system', error));
- finalize();
- });
+/***/ }),
- fixResponseChunkedTransferBadEnding(request_, error => {
- if (response && response.body) {
- response.body.destroy(error);
- }
- });
+/***/ 3685:
+/***/ ((module) => {
- /* c8 ignore next 18 */
- if (process.version < 'v14') {
- // Before Node.js 14, pipeline() does not fully support async iterators and does not always
- // properly handle when the socket close/end events are out of order.
- request_.on('socket', s => {
- let endedWithEventsCount;
- s.prependListener('end', () => {
- endedWithEventsCount = s._eventsCount;
- });
- s.prependListener('close', hadError => {
- // if end happened before close but the socket didn't emit an error, do it now
- if (response && endedWithEventsCount < s._eventsCount && !hadError) {
- const error = new Error('Premature close');
- error.code = 'ERR_STREAM_PREMATURE_CLOSE';
- response.body.emit('error', error);
- }
- });
- });
- }
+"use strict";
+module.exports = require("http");
- request_.on('response', response_ => {
- request_.setTimeout(0);
- const headers = fromRawHeaders(response_.rawHeaders);
+/***/ }),
- // HTTP fetch step 5
- if (isRedirect(response_.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location');
+/***/ 5687:
+/***/ ((module) => {
- // HTTP fetch step 5.3
- let locationURL = null;
- try {
- locationURL = location === null ? null : new URL(location, request.url);
- } catch {
- // error here can only be invalid URL in Location: header
- // do not throw when options.redirect == manual
- // let the user extract the errorneous redirect URL
- if (request.redirect !== 'manual') {
- reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
- finalize();
- return;
- }
- }
+"use strict";
+module.exports = require("https");
- // HTTP fetch step 5.5
- switch (request.redirect) {
- case 'error':
- reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
- finalize();
- return;
- case 'manual':
- // Nothing to do
- break;
- case 'follow': {
- // HTTP-redirect fetch step 2
- if (locationURL === null) {
- break;
- }
+/***/ }),
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
- finalize();
- return;
- }
+/***/ 1808:
+/***/ ((module) => {
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOptions = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: clone(request),
- signal: request.signal,
- size: request.size,
- referrer: request.referrer,
- referrerPolicy: request.referrerPolicy
- };
+"use strict";
+module.exports = require("net");
- // when forwarding sensitive headers like "Authorization",
- // "WWW-Authenticate", and "Cookie" to untrusted targets,
- // headers will be ignored when following a redirect to a domain
- // that is not a subdomain match or exact match of the initial domain.
- // For example, a redirect from "foo.com" to either "foo.com" or "sub.foo.com"
- // will forward the sensitive headers, but a redirect to "bar.com" will not.
- // headers will also be ignored when following a redirect to a domain using
- // a different protocol. For example, a redirect from "https://foo.com" to "http://foo.com"
- // will not forward the sensitive headers
- if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
- for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
- requestOptions.headers.delete(name);
- }
- }
+/***/ }),
- // HTTP-redirect fetch step 9
- if (response_.statusCode !== 303 && request.body && options_.body instanceof external_node_stream_namespaceObject.Readable) {
- reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
- finalize();
- return;
- }
+/***/ 2037:
+/***/ ((module) => {
- // HTTP-redirect fetch step 11
- if (response_.statusCode === 303 || ((response_.statusCode === 301 || response_.statusCode === 302) && request.method === 'POST')) {
- requestOptions.method = 'GET';
- requestOptions.body = undefined;
- requestOptions.headers.delete('content-length');
- }
+"use strict";
+module.exports = require("os");
- // HTTP-redirect fetch step 14
- const responseReferrerPolicy = parseReferrerPolicyFromHeader(headers);
- if (responseReferrerPolicy) {
- requestOptions.referrerPolicy = responseReferrerPolicy;
- }
+/***/ }),
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOptions)));
- finalize();
- return;
- }
+/***/ 1017:
+/***/ ((module) => {
- default:
- return reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`));
- }
- }
+"use strict";
+module.exports = require("path");
- // Prepare response
- if (signal) {
- response_.once('end', () => {
- signal.removeEventListener('abort', abortAndFinalize);
- });
- }
+/***/ }),
- let body = (0,external_node_stream_namespaceObject.pipeline)(response_, new external_node_stream_namespaceObject.PassThrough(), error => {
- if (error) {
- reject(error);
- }
- });
- // see https://github.com/nodejs/node/pull/29376
- /* c8 ignore next 3 */
- if (process.version < 'v12.10') {
- response_.on('aborted', abortAndFinalize);
- }
+/***/ 5477:
+/***/ ((module) => {
- const responseOptions = {
- url: request.url,
- status: response_.statusCode,
- statusText: response_.statusMessage,
- headers,
- size: request.size,
- counter: request.counter,
- highWaterMark: request.highWaterMark
- };
+"use strict";
+module.exports = require("punycode");
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding');
+/***/ }),
- // HTTP-network fetch step 12.1.1.4: handle content codings
+/***/ 2781:
+/***/ ((module) => {
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {
- response = new Response(body, responseOptions);
- resolve(response);
- return;
- }
+"use strict";
+module.exports = require("stream");
- // For Node v6+
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: external_node_zlib_namespaceObject.Z_SYNC_FLUSH,
- finishFlush: external_node_zlib_namespaceObject.Z_SYNC_FLUSH
- };
+/***/ }),
- // For gzip
- if (codings === 'gzip' || codings === 'x-gzip') {
- body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createGunzip(zlibOptions), error => {
- if (error) {
- reject(error);
- }
- });
- response = new Response(body, responseOptions);
- resolve(response);
- return;
- }
+/***/ 1576:
+/***/ ((module) => {
- // For deflate
- if (codings === 'deflate' || codings === 'x-deflate') {
- // Handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = (0,external_node_stream_namespaceObject.pipeline)(response_, new external_node_stream_namespaceObject.PassThrough(), error => {
- if (error) {
- reject(error);
- }
- });
- raw.once('data', chunk => {
- // See http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createInflate(), error => {
- if (error) {
- reject(error);
- }
- });
- } else {
- body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createInflateRaw(), error => {
- if (error) {
- reject(error);
- }
- });
- }
+"use strict";
+module.exports = require("string_decoder");
- response = new Response(body, responseOptions);
- resolve(response);
- });
- raw.once('end', () => {
- // Some old IIS servers return zero-length OK deflate responses, so
- // 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903
- if (!response) {
- response = new Response(body, responseOptions);
- resolve(response);
- }
- });
- return;
- }
+/***/ }),
- // For br
- if (codings === 'br') {
- body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createBrotliDecompress(), error => {
- if (error) {
- reject(error);
- }
- });
- response = new Response(body, responseOptions);
- resolve(response);
- return;
- }
+/***/ 9512:
+/***/ ((module) => {
- // Otherwise, use response as-is
- response = new Response(body, responseOptions);
- resolve(response);
- });
+"use strict";
+module.exports = require("timers");
- // eslint-disable-next-line promise/prefer-await-to-then
- writeToStream(request_, request).catch(reject);
- });
-}
+/***/ }),
-function fixResponseChunkedTransferBadEnding(request, errorCallback) {
- const LAST_CHUNK = external_node_buffer_namespaceObject.Buffer.from('0\r\n\r\n');
+/***/ 4404:
+/***/ ((module) => {
- let isChunkedTransfer = false;
- let properLastChunkReceived = false;
- let previousChunk;
+"use strict";
+module.exports = require("tls");
- request.on('response', response => {
- const {headers} = response;
- isChunkedTransfer = headers['transfer-encoding'] === 'chunked' && !headers['content-length'];
- });
+/***/ }),
- request.on('socket', socket => {
- const onSocketClose = () => {
- if (isChunkedTransfer && !properLastChunkReceived) {
- const error = new Error('Premature close');
- error.code = 'ERR_STREAM_PREMATURE_CLOSE';
- errorCallback(error);
- }
- };
+/***/ 7310:
+/***/ ((module) => {
- const onData = buf => {
- properLastChunkReceived = external_node_buffer_namespaceObject.Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;
+"use strict";
+module.exports = require("url");
- // Sometimes final 0-length chunk and end of message code are in separate packets
- if (!properLastChunkReceived && previousChunk) {
- properLastChunkReceived = (
- external_node_buffer_namespaceObject.Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
- external_node_buffer_namespaceObject.Buffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0
- );
- }
+/***/ }),
- previousChunk = buf;
- };
+/***/ 3837:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("util");
- socket.prependListener('close', onSocketClose);
- socket.on('data', onData);
+/***/ }),
- request.on('close', () => {
- socket.removeListener('close', onSocketClose);
- socket.removeListener('data', onData);
- });
- });
-}
+/***/ 9796:
+/***/ ((module) => {
+"use strict";
+module.exports = require("zlib");
/***/ }),
@@ -21573,106 +14562,11 @@ module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"]
/******/ return module.exports;
/******/ }
/******/
-/******/ // expose the modules object (__webpack_modules__)
-/******/ __nccwpck_require__.m = __webpack_modules__;
-/******/
/************************************************************************/
-/******/ /* webpack/runtime/define property getters */
-/******/ (() => {
-/******/ // define getter functions for harmony exports
-/******/ __nccwpck_require__.d = (exports, definition) => {
-/******/ for(var key in definition) {
-/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) {
-/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
-/******/ }
-/******/ }
-/******/ };
-/******/ })();
-/******/
-/******/ /* webpack/runtime/ensure chunk */
-/******/ (() => {
-/******/ __nccwpck_require__.f = {};
-/******/ // This file contains only the entry chunk.
-/******/ // The chunk loading function for additional chunks
-/******/ __nccwpck_require__.e = (chunkId) => {
-/******/ return Promise.all(Object.keys(__nccwpck_require__.f).reduce((promises, key) => {
-/******/ __nccwpck_require__.f[key](chunkId, promises);
-/******/ return promises;
-/******/ }, []));
-/******/ };
-/******/ })();
-/******/
-/******/ /* webpack/runtime/get javascript chunk filename */
-/******/ (() => {
-/******/ // This function allow to reference async chunks
-/******/ __nccwpck_require__.u = (chunkId) => {
-/******/ // return url for filenames based on template
-/******/ return "" + chunkId + ".index.js";
-/******/ };
-/******/ })();
-/******/
-/******/ /* webpack/runtime/hasOwnProperty shorthand */
-/******/ (() => {
-/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
-/******/ })();
-/******/
-/******/ /* webpack/runtime/make namespace object */
-/******/ (() => {
-/******/ // define __esModule on exports
-/******/ __nccwpck_require__.r = (exports) => {
-/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
-/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
-/******/ }
-/******/ Object.defineProperty(exports, '__esModule', { value: true });
-/******/ };
-/******/ })();
-/******/
/******/ /* webpack/runtime/compat */
/******/
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
/******/
-/******/ /* webpack/runtime/require chunk loading */
-/******/ (() => {
-/******/ // no baseURI
-/******/
-/******/ // object to store loaded chunks
-/******/ // "1" means "loaded", otherwise not loaded yet
-/******/ var installedChunks = {
-/******/ 179: 1
-/******/ };
-/******/
-/******/ // no on chunks loaded
-/******/
-/******/ var installChunk = (chunk) => {
-/******/ var moreModules = chunk.modules, chunkIds = chunk.ids, runtime = chunk.runtime;
-/******/ for(var moduleId in moreModules) {
-/******/ if(__nccwpck_require__.o(moreModules, moduleId)) {
-/******/ __nccwpck_require__.m[moduleId] = moreModules[moduleId];
-/******/ }
-/******/ }
-/******/ if(runtime) runtime(__nccwpck_require__);
-/******/ for(var i = 0; i < chunkIds.length; i++)
-/******/ installedChunks[chunkIds[i]] = 1;
-/******/
-/******/ };
-/******/
-/******/ // require() chunk loading for javascript
-/******/ __nccwpck_require__.f.require = (chunkId, promises) => {
-/******/ // "1" is the signal for "already loaded"
-/******/ if(!installedChunks[chunkId]) {
-/******/ if(true) { // all chunks have JS
-/******/ installChunk(require("./" + __nccwpck_require__.u(chunkId)));
-/******/ } else installedChunks[chunkId] = 1;
-/******/ }
-/******/ };
-/******/
-/******/ // no external install chunk
-/******/
-/******/ // no HMR
-/******/
-/******/ // no HMR manifest
-/******/ })();
-/******/
/************************************************************************/
var __webpack_exports__ = {};
// This entry need to be wrapped in an IIFE because it need to be in strict mode.
diff --git a/dist/licenses.txt b/dist/licenses.txt
index f3a7fbd..9db6c19 100644
--- a/dist/licenses.txt
+++ b/dist/licenses.txt
@@ -1,3 +1,639 @@
+@actions/core
+MIT
+The MIT License (MIT)
+
+Copyright 2019 GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+@actions/exec
+MIT
+The MIT License (MIT)
+
+Copyright 2019 GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+@actions/github
+MIT
+The MIT License (MIT)
+
+Copyright 2019 GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+@actions/glob
+MIT
+The MIT License (MIT)
+
+Copyright 2019 GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+@actions/http-client
+MIT
+Actions Http Client for Node.js
+
+Copyright (c) GitHub, Inc.
+
+All rights reserved.
+
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
+associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
+LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+@actions/io
+MIT
+The MIT License (MIT)
+
+Copyright 2019 GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+@octokit/auth-token
+MIT
+The MIT License
+
+Copyright (c) 2019 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+@octokit/core
+MIT
+The MIT License
+
+Copyright (c) 2019 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+@octokit/endpoint
+MIT
+The MIT License
+
+Copyright (c) 2018 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+@octokit/graphql
+MIT
+The MIT License
+
+Copyright (c) 2018 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+@octokit/plugin-paginate-rest
+MIT
+MIT License Copyright (c) 2019 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+@octokit/plugin-rest-endpoint-methods
+MIT
+MIT License Copyright (c) 2019 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+@octokit/request
+MIT
+The MIT License
+
+Copyright (c) 2018 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+@octokit/request-error
+MIT
+The MIT License
+
+Copyright (c) 2019 Octokit contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+@vercel/ncc
+MIT
+Copyright 2018 ZEIT, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+balanced-match
+MIT
+(MIT)
+
+Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
+before-after-hook
+Apache-2.0
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2018 Gregor Martynus and other contributors.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+brace-expansion
+MIT
+MIT License
+
+Copyright (c) 2013 Julian Gruber
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
+concat-map
+MIT
+This software is released under the MIT license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+deprecation
+ISC
+The ISC License
+
+Copyright (c) Gregor Martynus and contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+is-plain-object
+MIT
+The MIT License (MIT)
+
+Copyright (c) 2014-2017, Jon Schlinkert.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+lcov-parse
+BSD-3-Clause
+Copyright 2012 Yahoo! Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the Yahoo! Inc. nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL YAHOO! INC. BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+minimatch
+ISC
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
node-fetch
MIT
The MIT License (MIT)
@@ -21,3 +657,135 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+
+
+
+once
+ISC
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+tr46
+MIT
+
+tunnel
+MIT
+The MIT License (MIT)
+
+Copyright (c) 2012 Koichi Kobayashi
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+universal-user-agent
+ISC
+# [ISC License](https://spdx.org/licenses/ISC)
+
+Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m)
+
+Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+uuid
+MIT
+The MIT License (MIT)
+
+Copyright (c) 2010-2020 Robert Kieffer and other contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+webidl-conversions
+BSD-2-Clause
+# The BSD 2-Clause License
+
+Copyright (c) 2014, Domenic Denicola
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+whatwg-url
+MIT
+The MIT License (MIT)
+
+Copyright (c) 2015–2016 Sebastian Mayr
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+wrappy
+ISC
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/package.json b/package.json
index ffba195..4737bf7 100644
--- a/package.json
+++ b/package.json
@@ -31,7 +31,8 @@
"lint": "npx eslint . -c .eslintrc.yaml",
"package": "ncc build src/index.ts --license licenses.txt",
"package:watch": "npm run package -- --watch",
- "test": "(jest && make-coverage-badge --output-path ./badges/coverage.svg) || make-coverage-badge --output-path ./badges/coverage.svg",
+ "test": "jest",
+ "coverage": "(jest && make-coverage-badge --output-path ./badges/coverage.svg) || make-coverage-badge --output-path ./badges/coverage.svg",
"all": "npm run format:write && npm run lint && npm run test && npm run package"
},
"license": "MIT",
diff --git a/src/badge.ts b/src/badge.ts
index 5d15b7b..16ba72e 100644
--- a/src/badge.ts
+++ b/src/badge.ts
@@ -38,7 +38,7 @@ export async function generateBadges(
fs.writeFileSync(`./coverage-total.svg`, svg)
}
-async function buildSvg(
+export async function buildSvg(
name: string,
upper: number,
lower: number,
@@ -47,14 +47,8 @@ async function buildSvg(
// get url
const url = buildBadgeUrl(name, upper, lower, percentage)
- const fetch = await import('node-fetch')
- // fetch url
- const response = await fetch.default(url)
-
- // get svg
- const svg = await response.text()
-
- return svg
+ const res = await fetch(url)
+ return res.text()
}
export function buildBadgeUrl(
@@ -74,5 +68,7 @@ export function buildBadgeUrl(
const firstHalf = name ? `${name}-` : ''
const secondHalf = `${percentage.toFixed(2)}%`
- return `https://img.shields.io/badge/${firstHalf}${secondHalf}-${color}`
+ return encodeURI(
+ `http://img.shields.io/badge/${firstHalf}${secondHalf}-${color}`
+ )
}
diff --git a/src/comment.ts b/src/comment.ts
index 5a075ba..3693821 100644
--- a/src/comment.ts
+++ b/src/comment.ts
@@ -28,7 +28,7 @@ export async function findPreviousComment(
number: number,
header: string
): Promise {
- let after = null
+ let after: string | null | undefined = null
let hasNextPage = true
const h = headerComment(header)
while (hasNextPage) {
diff --git a/src/message.ts b/src/message.ts
index 4141db0..a785b59 100644
--- a/src/message.ts
+++ b/src/message.ts
@@ -161,5 +161,5 @@ function buildBadge(upper: number, lower: number, percentage: number): string {
const alt =
percentage >= upper ? 'pass' : percentage >= lower ? 'warning' : 'fail'
const url = buildBadgeUrl(undefined, upper, lower, percentage)
- return `} "${alt}")`
+ return ``
}
diff --git a/src/semaphor.ts b/src/semaphor.ts
index af8fab7..a125a7b 100644
--- a/src/semaphor.ts
+++ b/src/semaphor.ts
@@ -20,17 +20,26 @@ export function verifyCoverageThreshold(projects: CoveredProject[]): boolean {
})
if (failedProjects.length) {
core.error(
- `Coverage threshold not met for ${failedProjects.length} projects`
+ `Coverage threshold (${Config.lowerCoverageThreshold}%) not met for ${failedProjects.length} projects:`
)
+ for (const failed of failedProjects) {
+ core.error(`${failed.name} - ${getProjectPercentage(failed)}%`)
+ }
return false
}
} else if (Config.enforceThreshold === 'total') {
const percentage = getTotalPercentage(projects)
- if (
- percentage === undefined ||
- percentage < Config.lowerCoverageThreshold
- ) {
- core.error(`Coverage threshold not met for total coverage`)
+ if (percentage === undefined) {
+ core.error(
+ `Total coverage threshold of ${Config.lowerCoverageThreshold}% not met.`
+ )
+ return false
+ } else if (percentage < Config.lowerCoverageThreshold) {
+ core.error(
+ `Total coverage of ${percentage.toFixed(
+ 2
+ )}% does not meet threshold of ${Config.lowerCoverageThreshold}%`
+ )
return false
}
}
@@ -42,15 +51,24 @@ export function verifyNoCoverageDecrease(projects: CoveredProject[]): boolean {
if (Config.enforceForbiddenDecrease === 'none') {
return true
} else if (Config.enforceForbiddenDecrease === 'single') {
- const failed = projects.filter(p => {
+ const failedProjects = projects.filter(p => {
const percentage = getProjectPercentage(p)
const before = getProjectPercentageBefore(p)
return (
percentage !== undefined && before !== undefined && percentage < before
)
})
- if (failed.length) {
- core.error(`Coverage decrease detected for ${failed.length} projects`)
+ if (failedProjects.length) {
+ core.error(
+ `Coverage decrease detected for ${failedProjects.length} projects:`
+ )
+ for (const failed of failedProjects) {
+ core.error(
+ `${failed.name} - ${getProjectPercentageBefore(
+ failed
+ )?.toFixed()}% -> ${getProjectPercentage(failed)?.toFixed(2)}%`
+ )
+ }
return false
}
} else if (Config.enforceForbiddenDecrease === 'total') {