From b6b527db4b7ab0473976301c498b811c1152b4f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Fri, 13 Aug 2021 13:41:03 -0300 Subject: [PATCH 01/10] fix the conditions triggering the warning about unexpected too large stream size --- src/FlowChunk.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/FlowChunk.js b/src/FlowChunk.js index 82e2ffc..4c66ba5 100644 --- a/src/FlowChunk.js +++ b/src/FlowChunk.js @@ -327,7 +327,7 @@ export default class FlowChunk { } if (data && data.size > 0) { - if (this.flowObj.chunkSize) { + if (this.fileObj.chunkSize && data.size > this.fileObj.chunkSize) { // This may imply a miscalculation of the total chunk numbers. console.warn(`Chunk ${this.offset}: returned too much data. Got ${data.size}. Expected not more than ${this.flowObj.chunkSize}.`); } From 83b6e7b2926f355d4ec2c38eb8be7debf9ffa0a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Fri, 13 Aug 2021 13:46:31 -0300 Subject: [PATCH 02/10] Ability for chunks corresponding to stream, to be correctly retried if upload failed, #346 --- src/FlowChunk.js | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/FlowChunk.js b/src/FlowChunk.js index 4c66ba5..576c4fe 100644 --- a/src/FlowChunk.js +++ b/src/FlowChunk.js @@ -307,12 +307,20 @@ export default class FlowChunk { async readStreamChunk() { if (this.readStreamState.resolved) { - // This is normally impossible to reach. Has it been uploaded? - console.warn(`Chunk ${this.offset} already read. xhr initialized = ${this.xhr ? 1 : 0}`); - // We may want to retry (or not) to upload (but never try to read from the stream again or risk misordered chunks - return; + // Requiring to read the same chunk twice is unlikely but may happen (in case of retry a failed upload) + // If the bytes are still here (this.payload), then retry the upload... + if (this.payload && this.pendingRetry) { + console.info(`Retrying chunk ${this.offset} upload`); + return this.uploadStreamChunk(this.payload); + } + + console.warn(`Chunk ${this.offset} already read. xhr initialized = ${this.xhr ? 1 : 0}. payload size = ${this.payload ? this.payload.size : null}. readState = ${this.readState}. retry = ${this.pendingRetry}`); + // ... but never try to read that same chunk from the (non-rewindable) stream again or we'd risk + // not only misordered chunks but a corrupted file. + return null; } + this.readState = 1; await this.readStreamGuard(); var data, asyncRead = this.flowObj.opts.asyncReadFileFn; @@ -326,6 +334,10 @@ export default class FlowChunk { this.readBytes = data.size || data.size === 0 ? data.size : -1; } + return this.uploadStreamChunk(data); + } + + async uploadStreamChunk(data) { if (data && data.size > 0) { if (this.fileObj.chunkSize && data.size > this.fileObj.chunkSize) { // This may imply a miscalculation of the total chunk numbers. @@ -377,7 +389,6 @@ export default class FlowChunk { } if (asyncRead) { - this.readState = 1; await this.readStreamChunk(); return; } From 21f68ba00885817e6cc5611dcb3b5c08482ca32a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Fri, 13 Aug 2021 13:48:32 -0300 Subject: [PATCH 03/10] tests: Improved the helpers related to file consistency and xhr final state --- test/asyncSpec.js | 1 + test/helpers.js | 37 ++++++++++++++++++++++++++----------- test/uploadSpec.js | 3 ++- 3 files changed, 29 insertions(+), 12 deletions(-) diff --git a/test/asyncSpec.js b/test/asyncSpec.js index 7f87679..115770e 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -129,6 +129,7 @@ describe('upload stream', function() { flow.on('file-error', jasmine.createSpy('error')); flow.on('file-success', jasmine.createSpy('success')); flow.on('complete', () => { + validateStatus({flow, content_length: content.length, requests: xhr_server.requests}, flow.files[0]); validatePayload(done, content, {orig_hash, flow, requests: xhr_server.requests}); }); diff --git a/test/helpers.js b/test/helpers.js index 4b8d28c..ec526af 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -34,6 +34,29 @@ function uploadProgress(file) { return {readStates, chunkCount, completion}; } +/** + * Validate whether a generated file is successfully reconstructed from Flow XHR. + * + * @param args Contains `flow` and either `content_length` or `request_number` + */ +function validateStatus(args, file) { + let { + requests: _requests = (typeof xhr_server !== 'undefined' ? xhr_server.requests : null), + flow: _flow = (typeof flow !== 'undefined' ? flow : null) + } = args; + + if (!_flow && !args.request_number) { + console.warn("Called validateStatus with no flow instance"); + } + var predicted_request_number = args.request_number || Math.ceil(args.content_length / _flow.opts.chunkSize); + expect(_requests.length).toBe(predicted_request_number); + if (file) { + expect(file.progress()).toBe(1); + expect(file.isUploading()).toBe(false); + expect(file.isComplete()).toBe(true); + } +} + /** * Validate whether a generated file is successfully reconstructed from Flow XHR. * @@ -45,23 +68,15 @@ async function validatePayload(done, content, args) { let { orig_hash = null, requests: _requests = (typeof xhr_server !== 'undefined' ? xhr_server.requests : null), - flow: _flow = (typeof flow !== 'undefined' ? flow : null) } = args; - if (!_flow || !_requests) { - console.warn("Called validatePayload with no array requests"); + if (!_requests) { + console.warn("Called validatePayload with no requests array"); done(); } - var predicted_request_number = Math.ceil(content.length / _flow.opts.chunkSize); - expect(_requests.length).toBe(predicted_request_number); - var file = _flow.files[0]; - expect(file.progress()).toBe(1); - expect(file.isUploading()).toBe(false); - expect(file.isComplete()).toBe(true); - // An array of promises of obtaining the corresponding request's body (= payload) - var payload_contents = _requests.map(x => x.requestBody.get('file').text()); + var payload_contents = _requests.map(x => [0, 200, 201].includes(x.status) ? x.requestBody.get('file').text() : ''); orig_hash = orig_hash || hex(await hash(content)); Promise.all(payload_contents) .then(values => hash(values.join(''))) diff --git a/test/uploadSpec.js b/test/uploadSpec.js index b6a6927..8086419 100644 --- a/test/uploadSpec.js +++ b/test/uploadSpec.js @@ -595,7 +595,8 @@ describe('upload file', function() { expect(customFunction).toHaveBeenCalledTimes(1); flow.on('complete', async () => { - await validatePayload(done, content, {requests: xhr.requests, flow}); + validateStatus({flow, content_length: content.length, requests: xhr.requests}, flow.files[0]); + await validatePayload(done, content, {requests: xhr.requests}); }); xhr.respondWith('ok'); From 69add564f03426b92ecba9003d9579f9e44aa7f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Fri, 13 Aug 2021 13:49:37 -0300 Subject: [PATCH 04/10] tests: Add tests for streamed-chunks retry, #346 --- test/asyncSpec.js | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/test/asyncSpec.js b/test/asyncSpec.js index 115770e..9fb3727 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -202,4 +202,42 @@ describe('upload stream', function() { flow.addFile(sample_file); expect(console.warn).toHaveBeenCalled(); }); + + it('async stream support request temporary failure', async function (done) { + // ToDo: This test use low-level files[0].chunks[x].send(); to do atomic + // uploads and avoid the unstoppable (recursive) loop. + xhr_server.configure({autoRespond: false, respondImmediately: false}); + + var streamer = new Streamer(1); + flow.opts.initFileFn = streamer.init.bind(streamer); + flow.opts.asyncReadFileFn = streamer.read.bind(streamer); + + flow.opts.chunkSize = 1; + flow.opts.maxChunkRetries = 3; + flow.opts.simultaneousUploads = 2; + await flow.asyncAddFile(new File(['12'], `stream-failure-${jasmine.currentTest.id}.bin`)); + var files = flow.files; + expect(files[0].chunks.length).toBe(2); + + await files[0].chunks[0].send(); + // xhr.error() is unusable. See https://github.com/sinonjs/nise/issues/148 + // xhr_server.respond(xhr => xhr.error()); + xhr_server.respond([400, {}, 'Error']); + + xhr_server.respondWith([200, { "Content-Type": "text/plain" }, 'ok']); + await files[0].chunks[0].send(); + await files[0].chunks[1].send(); + + validateStatus({flow, request_number: 3, requests: xhr_server.requests}); + // See the above comment about why the (inconsistent state can't be tested) + // expect(flow.files[0].isUploading()).toBe(false); + // expect(flow.files[0].isComplete()).toBe(true); + validatePayload(done, + '12', + { + orig_hash: "6b51d431df5d7f141cbececcf79edf3dd861c3b4069f0b11661a3eefacbba918", + requests: xhr_server.requests, + }); + + }); }); From 1062894e41543b70cde53d22d270faa112e56de0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Mon, 23 Aug 2021 14:20:59 -0300 Subject: [PATCH 05/10] tests: Jasmine does NOT support async + (done) tests (!). This is specifically forbidden in Jasmine 3.7. Workarounds must be used to use two distinct promises per tests (see https://github.com/jasmine/jasmine/issues/1893) --- test/asyncSpec.js | 24 +++++++++++------------- test/helpers.js | 17 +++++++---------- test/uploadSpec.js | 4 ++-- 3 files changed, 20 insertions(+), 25 deletions(-) diff --git a/test/asyncSpec.js b/test/asyncSpec.js index 9fb3727..fe5eee3 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -97,7 +97,7 @@ describe('upload stream', function() { xhr_server.restore(); }); - it('synchronous initFileFn and asyncReadFileFn', async function (done) { + it('synchronous initFileFn and asyncReadFileFn', function (done) { // No File.stream() support : No test // No support for skipping() test from Jasmine (https://github.com/jasmine/jasmine/issues/1709) if (typeof Blob === 'undefined' || Blob.prototype.stream !== 'function') { @@ -120,17 +120,17 @@ describe('upload stream', function() { } var content = gen_file(chunk_num, chunk_size), - orig_hash = hex(await hash(content)), sample_file = new File([content], 'foobar.bin'); - console.info(`Test File is ${chunk_num} bytes long (sha256: ${orig_hash}).`); + console.info(`Test File is ${chunk_num} bytes long.`); console.info(`Now uploads ${simultaneousUploads} simultaneous chunks of at most ${upload_chunk_size} bytes`); flow.on('file-error', jasmine.createSpy('error')); flow.on('file-success', jasmine.createSpy('success')); - flow.on('complete', () => { + flow.on('complete', async () => { validateStatus({flow, content_length: content.length, requests: xhr_server.requests}, flow.files[0]); - validatePayload(done, content, {orig_hash, flow, requests: xhr_server.requests}); + await validatePayload(content, {flow, requests: xhr_server.requests}); + done(); }); var streamer = new Streamer(upload_chunk_size); // chunk_size); @@ -203,7 +203,7 @@ describe('upload stream', function() { expect(console.warn).toHaveBeenCalled(); }); - it('async stream support request temporary failure', async function (done) { + it('async stream support request temporary failure', async function () { // ToDo: This test use low-level files[0].chunks[x].send(); to do atomic // uploads and avoid the unstoppable (recursive) loop. xhr_server.configure({autoRespond: false, respondImmediately: false}); @@ -232,12 +232,10 @@ describe('upload stream', function() { // See the above comment about why the (inconsistent state can't be tested) // expect(flow.files[0].isUploading()).toBe(false); // expect(flow.files[0].isComplete()).toBe(true); - validatePayload(done, - '12', - { - orig_hash: "6b51d431df5d7f141cbececcf79edf3dd861c3b4069f0b11661a3eefacbba918", - requests: xhr_server.requests, - }); - + await validatePayload('12', + { + orig_hash: "6b51d431df5d7f141cbececcf79edf3dd861c3b4069f0b11661a3eefacbba918", + requests: xhr_server.requests, + }); }); }); diff --git a/test/helpers.js b/test/helpers.js index ec526af..cbec6a9 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -64,7 +64,7 @@ function validateStatus(args, file) { * @param content File original content. * @param orig_hash (Optional) File original hash. if not provided, it will be computed from content. */ -async function validatePayload(done, content, args) { +async function validatePayload(content, args) { let { orig_hash = null, requests: _requests = (typeof xhr_server !== 'undefined' ? xhr_server.requests : null), @@ -72,18 +72,15 @@ async function validatePayload(done, content, args) { if (!_requests) { console.warn("Called validatePayload with no requests array"); - done(); + return; } // An array of promises of obtaining the corresponding request's body (= payload) var payload_contents = _requests.map(x => [0, 200, 201].includes(x.status) ? x.requestBody.get('file').text() : ''); orig_hash = orig_hash || hex(await hash(content)); - Promise.all(payload_contents) - .then(values => hash(values.join(''))) - .then(hash => hex(hash)) - .then(hexhash => { - // console.log(orig_hash, hexhash); - expect(hexhash).toBe(orig_hash); - done(); - }); + console.info(`Test File sha256: ${orig_hash}.`); + let values = await Promise.all(payload_contents); + let hexhash = hex(await hash(values.join(''))); + // console.log(orig_hash, hexhash); + expect(hexhash).toBe(orig_hash); } diff --git a/test/uploadSpec.js b/test/uploadSpec.js index 8086419..54a4af2 100644 --- a/test/uploadSpec.js +++ b/test/uploadSpec.js @@ -580,7 +580,7 @@ describe('upload file', function() { expect(xhr.requests.length).toBe(6); }); - it('should allow to hook initFileFn function', function(done) { + it('should allow to hook initFileFn function', function() { var content = gen_file(6, 128), sample_file = new File([content], `foobar-initFileFn.bin`), customFunction = jasmine.createSpy('fn'), @@ -596,7 +596,7 @@ describe('upload file', function() { flow.on('complete', async () => { validateStatus({flow, content_length: content.length, requests: xhr.requests}, flow.files[0]); - await validatePayload(done, content, {requests: xhr.requests}); + await validatePayload(content, {requests: xhr.requests}); }); xhr.respondWith('ok'); From e1f9a80b65939a49a42cc40daa528b09c15ee272 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Wed, 25 Aug 2021 10:05:40 -0300 Subject: [PATCH 06/10] tests: Handle case where an asyncReadFile function is used to process multiple Files --- test/asyncSpec.js | 81 ++++++++++++++++++++++++++++++++++++++--------- test/helpers.js | 3 +- 2 files changed, 68 insertions(+), 16 deletions(-) diff --git a/test/asyncSpec.js b/test/asyncSpec.js index fe5eee3..5a8a959 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -12,52 +12,59 @@ describe('upload stream', function() { class Streamer { constructor(chunk_size) { - this._reader = null; + this._reader = {}; this.chunk_size = chunk_size; // See the comment in read() for why we implement a custom reader here. - this.buffer = null; - this.index = 0; + this.buffer = {}; + this.index = {}; }; init(flowObj) { - this._reader = flowObj.file.stream().getReader(); + // ToDo: Use flowObj.uniqueIdentifier ? + this._reader[flowObj.name] = flowObj.file.stream().getReader(); + this.buffer[flowObj.name] = null; }; async read(flowObj, startByte, endByte, fileType, chunk) { - // chunk._log(`Start reading from ${this.buffer !== null ? 'existing' : 'the'} buffer`); - if (this.buffer === null) { + // console.log(`[asyncRead ${flowObj.name}#${chunk.offset}] start reading from ${this.buffer[flowObj.name] !== null ? 'existing' : 'the'} buffer`); + if (this.buffer[flowObj.name] === null) { // console.log(`[asyncRead ${chunk.offset}] no preexisting buffer => reader.read()`); /* Here we would expect a partial read of 64kb (by implementation) but it seems that *all* the buffer is returned making difficult to make a test based on ReadableStreamDefaultReader() behavior. As such we simulate it. */ - const {value: buffer, done} = await this._reader.read(); - this.buffer = buffer; + const {value: buffer, done} = await this._reader[flowObj.name].read(); + this.buffer[flowObj.name] = buffer.slice(0); if (buffer) { - // console.log(`[asyncRead ${chunk.offset}] got a buffer of ${buffer.length} bytes...`); + // console.log(`[asyncRead ${flowObj.name}#${chunk.offset}] Read ${buffer.length} bytes`, buffer); } else { // console.log(`[asyncRead ${chunk.offset}] no buffer[bail]`); return null; } } - if (this.buffer.length === 0) { - // console.log(`[asyncRead ${chunk.offset}] this.buffer is null[bail]`); + if (this.buffer[flowObj.name].length === 0) { + // console.log(`[asyncRead ${chunk.offset}] this.buffer[${flowObj.name}] is null[bail]`); return null; } - // console.log(`[asyncRead ${chunk.offset}] Read slice[${this.index}:${this.index + this.chunk_size}] a buffer of ${this.buffer.length} bytes`); - var buffer_chunk = this.buffer.slice(this.index, this.index + this.chunk_size); + if (! this.index[flowObj.name]) { + this.index[flowObj.name] = 0; + } + + // console.log(`[asyncRead ${chunk.offset}] Read slice[${this.index[flowObj.name]}:${this.index[flowObj.name] + this.chunk_size}] a buffer of ${this.buffer[flowObj.name].length} bytes`); + var buffer_chunk = this.buffer[flowObj.name].slice(this.index[flowObj.name], this.index[flowObj.name] + this.chunk_size); + // console.log(`[asyncRead] Read slice of ${buffer_chunk.length} bytes`); if (!buffer_chunk) { // console.log(`[asyncRead ${chunk.offset}] null slice`); // console.log(buffer_chunk); } else { - // chunk._log(`Read slice of ${buffer_chunk.length} bytes`); - this.index += this.chunk_size; + this.index[flowObj.name] += this.chunk_size; + // console.log(`[asyncRead] ${buffer_chunk}. index is now ${this.index[flowObj.name]}`); return new Blob([buffer_chunk], {type: 'application/octet-stream'}); } @@ -238,4 +245,48 @@ describe('upload stream', function() { requests: xhr_server.requests, }); }); + + it('Do not corrupt multiple streams', async function () { + xhr_server.configure({autoRespond: true, respondImmediately: true}); + xhr_server.respondWith([200, { "Content-Type": "text/plain" }, 'ok']); + var streamer = new Streamer(1); + flow.opts.initFileFn = streamer.init.bind(streamer); + flow.opts.asyncReadFileFn = streamer.read.bind(streamer); + + flow.opts.chunkSize = 1; + flow.opts.maxChunkRetries = 3; + flow.opts.simultaneousUploads = 2; + await flow.asyncAddFiles([ + new File(['1234'], `multi1-${jasmine.currentTest.id}.bin`), + new File(['56789'], `multi2-${jasmine.currentTest.id}.bin`) + ]); + + await flow.files[0].chunks[0].send(); + await flow.files[1].chunks[0].send(); + for (let i = 0; i < (9 - 2); i++) { + flow.uploadNextChunk(true); + await sleep(1); + } + + for (let file of flow.files) { + expect(file.isUploading()).toBeFalsy(); + expect(file.isComplete()).toBeTruthy(); + expect(file.progress()).toBe(1); + } + + expect(flow.progress()).toBe(1); + validateStatus({flow, request_number: 9, requests: xhr_server.requests}); + await validatePayload(null, + { + orig_hash: '03ac674216f3e15c761ee1a5e255f067953623c8b388b4459e13f978d7c846f4', + requests: xhr_server.requests, + filter: x => x.requestBody.get('file').name === `multi1-${jasmine.currentTest.id}.bin` + }); + await validatePayload(null, + { + orig_hash: 'f76043a74ec33b6aefbb289050faf7aa8d482095477397e3e63345125d49f527', + requests: xhr_server.requests, + filter: x => x.requestBody.get('file').name === `multi2-${jasmine.currentTest.id}.bin` + }); + }); }); diff --git a/test/helpers.js b/test/helpers.js index cbec6a9..0d29a01 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -68,6 +68,7 @@ async function validatePayload(content, args) { let { orig_hash = null, requests: _requests = (typeof xhr_server !== 'undefined' ? xhr_server.requests : null), + filter = x => true } = args; if (!_requests) { @@ -76,7 +77,7 @@ async function validatePayload(content, args) { } // An array of promises of obtaining the corresponding request's body (= payload) - var payload_contents = _requests.map(x => [0, 200, 201].includes(x.status) ? x.requestBody.get('file').text() : ''); + var payload_contents = _requests.map(x => [0, 200, 201].includes(x.status) && filter(x) ? x.requestBody.get('file').text() : ''); orig_hash = orig_hash || hex(await hash(content)); console.info(`Test File sha256: ${orig_hash}.`); let values = await Promise.all(payload_contents); From 2f444205fc8b77f99d796e5802ccbe204536ad3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Wed, 25 Aug 2021 10:51:20 -0300 Subject: [PATCH 07/10] tests: added a isReading() function + a (failing) function testing pause/resume() for streams --- src/AsyncFlowFile.js | 14 +++++ test/asyncSpec.js | 137 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 151 insertions(+) diff --git a/src/AsyncFlowFile.js b/src/AsyncFlowFile.js index 7a3ee80..c6dc6b4 100644 --- a/src/AsyncFlowFile.js +++ b/src/AsyncFlowFile.js @@ -33,4 +33,18 @@ export default class AsyncFlowFile extends FlowFile { // console.log("Flowfile returns [async]", this._bootstrapped); return this; } + + /** + * Indicates if string is being read at the moment + * @function + * @returns {boolean} + */ + isReading() { + for (let chunk of this.chunks) { + if (chunk.status() === 'reading') { + return true; + } + } + return false; + } } diff --git a/test/asyncSpec.js b/test/asyncSpec.js index 5a8a959..9806664 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -289,4 +289,141 @@ describe('upload stream', function() { filter: x => x.requestBody.get('file').name === `multi2-${jasmine.currentTest.id}.bin` }); }); + + it('should pause and resume stream', async function () { + xhr_server.configure({autoRespond: false, respondImmediately: false}); + var streamer = new Streamer(1); + flow.opts.initFileFn = streamer.init.bind(streamer); + flow.opts.asyncReadFileFn = streamer.read.bind(streamer); + + flow.opts.chunkSize = 1; + flow.opts.maxChunkRetries = 3; + flow.opts.simultaneousUploads = 2; + await flow.asyncAddFiles([ + new File(['123456'], `foobar1-${jasmine.currentTest.id}.bin`), + new File(['789'], `foobar2-${jasmine.currentTest.id}.bin`) + ]); + + let files = flow.files; + let counter = {}; + + /* + [ ] + [ ] + */ + expect(files[0].chunks.length).toBe(6); + expect(files[1].chunks.length).toBe(3); + flow.upload(); + expect(files[0].isReading()).toBeTruthy(); + await sleep(1); + + /* + [^^ ] + [ ] + */ + expect(xhr_server.requests.length).toBe(2); + expect(xhr_server.requests[0].aborted).toBeUndefined(); + expect(xhr_server.requests[1].aborted).toBeUndefined(); + + // Reply to XHR n°1 and 2 + xhr_server.respond(); + /* + [oo ] + [ ] + */ + expect(xhr_server.requests[0].status).toBe(200); + expect(xhr_server.requests[1].status).toBe(200); + await sleep(1); + /* + [oo^^__] + [ ] + */ + expect(xhr_server.requests.length).toBe(4); + expect(files[0].isUploading()).toBeTruthy(); + expect(files[0].isReading()).toBeFalsy(); + + // Next two chunks from file[0] were read but we abort() their + // corresponding `xhr`. They will get back to pending. + // Flow should start uploading second file now + files[0].pause(); + await sleep(1); + + /* + [oo____] + [^^ ] + */ + expect(xhr_server.requests[2].aborted).toBeTruthy(); + expect(xhr_server.requests[3].aborted).toBeTruthy(); + expect(xhr_server.requests[4].aborted).toBeUndefined(); + expect(files[0].isUploading()).toBeFalsy(); + + flow.upload(); + await sleep(1); + expect(files[0].isUploading()).toBeFalsy(); + expect(files[1].isUploading()).toBeTruthy(); + + // Reply to XHR n°4 and 5 + xhr_server.respond(); + + expect(xhr_server.requests.length).toBe(6); + expect(xhr_server.requests[4].aborted).toBeFalsy(); + expect(xhr_server.requests[5].aborted).toBeFalsy(); + + /* + [oo____] + [ooR] + */ + // Should resume file after second file chunks is uploaded + files[0].resume(); + await sleep(1); + + /* + [oo^^__] + [oo^] + */ + // Finish file 1 + expect(files[0].isUploading()).toBeTruthy(); + expect(files[1].isUploading()).toBeTruthy(); + expect(xhr_server.requests.length).toBe(9); // Above 7 + 2 failed when pause() + xhr_server.respond(); + + /* + [oooo__] + [ooo] + */ + // Upload finished + expect(files[1].isUploading()).toBeFalsy(); + expect(files[1].isComplete()).toBeTruthy(); + expect(files[1].progress()).toBe(1); + + /* + [oooo__] + [ooo] + */ + // Finish file 0 + await sleep(1); + expect(xhr_server.requests.length).toBe(11); + xhr_server.respond(); + + /* + [oooooo] + [ooo] + */ + expect(files[0].isUploading()).toBeFalsy(); + expect(files[0].isComplete()).toBeTruthy(); + expect(files[0].progress()).toBe(1); + expect(flow.progress()).toBe(1); + + validateStatus({flow, request_number: 11, requests: xhr_server.requests}); + await validatePayload(null, { + orig_hash: '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92', // "123456" + requests: xhr_server.requests, + filter: x => x.requestBody.get('file').name === `foobar1-${jasmine.currentTest.id}.bin` + }); + await validatePayload(null, { + orig_hash: '35a9e381b1a27567549b5f8a6f783c167ebf809f1c4d6a9e367240484d8ce281', // "789" + requests: xhr_server.requests, + filter: x => x.requestBody.get('file').name === `foobar2-${jasmine.currentTest.id}.bin` + }); + }); }); From 7ff83ffbf64b4ae02076e8cd5019aff79baecb18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Wed, 8 Sep 2021 17:07:57 -0300 Subject: [PATCH 08/10] tests: Compute XHR upload checksum on successful XHR only. --- test/asyncSpec.js | 1 + test/helpers.js | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/test/asyncSpec.js b/test/asyncSpec.js index 9806664..06d1113 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -234,6 +234,7 @@ describe('upload stream', function() { xhr_server.respondWith([200, { "Content-Type": "text/plain" }, 'ok']); await files[0].chunks[0].send(); await files[0].chunks[1].send(); + xhr_server.respond(); validateStatus({flow, request_number: 3, requests: xhr_server.requests}); // See the above comment about why the (inconsistent state can't be tested) diff --git a/test/helpers.js b/test/helpers.js index 0d29a01..df183b0 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -77,7 +77,7 @@ async function validatePayload(content, args) { } // An array of promises of obtaining the corresponding request's body (= payload) - var payload_contents = _requests.map(x => [0, 200, 201].includes(x.status) && filter(x) ? x.requestBody.get('file').text() : ''); + var payload_contents = _requests.map(x => [200, 201].includes(x.status) && filter(x) ? x.requestBody.get('file').text() : ''); orig_hash = orig_hash || hex(await hash(content)); console.info(`Test File sha256: ${orig_hash}.`); let values = await Promise.all(payload_contents); From 0c574a34e65ada2067cd9a7a5f9be1c9b1ade720 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Thu, 9 Sep 2021 12:18:39 -0300 Subject: [PATCH 09/10] misc: indentation --- test/asyncSpec.js | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/test/asyncSpec.js b/test/asyncSpec.js index 06d1113..e35b3da 100644 --- a/test/asyncSpec.js +++ b/test/asyncSpec.js @@ -240,11 +240,10 @@ describe('upload stream', function() { // See the above comment about why the (inconsistent state can't be tested) // expect(flow.files[0].isUploading()).toBe(false); // expect(flow.files[0].isComplete()).toBe(true); - await validatePayload('12', - { - orig_hash: "6b51d431df5d7f141cbececcf79edf3dd861c3b4069f0b11661a3eefacbba918", - requests: xhr_server.requests, - }); + await validatePayload(null, { + orig_hash: "6b51d431df5d7f141cbececcf79edf3dd861c3b4069f0b11661a3eefacbba918", // "12" + requests: xhr_server.requests, + }); }); it('Do not corrupt multiple streams', async function () { @@ -277,18 +276,16 @@ describe('upload stream', function() { expect(flow.progress()).toBe(1); validateStatus({flow, request_number: 9, requests: xhr_server.requests}); - await validatePayload(null, - { - orig_hash: '03ac674216f3e15c761ee1a5e255f067953623c8b388b4459e13f978d7c846f4', - requests: xhr_server.requests, - filter: x => x.requestBody.get('file').name === `multi1-${jasmine.currentTest.id}.bin` - }); - await validatePayload(null, - { - orig_hash: 'f76043a74ec33b6aefbb289050faf7aa8d482095477397e3e63345125d49f527', - requests: xhr_server.requests, - filter: x => x.requestBody.get('file').name === `multi2-${jasmine.currentTest.id}.bin` - }); + await validatePayload(null, { + orig_hash: '03ac674216f3e15c761ee1a5e255f067953623c8b388b4459e13f978d7c846f4', // "1234" + requests: xhr_server.requests, + filter: x => x.requestBody.get('file').name === `multi1-${jasmine.currentTest.id}.bin` + }); + await validatePayload(null, { + orig_hash: 'f76043a74ec33b6aefbb289050faf7aa8d482095477397e3e63345125d49f527', // "56789" + requests: xhr_server.requests, + filter: x => x.requestBody.get('file').name === `multi2-${jasmine.currentTest.id}.bin` + }); }); it('should pause and resume stream', async function () { From ccc38b8b7d4910e41ffed4e1b3646597c5ffecc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Droz?= Date: Wed, 15 Sep 2021 14:23:27 -0300 Subject: [PATCH 10/10] update builds --- dist/flow.js | 382 ++++++++++++++++++++++++++++++++--------------- dist/flow.min.js | 2 +- 2 files changed, 264 insertions(+), 120 deletions(-) diff --git a/dist/flow.js b/dist/flow.js index e101a12..e0ce962 100644 --- a/dist/flow.js +++ b/dist/flow.js @@ -5892,31 +5892,41 @@ key: "readStreamChunk", value: function () { var _readStreamChunk = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee3() { - var data, asyncRead, lastReadBytes; + var data, asyncRead; return regeneratorRuntime.wrap(function _callee3$(_context3) { while (1) { switch (_context3.prev = _context3.next) { case 0: if (!this.readStreamState.resolved) { - _context3.next = 3; + _context3.next = 6; break; } - // This is normally impossible to reach. Has it been uploaded? - console.warn("Chunk ".concat(this.offset, " already read. xhr initialized = ").concat(this.xhr ? 1 : 0)); // We may want to retry (or not) to upload (but never try to read from the stream again or risk misordered chunks + if (!(this.payload && this.pendingRetry)) { + _context3.next = 4; + break; + } - return _context3.abrupt("return"); + console.info("Retrying chunk ".concat(this.offset, " upload")); + return _context3.abrupt("return", this.uploadStreamChunk(this.payload)); - case 3: - _context3.next = 5; + case 4: + console.warn("Chunk ".concat(this.offset, " already read. xhr initialized = ").concat(this.xhr ? 1 : 0, ". payload size = ").concat(this.payload ? this.payload.size : null, ". readState = ").concat(this.readState, ". retry = ").concat(this.pendingRetry)); // ... but never try to read that same chunk from the (non-rewindable) stream again or we'd risk + // not only misordered chunks but a corrupted file. + + return _context3.abrupt("return", null); + + case 6: + this.readState = 1; + _context3.next = 9; return this.readStreamGuard(); - case 5: + case 9: asyncRead = this.flowObj.opts.asyncReadFileFn; - _context3.next = 8; + _context3.next = 12; return asyncRead(this.fileObj, this.startByte, this.endByte, this.fileObj.file.type, this); - case 8: + case 12: data = _context3.sent; this.readStreamState.resolve(); // Equivalent to readFinished() @@ -5926,23 +5936,48 @@ this.readBytes = data.size || data.size === 0 ? data.size : -1; } + return _context3.abrupt("return", this.uploadStreamChunk(data)); + + case 17: + case "end": + return _context3.stop(); + } + } + }, _callee3, this); + })); + + function readStreamChunk() { + return _readStreamChunk.apply(this, arguments); + } + + return readStreamChunk; + }() + }, { + key: "uploadStreamChunk", + value: function () { + var _uploadStreamChunk = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee4(data) { + var lastReadBytes; + return regeneratorRuntime.wrap(function _callee4$(_context4) { + while (1) { + switch (_context4.prev = _context4.next) { + case 0: if (!(data && data.size > 0)) { - _context3.next = 17; + _context4.next = 5; break; } - if (this.flowObj.chunkSize) { + if (this.fileObj.chunkSize && data.size > this.fileObj.chunkSize) { // This may imply a miscalculation of the total chunk numbers. console.warn("Chunk ".concat(this.offset, ": returned too much data. Got ").concat(data.size, ". Expected not more than ").concat(this.flowObj.chunkSize, ".")); } this.payload = data; this.xhrSend(data); - return _context3.abrupt("return"); + return _context4.abrupt("return"); - case 17: + case 5: if (!(this.offset > 0)) { - _context3.next = 25; + _context4.next = 13; break; } @@ -5950,7 +5985,7 @@ lastReadBytes = this.fileObj.chunks[this.offset - 1].readBytes; if (!(lastReadBytes < parseInt(this.chunkSize))) { - _context3.next = 25; + _context4.next = 13; break; } @@ -5961,29 +5996,32 @@ this.pendingRetry = false; this.xhr = { - readyState: 5, - status: 1 + readyState: 4, + status: 200, + abort: function abort(e) { + return null; + } }; this.doneHandler(null); - return _context3.abrupt("return"); + return _context4.abrupt("return"); - case 25: + case 13: console.warn("Chunk ".concat(this.offset, ": no byte to read()")); this.pendingRetry = false; - case 27: + case 15: case "end": - return _context3.stop(); + return _context4.stop(); } } - }, _callee3, this); + }, _callee4, this); })); - function readStreamChunk() { - return _readStreamChunk.apply(this, arguments); + function uploadStreamChunk(_x) { + return _uploadStreamChunk.apply(this, arguments); } - return readStreamChunk; + return uploadStreamChunk; }() /** * Prepare data (preprocess/read) data then call xhrSend() @@ -5993,68 +6031,67 @@ }, { key: "send", value: function () { - var _send = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee4() { + var _send = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee5() { var preprocess, read, asyncRead; - return regeneratorRuntime.wrap(function _callee4$(_context4) { + return regeneratorRuntime.wrap(function _callee5$(_context5) { while (1) { - switch (_context4.prev = _context4.next) { + switch (_context5.prev = _context5.next) { case 0: preprocess = this.flowObj.opts.preprocess; read = this.flowObj.opts.readFileFn; asyncRead = this.flowObj.opts.asyncReadFileFn; if (!(typeof preprocess === 'function')) { - _context4.next = 11; + _context5.next = 11; break; } - _context4.t0 = this.preprocessState; - _context4.next = _context4.t0 === 0 ? 7 : _context4.t0 === 1 ? 10 : 11; + _context5.t0 = this.preprocessState; + _context5.next = _context5.t0 === 0 ? 7 : _context5.t0 === 1 ? 10 : 11; break; case 7: this.preprocessState = 1; preprocess(this); - return _context4.abrupt("return"); + return _context5.abrupt("return"); case 10: - return _context4.abrupt("return"); + return _context5.abrupt("return"); case 11: if (!asyncRead) { - _context4.next = 16; + _context5.next = 15; break; } - this.readState = 1; - _context4.next = 15; + _context5.next = 14; return this.readStreamChunk(); - case 15: - return _context4.abrupt("return"); + case 14: + return _context5.abrupt("return"); - case 16: - _context4.t1 = this.readState; - _context4.next = _context4.t1 === 0 ? 19 : _context4.t1 === 1 ? 22 : 23; + case 15: + _context5.t1 = this.readState; + _context5.next = _context5.t1 === 0 ? 18 : _context5.t1 === 1 ? 21 : 22; break; - case 19: + case 18: this.readState = 1; read(this.fileObj, this.startByte, this.endByte, this.fileObj.file.type, this); - return _context4.abrupt("return"); + return _context5.abrupt("return"); - case 22: - return _context4.abrupt("return"); + case 21: + return _context5.abrupt("return"); - case 23: + case 22: this.xhrSend(this.payload); - case 24: + case 23: case "end": - return _context4.stop(); + return _context5.stop(); } } - }, _callee4, this); + }, _callee5, this); })); function send() { @@ -6102,13 +6139,11 @@ }, { key: "abort", value: function abort() { - // Abort and reset - var xhr = this.xhr; - this.xhr = null; - - if (xhr) { - xhr.abort(); + if (this.xhr) { + this.xhr.abort(); } + + this.xhr = null; } /** * Retrieve current chunk upload status @@ -6772,6 +6807,34 @@ return bootstrap; }() + /** + * Indicates if string is being read at the moment + * @function + * @returns {boolean} + */ + + }, { + key: "isReading", + value: function isReading() { + var _iterator = _createForOfIteratorHelper(this.chunks), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var chunk = _step.value; + + if (chunk.status() === 'reading') { + return true; + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return false; + } }]); return AsyncFlowFile; @@ -6919,6 +6982,55 @@ this.addFiles(dataTransfer.files, event); } } + /** + * On drop event when file/stream initialization is asynchronous + * @function + * @param {MouseEvent} event + */ + + }, { + key: "asyncOnDrop", + value: function () { + var _asyncOnDrop = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee(event) { + var dataTransfer; + return regeneratorRuntime.wrap(function _callee$(_context) { + while (1) { + switch (_context.prev = _context.next) { + case 0: + if (this.opts.onDropStopPropagation) { + event.stopPropagation(); + } + + event.preventDefault(); + dataTransfer = event.dataTransfer; + + if (!(dataTransfer.items && dataTransfer.items[0] && dataTransfer.items[0].webkitGetAsEntry)) { + _context.next = 7; + break; + } + + this.webkitReadDataTransfer(event); + _context.next = 9; + break; + + case 7: + _context.next = 9; + return this.asyncAddFiles(dataTransfer.files, event); + + case 9: + case "end": + return _context.stop(); + } + } + }, _callee, this); + })); + + function asyncOnDrop(_x) { + return _asyncOnDrop.apply(this, arguments); + } + + return asyncOnDrop; + }() /** * Prevent default * @function @@ -7217,14 +7329,46 @@ each(attributes, function (value, key) { input.setAttribute(key, value); }); // When new files are added, simply append them to the overall list + // but adapt to the case where initFileFn is async. + + var callback = this.opts.initFileFn && typeof this.opts.initFileFn === 'function' && this.opts.initFileFn.constructor.name === 'AsyncFunction' ? /*#__PURE__*/function () { + var _ref = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee2(e) { + return regeneratorRuntime.wrap(function _callee2$(_context2) { + while (1) { + switch (_context2.prev = _context2.next) { + case 0: + if (!e.target.value) { + _context2.next = 6; + break; + } + + input.setAttribute('readonly', 'readonly'); + _context2.next = 4; + return _this3.asyncAddFiles(e.target.files, e); + + case 4: + e.target.value = ''; + input.removeAttribute('readonly'); - input.addEventListener('change', function (e) { + case 6: + case "end": + return _context2.stop(); + } + } + }, _callee2); + })); + + return function (_x2) { + return _ref.apply(this, arguments); + }; + }() : function (e) { if (e.target.value) { _this3.addFiles(e.target.files, e); e.target.value = ''; } - }, false); + }; + input.addEventListener('change', callback, false); }, this); } /** @@ -7240,7 +7384,7 @@ domNodes = [domNodes]; } - this._onDropBound = this.onDrop.bind(this); + this._onDropBound = this.opts.initFileFn && typeof this.opts.initFileFn === 'function' && this.opts.initFileFn.constructor.name === 'AsyncFunction' ? this.asyncOnDrop.bind(this) : this.onDrop.bind(this); var _iterator7 = _createForOfIteratorHelper(domNodes), _step7; @@ -7427,78 +7571,78 @@ value: /*#__PURE__*/regeneratorRuntime.mark(function filterFileList(fileList, event) { var ie10plus, _iterator9, _step9, file, uniqueIdentifier; - return regeneratorRuntime.wrap(function filterFileList$(_context) { + return regeneratorRuntime.wrap(function filterFileList$(_context3) { while (1) { - switch (_context.prev = _context.next) { + switch (_context3.prev = _context3.next) { case 0: // ie10+ ie10plus = window.navigator.msPointerEnabled; _iterator9 = _createForOfIteratorHelper(fileList); - _context.prev = 2; + _context3.prev = 2; _iterator9.s(); case 4: if ((_step9 = _iterator9.n()).done) { - _context.next = 17; + _context3.next = 17; break; } file = _step9.value; if (!(ie10plus && file.size === 0 || file.size % 4096 === 0 && (file.name === '.' || file.fileName === '.'))) { - _context.next = 8; + _context3.next = 8; break; } - return _context.abrupt("continue", 15); + return _context3.abrupt("continue", 15); case 8: uniqueIdentifier = this.generateUniqueIdentifier(file); if (!(!this.opts.allowDuplicateUploads && this.getFromUniqueIdentifier(uniqueIdentifier))) { - _context.next = 11; + _context3.next = 11; break; } - return _context.abrupt("continue", 15); + return _context3.abrupt("continue", 15); case 11: if (this.hook('filter-file', file, event)) { - _context.next = 13; + _context3.next = 13; break; } - return _context.abrupt("continue", 15); + return _context3.abrupt("continue", 15); case 13: - _context.next = 15; + _context3.next = 15; return [file, uniqueIdentifier]; case 15: - _context.next = 4; + _context3.next = 4; break; case 17: - _context.next = 22; + _context3.next = 22; break; case 19: - _context.prev = 19; - _context.t0 = _context["catch"](2); + _context3.prev = 19; + _context3.t0 = _context3["catch"](2); - _iterator9.e(_context.t0); + _iterator9.e(_context3.t0); case 22: - _context.prev = 22; + _context3.prev = 22; _iterator9.f(); - return _context.finish(22); + return _context3.finish(22); case 25: case "end": - return _context.stop(); + return _context3.stop(); } } }, filterFileList, this, [[2, 19, 22, 25]]); @@ -7599,35 +7743,35 @@ }, { key: "asyncAddFile", value: function () { - var _asyncAddFile = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee(file) { + var _asyncAddFile = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee3(file) { var _len2, args, _key2, - _args2 = arguments; + _args4 = arguments; - return regeneratorRuntime.wrap(function _callee$(_context2) { + return regeneratorRuntime.wrap(function _callee3$(_context4) { while (1) { - switch (_context2.prev = _context2.next) { + switch (_context4.prev = _context4.next) { case 0: - for (_len2 = _args2.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) { - args[_key2 - 1] = _args2[_key2]; + for (_len2 = _args4.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) { + args[_key2 - 1] = _args4[_key2]; } - _context2.next = 3; + _context4.next = 3; return this.asyncAddFiles.apply(this, [[file]].concat(args)); case 3: - return _context2.abrupt("return", _context2.sent[0]); + return _context4.abrupt("return", _context4.sent[0]); case 4: case "end": - return _context2.stop(); + return _context4.stop(); } } - }, _callee, this); + }, _callee3, this); })); - function asyncAddFile(_x) { + function asyncAddFile(_x3) { return _asyncAddFile.apply(this, arguments); } @@ -7645,7 +7789,7 @@ }, { key: "asyncAddFiles", value: function () { - var _asyncAddFiles = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee2(fileList) { + var _asyncAddFiles = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee4(fileList) { var event, initFileFn, item, @@ -7663,90 +7807,90 @@ _iterator12, _step12, _file3, - _args3 = arguments; + _args5 = arguments; - return regeneratorRuntime.wrap(function _callee2$(_context3) { + return regeneratorRuntime.wrap(function _callee4$(_context5) { while (1) { - switch (_context3.prev = _context3.next) { + switch (_context5.prev = _context5.next) { case 0: - event = _args3.length > 1 && _args3[1] !== undefined ? _args3[1] : null; - initFileFn = _args3.length > 2 && _args3[2] !== undefined ? _args3[2] : this.opts.initFileFn; + event = _args5.length > 1 && _args5[1] !== undefined ? _args5[1] : null; + initFileFn = _args5.length > 2 && _args5[2] !== undefined ? _args5[2] : this.opts.initFileFn; states = []; iterator = this.filterFileList(fileList, event); case 4: if (!((item = iterator.next()) && !item.done)) { - _context3.next = 16; + _context5.next = 16; break; } _item$value2 = _slicedToArray(item.value, 2); file = _item$value2[0]; uniqueIdentifier = _item$value2[1]; - _context3.next = 10; + _context5.next = 10; return this.aHook('filter-file', file, event); case 10: - if (_context3.sent) { - _context3.next = 12; + if (_context5.sent) { + _context5.next = 12; break; } - return _context3.abrupt("continue", 4); + return _context5.abrupt("continue", 4); case 12: // ToDo: parallelizable ? flowFile = new AsyncFlowFile(this, file, uniqueIdentifier), state = flowFile.bootstrap(event, initFileFn); states.push(state); - _context3.next = 4; + _context5.next = 4; break; case 16: - _context3.next = 18; + _context5.next = 18; return Promise.all(states); case 18: - flowfiles = _context3.sent; + flowfiles = _context5.sent; _iterator11 = _createForOfIteratorHelper(flowfiles); - _context3.prev = 20; + _context5.prev = 20; _iterator11.s(); case 22: if ((_step11 = _iterator11.n()).done) { - _context3.next = 29; + _context5.next = 29; break; } ff = _step11.value; this.hook('file-added', ff, event); - _context3.next = 27; + _context5.next = 27; return this.aHook('file-added', ff, event); case 27: - _context3.next = 22; + _context5.next = 22; break; case 29: - _context3.next = 34; + _context5.next = 34; break; case 31: - _context3.prev = 31; - _context3.t0 = _context3["catch"](20); + _context5.prev = 31; + _context5.t0 = _context5["catch"](20); - _iterator11.e(_context3.t0); + _iterator11.e(_context5.t0); case 34: - _context3.prev = 34; + _context5.prev = 34; _iterator11.f(); - return _context3.finish(34); + return _context5.finish(34); case 37: this.hook('files-added', flowfiles, event); - _context3.next = 40; + _context5.next = 40; return this.aHook('files-added', flowfiles, event); case 40: @@ -7772,21 +7916,21 @@ } this.hook('files-submitted', this.files, event); - _context3.next = 46; + _context5.next = 46; return this.aHook('files-submitted', this.files, event); case 46: - return _context3.abrupt("return", flowfiles); + return _context5.abrupt("return", flowfiles); case 47: case "end": - return _context3.stop(); + return _context5.stop(); } } - }, _callee2, this, [[20, 31, 34, 37]]); + }, _callee4, this, [[20, 31, 34, 37]]); })); - function asyncAddFiles(_x2) { + function asyncAddFiles(_x4) { return _asyncAddFiles.apply(this, arguments); } diff --git a/dist/flow.min.js b/dist/flow.min.js index a46266b..ceea935 100644 --- a/dist/flow.min.js +++ b/dist/flow.min.js @@ -1,3 +1,3 @@ /*! @flowjs/flow.js 3.0.0-alpha.0 */ -!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).Flow=e()}(this,(function(){"use strict";function t(e){return(t="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(e)}function e(t,e,r,n,i,o,a){try{var s=t[o](a),u=s.value}catch(t){return void r(t)}s.done?e(u):Promise.resolve(u).then(n,i)}function r(t){return function(){var r=this,n=arguments;return new Promise((function(i,o){var a=t.apply(r,n);function s(t){e(a,i,o,s,u,"next",t)}function u(t){e(a,i,o,s,u,"throw",t)}s(void 0)}))}}function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(t,e){for(var r=0;rt.length)&&(e=t.length);for(var r=0,n=new Array(e);r=t.length?{done:!0}:{done:!1,value:t[n++]}},e:function(t){throw t},f:i}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var o,a=!0,s=!1;return{s:function(){r=t[Symbol.iterator]()},n:function(){var t=r.next();return a=t.done,t},e:function(t){s=!0,o=t},f:function(){try{a||null==r.return||r.return()}finally{if(s)throw o}}}}var k="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function S(t){var e={exports:{}};return t(e,e.exports),e.exports}var O=function(t){return t&&t.Math==Math&&t},x=O("object"==("undefined"==typeof globalThis?"undefined":t(globalThis))&&globalThis)||O("object"==("undefined"==typeof window?"undefined":t(window))&&window)||O("object"==("undefined"==typeof self?"undefined":t(self))&&self)||O("object"==t(k)&&k)||function(){return this}()||Function("return this")(),E=function(t){try{return!!t()}catch(t){return!0}},j=!E((function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]})),_={}.propertyIsEnumerable,P=Object.getOwnPropertyDescriptor,R={f:P&&!_.call({1:2},1)?function(t){var e=P(this,t);return!!e&&e.enumerable}:_},A=function(t,e){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:e}},T={}.toString,I=function(t){return T.call(t).slice(8,-1)},C="".split,L=E((function(){return!Object("z").propertyIsEnumerable(0)}))?function(t){return"String"==I(t)?C.call(t,""):Object(t)}:Object,F=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t},N=function(t){return L(F(t))},z=function(e){return"object"===t(e)?null!==e:"function"==typeof e},M=function(t,e){if(!z(t))return t;var r,n;if(e&&"function"==typeof(r=t.toString)&&!z(n=r.call(t)))return n;if("function"==typeof(r=t.valueOf)&&!z(n=r.call(t)))return n;if(!e&&"function"==typeof(r=t.toString)&&!z(n=r.call(t)))return n;throw TypeError("Can't convert object to primitive value")},D={}.hasOwnProperty,U=function(t,e){return D.call(t,e)},H=x.document,B=z(H)&&z(H.createElement),G=function(t){return B?H.createElement(t):{}},q=!j&&!E((function(){return 7!=Object.defineProperty(G("div"),"a",{get:function(){return 7}}).a})),$=Object.getOwnPropertyDescriptor,V={f:j?$:function(t,e){if(t=N(t),e=M(e,!0),q)try{return $(t,e)}catch(t){}if(U(t,e))return A(!R.f.call(t,e),t[e])}},Y=function(t){if(!z(t))throw TypeError(String(t)+" is not an object");return t},X=Object.defineProperty,K={f:j?X:function(t,e,r){if(Y(t),e=M(e,!0),Y(r),q)try{return X(t,e,r)}catch(t){}if("get"in r||"set"in r)throw TypeError("Accessors not supported");return"value"in r&&(t[e]=r.value),t}},W=j?function(t,e,r){return K.f(t,e,A(1,r))}:function(t,e,r){return t[e]=r,t},J=function(t,e){try{W(x,t,e)}catch(r){x[t]=e}return e},Z="__core-js_shared__",Q=x[Z]||J(Z,{}),tt=Function.toString;"function"!=typeof Q.inspectSource&&(Q.inspectSource=function(t){return tt.call(t)});var et,rt,nt,it=Q.inspectSource,ot=x.WeakMap,at="function"==typeof ot&&/native code/.test(it(ot)),st=S((function(t){(t.exports=function(t,e){return Q[t]||(Q[t]=void 0!==e?e:{})})("versions",[]).push({version:"3.8.3",mode:"global",copyright:"© 2021 Denis Pushkarev (zloirock.ru)"})})),ut=0,ct=Math.random(),lt=function(t){return"Symbol("+String(void 0===t?"":t)+")_"+(++ut+ct).toString(36)},ft=st("keys"),ht=function(t){return ft[t]||(ft[t]=lt(t))},pt={},dt=x.WeakMap;if(at){var vt=Q.state||(Q.state=new dt),yt=vt.get,gt=vt.has,mt=vt.set;et=function(t,e){return e.facade=t,mt.call(vt,t,e),e},rt=function(t){return yt.call(vt,t)||{}},nt=function(t){return gt.call(vt,t)}}else{var bt=ht("state");pt[bt]=!0,et=function(t,e){return e.facade=t,W(t,bt,e),e},rt=function(t){return U(t,bt)?t[bt]:{}},nt=function(t){return U(t,bt)}}var wt,kt,St={set:et,get:rt,has:nt,enforce:function(t){return nt(t)?rt(t):et(t,{})},getterFor:function(t){return function(e){var r;if(!z(e)||(r=rt(e)).type!==t)throw TypeError("Incompatible receiver, "+t+" required");return r}}},Ot=S((function(t){var e=St.get,r=St.enforce,n=String(String).split("String");(t.exports=function(t,e,i,o){var a,s=!!o&&!!o.unsafe,u=!!o&&!!o.enumerable,c=!!o&&!!o.noTargetGet;"function"==typeof i&&("string"!=typeof e||U(i,"name")||W(i,"name",e),(a=r(i)).source||(a.source=n.join("string"==typeof e?e:""))),t!==x?(s?!c&&t[e]&&(u=!0):delete t[e],u?t[e]=i:W(t,e,i)):u?t[e]=i:J(e,i)})(Function.prototype,"toString",(function(){return"function"==typeof this&&e(this).source||it(this)}))})),xt=x,Et=function(t){return"function"==typeof t?t:void 0},jt=function(t,e){return arguments.length<2?Et(xt[t])||Et(x[t]):xt[t]&&xt[t][e]||x[t]&&x[t][e]},_t=Math.ceil,Pt=Math.floor,Rt=function(t){return isNaN(t=+t)?0:(t>0?Pt:_t)(t)},At=Math.min,Tt=function(t){return t>0?At(Rt(t),9007199254740991):0},It=Math.max,Ct=Math.min,Lt=function(t,e){var r=Rt(t);return r<0?It(r+e,0):Ct(r,e)},Ft=function(t){return function(e,r,n){var i,o=N(e),a=Tt(o.length),s=Lt(n,a);if(t&&r!=r){for(;a>s;)if((i=o[s++])!=i)return!0}else for(;a>s;s++)if((t||s in o)&&o[s]===r)return t||s||0;return!t&&-1}},Nt={includes:Ft(!0),indexOf:Ft(!1)},zt=Nt.indexOf,Mt=function(t,e){var r,n=N(t),i=0,o=[];for(r in n)!U(pt,r)&&U(n,r)&&o.push(r);for(;e.length>i;)U(n,r=e[i++])&&(~zt(o,r)||o.push(r));return o},Dt=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],Ut=Dt.concat("length","prototype"),Ht={f:Object.getOwnPropertyNames||function(t){return Mt(t,Ut)}},Bt={f:Object.getOwnPropertySymbols},Gt=jt("Reflect","ownKeys")||function(t){var e=Ht.f(Y(t)),r=Bt.f;return r?e.concat(r(t)):e},qt=function(t,e){for(var r=Gt(e),n=K.f,i=V.f,o=0;o=74)&&(wt=fe.match(/Chrome\/(\d+)/))&&(kt=wt[1]);var ve=kt&&+kt,ye=ue("species"),ge=function(t){return ve>=51||!E((function(){var e=[];return(e.constructor={})[ye]=function(){return{foo:1}},1!==e[t](Boolean).foo}))},me=ue("isConcatSpreadable"),be=9007199254740991,we="Maximum allowed index exceeded",ke=ve>=51||!E((function(){var t=[];return t[me]=!1,t.concat()[0]!==t})),Se=ge("concat"),Oe=function(t){if(!z(t))return!1;var e=t[me];return void 0!==e?!!e:te(t)};Qt({target:"Array",proto:!0,forced:!ke||!Se},{concat:function(t){var e,r,n,i,o,a=ee(this),s=le(a,0),u=0;for(e=-1,n=arguments.length;ebe)throw TypeError(we);for(r=0;r=be)throw TypeError(we);re(s,u++,o)}return s.length=u,s}});var xe=function(t){if("function"!=typeof t)throw TypeError(String(t)+" is not a function");return t},Ee=function(t,e,r){if(xe(t),void 0===e)return t;switch(r){case 0:return function(){return t.call(e)};case 1:return function(r){return t.call(e,r)};case 2:return function(r,n){return t.call(e,r,n)};case 3:return function(r,n,i){return t.call(e,r,n,i)}}return function(){return t.apply(e,arguments)}},je=[].push,_e=function(t){var e=1==t,r=2==t,n=3==t,i=4==t,o=6==t,a=7==t,s=5==t||o;return function(u,c,l,f){for(var h,p,d=ee(u),v=L(d),y=Ee(c,l,3),g=Tt(v.length),m=0,b=f||le,w=e?b(u,g):r||a?b(u,0):void 0;g>m;m++)if((s||m in v)&&(p=y(h=v[m],m,d),t))if(e)w[m]=p;else if(p)switch(t){case 3:return!0;case 5:return h;case 6:return m;case 2:je.call(w,h)}else switch(t){case 4:return!1;case 7:je.call(w,h)}return o?-1:n||i?i:w}},Pe={forEach:_e(0),map:_e(1),filter:_e(2),some:_e(3),every:_e(4),find:_e(5),findIndex:_e(6),filterOut:_e(7)},Re=Object.defineProperty,Ae={},Te=function(t){throw t},Ie=function(t,e){if(U(Ae,t))return Ae[t];e||(e={});var r=[][t],n=!!U(e,"ACCESSORS")&&e.ACCESSORS,i=U(e,0)?e[0]:Te,o=U(e,1)?e[1]:void 0;return Ae[t]=!!r&&!E((function(){if(n&&!j)return!0;var t={length:-1};n?Re(t,1,{enumerable:!0,get:Te}):t[1]=1,r.call(t,i,o)}))},Ce=Pe.filter,Le=ge("filter"),Fe=Ie("filter");Qt({target:"Array",proto:!0,forced:!Le||!Fe},{filter:function(t){return Ce(this,t,arguments.length>1?arguments[1]:void 0)}});var Ne,ze=Object.keys||function(t){return Mt(t,Dt)},Me=j?Object.defineProperties:function(t,e){Y(t);for(var r,n=ze(e),i=n.length,o=0;i>o;)K.f(t,r=n[o++],e[r]);return t},De=jt("document","documentElement"),Ue=ht("IE_PROTO"),He=function(){},Be=function(t){return"