diff --git a/modules/arrow/src/arrow-loader.js b/modules/arrow/src/arrow-loader.js index 608212aa76..5368687305 100644 --- a/modules/arrow/src/arrow-loader.js +++ b/modules/arrow/src/arrow-loader.js @@ -4,29 +4,24 @@ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; import parseSync from './lib/parse-arrow-sync'; import {parseArrowInBatches, parseArrowInBatchesSync} from './lib/parse-arrow-in-batches'; -const ARROW = { +export const ArrowWorkerLoader = { id: 'arrow', name: 'Apache Arrow', version: VERSION, extensions: ['arrow'], mimeType: 'application/octet-stream', - category: 'table' + category: 'table', + options: { + arrow: { + workerUrl: `https://unpkg.com/@loaders.gl/arrow@${VERSION}/dist/arrow-loader.worker.js` + } + } }; export const ArrowLoader = { - ...ARROW, + ...ArrowWorkerLoader, parse: async (arraybuffer, options) => parseSync(arraybuffer, options), parseSync, parseInBatches: parseArrowInBatches, - parseInBatchesSync: parseArrowInBatchesSync, - options: {} -}; - -export const ArrowWorkerLoader = { - ...ARROW, - options: { - arrow: { - workerUrl: `https://unpkg.com/@loaders.gl/arrow@${VERSION}/dist/arrow-loader.worker.js` - } - } + parseInBatchesSync: parseArrowInBatchesSync }; diff --git a/modules/arrow/test/arrow-loader.spec.js b/modules/arrow/test/arrow-loader.spec.js index 48bd2e27c7..e2e9025680 100644 --- a/modules/arrow/test/arrow-loader.spec.js +++ b/modules/arrow/test/arrow-loader.spec.js @@ -1,16 +1,9 @@ import test from 'tape-promise/tape'; import {validateLoader} from 'test/common/conformance'; -import {ArrowLoader} from '@loaders.gl/arrow'; -import {ArrowWorkerLoader} from '@loaders.gl/arrow'; -import { - isBrowser, - resolvePath, - fetchFile, - getStreamIterator, - parse, - parseInBatches -} from '@loaders.gl/core'; +import {ArrowLoader, ArrowWorkerLoader} from '@loaders.gl/arrow'; +import {isBrowser, getStreamIterator, resolvePath} from '@loaders.gl/core'; +import {setLoaderOptions, fetchFile, parse, parseInBatches} from '@loaders.gl/core'; // Small Arrow Sample Files const ARROW_SIMPLE = '@loaders.gl/arrow/test/data/simple.arrow'; @@ -20,13 +13,19 @@ const ARROW_STRUCT = '@loaders.gl/arrow/test/data/struct.arrow'; // Bigger, batched sample file const ARROW_BIOGRID_NODES = '@loaders.gl/arrow/test/data/biogrid-nodes.arrow'; +setLoaderOptions({ + arrow: { + workerUrl: 'modules/arrow/dist/arrow-loader.worker.js' + } +}); + test('ArrowLoader#loader conformance', t => { validateLoader(t, ArrowLoader, 'ArrowLoader'); t.end(); }); test('ArrowLoader#parseSync(simple.arrow)', async t => { - const columns = await parse(fetchFile(ARROW_SIMPLE), ArrowLoader); + const columns = await parse(fetchFile(ARROW_SIMPLE), ArrowLoader, {worker: false}); // Check loader specific results t.ok(columns.bar, 'bar column loaded'); t.ok(columns.baz, 'baz column loaded'); @@ -56,11 +55,7 @@ test('ArrowLoader#parse (WORKER)', async t => { return; } - const data = await parse(fetchFile(ARROW_SIMPLE), ArrowWorkerLoader, { - arrow: { - workerUrl: 'modules/arrow/dist/arrow-loader.worker.js' - } - }); + const data = await parse(fetchFile(ARROW_SIMPLE), ArrowWorkerLoader); t.ok(data, 'Data returned'); t.end(); }); diff --git a/modules/core/src/lib/loader-utils/parse-with-worker.js b/modules/core/src/lib/loader-utils/parse-with-worker.js index ff4554366c..9ab080a7f7 100644 --- a/modules/core/src/lib/loader-utils/parse-with-worker.js +++ b/modules/core/src/lib/loader-utils/parse-with-worker.js @@ -1,9 +1,13 @@ +/* global __VERSION__ */ // __VERSION__ is injected by babel-plugin-version-inline import {toArrayBuffer} from '../../javascript-utils/binary-utils'; import WorkerFarm from '../../worker-utils/worker-farm'; import {getTransferList} from '../../worker-utils/get-transfer-list'; import {parse} from '../parse'; export function canParseWithWorker(loader, data, options, context) { + if (!WorkerFarm.isSupported()) { + return false; + } const loaderOptions = options && options[loader.id]; if (options.worker && loaderOptions && loaderOptions.workerUrl) { return loader.useWorker ? loader.useWorker(options) : true; @@ -31,8 +35,8 @@ export default function parseWithWorker(loader, data, options, context) { return workerFarm.process(workerSource, `loaders.gl-${workerName}`, { arraybuffer: toArrayBuffer(data), - options, // __VERSION__ is injected by babel-plugin-version-inline - /* global __VERSION__ */ source: `loaders.gl@${__VERSION__}`, // Lets worker ignore unrelated messages + options, + source: `loaders.gl@${__VERSION__}`, // Lets worker ignore unrelated messages type: 'parse' // For future extension }); } diff --git a/modules/core/src/worker-utils/worker-farm.js b/modules/core/src/worker-utils/worker-farm.js index fec3f5e9be..5a2f5d75af 100644 --- a/modules/core/src/worker-utils/worker-farm.js +++ b/modules/core/src/worker-utils/worker-farm.js @@ -6,6 +6,10 @@ const DEFAULT_MAX_CONCURRENCY = 5; * Process multiple data messages with a "farm" of different workers (in worker pools) */ export default class WorkerFarm { + static isSupported() { + return typeof Worker !== 'undefined'; + } + /** * @param processor {function | string} - worker function * @param maxConcurrency {number} - max count of workers diff --git a/modules/draco/src/draco-loader.js b/modules/draco/src/draco-loader.js index 1f716cdac7..bba7f30dfe 100644 --- a/modules/draco/src/draco-loader.js +++ b/modules/draco/src/draco-loader.js @@ -3,25 +3,14 @@ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; import DracoParser from './lib/draco-parser'; -const DRACO = { +export const DracoWorkerLoader = { id: 'draco', name: 'DRACO', version: VERSION, extensions: ['drc'], mimeType: 'application/octet-stream', binary: true, - test: 'DRACO' -}; - -export const DracoLoader = { - ...DRACO, - parse: async (arrayBuffer, options) => parseSync(arrayBuffer, options), - parseSync, - options: {} -}; - -export const DracoWorkerLoader = { - ...DRACO, + test: 'DRACO', options: { draco: { workerUrl: `https://unpkg.com/@loaders.gl/draco@${VERSION}/dist/draco-loader.worker.js` @@ -29,6 +18,12 @@ export const DracoWorkerLoader = { } }; +export const DracoLoader = { + ...DracoWorkerLoader, + parse: async (arrayBuffer, options) => parseSync(arrayBuffer, options), + parseSync +}; + function parseSync(arrayBuffer, options) { const dracoParser = new DracoParser(); try { diff --git a/modules/draco/test/draco-loader.spec.js b/modules/draco/test/draco-loader.spec.js index 0c4519cbfa..43d175795d 100644 --- a/modules/draco/test/draco-loader.spec.js +++ b/modules/draco/test/draco-loader.spec.js @@ -3,10 +3,16 @@ import test from 'tape-promise/tape'; import {validateLoader, validatePointCloudCategoryData} from 'test/common/conformance'; import {DracoLoader, DracoWorkerLoader} from '@loaders.gl/draco'; -import {load} from '@loaders.gl/core'; +import {setLoaderOptions, load} from '@loaders.gl/core'; const BUNNY_DRC_URL = '@loaders.gl/draco/test/data/bunny.drc'; +setLoaderOptions({ + draco: { + workerUrl: 'modules/draco/dist/draco-loader.worker.js' + } +}); + test('DracoLoader#loader conformance', t => { validateLoader(t, DracoLoader, 'DracoLoader'); validateLoader(t, DracoWorkerLoader, 'DracoWorkerLoader'); @@ -14,10 +20,9 @@ test('DracoLoader#loader conformance', t => { }); test('DracoLoader#parse and encode', async t => { - const data = await load(BUNNY_DRC_URL, DracoLoader); + const data = await load(BUNNY_DRC_URL, DracoLoader, {worker: false}); validatePointCloudCategoryData(t, data); t.equal(data.attributes.POSITION.value.length, 104502, 'POSITION attribute was found'); - t.end(); }); @@ -28,11 +33,7 @@ test('DracoWorkerLoader#parse', async t => { return; } - const data = await load(BUNNY_DRC_URL, DracoWorkerLoader, { - draco: { - workerUrl: 'modules/draco/dist/draco-loader.worker.js' - } - }); + const data = await load(BUNNY_DRC_URL, DracoWorkerLoader); validatePointCloudCategoryData(t, data); t.equal(data.attributes.POSITION.value.length, 104502, 'POSITION attribute was found'); diff --git a/modules/las/docs/api-reference/las-loader.md b/modules/las/docs/api-reference/las-loader.md index d22a1cb50a..279b94bb0a 100644 --- a/modules/las/docs/api-reference/las-loader.md +++ b/modules/las/docs/api-reference/las-loader.md @@ -26,7 +26,7 @@ const data = await load(url, LASWorkerLoader, options); ## Options -| Option | Type | Default | Description | -| ------------ | -------- | ------- | ------------------------------------------------------------------------- | -| `skip` | Number | `1` | Read one from every _n_ points. | -| `onProgress` | Function | - | Callback when a new chunk of data is read. Only works on the main thread. | +| Option | Type | Default | Description | +| -------------------- | -------- | ------- | ------------------------------------------------------------------------- | +| `options.las.skip` | Number | `1` | Read one from every _n_ points. | +| `options.onProgress` | Function | - | Callback when a new chunk of data is read. Only works on the main thread. | diff --git a/modules/las/src/las-loader.js b/modules/las/src/las-loader.js index 11ce526097..00b83cd496 100644 --- a/modules/las/src/las-loader.js +++ b/modules/las/src/las-loader.js @@ -5,7 +5,7 @@ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; import parseLAS from './lib/parse-las'; -const LAS = { +export const LASWorkerLoader = { id: 'las', name: 'LAS', version: VERSION, @@ -13,21 +13,16 @@ const LAS = { mimeType: 'application/octet-stream', // TODO - text version? text: true, binary: true, - test: 'LAS' -}; - -export const LASLoader = { - ...LAS, - parse: async (arrayBuffer, options) => parseLAS(arrayBuffer, options), - parseSync: parseLAS, - options: {} -}; - -export const LASWorkerLoader = { - ...LAS, + test: 'LAS', options: { las: { workerUrl: `https://unpkg.com/@loaders.gl/las@${VERSION}/dist/las-loader.worker.js` } } }; + +export const LASLoader = { + ...LASWorkerLoader, + parse: async (arrayBuffer, options) => parseLAS(arrayBuffer, options), + parseSync: parseLAS +}; diff --git a/modules/las/src/lib/parse-las.js b/modules/las/src/lib/parse-las.js index 0db0898189..3b4c7d0a74 100644 --- a/modules/las/src/lib/parse-las.js +++ b/modules/las/src/lib/parse-las.js @@ -12,7 +12,8 @@ export default function parseLAS(arraybuffer, options = {}) { let originalHeader; const result = {}; - const {skip = 1, onProgress} = options; + const {onProgress} = options; + const {skip = 1} = options.las || {}; parseLASChunked(arraybuffer, skip, (decoder, header) => { if (!originalHeader) { diff --git a/modules/las/test/las-loader.spec.js b/modules/las/test/las-loader.spec.js index 44d7054312..27fcd42c26 100644 --- a/modules/las/test/las-loader.spec.js +++ b/modules/las/test/las-loader.spec.js @@ -2,11 +2,17 @@ import test from 'tape-promise/tape'; import {validateLoader, validatePointCloudCategoryData} from 'test/common/conformance'; -import {fetchFile, parse, load} from '@loaders.gl/core'; import {LASLoader, LASWorkerLoader} from '@loaders.gl/las'; +import {setLoaderOptions, fetchFile, parse, load} from '@loaders.gl/core'; const LAS_BINARY_URL = '@loaders.gl/las/test/data/indoor.laz'; +setLoaderOptions({ + las: { + workerUrl: 'modules/las/dist/las-loader.worker.js' + } +}); + test('LASLoader#loader conformance', t => { validateLoader(t, LASLoader, 'LASLoader'); validateLoader(t, LASWorkerLoader, 'LASWorkerLoader'); @@ -14,7 +20,7 @@ test('LASLoader#loader conformance', t => { }); test('LASLoader#parse(binary)', async t => { - const data = await parse(fetchFile(LAS_BINARY_URL), LASLoader, {skip: 10}); + const data = await parse(fetchFile(LAS_BINARY_URL), LASLoader, {las: {skip: 10}, worker: false}); validatePointCloudCategoryData(t, data); t.is(data.header.vertexCount, data.loaderData.header.totalRead, 'Original header was found'); @@ -26,7 +32,7 @@ test('LASLoader#parse(binary)', async t => { t.end(); }); -test('LASWorkerLoader#load(binary)', async t => { +test('LASWorkerLoader#load(worker)', async t => { if (typeof Worker === 'undefined') { t.comment('Worker is not usable in non-browser environments'); t.end(); @@ -37,8 +43,7 @@ test('LASWorkerLoader#load(binary)', async t => { las: { workerUrl: 'modules/las/dist/las-loader.worker.js', skip: 10 - }, - skip: 10 + } }); validatePointCloudCategoryData(t, data); diff --git a/modules/loader-utils/src/lib/create-worker.js b/modules/loader-utils/src/lib/create-worker.js index 27358488e8..e3599a79b2 100644 --- a/modules/loader-utils/src/lib/create-worker.js +++ b/modules/loader-utils/src/lib/create-worker.js @@ -86,7 +86,7 @@ async function parseData({loader, arraybuffer, byteOffset, byteLength, options, throw new Error(`Could not load data with ${loader.name} loader`); } - return await parser(data, options, context, loader); + return await parser(data, {...options, worker: false}, context, loader); } // Filter out noise messages sent to workers diff --git a/modules/obj/src/obj-loader.js b/modules/obj/src/obj-loader.js index 59f2cbd8e5..e137c7cc0e 100644 --- a/modules/obj/src/obj-loader.js +++ b/modules/obj/src/obj-loader.js @@ -4,23 +4,13 @@ import loadOBJ from './lib/load-obj'; const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; -const OBJ = { +export const OBJWorkerLoader = { id: 'obj', name: 'OBJ', version: VERSION, extensions: ['obj'], mimeType: 'text/plain', - testText: testOBJFile -}; - -export const OBJLoader = { - ...OBJ, - parse: async (arrayBuffer, options) => loadOBJ(new TextDecoder().decode(arrayBuffer), options), - parseTextSync: loadOBJ -}; - -export const OBJWorkerLoader = { - ...OBJ, + testText: testOBJFile, options: { obj: { workerUrl: `https://unpkg.com/@loaders.gl/obj@${VERSION}/dist/obj-loader.worker.js` @@ -28,6 +18,12 @@ export const OBJWorkerLoader = { } }; +export const OBJLoader = { + ...OBJWorkerLoader, + parse: async (arrayBuffer, options) => loadOBJ(new TextDecoder().decode(arrayBuffer), options), + parseTextSync: loadOBJ +}; + function testOBJFile(text) { // TODO - There could be comment line first return text[0] === 'v'; diff --git a/modules/obj/test/obj-loader.spec.js b/modules/obj/test/obj-loader.spec.js index 562ca6e291..f056a16a39 100644 --- a/modules/obj/test/obj-loader.spec.js +++ b/modules/obj/test/obj-loader.spec.js @@ -3,12 +3,18 @@ import test from 'tape-promise/tape'; import {validateLoader, validatePointCloudCategoryData} from 'test/common/conformance'; import {OBJLoader, OBJWorkerLoader} from '@loaders.gl/obj'; -import {load} from '@loaders.gl/core'; +import {setLoaderOptions, load} from '@loaders.gl/core'; const OBJ_ASCII_URL = '@loaders.gl/obj/test/data/bunny.obj'; const OBJ_NORMALS_URL = '@loaders.gl/obj/test/data/cube.obj'; const OBJ_MULTI_PART_URL = '@loaders.gl/obj/test/data/magnolia.obj'; +setLoaderOptions({ + obj: { + workerUrl: 'modules/obj/dist/obj-loader.worker.js' + } +}); + test('OBJLoader#loader objects', async t => { validateLoader(t, OBJLoader, 'OBJLoader'); validateLoader(t, OBJWorkerLoader, 'OBJWorkerLoader'); @@ -55,11 +61,7 @@ test('OBJWorkerLoader#parse(text)', async t => { return; } - const data = await load(OBJ_ASCII_URL, OBJWorkerLoader, { - obj: { - workerUrl: 'modules/obj/dist/obj-loader.worker.js' - } - }); + const data = await load(OBJ_ASCII_URL, OBJWorkerLoader); validatePointCloudCategoryData(t, data); diff --git a/modules/pcd/src/pcd-loader.js b/modules/pcd/src/pcd-loader.js index 1398f95775..2b938ffa0f 100644 --- a/modules/pcd/src/pcd-loader.js +++ b/modules/pcd/src/pcd-loader.js @@ -3,28 +3,21 @@ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; import parsePCDSync from './lib/parse-pcd'; -const PCD = { +export const PCDWorkerLoader = { id: 'pcd', name: 'PCD', version: VERSION, extensions: ['pcd'], - mimeType: 'text/plain' -}; - -export const PCDLoader = { - ...PCD, - parse: async (arrayBuffer, options) => parsePCDSync(arrayBuffer, options), - parseSync: parsePCDSync, - options: { - pcd: {} - } -}; - -export const PCDWorkerLoader = { - ...PCD, + mimeType: 'text/plain', options: { pcd: { workerUrl: `https://unpkg.com/@loaders.gl/pcd@${VERSION}/dist/pcd-loader.worker.js` } } }; + +export const PCDLoader = { + ...PCDWorkerLoader, + parse: async (arrayBuffer, options) => parsePCDSync(arrayBuffer, options), + parseSync: parsePCDSync +}; diff --git a/modules/pcd/test/pcd-loader.spec.js b/modules/pcd/test/pcd-loader.spec.js index 775c6574ae..4cdb0daf2e 100644 --- a/modules/pcd/test/pcd-loader.spec.js +++ b/modules/pcd/test/pcd-loader.spec.js @@ -3,11 +3,17 @@ import test from 'tape-promise/tape'; import {validateLoader, validatePointCloudCategoryData} from 'test/common/conformance'; import {PCDLoader, PCDWorkerLoader} from '@loaders.gl/pcd'; -import {fetchFile, parse, load} from '@loaders.gl/core'; +import {setLoaderOptions, fetchFile, parse, load} from '@loaders.gl/core'; const PCD_ASCII_URL = '@loaders.gl/pcd/test/data/simple-ascii.pcd'; const PCD_BINARY_URL = '@loaders.gl/pcd/test/data/Zaghetto.pcd'; +setLoaderOptions({ + pcd: { + workerUrl: 'modules/pcd/dist/pcd-loader.worker.js' + } +}); + test('PCDLoader#loader conformance', t => { validateLoader(t, PCDLoader, 'PCDLoader'); validateLoader(t, PCDWorkerLoader, 'PCDWorkerLoader'); @@ -28,7 +34,7 @@ test('PCDLoader#parse(text)', async t => { }); test('PCDLoader#parse(binary)', async t => { - const data = await parse(fetchFile(PCD_BINARY_URL), PCDLoader); + const data = await parse(fetchFile(PCD_BINARY_URL), PCDLoader, {worker: false}); validatePointCloudCategoryData(t, data); t.equal(data.mode, 0, 'mode is POINTS (0)'); @@ -45,11 +51,7 @@ test('PCDWorkerLoader#parse(binary)', async t => { return; } - const data = await load(PCD_BINARY_URL, PCDWorkerLoader, { - pcd: { - workerUrl: 'modules/pcd/dist/pcd-loader.worker.js' - } - }); + const data = await load(PCD_BINARY_URL, PCDWorkerLoader); validatePointCloudCategoryData(t, data); t.equal(data.mode, 0, 'mode is POINTS (0)'); diff --git a/modules/ply/src/ply-loader.js b/modules/ply/src/ply-loader.js index f3760a0946..ce2820cc14 100644 --- a/modules/ply/src/ply-loader.js +++ b/modules/ply/src/ply-loader.js @@ -7,7 +7,7 @@ import parsePLY from './lib/parse-ply'; const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; -export const PLY = { +export const PLYWorkerLoader = { id: 'ply', name: 'PLY', version: VERSION, @@ -16,23 +16,18 @@ export const PLY = { // mimeType: 'application/octet-stream', TODO - binary version? text: true, binary: true, - test: 'ply' + test: 'ply', + options: { + ply: { + workerUrl: `https://unpkg.com/@loaders.gl/ply@${VERSION}/dist/ply-loader.worker.js` + } + } }; export const PLYLoader = { - ...PLY, + ...PLYWorkerLoader, // Note: parsePLY supports both text and binary parse: async (arrayBuffer, options) => parsePLY(arrayBuffer, options), // TODO - this may not detect text correctly? parseTextSync: parsePLY, - parseSync: parsePLY, - options: {} -}; - -export const PLYWorkerLoader = { - ...PLY, - options: { - ply: { - workerUrl: `https://unpkg.com/@loaders.gl/ply@${VERSION}/dist/ply-loader.worker.js` - } - } + parseSync: parsePLY }; diff --git a/modules/ply/test/ply-loader.spec.js b/modules/ply/test/ply-loader.spec.js index eabf89ded1..6b7c2c50e7 100644 --- a/modules/ply/test/ply-loader.spec.js +++ b/modules/ply/test/ply-loader.spec.js @@ -3,13 +3,19 @@ import test from 'tape-promise/tape'; import {validateLoader, validatePointCloudCategoryData} from 'test/common/conformance'; import {PLYLoader, PLYWorkerLoader, _PLYStreamLoader} from '@loaders.gl/ply'; -import {fetchFile, parse, parseSync, load} from '@loaders.gl/core'; +import {setLoaderOptions, fetchFile, parse, parseSync, load} from '@loaders.gl/core'; import {getStreamIterator} from '@loaders.gl/core'; const PLY_CUBE_ATT_URL = '@loaders.gl/ply/test/data/cube_att.ply'; const PLY_BUN_ZIPPER_URL = '@loaders.gl/ply/test/data/bun_zipper.ply'; const PLY_BUN_BINARY_URL = '@loaders.gl/ply/test/data/bunny.ply'; +setLoaderOptions({ + ply: { + workerUrl: 'modules/ply/dist/ply-loader.worker.js' + } +}); + function validateTextPLY(t, data) { t.equal(data.indices.value.length, 36, 'Indices found'); t.equal(data.attributes.POSITION.value.length, 72, 'POSITION attribute was found'); @@ -62,11 +68,7 @@ test('PLYLoader#parse(WORKER)', async t => { return; } - const data = await load(PLY_BUN_ZIPPER_URL, PLYWorkerLoader, { - ply: { - workerUrl: 'modules/ply/dist/ply-loader.worker.js' - } - }); + const data = await load(PLY_BUN_ZIPPER_URL, PLYWorkerLoader); validatePointCloudCategoryData(t, data); t.equal(data.attributes.POSITION.value.length, 107841, 'POSITION attribute was found');