From 334878f75ea6cb2bdfe972bb0ef9b01708dff97e Mon Sep 17 00:00:00 2001 From: Ib Green Date: Sun, 7 Apr 2019 09:39:48 -0700 Subject: [PATCH] API Cleanup --- .../3d-tile-loaders/tile-3d-loader.md | 10 ++--- .../core/{encode-file.md => encode.md} | 6 +-- docs/api-reference/core/fetch-file.md | 8 ++-- docs/api-reference/core/load-file.md | 32 ---------------- docs/api-reference/core/load.md | 26 +++++++++++++ .../core/{parse-file.md => parse.md} | 24 ++++++------ docs/api-reference/core/register-loaders.md | 8 ++-- docs/api-reference/core/save-file.md | 33 ---------------- docs/api-reference/core/save.md | 33 ++++++++++++++++ .../geojson-loaders/kml-loader.md | 2 +- docs/api-reference/gltf-loaders/glb-parser.md | 4 +- .../api-reference/gltf-loaders/gltf-loader.md | 8 ++-- .../api-reference/gltf-loaders/gltf-parser.md | 4 +- .../api-reference/gltf-loaders/gltf-writer.md | 4 +- docs/api-reference/images/load-image.md | 17 +++++++++ .../mesh-loaders/draco-loader.md | 4 +- .../mesh-loaders/draco-writer.md | 4 +- docs/api-reference/mesh-loaders/las-loader.md | 4 +- docs/api-reference/mesh-loaders/obj-loader.md | 4 +- docs/api-reference/mesh-loaders/pcd-loader.md | 4 +- docs/api-reference/mesh-loaders/ply-loader.md | 4 +- docs/api-reference/misc-loaders/zip-loader.md | 4 +- docs/api-reference/misc-loaders/zip-writer.md | 4 +- .../specifications/writer-object-format.md | 10 ++--- .../api-reference/table-loaders/csv-loader.md | 8 ++-- docs/developer-guide/about-loaders.md | 6 +-- docs/table-of-contents.json | 8 ++-- modules/arrow/test/arrow-loader.spec.js | 36 +++++++++--------- modules/core/src/index.js | 35 +++++++++++------ .../node/buffer-to-array-buffer.js | 10 +++++ modules/core/src/lib/encode-file.js | 24 ------------ modules/core/src/lib/encode.js | 25 ++++++++++++ .../core/src/lib/{load-file.js => load.js} | 21 ++++------ .../core/src/lib/{parse-file.js => parse.js} | 16 ++++---- modules/core/src/lib/save-file.js | 12 ------ modules/core/src/lib/save.js | 12 ++++++ modules/core/test/index.js | 4 +- modules/core/test/lib/load-file.spec.js | 22 ----------- modules/core/test/lib/load.spec.js | 22 +++++++++++ .../test/lib/loader-utils/auto-parse.spec.js | 6 +-- modules/csv/test/csv-loader-arrow.spec.js | 14 +++---- modules/csv/test/csv-loader.bench.js | 4 +- modules/csv/test/csv-loader.spec.js | 26 ++++++------- modules/csv/test/csv-writer.spec.js | 4 +- .../test/draco-compression-ratio.spec.js | 6 +-- modules/draco/test/draco-loader.spec.js | 6 +-- modules/draco/test/draco-writer.spec.js | 12 +++--- modules/draco/test/draco.bench.js | 6 +-- .../src/json-model-loader/json-loader.js | 4 +- modules/gltf/test/gltf/gltf-loader.spec.js | 14 +++---- modules/images/test/data/test.png | Bin 95 -> 0 bytes modules/images/test/index.js | 2 +- .../images/test/load-image/load-image.spec.js | 6 +-- .../kml/test/kml-as-geojson-loader.spec.js | 4 +- modules/kml/test/kml-loader.spec.js | 4 +- modules/las/src/laz-perf.js | 4 +- modules/las/test/las-loader.spec.js | 6 +-- modules/obj/test/obj-loader.spec.js | 6 +-- modules/pcd/test/pcd-loader.spec.js | 8 ++-- modules/ply/test/ply-loader.bench.js | 10 ++--- modules/ply/test/ply-loader.spec.js | 14 +++---- modules/zip/test/zip-writer-loader.spec.js | 6 +-- test/render/test-cases/mesh.js | 4 +- test/render/test-cases/point-cloud.js | 8 ++-- test/size/import-nothing.js | 4 +- website-ocular/src/mdRoutes.js | 2 +- 66 files changed, 370 insertions(+), 342 deletions(-) rename docs/api-reference/core/{encode-file.md => encode.md} (79%) delete mode 100644 docs/api-reference/core/load-file.md create mode 100644 docs/api-reference/core/load.md rename docs/api-reference/core/{parse-file.md => parse.md} (84%) delete mode 100644 docs/api-reference/core/save-file.md create mode 100644 docs/api-reference/core/save.md create mode 100644 docs/api-reference/images/load-image.md create mode 100644 modules/core/src/javascript-utils/node/buffer-to-array-buffer.js delete mode 100644 modules/core/src/lib/encode-file.js create mode 100644 modules/core/src/lib/encode.js rename modules/core/src/lib/{load-file.js => load.js} (55%) rename modules/core/src/lib/{parse-file.js => parse.js} (86%) delete mode 100644 modules/core/src/lib/save-file.js create mode 100644 modules/core/src/lib/save.js delete mode 100644 modules/core/test/lib/load-file.spec.js create mode 100644 modules/core/test/lib/load.spec.js diff --git a/docs/api-reference/3d-tile-loaders/tile-3d-loader.md b/docs/api-reference/3d-tile-loaders/tile-3d-loader.md index 9c048c18e2..8ad4aadb12 100644 --- a/docs/api-reference/3d-tile-loaders/tile-3d-loader.md +++ b/docs/api-reference/3d-tile-loaders/tile-3d-loader.md @@ -15,18 +15,18 @@ Parses a [3D tile](https://github.com/AnalyticalGraphicsInc/3d-tiles). glTF file ## Usage ``` -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {Tile3DLoader} from '@loaders.gl/3d-tiles'; -const gltf = await loadFile(url, Tile3DLoader); +const gltf = await load(url, Tile3DLoader); ``` To decompress tiles containing Draco compressed glTF models or Draco compressed point clouds: ``` -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {Tile3DLoader} from '@loaders.gl/3d-tiles'; import {DracoDecoder} from '@loaders.gl/draco'; -const gltf = loadFile(url, Tile3DLoader, {DracoDecoder, decompress: true}); +const gltf = load(url, Tile3DLoader, {DracoDecoder, decompress: true}); ``` ## Options @@ -48,7 +48,7 @@ const gltf = loadFile(url, Tile3DLoader, {DracoDecoder, decompress: true}); Returns a JSON object with "embedded" binary data in the form of typed javascript arrays. -When parsed asynchronously (not using `loadSync` or `parseSync`): +When parsed asynchronously (i.e. not using `parseSync`): - linked binary resources will be loaded and resolved (if url is available). - base64 encoded binary data inside the JSON payload will be decoded diff --git a/docs/api-reference/core/encode-file.md b/docs/api-reference/core/encode.md similarity index 79% rename from docs/api-reference/core/encode-file.md rename to docs/api-reference/core/encode.md index 67854ae1af..e2a3624209 100644 --- a/docs/api-reference/core/encode-file.md +++ b/docs/api-reference/core/encode.md @@ -1,10 +1,10 @@ -# encodeFile +# encode > Needs update ## Functions -### encodeFile(fileData : ArrayBuffer | String, writer : Object | Array [, options : Object [, url : String]]) : Promise +### encode(fileData : ArrayBuffer | String, writer : Object | Array [, options : Object [, url : String]]) : Promise Encodes data asynchronously using the provided writer. @@ -15,7 +15,7 @@ Encodes data asynchronously using the provided writer. - `options.log`=`console` Any object with methods `log`, `info`, `warn` and `error`. By default set to `console`. Setting log to `null` will turn off logging. -### encodeFileSync(fileData : ArrayBuffer | String, writer : Object | Array, [, options : Object [, url : String]]) : any +### encodeSync(fileData : ArrayBuffer | String, writer : Object | Array, [, options : Object [, url : String]]) : any Encodes data synchronously using the provided writer, if possible. If not, returns `null`, in which case asynchronous loading is required. diff --git a/docs/api-reference/core/fetch-file.md b/docs/api-reference/core/fetch-file.md index f9e7d79f50..f3f74d0692 100644 --- a/docs/api-reference/core/fetch-file.md +++ b/docs/api-reference/core/fetch-file.md @@ -7,10 +7,10 @@ Small optional file reading utilities that work consistently across browser (bot Use the `fetchFile` function as follows: ```js -import {fetchFile, parseFile} from '@loaders.gl/core'; +import {fetchFile, parse} from '@loaders.gl/core'; import {OBJLoader} from '@loaders.gl/obj'; -data = await parseFile(fetchFile(url), OBJLoader); +data = await parse(fetchFile(url), OBJLoader); // Application code here ... ``` @@ -18,10 +18,10 @@ data = await parseFile(fetchFile(url), OBJLoader); Note that if you don't care about Node.js compatibility, you can just use the browser's built-in `fetch` directly. ```js -import {parseFile} from '@loaders.gl/core'; +import {parse} from '@loaders.gl/core'; import {OBJLoader} from '@loaders.gl/obj'; -data = await parseFile(fetch(url), OBJLoader); +data = await parse(fetch(url), OBJLoader); // Application code here ... ``` diff --git a/docs/api-reference/core/load-file.md b/docs/api-reference/core/load-file.md deleted file mode 100644 index 802fc07078..0000000000 --- a/docs/api-reference/core/load-file.md +++ /dev/null @@ -1,32 +0,0 @@ -# loadFile - -`loadFile` and `loadFileSync` function can be used with any _loader object_. They takes a `url` and one or more _loader objects_, checks what type of data that loader prefers to work on (e.g. text, JSON, binary, stream, ...), loads the data in the appropriate way, and passes it to the loader. - -### loadFile(url : String | File, loaders : Object | Object[][, options : object]) : Promise - -### loadFile(url : String | File [, options : Object]) : Promise - -The `loadFile` function is used to load and parse data with a specific _loader object_. An array of loader objects can be provided, in which case `loadFile` will attempt to autodetect which loader is appropriate for the file. - -The `loaders` parameter can also be ommitted, in which case any _loader objects_ previously registered with [`registerLoaders`](docs/api-reference/core/register-loaders) will be used. - -- `url` - Can be a string, either a data url or a request url, or in Node.js, a file name, or in the browser, a File object. -- `data` - loaded data, either in binary or text format. -- `loaders` - can be a single loader or an array of loaders. If ommitted, will use the list of registered loaders (see `registerLoaders`) -- `options` - optional, contains both options for the read process and options for the loader (see documentation of the specific loader). -- `options.dataType`=`arraybuffer` - By default reads as binary. Set to 'text' to read as text. - -Returns: - -- Return value depends on the _loader object_ category - -Notes: - -- Any path prefix set by `setPathPrefix` will be appended to relative urls. -- `loadFile` takes a `url` and a loader object, checks what type of data that loader prefers to work on (e.g. text, binary, stream, ...), loads the data in the appropriate way, and passes it to the loader. - -### loadFileSync(url : String [, options : Object]) : ArrayBuffer | String - -Similar to `loadFile` except loads and parses data synchronously. - -Note that for `loadFileSync` to work, the `url` needs to be loadable synchronously _and_ the loader used must support synchronous parsing. Synchronous loading only works on data URLs or files in Node.js. In many cases, the asynchronous `loadFile` is more appropriate. diff --git a/docs/api-reference/core/load.md b/docs/api-reference/core/load.md new file mode 100644 index 0000000000..419237f9ae --- /dev/null +++ b/docs/api-reference/core/load.md @@ -0,0 +1,26 @@ +# load + +The `load` function can be used with any _loader object_. They takes a `url` and one or more _loader objects_, checks what type of data that loader prefers to work on (e.g. text, JSON, binary, stream, ...), loads the data in the appropriate way, and passes it to the loader. + +### load(url : String | File, loaders : Object | Object[][, options : object]) : Promise + +### load(url : String | File [, options : Object]) : Promise + +The `load` function is used to load and parse data with a specific _loader object_. An array of loader objects can be provided, in which case `load` will attempt to autodetect which loader is appropriate for the file. + +The `loaders` parameter can also be ommitted, in which case any _loader objects_ previously registered with [`registerLoaders`](docs/api-reference/core/register-loaders) will be used. + +- `url` - Can be a string, either a data url or a request url, or in Node.js, a file name, or in the browser, a File object. +- `data` - loaded data, either in binary or text format. +- `loaders` - can be a single loader or an array of loaders. If ommitted, will use the list of registered loaders (see `registerLoaders`) +- `options` - optional, contains both options for the read process and options for the loader (see documentation of the specific loader). +- `options.dataType`=`arraybuffer` - By default reads as binary. Set to 'text' to read as text. + +Returns: + +- Return value depends on the _loader object_ category + +Notes: + +- Any path prefix set by `setPathPrefix` will be appended to relative urls. +- `load` takes a `url` and a loader object, checks what type of data that loader prefers to work on (e.g. text, binary, stream, ...), loads the data in the appropriate way, and passes it to the loader. diff --git a/docs/api-reference/core/parse-file.md b/docs/api-reference/core/parse.md similarity index 84% rename from docs/api-reference/core/parse-file.md rename to docs/api-reference/core/parse.md index 8de7ccf37c..87420a980f 100644 --- a/docs/api-reference/core/parse-file.md +++ b/docs/api-reference/core/parse.md @@ -1,16 +1,16 @@ -# parseFile +# parse -This function parses already loaded data. As a special case, it can also load (and then parse) data from `fetch` or `fetchFile` response object). +This function parses already loaded data. As a special case, it can also load (and then parse) data from a `fetch` or `fetchFile` response object). ## Usage The return value from `fetch` or `fetchFile` is a `Promise` that resolves to the fetch response object and can be passed directly to the non-sync parser functions: ```js -import {fetchFile, parseFile} from '@loaders.gl/core'; +import {fetchFile, parse} from '@loaders.gl/core'; import {OBJLoader} from '@loaders.gl/obj'; -data = await parseFile(fetchFile(url), OBJLoader); +data = await parse(fetchFile(url), OBJLoader); // Application code here ... ``` @@ -18,10 +18,10 @@ data = await parseFile(fetchFile(url), OBJLoader); Batched (streaming) parsing is supported by some loaders ```js -import {fetchFile, parseFileInBatches} from '@loaders.gl/core'; +import {fetchFile, parseInBatches} from '@loaders.gl/core'; import {CSVLoader} from '@loaders.gl/obj'; -const batchIterator = await parseFileInBatches(fetchFile(url), CSVLoader); +const batchIterator = await parseInBatches(fetchFile(url), CSVLoader); for await (const batch of batchIterator) { console.log(batch.length); } @@ -29,9 +29,9 @@ for await (const batch of batchIterator) { ## Functions -### parseFileInBatches(data : any, loaders : Object | Object\[] [, options : Object [, url : String]]) : AsyncIterator +### parseInBatches(data : any, loaders : Object | Object\[] [, options : Object [, url : String]]) : AsyncIterator -### parseFileInBatches(data : any [, options : Object [, url : String]]) : AsyncIterator +### parseInBatches(data : any [, options : Object [, url : String]]) : AsyncIterator > Batched loading is not supported by all _loader objects_ @@ -57,9 +57,9 @@ Returns: - Returns an async iterator that yields batches of data. The exact format for the batches depends on the _loader object_ category. -### parseFile(data : ArrayBuffer | String, loaders : Object | Object\[] [, options : Object [, url : String]]) : Promise +### parse(data : ArrayBuffer | String, loaders : Object | Object\[] [, options : Object [, url : String]]) : Promise -### parseFile(data : ArrayBuffer | String, [, options : Object [, url : String]]) : Promise +### parse(data : ArrayBuffer | String, [, options : Object [, url : String]]) : Promise Parses data asynchronously using the provided loader. Used to parse data with a selected _loader object_. An array of `loaders` can be provided, in which case an attempt will be made to autodetect which loader is appropriate for the file (using url extension and header matching). @@ -84,9 +84,9 @@ Returns: - Return value depends on the _loader object_ category -### parseFileSync(fileData : ArrayBuffer | String, loaders : Object | Object\[], [, options : Object [, url : String]]) : any +### parseSync(fileData : ArrayBuffer | String, loaders : Object | Object\[], [, options : Object [, url : String]]) : any -### parseFileSync(fileData : ArrayBuffer | String, [, options : Object [, url : String]]) : any +### parseSync(fileData : ArrayBuffer | String, [, options : Object [, url : String]]) : any > Synchronous parsing is not supported by all _loader objects_ diff --git a/docs/api-reference/core/register-loaders.md b/docs/api-reference/core/register-loaders.md index 192e8ab528..cddcc26d7e 100644 --- a/docs/api-reference/core/register-loaders.md +++ b/docs/api-reference/core/register-loaders.md @@ -2,7 +2,7 @@ The loader registry allows applications to cherry-pick which loaders to include in their application bundle by importing just the loaders they need and registering them during initialization. -Applications can then make all those imported loaders available (via format autodetection) to all subsequent `parseFile` and `loadFile` calls, without those calls having to specify which loaders to use. +Applications can then make all those imported loaders available (via format autodetection) to all subsequent `parse` and `load` calls, without those calls having to specify which loaders to use. ## Usage @@ -18,16 +18,16 @@ registerLoaders(CSVLoader); Some other file that needs to load CSV: ```js -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; // The pre-registered CSVLoader gets auto selected based on file extension... -const data = await loadFile('data.csv'); +const data = await load('data.csv'); ``` ## Functions ### registerLoaders(loaders : Object | Object[]) -Registers one or more _loader objects_ to a global _loader object registry_, these loaders will be used if no loader object is supplied to `parseFile` and `loadFile`. +Registers one or more _loader objects_ to a global _loader object registry_, these loaders will be used if no loader object is supplied to `parse` and `load`. - `loaders` - can be a single loader or an array of loaders. The specified loaders will be added to any previously registered loaders. diff --git a/docs/api-reference/core/save-file.md b/docs/api-reference/core/save-file.md deleted file mode 100644 index 4192c59978..0000000000 --- a/docs/api-reference/core/save-file.md +++ /dev/null @@ -1,33 +0,0 @@ -# saveFile - -> Needs update - -`saveFile` and `saveFileSync` function can be used with any writer. `saveFile` takes a `url` and a writer object, checks what type of data that writer prefers to work on (e.g. text, JSON, binary, stream, ...), saves the data in the appropriate way, and passes it to the writer. - -## Functions - -### saveFile(url : String | File, writer : Object [, options : Object]) : Promise - -The `saveFile` function can be used with any writer. - -`saveFile` takes a `url` and a writer object, checks what type of data that writer prefers to work on (e.g. text, JSON, binary, stream, ...), saves the data in the appropriate way, and passes it to the writer. - -- `url` - Can be a string, either a data url or a request url, or in Node.js, a file name, or in the browser, a File object. -- `data` - saveed data, either in binary or text format. -- `writer` - can be a single writer or an array of writers. -- `options` - optional, contains both options for the read process and options for the writer (see documentation of the specific writer). -- `options.dataType`=`arraybuffer` - By default reads as binary. Set to 'text' to read as text. - -Returns: - -- Return value depends on the category - -Notes: - -- Any path prefix set by `setPathPrefix` will be appended to relative urls. - -### saveFileSync(url : String [, options : Object]) : ArrayBuffer | String - -Similar to `saveFile` except saves and parses data synchronously. - -Note that for `saveFileSync` to work, the `url` needs to be saveable synchronously _and_ the writer used must support synchronous parsing. Synchronous saveing only works on data URLs or files in Node.js. In many cases, the asynchronous `saveFile` is more appropriate. diff --git a/docs/api-reference/core/save.md b/docs/api-reference/core/save.md new file mode 100644 index 0000000000..c643727797 --- /dev/null +++ b/docs/api-reference/core/save.md @@ -0,0 +1,33 @@ +# save + +> Needs update + +`save` and `saveSync` function can be used with any writer. `save` takes a `url` and a writer object, checks what type of data that writer prefers to work on (e.g. text, JSON, binary, stream, ...), saves the data in the appropriate way, and passes it to the writer. + +## Functions + +### save(url : String | File, writer : Object [, options : Object]) : Promise + +The `save` function can be used with any writer. + +`save` takes a `url` and a writer object, checks what type of data that writer prefers to work on (e.g. text, JSON, binary, stream, ...), saves the data in the appropriate way, and passes it to the writer. + +- `url` - Can be a string, either a data url or a request url, or in Node.js, a file name, or in the browser, a File object. +- `data` - saveed data, either in binary or text format. +- `writer` - can be a single writer or an array of writers. +- `options` - optional, contains both options for the read process and options for the writer (see documentation of the specific writer). +- `options.dataType`=`arraybuffer` - By default reads as binary. Set to 'text' to read as text. + +Returns: + +- Return value depends on the category + +Notes: + +- Any path prefix set by `setPathPrefix` will be appended to relative urls. + +### saveSync(url : String [, options : Object]) : ArrayBuffer | String + +Similar to `save` except saves and parses data synchronously. + +Note that for `saveSync` to work, the `url` needs to be saveable synchronously _and_ the writer used must support synchronous parsing. Synchronous saveing only works on data URLs or files in Node.js. In many cases, the asynchronous `save` is more appropriate. diff --git a/docs/api-reference/geojson-loaders/kml-loader.md b/docs/api-reference/geojson-loaders/kml-loader.md index 960c6f9066..29b64b6466 100644 --- a/docs/api-reference/geojson-loaders/kml-loader.md +++ b/docs/api-reference/geojson-loaders/kml-loader.md @@ -11,7 +11,7 @@ References: ```js import {KMLLoader} from '@loaders.gl/kml'; -import {loadFileSync} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; ``` ## Structure of Loaded Data diff --git a/docs/api-reference/gltf-loaders/glb-parser.md b/docs/api-reference/gltf-loaders/glb-parser.md index c11c579984..b40d58ccb8 100644 --- a/docs/api-reference/gltf-loaders/glb-parser.md +++ b/docs/api-reference/gltf-loaders/glb-parser.md @@ -12,13 +12,13 @@ References: ```js import {GLBParser} from '@loaders.gl/gltf'; -import {loadFileSync} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; // Create a parser const glbParser = new GLBParser(); // Load and parse a file -const GLB_BINARY = loadFileSync(...); +const GLB_BINARY = await load(...); glbParser.parse(GLB_BINARY); // Get the complete GLB JSON structure diff --git a/docs/api-reference/gltf-loaders/gltf-loader.md b/docs/api-reference/gltf-loaders/gltf-loader.md index 535414364e..c2db1dd6b2 100644 --- a/docs/api-reference/gltf-loaders/gltf-loader.md +++ b/docs/api-reference/gltf-loaders/gltf-loader.md @@ -15,18 +15,18 @@ Parses a glTF file into a hierarchical scenegraph description that can be used t ## Usage ``` -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {GLTFLoader} from '@loaders.gl/gltf'; -const gltf = await loadFile(url, GLTFLoader); +const gltf = await load(url, GLTFLoader); ``` To decompress Draco compressed meshes: ``` -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {GLTFLoader} from '@loaders.gl/gltf'; import {DracoLoader} from '@loaders.gl/draco'; -const gltf = loadFile(url, GLTFLoader, {DracoLoader, decompress: true}); +const gltf = load(url, GLTFLoader, {DracoLoader, decompress: true}); ``` ## Options diff --git a/docs/api-reference/gltf-loaders/gltf-parser.md b/docs/api-reference/gltf-loaders/gltf-parser.md index d3ff981f79..9a678ee0f0 100644 --- a/docs/api-reference/gltf-loaders/gltf-parser.md +++ b/docs/api-reference/gltf-loaders/gltf-parser.md @@ -17,13 +17,13 @@ References: ```js import {GLTFParser} from '@loaders.gl/gltf'; -import {loadFileSync} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; // Create a parser const gltfParser = new GLTFParser(); // Load and parse a file -const GLTF_BINARY = loadFileSync(...); +const GLTF_BINARY = await load(...); gltfParser.parseSync(GLTF_BINARY); // Get the complete glTF JSON structure diff --git a/docs/api-reference/gltf-loaders/gltf-writer.md b/docs/api-reference/gltf-loaders/gltf-writer.md index a3ff515eec..01313cb7a1 100644 --- a/docs/api-reference/gltf-loaders/gltf-writer.md +++ b/docs/api-reference/gltf-loaders/gltf-writer.md @@ -16,9 +16,9 @@ The `GLTFWriter` is a writer for glTF scenegraphs. ```js import {GLTFWriter} from '@loaders.gl/gltf'; -import {encodeFileSync} from '@loaders.gl/core'; +import {encodeSync} from '@loaders.gl/core'; -const arrayBuffer = encodeFileSync(gltf, GLTFWriter); +const arrayBuffer = encodeSync(gltf, GLTFWriter); ``` ## Options diff --git a/docs/api-reference/images/load-image.md b/docs/api-reference/images/load-image.md new file mode 100644 index 0000000000..365636f78a --- /dev/null +++ b/docs/api-reference/images/load-image.md @@ -0,0 +1,17 @@ +# loadImage + +## Functions + +### loadImage(url : String [, options : Object]) : Image / HTMLImageElement + +

+ browser only +

+ +This is a minimal basic image loading function that only works in the browser main threaqd. For image loading and writing that works across both browser and node, refer to the `@loaders.gl/images` module. + +`options.crossOrigin` - Provides control of the requests cross origin field. + +Notes: + +- Any path prefix set by `setPathPrefix` will be appended to relative urls. diff --git a/docs/api-reference/mesh-loaders/draco-loader.md b/docs/api-reference/mesh-loaders/draco-loader.md index 00b0d885b3..6febe04dcd 100644 --- a/docs/api-reference/mesh-loaders/draco-loader.md +++ b/docs/api-reference/mesh-loaders/draco-loader.md @@ -16,9 +16,9 @@ Decodes a mesh or point cloud (maps of attributes) using [DRACO compression](htt ```js import {DracoLoader} from '@loaders.gl/draco'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; -const data = await loadFile(url, DracoLoader, options); +const data = await load(url, DracoLoader, options); ``` ## Options diff --git a/docs/api-reference/mesh-loaders/draco-writer.md b/docs/api-reference/mesh-loaders/draco-writer.md index cee02ee23a..e554b83a6f 100644 --- a/docs/api-reference/mesh-loaders/draco-writer.md +++ b/docs/api-reference/mesh-loaders/draco-writer.md @@ -16,7 +16,7 @@ Encodes a mesh or point cloud (maps of attributes) using [Draco3D](https://googl ```js import {DracoWriter} from '@loaders.gl/draco'; -import {encodeFile} from '@loaders.gl/core'; +import {encode} from '@loaders.gl/core'; const mesh = { attributes: { @@ -24,7 +24,7 @@ const mesh = { } }; -const data = await encodeFile(mesh, DracoWriter, options); +const data = await encode(mesh, DracoWriter, options); ``` ## Options diff --git a/docs/api-reference/mesh-loaders/las-loader.md b/docs/api-reference/mesh-loaders/las-loader.md index dc1f9baff7..3389545034 100644 --- a/docs/api-reference/mesh-loaders/las-loader.md +++ b/docs/api-reference/mesh-loaders/las-loader.md @@ -18,9 +18,9 @@ Note: LAZ is the compressed version of LAS ```js import {LASLoader} from '@loaders.gl/las'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; -const data = await loadFile(url, LASLoader, options); +const data = await load(url, LASLoader, options); ``` ## Options diff --git a/docs/api-reference/mesh-loaders/obj-loader.md b/docs/api-reference/mesh-loaders/obj-loader.md index cfe5100a6f..6888becda7 100644 --- a/docs/api-reference/mesh-loaders/obj-loader.md +++ b/docs/api-reference/mesh-loaders/obj-loader.md @@ -16,9 +16,9 @@ This loader handles the OBJ half of the classic Wavefront OBJ/MTL format. The OB ```js import {OBJLoader} from '@loaders.gl/obj'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; -const data = await loadFile(url, OBJLoader); +const data = await load(url, OBJLoader); ``` ## Loader Options diff --git a/docs/api-reference/mesh-loaders/pcd-loader.md b/docs/api-reference/mesh-loaders/pcd-loader.md index 76343fde8f..f4c294f145 100644 --- a/docs/api-reference/mesh-loaders/pcd-loader.md +++ b/docs/api-reference/mesh-loaders/pcd-loader.md @@ -18,9 +18,9 @@ Note: Currently only `ascii` and `binary` subformats are supported. Compressed b ```js import {PCDLoader} from '@loaders.gl/pcd'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; -const {header, attributes} = await loadFile(url, PCDLoader); +const {header, attributes} = await load(url, PCDLoader); // Application code here, e.g: // return new Geometry(attributes) ``` diff --git a/docs/api-reference/mesh-loaders/ply-loader.md b/docs/api-reference/mesh-loaders/ply-loader.md index 3e5fb5ebf6..1959ea2888 100644 --- a/docs/api-reference/mesh-loaders/ply-loader.md +++ b/docs/api-reference/mesh-loaders/ply-loader.md @@ -16,9 +16,9 @@ PLY is a computer file format known as the Polygon File Format or the Stanford T ```js import {PLYLoader} from '@loaders.gl/ply'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; -const data = await loadFile(url, PLYLoader); +const data = await load(url, PLYLoader); ``` ## Attribution/Credits diff --git a/docs/api-reference/misc-loaders/zip-loader.md b/docs/api-reference/misc-loaders/zip-loader.md index a33715df6d..dd753e7fe9 100644 --- a/docs/api-reference/misc-loaders/zip-loader.md +++ b/docs/api-reference/misc-loaders/zip-loader.md @@ -15,10 +15,10 @@ Decodes a Zip Archive into a file map. ## Usage ```js -import {parseFile} from '@loaders.gl/core'; +import {parse} from '@loaders.gl/core'; import {ZipLoader} from '@loaders.gl/zip'; -const fileMap = await parseFile(arrayBuffer, ZipLoader); +const fileMap = await parse(arrayBuffer, ZipLoader); for (const fileName in FILE_MAP) { const fileData = fileMap[key]; // Do something with the subfile diff --git a/docs/api-reference/misc-loaders/zip-writer.md b/docs/api-reference/misc-loaders/zip-writer.md index 83fda31e84..87c2cf6abb 100644 --- a/docs/api-reference/misc-loaders/zip-writer.md +++ b/docs/api-reference/misc-loaders/zip-writer.md @@ -16,7 +16,7 @@ Encodes a filemap into a Zip Archive. Returns an `ArrayBuffer` that is a valid Z ## Usage ```js -import {encodeFile, writeFile} from '@loaders.gl/core'; +import {encode, writeFile} from '@loaders.gl/core'; import {ZipWriter} from '@loaders.gl/zip'; const FILEMAP = { @@ -24,7 +24,7 @@ const FILEMAP = { 'directory/filename2': ... }; -const arrayBuffer = await encodeFile(FILE_MAP, ZipWriter) +const arrayBuffer = await encode(FILE_MAP, ZipWriter) writeFile(zipFileName, arrayBuffer); ``` diff --git a/docs/api-reference/specifications/writer-object-format.md b/docs/api-reference/specifications/writer-object-format.md index 50e23cfa25..8d166ec791 100644 --- a/docs/api-reference/specifications/writer-object-format.md +++ b/docs/api-reference/specifications/writer-object-format.md @@ -12,10 +12,10 @@ To be compatible with `@loaders.gl/core` functions, writer objects need to confo ### Encoder Function -| Field | Type | Default | Description | -| ------------------------------- | ---------- | ------- | ---------------------- | -| `encodeSync` | `Function` | `null` | Encodes synchronously | -| `encode` | `Function` | `null` | Encodes asynchronously | -| `encodeToStream` (Experimental) | `Function` | `null` | Encodes to a stream | +| Field | Type | Default | Description | +| -------------------------------- | ---------- | ------- | ------------------------------------------------------ | +| `encodeSync` | `Function` | `null` | Encodes synchronously | +| `encode` | `Function` | `null` | Encodes asynchronously | +| `encodeInBatches` (Experimental) | `Function` | `null` | Encodes and releases batches through an async iterator | Note: The format of the input data to the encoders depends on the loader. Several loader categories are defined to provided standardized data formats for similar loaders. diff --git a/docs/api-reference/table-loaders/csv-loader.md b/docs/api-reference/table-loaders/csv-loader.md index faa47de16e..ea328aafcb 100644 --- a/docs/api-reference/table-loaders/csv-loader.md +++ b/docs/api-reference/table-loaders/csv-loader.md @@ -40,12 +40,12 @@ Note that the following `papaparse` options are NOT supported by `CSVLoader` (th | `header`=`false` | If true, the first row of parsed data will be interpreted as field names. \* | Header is detected and parsed by `CSVLoader` | | `transformHeader`= | Function to apply on each header. | (Only available in version 5.0) | | `worker` | Whether to use a worker thread. | Use `CSVWorkerLoader` instead. | -| `step` | Callback function for streaming. | Use `loadFileInBatches` instead. | -| `complete` | Callback function for streaming. | Use `loadFileInBatches` instead. | +| `step` | Callback function for streaming. | Use `loadInBatches` instead. | +| `complete` | Callback function for streaming. | Use `loadInBatches` instead. | | `error` | Callback function for error. | Errors will be handled by `CSVLoader`. | | `download` | First argument is URL from which to download a file. | Use external functions to load data (such as `fetch` or `fetchFile`). | -| `chunk` | Callback executed after every chunk is loaded. | Use `loadFileInBatches` instead. | -| `beforeFirstChunk` | Callback executed before parsing of first chunk. | Use `loadFileInBatches` instead. | +| `chunk` | Callback executed after every chunk is loaded. | Use `loadInBatches` instead. | +| `beforeFirstChunk` | Callback executed before parsing of first chunk. | Use `loadInBatches` instead. | | `withCredentials` | `XMLHttpRequest.withCredentials` property. | Control credentials using your loading functions (e.g. `fetch` or `fetchFile`). | ## Attributions diff --git a/docs/developer-guide/about-loaders.md b/docs/developer-guide/about-loaders.md index 466ec1dd56..76b08a6e67 100644 --- a/docs/developer-guide/about-loaders.md +++ b/docs/developer-guide/about-loaders.md @@ -25,7 +25,7 @@ You would give a name to the loader object, define what file extension(s) it use A loader must define a parser function for the format, a function that takes the loaded data and converts it into a parsed object. Depending on how the underlying loader works (whether it is synchronous or asynchronous and whether it expects text or binary data), the loader object can expose the parser in a couple of different ways, specified by provided one of the parser function fields. -The loaders.gl `loadFile` and `parseFile` functions accept one or more loader objects. These functions examines what format the loader needs (text or binary), reads data into the required format, and then calls one of the loader object's parser functions with that data. +The loaders.gl `load` and `parse` functions accept one or more loader objects. These functions examines what format the loader needs (text or binary), reads data into the required format, and then calls one of the loader object's parser functions with that data. When creating a new loader object, at least one of the parser functions needs to be defined: @@ -36,8 +36,8 @@ When creating a new loader object, at least one of the parser functions needs to | `parse` | `Function` | `null` | Parses a binary file asynchronously (`ArrayBuffer`) | | `load` | `Function` | `null` | Reads and parses a binary file asynchronously | -- The preferred option is to provide a synchronous parser that works on loaded data (using the `parseSync` or `parseTextSync` fields). This allows the use of the `loadFileSync` and `parseFileSync` functions with your loader. -- The second preference is to provide an asynchronous parser that works on loaded data (`parse`). This allows the user to load the data using any preferred mechanism, and only use loaders.gl for parsing by calling `parseFile` on the loaded data. +- The preferred option is to provide a synchronous parser that works on loaded data (using the `parseSync` or `parseTextSync` fields). This allows the use of the `parseSync` function with your loader. +- The second preference is to provide an asynchronous parser that works on loaded data (`parse`). This allows the user to load the data using any preferred mechanism, and only use loaders.gl for parsing by calling `parse` on the loaded data. - Finally, some existing parsers combine both loading and parsing, and loaders.gl provides an accommodation for packaging such loaders into loader options (`load`). The `load` parser field is for instance used to define a loader object using the classic browser image loading technique of creating a new `Image` and setting its `src` and `onload` fields. ## Remarks diff --git a/docs/table-of-contents.json b/docs/table-of-contents.json index d24ff5932d..3cc607dba9 100644 --- a/docs/table-of-contents.json +++ b/docs/table-of-contents.json @@ -31,10 +31,12 @@ { "title": "Core Functions", "entries": [ - {"entry": "docs/api-reference/core/load-file"}, {"entry": "docs/api-reference/core/load-image"}, - {"entry": "docs/api-reference/core/parse-file"}, - {"entry": "docs/api-reference/core/read-file"}, + {"entry": "docs/api-reference/core/load"}, + {"entry": "docs/api-reference/core/parse"}, + {"entry": "docs/api-reference/core/encode"}, + {"entry": "docs/api-reference/core/fetch-file"}, + {"entry": "docs/api-reference/core/save"}, {"entry": "docs/api-reference/core/set-path-prefix"}, {"entry": "docs/api-reference/core/image-utilities"}, {"entry": "docs/api-reference/core/binary-utilities"}, diff --git a/modules/arrow/test/arrow-loader.spec.js b/modules/arrow/test/arrow-loader.spec.js index 056fedf6d6..281b04f324 100644 --- a/modules/arrow/test/arrow-loader.spec.js +++ b/modules/arrow/test/arrow-loader.spec.js @@ -1,6 +1,6 @@ import test from 'tape-promise/tape'; -import {isBrowser, resolvePath, fetchFile, parseFile, parseFileInBatches} from '@loaders.gl/core'; -// import {parseFileInBatchesSync} from '@loaders.gl/core'; +import {isBrowser, resolvePath, fetchFile, parse, parseInBatches} from '@loaders.gl/core'; +// import {parseInBatchesSync} from '@loaders.gl/core'; import {ArrowLoader} from '@loaders.gl/arrow'; import {ArrowWorkerLoader} from '@loaders.gl/arrow'; @@ -12,8 +12,8 @@ const ARROW_STRUCT = '@loaders.gl/arrow/test/data/struct.arrow'; // Bigger, batched sample file const ARROW_BIOGRID_NODES = '@loaders.gl/arrow/test/data/biogrid-nodes.arrow'; -test('ArrowLoader#parseFileSync(simple.arrow)', async t => { - const columns = await parseFile(fetchFile(ARROW_SIMPLE), ArrowLoader); +test('ArrowLoader#parseSync(simple.arrow)', async t => { + const columns = await parse(fetchFile(ARROW_SIMPLE), ArrowLoader); // Check loader specific results t.ok(columns.bar, 'bar column loaded'); t.ok(columns.baz, 'baz column loaded'); @@ -21,38 +21,38 @@ test('ArrowLoader#parseFileSync(simple.arrow)', async t => { t.end(); }); -test('ArrowLoader#parseFileSync(dictionary.arrow)', async t => { - const columns = await parseFile(fetchFile(ARROW_DICTIONARY), ArrowLoader); +test('ArrowLoader#parseSync(dictionary.arrow)', async t => { + const columns = await parse(fetchFile(ARROW_DICTIONARY), ArrowLoader); // Check loader specific results t.ok(columns['example-csv'], 'example-csv loaded'); t.end(); }); -test('ArrowLoader#parseFile(fetchFile(struct).arrow)', async t => { - const columns = await parseFile(fetchFile(ARROW_STRUCT), ArrowLoader); +test('ArrowLoader#parse(fetchFile(struct).arrow)', async t => { + const columns = await parse(fetchFile(ARROW_STRUCT), ArrowLoader); // Check loader specific results t.ok(columns.struct_nullable, 'struct_nullable loaded'); t.end(); }); // TODO - Arrow worker seems to not bundle apache arrow lib? -test('ArrowLoader#parseFile (WORKER)', async t => { +test('ArrowLoader#parse (WORKER)', async t => { if (!isBrowser) { t.comment('Worker is not usable in non-browser environments'); t.end(); return; } - const data = await parseFile(fetchFile(ARROW_SIMPLE), ArrowWorkerLoader); + const data = await parse(fetchFile(ARROW_SIMPLE), ArrowWorkerLoader); t.ok(data, 'Data returned'); t.end(); }); -test('ArrowLoader#parseFileInBatches(async input)', async t => { - // TODO - parseFileInBatches should accept fetch response directly +test('ArrowLoader#parseInBatches(async input)', async t => { + // TODO - parseInBatches should accept fetch response directly const response = await fetchFile(ARROW_BIOGRID_NODES); const data = await response.arrayBuffer(); - const asyncIterator = await parseFileInBatches(data, ArrowLoader); + const asyncIterator = await parseInBatches(data, ArrowLoader); for await (const batch of asyncIterator) { t.ok(batch, 'received batch'); t.end(); @@ -60,11 +60,11 @@ test('ArrowLoader#parseFileInBatches(async input)', async t => { }); /* -test('ArrowLoader#parseFileInBatchesSync(sync input)', async t => { +test('ArrowLoader#parseInBatchesSync(sync input)', async t => { const response = await fetchFile(ARROW_BIOGRID_NODES); const data = await response.arrayBuffer(); - const iterator = parseFileInBatchesSync(data, ArrowLoader); + const iterator = parseInBatchesSync(data, ArrowLoader); for (const batch of iterator) { t.ok(batch, 'received batch'); t.end(); @@ -72,8 +72,8 @@ test('ArrowLoader#parseFileInBatchesSync(sync input)', async t => { }); */ -// TODO - Move node stream test to generic parseFileInBatches test? -test('ArrowLoader#parseFileInBatches(Stream)', async t => { +// TODO - Move node stream test to generic parseInBatches test? +test('ArrowLoader#parseInBatches(Stream)', async t => { if (isBrowser) { t.comment('Node stream test case only supported in Node'); t.end(); @@ -82,7 +82,7 @@ test('ArrowLoader#parseFileInBatches(Stream)', async t => { const fs = require('fs'); const stream = fs.createReadStream(resolvePath(ARROW_BIOGRID_NODES)); - const asyncIterator = await parseFileInBatches(stream, ArrowLoader); + const asyncIterator = await parseInBatches(stream, ArrowLoader); for await (const batch of asyncIterator) { t.ok(batch, 'received batch'); t.end(); diff --git a/modules/core/src/index.js b/modules/core/src/index.js index 1bcd41d2cc..d320e565f4 100644 --- a/modules/core/src/index.js +++ b/modules/core/src/index.js @@ -5,21 +5,14 @@ export {writeFile, writeFileSync} from './lib/fetch/write-file'; // FILE PARSING AND ENCODING export {registerLoaders} from './lib/register-loaders'; -export { - parseFile, - parseFileSync, - parseFileInBatches, - parseFileInBatchesSync -} from './lib/parse-file'; +export {parse, parseSync, parseInBatches, parseInBatchesSync} from './lib/parse'; // LOADING (READING + PARSING) -export {loadFileInBatches, loadFile, loadFileSync} from './lib/load-file'; -export {loadImage} from './lib/load-image'; -export {ImageBitmapLoader, HTMLImageLoader, PlatformImageLoader} from './lib/image-loaders'; +export {load, loadInBatches} from './lib/load'; // ENCODING AND SAVING -export {encodeFile, encodeFileSync, encodeToStream} from './lib/encode-file'; -export {saveFile, saveFileSync} from './lib/save-file'; +export {encode, encodeSync, encodeInBatches} from './lib/encode'; +export {save, saveSync} from './lib/save'; // "JAVASCRIPT" UTILS export { @@ -66,4 +59,24 @@ export {default as assert} from './utils/assert'; export {getMeshSize as _getMeshSize} from './categories/mesh/mesh-utils'; // DEPRECATED +export {loadImage} from './lib/load-image'; + export {createReadStream} from './lib/fetch/fetch-file'; + +import {parse, parseSync} from './lib/parse'; +import {load} from './lib/load'; + +export function parseFile(...args) { + console.warn('parseFile() deprecated, use parse()'); // eslint-disable-line + return parse(...args); +} + +export function parseFileSync(...args) { + console.warn('parseFileSync() deprecated, use parseSync()'); // eslint-disable-line + return parseSync(...args); +} + +export function loadFile(...args) { + console.warn('loadFile() deprecated, use load()'); // eslint-disable-line + return load(...args); +} diff --git a/modules/core/src/javascript-utils/node/buffer-to-array-buffer.js b/modules/core/src/javascript-utils/node/buffer-to-array-buffer.js new file mode 100644 index 0000000000..48e0f68861 --- /dev/null +++ b/modules/core/src/javascript-utils/node/buffer-to-array-buffer.js @@ -0,0 +1,10 @@ +/* global Buffer */ + +export function bufferToArrayBuffer(buffer) { + // TODO - per docs we should just be able to call buffer.buffer, but there are issues + if (Buffer.isBuffer(buffer)) { + const typedArray = new Uint8Array(buffer); + return typedArray.buffer; + } + return buffer; +} diff --git a/modules/core/src/lib/encode-file.js b/modules/core/src/lib/encode-file.js deleted file mode 100644 index c06077f38f..0000000000 --- a/modules/core/src/lib/encode-file.js +++ /dev/null @@ -1,24 +0,0 @@ -export function encodeFile(data, writer, options, url) { - if (writer.encode) { - return writer.encode(data, options); - } - if (writer.encodeSync) { - return Promise.resolve(writer.encodeSync(data, options)); - } - // TODO - handle encodeStream? - throw new Error('Writer could not encode data'); -} - -export function encodeFileSync(data, writer, options, url) { - if (writer.encodeSync) { - return writer.encodeSync(data, options); - } - throw new Error('Writer could not synchronously encode data'); -} - -export function encodeToStream(data, writer, options, url) { - if (writer.encodeToStream) { - return writer.encodeToStream(data, options); - } - throw new Error('Writer could not encode data to stream'); -} diff --git a/modules/core/src/lib/encode.js b/modules/core/src/lib/encode.js new file mode 100644 index 0000000000..8a77d38308 --- /dev/null +++ b/modules/core/src/lib/encode.js @@ -0,0 +1,25 @@ +export function encode(data, writer, options, url) { + if (writer.encode) { + return writer.encode(data, options); + } + if (writer.encodeSync) { + return Promise.resolve(writer.encodeSync(data, options)); + } + // TODO - Use encodeToBatches? + throw new Error('Writer could not encode data'); +} + +export function encodeSync(data, writer, options, url) { + if (writer.encodeSync) { + return writer.encodeSync(data, options); + } + throw new Error('Writer could not synchronously encode data'); +} + +export function encodeInBatches(data, writer, options, url) { + if (writer.encodeInBatches) { + return writer.encodeInBatches(data, options); + } + // TODO -fall back to atomic encode? + throw new Error('Writer could not encode data in batches'); +} diff --git a/modules/core/src/lib/load-file.js b/modules/core/src/lib/load.js similarity index 55% rename from modules/core/src/lib/load-file.js rename to modules/core/src/lib/load.js index f4735d949c..4595fdee83 100644 --- a/modules/core/src/lib/load-file.js +++ b/modules/core/src/lib/load.js @@ -1,17 +1,17 @@ -import {fetchFile, readFileSync} from './fetch/fetch-file'; +import {fetchFile} from './fetch/fetch-file'; import {isLoaderObject} from './loader-utils/normalize-loader'; import {autoDetectLoader} from './loader-utils/auto-detect-loader'; -import {parseFile, parseFileSync, parseFileInBatches} from './parse-file'; +import {parse, parseInBatches} from './parse'; import {getRegisteredLoaders} from './register-loaders'; -export async function loadFileInBatches(url, loaders, options) { +export async function loadInBatches(url, loaders, options) { const response = await fetchFile(url, options); - return parseFileInBatches(response, loaders, options, url); + return parseInBatches(response, loaders, options, url); } -export async function loadFile(url, loaders, options) { - // Signature: loadFile(url, options) +export async function load(url, loaders, options) { + // Signature: load(url, options) // Uses registered loaders if (!Array.isArray(loaders) && !isLoaderObject(loaders)) { options = loaders; @@ -28,12 +28,5 @@ export async function loadFile(url, loaders, options) { // at this point, data can be binary or text const response = await fetchFile(url, options); - return parseFile(response, loaders, options, url); -} - -export function loadFileSync(url, loaders, options) { - const data = readFileSync(url, options); - const result = parseFileSync(data, loaders, options, url); - // Separate return to facilitate breakpoint setting - return result; + return parse(response, loaders, options, url); } diff --git a/modules/core/src/lib/parse-file.js b/modules/core/src/lib/parse.js similarity index 86% rename from modules/core/src/lib/parse-file.js rename to modules/core/src/lib/parse.js index 6be86389d5..e7e205e727 100644 --- a/modules/core/src/lib/parse-file.js +++ b/modules/core/src/lib/parse.js @@ -4,8 +4,8 @@ import NullLog from './loader-utils/null-log'; import {getRegisteredLoaders} from './register-loaders'; import {parseWithLoader, parseWithLoaderInBatches, parseWithLoaderSync} from './parse-with-loader'; -export async function parseFile(data, loaders, options, url) { - // Signature: parseFile(data, options, url) +export async function parse(data, loaders, options, url) { + // Signature: parse(data, options, url) // Uses registered loaders if (!Array.isArray(loaders) && !isLoaderObject(loaders)) { url = options; @@ -29,8 +29,8 @@ export async function parseFile(data, loaders, options, url) { return await parseWithLoader(data, loader, options, url); } -export function parseFileSync(data, loaders, options, url) { - // Signature: parseFileSync(data, options, url) +export function parseSync(data, loaders, options, url) { + // Signature: parseSync(data, options, url) // Uses registered loaders if (!Array.isArray(loaders) && !isLoaderObject(loaders)) { url = options; @@ -49,8 +49,8 @@ export function parseFileSync(data, loaders, options, url) { return parseWithLoaderSync(data, loader, options, url); } -export async function parseFileInBatches(data, loaders, options, url) { - // Signature: parseFileInBatches(data, options, url) +export async function parseInBatches(data, loaders, options, url) { + // Signature: parseInBatches(data, options, url) // Uses registered loaders if (!Array.isArray(loaders) && !isLoaderObject(loaders)) { url = options; @@ -69,8 +69,8 @@ export async function parseFileInBatches(data, loaders, options, url) { return parseWithLoaderInBatches(data, loader, options, url); } -export async function parseFileInBatchesSync(data, loaders, options, url) { - // Signature: parseFileInBatchesSync(data, options, url) +export async function parseInBatchesSync(data, loaders, options, url) { + // Signature: parseInBatchesSync(data, options, url) // Uses registered loaders if (!Array.isArray(loaders) && !isLoaderObject(loaders)) { url = options; diff --git a/modules/core/src/lib/save-file.js b/modules/core/src/lib/save-file.js deleted file mode 100644 index 134514ce55..0000000000 --- a/modules/core/src/lib/save-file.js +++ /dev/null @@ -1,12 +0,0 @@ -import {encodeFile, encodeFileSync} from './encode-file'; -import {writeFile, writeFileSync} from './fetch/write-file'; - -export function saveFile(data, url, writer) { - const encodedData = encodeFile(data, writer, url); - return writeFile(url, encodedData); -} - -export function saveFileSync(data, url, writer) { - const encodedData = encodeFileSync(data, writer, url); - return writeFileSync(url, encodedData); -} diff --git a/modules/core/src/lib/save.js b/modules/core/src/lib/save.js new file mode 100644 index 0000000000..1dda4243c1 --- /dev/null +++ b/modules/core/src/lib/save.js @@ -0,0 +1,12 @@ +import {encode, encodeSync} from './encode'; +import {writeFile, writeFileSync} from './fetch/write-file'; + +export function save(data, url, writer) { + const encodedData = encode(data, writer, url); + return writeFile(url, encodedData); +} + +export function saveSync(data, url, writer) { + const encodedData = encodeSync(data, writer, url); + return writeFileSync(url, encodedData); +} diff --git a/modules/core/test/index.js b/modules/core/test/index.js index b0723bb2b2..d2ce700613 100644 --- a/modules/core/test/index.js +++ b/modules/core/test/index.js @@ -12,6 +12,4 @@ import './lib/fetch/fetch-file.spec'; import './lib/loader-utils/auto-parse.spec'; -import './lib/image-loaders.spec'; -import './lib/load-image.spec'; -import './lib/load-file.spec'; +import './lib/load.spec'; diff --git a/modules/core/test/lib/load-file.spec.js b/modules/core/test/lib/load-file.spec.js deleted file mode 100644 index 61e34109a2..0000000000 --- a/modules/core/test/lib/load-file.spec.js +++ /dev/null @@ -1,22 +0,0 @@ -import {loadFile, registerLoaders} from '@loaders.gl/core'; - -import test from 'tape-promise/tape'; - -test('loadFile#loadFile', t => { - t.ok(loadFile, 'loadFile defined'); - loadFile('.').then(loadedData => { - t.ok(true, 'loadFile accepts undefined loaders'); - t.end(); - }); -}); - -test('loadFile#auto detect loader', t => { - registerLoaders({ - name: 'JSON', - parse: data => { - t.ok(data instanceof ArrayBuffer, 'Got ArrayBuffer'); - t.end(); - } - }); - loadFile('package.json'); -}); diff --git a/modules/core/test/lib/load.spec.js b/modules/core/test/lib/load.spec.js new file mode 100644 index 0000000000..a01648094e --- /dev/null +++ b/modules/core/test/lib/load.spec.js @@ -0,0 +1,22 @@ +import {load, registerLoaders} from '@loaders.gl/core'; + +import test from 'tape-promise/tape'; + +test('load#load', t => { + t.ok(load, 'load defined'); + load('.').then(loadedData => { + t.ok(true, 'load accepts undefined loaders'); + t.end(); + }); +}); + +test('load#auto detect loader', t => { + registerLoaders({ + name: 'JSON', + parse: data => { + t.ok(data instanceof ArrayBuffer, 'Got ArrayBuffer'); + t.end(); + } + }); + load('package.json'); +}); diff --git a/modules/core/test/lib/loader-utils/auto-parse.spec.js b/modules/core/test/lib/loader-utils/auto-parse.spec.js index 0f19cae80a..b5cd5ec0a7 100644 --- a/modules/core/test/lib/loader-utils/auto-parse.spec.js +++ b/modules/core/test/lib/loader-utils/auto-parse.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {parseFileSync} from '@loaders.gl/core'; +import {parseSync} from '@loaders.gl/core'; import {OBJLoader} from '@loaders.gl/obj'; import {KMLLoader} from '@loaders.gl/kml'; @@ -8,11 +8,11 @@ import KML from '@loaders.gl/kml/test/data/KML_Samples.kml'; const LOADERS = [OBJLoader, KMLLoader]; -test('parseFileSync#autoParse', t => { +test('parseSync#autoParse', t => { if (!KMLLoader.supported) { t.comment('XML parsing not available'); } else { - const data = parseFileSync(KML, LOADERS, {log: null}); + const data = parseSync(KML, LOADERS, {log: null}); t.equal(data.documents.length, 2, 'Documents were found'); t.equal(data.markers.length, 4, 'Markers were found'); t.equal(data.lines.length, 6, 'Lines were found'); diff --git a/modules/csv/test/csv-loader-arrow.spec.js b/modules/csv/test/csv-loader-arrow.spec.js index 195e8d1601..f70235e93d 100644 --- a/modules/csv/test/csv-loader-arrow.spec.js +++ b/modules/csv/test/csv-loader-arrow.spec.js @@ -1,5 +1,5 @@ import test from 'tape-promise/tape'; -import {loadFileInBatches, isIterator, isAsyncIterable} from '@loaders.gl/core'; +import {loadInBatches, isIterator, isAsyncIterable} from '@loaders.gl/core'; import {CSVLoader} from '@loaders.gl/csv'; import {ArrowTableBatch} from '@loaders.gl/arrow'; import {RecordBatch} from 'apache-arrow'; @@ -9,13 +9,13 @@ import {RecordBatch} from 'apache-arrow'; const CSV_NUMBERS_100_URL = '@loaders.gl/csv/test/data/numbers-100.csv'; const CSV_NUMBERS_10000_URL = '@loaders.gl/csv/test/data/numbers-10000.csv'; -test('CSVLoader#loadFileInBatches(numbers-100.csv, arrow)', async t => { - const iterator = await loadFileInBatches(CSV_NUMBERS_100_URL, CSVLoader, { +test('CSVLoader#loadInBatches(numbers-100.csv, arrow)', async t => { + const iterator = await loadInBatches(CSV_NUMBERS_100_URL, CSVLoader, { TableBatch: ArrowTableBatch, batchSize: 40 }); - t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadFileInBatches returned iterator'); + t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadInBatches returned iterator'); let batchCount = 0; for await (const batch of iterator) { @@ -28,12 +28,12 @@ test('CSVLoader#loadFileInBatches(numbers-100.csv, arrow)', async t => { t.end(); }); -test('CSVLoader#loadFileInBatches(numbers-10000.csv, arrow)', async t => { - const iterator = await loadFileInBatches(CSV_NUMBERS_10000_URL, CSVLoader, { +test('CSVLoader#loadInBatches(numbers-10000.csv, arrow)', async t => { + const iterator = await loadInBatches(CSV_NUMBERS_10000_URL, CSVLoader, { TableBatch: ArrowTableBatch, batchSize: 2000 }); - t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadFileInBatches returned iterator'); + t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadInBatches returned iterator'); let batchCount = 0; for await (const batch of iterator) { diff --git a/modules/csv/test/csv-loader.bench.js b/modules/csv/test/csv-loader.bench.js index fda089afd5..5c9ddaf7b5 100644 --- a/modules/csv/test/csv-loader.bench.js +++ b/modules/csv/test/csv-loader.bench.js @@ -1,4 +1,4 @@ -import {fetchFile, parseFile} from '@loaders.gl/core'; +import {fetchFile, parse} from '@loaders.gl/core'; import {CSVLoader} from '@loaders.gl/csv'; const SAMPLE_CSV_URL = '@loaders.gl/csv/test/data/sample-very-long.csv'; @@ -21,7 +21,7 @@ export default async function csvBench(bench) { bench = bench.group('CSV Decode'); bench = bench.addAsync('CSVLoader#decode', async () => { - parseFile(sample, CSVLoader); + parse(sample, CSVLoader); }); return bench; diff --git a/modules/csv/test/csv-loader.spec.js b/modules/csv/test/csv-loader.spec.js index b08f77c140..bf1d344101 100644 --- a/modules/csv/test/csv-loader.spec.js +++ b/modules/csv/test/csv-loader.spec.js @@ -1,5 +1,5 @@ import test from 'tape-promise/tape'; -import {loadFileInBatches, isIterator, isAsyncIterable} from '@loaders.gl/core'; +import {loadInBatches, isIterator, isAsyncIterable} from '@loaders.gl/core'; import {ColumnarTableBatch} from '@loaders.gl/experimental'; import {CSVLoader} from '@loaders.gl/csv'; @@ -8,11 +8,11 @@ const CSV_SAMPLE_URL = '@loaders.gl/csv/test/data/sample.csv'; // const CSV_SAMLE_LONG_URL = '@loaders.gl/csv/test/data/sample-long.csv'; const CSV_SAMPLE_VERY_LONG_URL = '@loaders.gl/csv/test/data/sample-very-long.csv'; -test('CSVLoader#loadFileInBatches(sample.csv, columns)', async t => { - const iterator = await loadFileInBatches(CSV_SAMPLE_URL, CSVLoader, { +test('CSVLoader#loadInBatches(sample.csv, columns)', async t => { + const iterator = await loadInBatches(CSV_SAMPLE_URL, CSVLoader, { TableBatch: ColumnarTableBatch }); - t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadFileInBatches returned iterator'); + t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadInBatches returned iterator'); let batchCount = 0; for await (const batch of iterator) { @@ -24,13 +24,13 @@ test('CSVLoader#loadFileInBatches(sample.csv, columns)', async t => { t.end(); }); -test('CSVLoader#loadFileInBatches(sample-very-long.csv, columns)', async t => { +test('CSVLoader#loadInBatches(sample-very-long.csv, columns)', async t => { const batchSize = 25; - const iterator = await loadFileInBatches(CSV_SAMPLE_VERY_LONG_URL, CSVLoader, { + const iterator = await loadInBatches(CSV_SAMPLE_VERY_LONG_URL, CSVLoader, { TableBatch: ColumnarTableBatch, batchSize }); - t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadFileInBatches returned iterator'); + t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadInBatches returned iterator'); let batchCount = 0; for await (const batch of iterator) { @@ -46,9 +46,9 @@ test('CSVLoader#loadFileInBatches(sample-very-long.csv, columns)', async t => { t.end(); }); -test('CSVLoader#loadFileInBatches(sample.csv, rows)', async t => { - const iterator = await loadFileInBatches(CSV_SAMPLE_URL, CSVLoader); - t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadFileInBatches returned iterator'); +test('CSVLoader#loadInBatches(sample.csv, rows)', async t => { + const iterator = await loadInBatches(CSV_SAMPLE_URL, CSVLoader); + t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadInBatches returned iterator'); let batchCount = 0; for await (const batch of iterator) { @@ -61,10 +61,10 @@ test('CSVLoader#loadFileInBatches(sample.csv, rows)', async t => { t.end(); }); -test('CSVLoader#loadFileInBatches(sample-very-long.csv, rows)', async t => { +test('CSVLoader#loadInBatches(sample-very-long.csv, rows)', async t => { const batchSize = 25; - const iterator = await loadFileInBatches(CSV_SAMPLE_VERY_LONG_URL, CSVLoader, {batchSize}); - t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadFileInBatches returned iterator'); + const iterator = await loadInBatches(CSV_SAMPLE_VERY_LONG_URL, CSVLoader, {batchSize}); + t.ok(isIterator(iterator) || isAsyncIterable(iterator), 'loadInBatches returned iterator'); let batchCount = 0; for await (const batch of iterator) { diff --git a/modules/csv/test/csv-writer.spec.js b/modules/csv/test/csv-writer.spec.js index cbebb4ab98..bbc35247a8 100644 --- a/modules/csv/test/csv-writer.spec.js +++ b/modules/csv/test/csv-writer.spec.js @@ -11,8 +11,8 @@ License: MIT /* eslint-disable quotes */ import test from 'tape-promise/tape'; -import {isBrowser, loadFile} from '@loaders.gl/core'; -import {parseFileAsIterator, parseFileAsAsyncIterator} from '@loaders.gl/core'; +import {isBrowser, load} from '@loaders.gl/core'; +import {parseAsIterator, parseAsAsyncIterator} from '@loaders.gl/core'; import {CSVWriter} from '@loaders.gl/experimental'; // Tests for Papa.unparse() function (JSON to CSV) diff --git a/modules/draco/test/draco-compression-ratio.spec.js b/modules/draco/test/draco-compression-ratio.spec.js index bb73218d01..b1909fa4ae 100644 --- a/modules/draco/test/draco-compression-ratio.spec.js +++ b/modules/draco/test/draco-compression-ratio.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {fetchFile, parseFileSync, encodeFileSync, _getMeshSize} from '@loaders.gl/core'; +import {fetchFile, parseSync, encodeSync, _getMeshSize} from '@loaders.gl/core'; import {DracoWriter, DracoLoader} from '@loaders.gl/draco'; import {validateLoadedData} from 'test/common/conformance'; @@ -22,13 +22,13 @@ test('DracoWriter#compressRawBuffers', async t => { // Encode mesh // TODO - Replace with draco writer - const compressedMesh = encodeFileSync({attributes}, DracoWriter, {pointcloud: true}); + const compressedMesh = encodeSync({attributes}, DracoWriter, {pointcloud: true}); const meshSize = _getMeshSize(attributes); const ratio = meshSize / compressedMesh.byteLength; t.comment(`Draco compression ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`); // Ensure we can parse it - const data2 = parseFileSync(compressedMesh, DracoLoader); + const data2 = parseSync(compressedMesh, DracoLoader); validateLoadedData(t, data2); t.equal(data2.attributes.POSITION.value.length, attributes.POSITION.length, 'POSITION matched'); diff --git a/modules/draco/test/draco-loader.spec.js b/modules/draco/test/draco-loader.spec.js index fed4edbd25..ad1e4065b6 100644 --- a/modules/draco/test/draco-loader.spec.js +++ b/modules/draco/test/draco-loader.spec.js @@ -1,13 +1,13 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {DracoLoader, DracoWorkerLoader} from '@loaders.gl/draco'; import {validateLoadedData} from 'test/common/conformance'; const BUNNY_DRC_URL = '@loaders.gl/draco/test/data/bunny.drc'; test('DracoLoader#parse and encode', async t => { - const data = await loadFile(BUNNY_DRC_URL, DracoLoader); + const data = await load(BUNNY_DRC_URL, DracoLoader); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 104502, 'POSITION attribute was found'); @@ -21,7 +21,7 @@ test('DracoWorkerLoader#parse', async t => { return; } - const data = await loadFile(BUNNY_DRC_URL, DracoWorkerLoader); + const data = await load(BUNNY_DRC_URL, DracoWorkerLoader); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 104502, 'POSITION attribute was found'); diff --git a/modules/draco/test/draco-writer.spec.js b/modules/draco/test/draco-writer.spec.js index 719d55749b..5b26f2092c 100644 --- a/modules/draco/test/draco-writer.spec.js +++ b/modules/draco/test/draco-writer.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {encodeFileSync, fetchFile, parseFileSync, _getMeshSize} from '@loaders.gl/core'; +import {encodeSync, fetchFile, parseSync, _getMeshSize} from '@loaders.gl/core'; import {DracoLoader, DracoWriter, DracoBuilder} from '@loaders.gl/draco'; import {validateLoadedData} from 'test/common/conformance'; @@ -31,10 +31,10 @@ async function loadBunny() { const response = await fetchFile(BUNNY_DRC_URL); const arrayBuffer = await response.arrayBuffer(); // Decode Loaded Mesh to use as input data for encoders - return parseFileSync(arrayBuffer, DracoLoader); + return parseSync(arrayBuffer, DracoLoader); } -test('DracoWriter#encodeFileSync(bunny.drc)', async t => { +test('DracoWriter#encodeSync(bunny.drc)', async t => { const data = await loadBunny(); t.equal(data.attributes.POSITION.value.length, 104502, 'POSITION attribute was found'); @@ -56,14 +56,14 @@ test('DracoWriter#encodeFileSync(bunny.drc)', async t => { let compressedMesh; t.doesNotThrow(() => { - compressedMesh = encodeFileSync(mesh, DracoWriter, tc.options); + compressedMesh = encodeSync(mesh, DracoWriter, tc.options); const ratio = meshSize / compressedMesh.byteLength; t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`); }, `${tc.title} did not trow`); if (!tc.options.pointcloud) { // Decode the mesh - const data2 = parseFileSync(compressedMesh, DracoLoader); + const data2 = parseSync(compressedMesh, DracoLoader); validateLoadedData(t, data2); // t.comment(JSON.stringify(data)); @@ -108,7 +108,7 @@ test('DracoParser#encode(bunny.drc)', async t => { if (!tc.options.pointcloud) { // Decode the mesh - const data2 = parseFileSync(compressedMesh, DracoLoader); + const data2 = parseSync(compressedMesh, DracoLoader); validateLoadedData(t, data2); // t.comment(JSON.stringify(data)); diff --git a/modules/draco/test/draco.bench.js b/modules/draco/test/draco.bench.js index 4931f27344..325f669177 100644 --- a/modules/draco/test/draco.bench.js +++ b/modules/draco/test/draco.bench.js @@ -1,4 +1,4 @@ -import {fetchFile, parseFileSync, encodeFile, _getMeshSize} from '@loaders.gl/core'; +import {fetchFile, parseSync, encode, _getMeshSize} from '@loaders.gl/core'; import {DracoLoader, DracoWriter} from '@loaders.gl/draco'; const OPTIONS = [ @@ -31,7 +31,7 @@ export default async function dracoBench(bench) { const rawSize = _getMeshSize(attributes); OPTIONS.forEach(options => { - const dracoEncoder = encodeFile({attributes}, DracoWriter, options); + const dracoEncoder = encode({attributes}, DracoWriter, options); const compressedPointCloud = dracoEncoder.encodePointCloud(attributes); // eslint-disable-next-line console.log(`${options.name} compression rate: @@ -42,7 +42,7 @@ export default async function dracoBench(bench) { dracoEncoder.encodePointCloud(attributes); }) .add(`DracoDecoder#decode point cloud#${options.name}`, () => { - parseFileSync(compressedPointCloud, DracoLoader); + parseSync(compressedPointCloud, DracoLoader); }); }); diff --git a/modules/experimental/src/json-model-loader/json-loader.js b/modules/experimental/src/json-model-loader/json-loader.js index 4e8f4dfd4b..d272ec7fed 100644 --- a/modules/experimental/src/json-model-loader/json-loader.js +++ b/modules/experimental/src/json-model-loader/json-loader.js @@ -1,11 +1,11 @@ -import {loadFile} from './browser-load-file'; +import {load} from './browser-load-file'; import {Program} from '../webgl'; import {Model} from '../core'; import {Geometry} from '../geometry'; // Loads a simple JSON format export function loadModel(gl, opts = {}) { - return loadFile(opts).then(([file]) => parseModel(gl, Object.assign({file}, opts))); + return load(opts).then(([file]) => parseModel(gl, Object.assign({file}, opts))); } export function parseModel(gl, opts = {}) { diff --git a/modules/gltf/test/gltf/gltf-loader.spec.js b/modules/gltf/test/gltf/gltf-loader.spec.js index 85faaccfb9..c3d49ea70c 100644 --- a/modules/gltf/test/gltf/gltf-loader.spec.js +++ b/modules/gltf/test/gltf/gltf-loader.spec.js @@ -1,7 +1,7 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {loadFile, parseFileSync, fetchFile} from '@loaders.gl/core'; +import {load, parseSync, fetchFile} from '@loaders.gl/core'; import {GLTFLoader, GLBParser, GLTFParser} from '@loaders.gl/gltf'; const GLTF_BINARY_URL = '@loaders.gl/gltf/test/data/gltf-2.0/2CylinderEngine.glb'; @@ -21,7 +21,7 @@ test('GLTFParser#parseSync(text/JSON)', async t => { const response = await fetchFile(GLTF_JSON_URL); const data = await response.text(); - let gltf = parseFileSync(data, GLTFLoader); + let gltf = parseSync(data, GLTFLoader); t.ok(gltf, 'GLTFLoader returned parsed data'); gltf = new GLTFParser().parseSync(data); @@ -34,7 +34,7 @@ test('GLTFParser#parseSync(binary)', async t => { const response = await fetchFile(GLTF_BINARY_URL); const data = await response.arrayBuffer(); - let gltf = parseFileSync(data, GLTFLoader); + let gltf = parseSync(data, GLTFLoader); t.ok(gltf, 'GLTFLoader returned parsed data'); gltf = new GLTFParser().parseSync(data); @@ -47,14 +47,14 @@ test('GLTFParser#parseSync(binary)', async t => { t.end(); }); -test('GLTFLoader#loadFile(binary)', async t => { - const data = await loadFile(GLTF_BINARY_URL, GLTFLoader); +test('GLTFLoader#load(binary)', async t => { + const data = await load(GLTF_BINARY_URL, GLTFLoader); t.ok(data.asset, 'GLTFLoader returned parsed data'); t.end(); }); -test('GLTFLoader#loadFile(text)', async t => { - const data = await loadFile(GLTF_JSON_URL, GLTFLoader); +test('GLTFLoader#load(text)', async t => { + const data = await load(GLTF_JSON_URL, GLTFLoader); t.ok(data.asset, 'GLTFLoader returned parsed data'); t.end(); }); diff --git a/modules/images/test/data/test.png b/modules/images/test/data/test.png index 9e7079c9dbae6d11372fd189a4694a167f8782b2..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644 GIT binary patch literal 0 HcmV?d00001 literal 95 zcmeAS@N?(olHy`uVBq!ia0vp^OhC-c!3HFEzPi^467Y0!45?szBUN_^$ddr^SPwEX pGB9u)R>=E*PS|DD>=eJ`%85@n8Cw2nivra!c)I$ztaD0e0sz=t7-|3j diff --git a/modules/images/test/index.js b/modules/images/test/index.js index 3804d86723..54fbf16be8 100644 --- a/modules/images/test/index.js +++ b/modules/images/test/index.js @@ -1,3 +1,3 @@ import './image-utils/get-image-size.spec'; import './load-image/load-image.spec.js'; -import './write-image/write-and-read-image.spec.js'; +// import './write-image/write-and-read-image.spec.js'; diff --git a/modules/images/test/load-image/load-image.spec.js b/modules/images/test/load-image/load-image.spec.js index fab1783a6c..cb22e8da46 100644 --- a/modules/images/test/load-image/load-image.spec.js +++ b/modules/images/test/load-image/load-image.spec.js @@ -1,5 +1,5 @@ import test from 'tape-promise/tape'; -import {isBrowser, fetchFile, loadFile} from '@loaders.gl/core'; +import {isBrowser, fetchFile, load} from '@loaders.gl/core'; import {loadImage, ImageLoader} from '@loaders.gl/images'; import path from 'path'; @@ -52,14 +52,14 @@ test('images#loadImage (NODE)', async t => { t.end(); }); -test('images#loadFile(ImageLoader) (NODE)', async t => { +test('images#load(ImageLoader) (NODE)', async t => { if (isBrowser) { t.comment('Skip loadImage file in browser'); t.end(); return; } - const result = await loadFile(TEST_URL, ImageLoader); + const result = await load(TEST_URL, ImageLoader); t.ok(result, 'image loaded successfully'); t.end(); }); diff --git a/modules/kml/test/kml-as-geojson-loader.spec.js b/modules/kml/test/kml-as-geojson-loader.spec.js index 19da34d607..b2a31bf00b 100644 --- a/modules/kml/test/kml-as-geojson-loader.spec.js +++ b/modules/kml/test/kml-as-geojson-loader.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {parseFileSync} from '@loaders.gl/core'; +import {parseSync} from '@loaders.gl/core'; import {KMLasGeoJsonLoader} from '@loaders.gl/kml'; import KML from './data/KML_Samples.kml'; @@ -27,7 +27,7 @@ test('KMLasGeoJsonLoader#parse(text)', t => { if (!KMLasGeoJsonLoader.supported) { t.comment('XML parsing not available'); } else { - const data = parseFileSync(KML, KMLasGeoJsonLoader, {log: null}); + const data = parseSync(KML, KMLasGeoJsonLoader, {log: null}); t.equal(data.type, 'FeatureCollection', 'FeatureCollection found'); t.equal(data.features.length, 19, 'Features were found'); } diff --git a/modules/kml/test/kml-loader.spec.js b/modules/kml/test/kml-loader.spec.js index c28b2bd9d0..350ad386a1 100644 --- a/modules/kml/test/kml-loader.spec.js +++ b/modules/kml/test/kml-loader.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {parseFileSync} from '@loaders.gl/core'; +import {parseSync} from '@loaders.gl/core'; import {KMLLoader} from '@loaders.gl/kml'; import KML from './data/KML_Samples.kml'; @@ -27,7 +27,7 @@ test('KMLLoader#parseText', t => { if (!KMLLoader.supported) { t.comment('XML parsing not available'); } else { - const data = parseFileSync(KML, KMLLoader, {log: null}); + const data = parseSync(KML, KMLLoader, {log: null}); t.equal(data.documents.length, 2, 'Documents were found'); t.equal(data.markers.length, 4, 'Markers were found'); t.equal(data.lines.length, 6, 'Lines were found'); diff --git a/modules/las/src/laz-perf.js b/modules/las/src/laz-perf.js index 57b2c9ff4e..2995f8ea85 100644 --- a/modules/las/src/laz-perf.js +++ b/modules/las/src/laz-perf.js @@ -33575,7 +33575,7 @@ export default function getModule() { }, DB_VERSION: 20, DB_STORE_NAME: 'FILE_DATA', - saveFilesToDB: function(paths, onload, onerror) { + savesToDB: function(paths, onload, onerror) { onload = onload || function() {}; onerror = onerror || function() {}; var indexedDB = FS.indexedDB(); @@ -33615,7 +33615,7 @@ export default function getModule() { }; openRequest.onerror = onerror; }, - loadFilesFromDB: function(paths, onload, onerror) { + loadsFromDB: function(paths, onload, onerror) { onload = onload || function() {}; onerror = onerror || function() {}; var indexedDB = FS.indexedDB(); diff --git a/modules/las/test/las-loader.spec.js b/modules/las/test/las-loader.spec.js index 67b1f4c0eb..14cc9f4eaa 100644 --- a/modules/las/test/las-loader.spec.js +++ b/modules/las/test/las-loader.spec.js @@ -1,13 +1,13 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {fetchFile, parseFile, loadFile} from '@loaders.gl/core'; +import {fetchFile, parse, load} from '@loaders.gl/core'; import {LASLoader, LASWorkerLoader} from '@loaders.gl/las'; import {validateLoadedData} from 'test/common/conformance'; const LAS_BINARY_URL = '@loaders.gl/las/test/data/indoor.laz'; test('LASLoader#parseBinary', async t => { - const data = await parseFile(fetchFile(LAS_BINARY_URL), LASLoader, {skip: 10}); + const data = await parse(fetchFile(LAS_BINARY_URL), LASLoader, {skip: 10}); validateLoadedData(t, data); t.is(data.header.vertexCount, data.loaderData.header.totalRead, 'Original header was found'); @@ -26,7 +26,7 @@ test('LASWorkerLoader#parseBinary', async t => { return; } - const data = await loadFile(LAS_BINARY_URL, LASWorkerLoader, {skip: 10}); + const data = await load(LAS_BINARY_URL, LASWorkerLoader, {skip: 10}); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 80805 * 3, 'POSITION attribute was found'); diff --git a/modules/obj/test/obj-loader.spec.js b/modules/obj/test/obj-loader.spec.js index aaf9242179..80231da8a2 100644 --- a/modules/obj/test/obj-loader.spec.js +++ b/modules/obj/test/obj-loader.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {OBJLoader, OBJWorkerLoader} from '@loaders.gl/obj'; // Note: The Sublime Text Editor hides OBJ files from the file tree... @@ -9,7 +9,7 @@ import {validateLoadedData} from 'test/common/conformance'; const OBJ_ASCII_URL = '@loaders.gl/obj/test/data/bunny.obj'; test('OBJLoader#parseText', async t => { - const data = await loadFile(OBJ_ASCII_URL, OBJLoader); + const data = await load(OBJ_ASCII_URL, OBJLoader); validateLoadedData(t, data); t.equal(data.mode, 4, 'mode is TRIANGLES (4)'); @@ -36,7 +36,7 @@ test('OBJWorkerLoader#parse(text)', async t => { return; } - const data = await loadFile(OBJ_ASCII_URL, OBJWorkerLoader); + const data = await load(OBJ_ASCII_URL, OBJWorkerLoader); validateLoadedData(t, data); diff --git a/modules/pcd/test/pcd-loader.spec.js b/modules/pcd/test/pcd-loader.spec.js index d399ab2ca3..ea7f8c52cd 100644 --- a/modules/pcd/test/pcd-loader.spec.js +++ b/modules/pcd/test/pcd-loader.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {fetchFile, parseFile, loadFile} from '@loaders.gl/core'; +import {fetchFile, parse, load} from '@loaders.gl/core'; import {PCDLoader, PCDWorkerLoader} from '@loaders.gl/pcd'; import {validateLoadedData} from 'test/common/conformance'; @@ -9,7 +9,7 @@ const PCD_ASCII_URL = '@loaders.gl/pcd/test/data/simple-ascii.pcd'; const PCD_BINARY_URL = '@loaders.gl/pcd/test/data/Zaghetto.pcd'; test('PCDLoader#parse(text)', async t => { - const data = await parseFile(fetchFile(PCD_ASCII_URL), PCDLoader); + const data = await parse(fetchFile(PCD_ASCII_URL), PCDLoader); validateLoadedData(t, data); t.equal(data.mode, 0, 'mode is POINTS (0)'); @@ -22,7 +22,7 @@ test('PCDLoader#parse(text)', async t => { }); test('PCDLoader#parse(binary)', async t => { - const data = await parseFile(fetchFile(PCD_BINARY_URL), PCDLoader); + const data = await parse(fetchFile(PCD_BINARY_URL), PCDLoader); validateLoadedData(t, data); t.equal(data.mode, 0, 'mode is POINTS (0)'); @@ -39,7 +39,7 @@ test('PCDWorkerLoader#parse(binary)', async t => { return; } - const data = await loadFile(PCD_BINARY_URL, PCDWorkerLoader); + const data = await load(PCD_BINARY_URL, PCDWorkerLoader); validateLoadedData(t, data); t.equal(data.mode, 0, 'mode is POINTS (0)'); diff --git a/modules/ply/test/ply-loader.bench.js b/modules/ply/test/ply-loader.bench.js index 6f0a906046..084e27acd2 100644 --- a/modules/ply/test/ply-loader.bench.js +++ b/modules/ply/test/ply-loader.bench.js @@ -1,4 +1,4 @@ -import {fetchFile, loadFile, createReadStream, getStreamIterator} from '@loaders.gl/core'; +import {fetchFile, load, createReadStream, getStreamIterator} from '@loaders.gl/core'; import {PLYLoader, PLYWorkerLoader, _PLYStreamLoader} from '@loaders.gl/ply'; export default function PLYLoaderBench(bench) { @@ -8,14 +8,14 @@ export default function PLYLoaderBench(bench) { .group('PLYLoader (ASCII)') .addAsync('Atomic parsing', async () => { - await loadFile('@loaders.gl/ply/test/data/cube_att.ply', PLYLoader); + await load('@loaders.gl/ply/test/data/cube_att.ply', PLYLoader); }) .addAsync('Worker parsing', async () => { // Once binary is transferred to worker it cannot be read from the main thread // Duplicate it here to avoid breaking other tests const response = await fetchFile('@loaders.gl/ply/test/data/bun_zipper.ply'); const arrayBuffer = await response.arrayBuffer(); - await loadFile(arrayBuffer, PLYWorkerLoader); + await load(arrayBuffer, PLYWorkerLoader); }) .addAsync('Stream parsing', async () => { const stream = await createReadStream('@loaders.gl/ply/test/data/cube_att.ply'); @@ -24,12 +24,12 @@ export default function PLYLoaderBench(bench) { .group('PLYLoader (Binary)') .addAsync('Atomic parsing', async () => { - await loadFile('@loaders.gl/ply/test/data/cube_att.ply', PLYLoader); + await load('@loaders.gl/ply/test/data/cube_att.ply', PLYLoader); }) .addAsync('Worker parsing', async () => { const response = await fetchFile('@loaders.gl/ply/test/data/bun_zipper.ply'); const arrayBuffer = await response.arrayBuffer(); - await loadFile(arrayBuffer, PLYWorkerLoader); + await load(arrayBuffer, PLYWorkerLoader); }) ); } diff --git a/modules/ply/test/ply-loader.spec.js b/modules/ply/test/ply-loader.spec.js index 40cda1c4d3..72d3c1a517 100644 --- a/modules/ply/test/ply-loader.spec.js +++ b/modules/ply/test/ply-loader.spec.js @@ -1,6 +1,6 @@ /* eslint-disable max-len */ import test from 'tape-promise/tape'; -import {fetchFile, parseFile, parseFileSync, loadFile} from '@loaders.gl/core'; +import {fetchFile, parse, parseSync, load} from '@loaders.gl/core'; import {getStreamIterator} from '@loaders.gl/core'; import {PLYLoader, PLYWorkerLoader, _PLYStreamLoader} from '@loaders.gl/ply'; @@ -17,7 +17,7 @@ function validateTextPLY(t, data) { } test('PLYLoader#parse(textFile)', async t => { - const data = await parseFile(fetchFile(PLY_CUBE_ATT_URL), PLYLoader, {}); + const data = await parse(fetchFile(PLY_CUBE_ATT_URL), PLYLoader, {}); validateLoadedData(t, data); validateTextPLY(t, data); @@ -25,7 +25,7 @@ test('PLYLoader#parse(textFile)', async t => { }); test('PLYLoader#parse(binary)', async t => { - const data = await parseFile(fetchFile(PLY_BUN_BINARY_URL), PLYLoader); + const data = await parse(fetchFile(PLY_BUN_BINARY_URL), PLYLoader); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 104502, 'POSITION attribute was found'); @@ -33,16 +33,16 @@ test('PLYLoader#parse(binary)', async t => { }); test('PLYLoader#parse(ascii)', async t => { - const data = await parseFile(fetchFile(PLY_BUN_ZIPPER_URL), PLYLoader); + const data = await parse(fetchFile(PLY_BUN_ZIPPER_URL), PLYLoader); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 107841, 'POSITION attribute was found'); t.end(); }); -test('PLYLoader#parseFileSync(binary)', async t => { +test('PLYLoader#parseSync(binary)', async t => { const arrayBuffer = await fetchFile(PLY_BUN_ZIPPER_URL).then(res => res.arrayBuffer()); - const data = parseFileSync(arrayBuffer, PLYLoader); + const data = parseSync(arrayBuffer, PLYLoader); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 107841, 'POSITION attribute was found'); @@ -56,7 +56,7 @@ test('PLYLoader#parse(WORKER)', async t => { return; } - const data = await loadFile(PLY_BUN_ZIPPER_URL, PLYWorkerLoader); + const data = await load(PLY_BUN_ZIPPER_URL, PLYWorkerLoader); validateLoadedData(t, data); t.equal(data.attributes.POSITION.value.length, 107841, 'POSITION attribute was found'); diff --git a/modules/zip/test/zip-writer-loader.spec.js b/modules/zip/test/zip-writer-loader.spec.js index a28da96230..f7dc178bc9 100644 --- a/modules/zip/test/zip-writer-loader.spec.js +++ b/modules/zip/test/zip-writer-loader.spec.js @@ -1,5 +1,5 @@ import test from 'tape-promise/tape'; -import {encodeFile, parseFile, TextDecoder} from '@loaders.gl/core'; +import {encode, parse, TextDecoder} from '@loaders.gl/core'; import {ZipWriter, ZipLoader} from '@loaders.gl/zip'; const FILE_MAP = { @@ -10,8 +10,8 @@ const FILE_MAP = { }; test('Zip#encode/decode', t => { - encodeFile(FILE_MAP, ZipWriter) - .then(arrayBuffer => parseFile(arrayBuffer, ZipLoader)) + encode(FILE_MAP, ZipWriter) + .then(arrayBuffer => parse(arrayBuffer, ZipLoader)) .then(fileMap => { for (const key in FILE_MAP) { const text = new TextDecoder().decode(fileMap[key]); diff --git a/test/render/test-cases/mesh.js b/test/render/test-cases/mesh.js index 245fdb7c3f..b00bd8fff6 100644 --- a/test/render/test-cases/mesh.js +++ b/test/render/test-cases/mesh.js @@ -1,4 +1,4 @@ -import {loadFile} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {PLYLoader} from '@loaders.gl/ply'; import {getModel, drawModelInViewport} from '../test-utils/get-model'; @@ -9,7 +9,7 @@ export default [ name: 'PLYLoader', goldenImage: './test/render/golden-images/ply-loader.png', onInitialize: async ({gl}) => { - const model = getModel(gl, await loadFile(PLY_BINARY_URL, PLYLoader)); + const model = getModel(gl, await load(PLY_BINARY_URL, PLYLoader)); return {model}; }, onRender: ({model, done}) => { diff --git a/test/render/test-cases/point-cloud.js b/test/render/test-cases/point-cloud.js index 258fa6d9be..4a28702d30 100644 --- a/test/render/test-cases/point-cloud.js +++ b/test/render/test-cases/point-cloud.js @@ -1,4 +1,4 @@ -import {loadFile, fetchFile, parseFileSync, encodeFileSync} from '@loaders.gl/core'; +import {load, fetchFile, parseSync, encodeSync} from '@loaders.gl/core'; import {DracoWriter, DracoLoader} from '@loaders.gl/draco'; import {LASLoader} from '@loaders.gl/las'; @@ -33,7 +33,7 @@ export default [ name: 'LAZ pointcloud', disabled: true, // Seems breaking on master onInitialize: async ({gl}) => { - const lazPointCloud = await loadFile(LAS_BINARY_URL, LASLoader, {skip: 10}); + const lazPointCloud = await load(LAS_BINARY_URL, LASLoader, {skip: 10}); const model = getModel(gl, lazPointCloud); return {model, lazPointCloud}; }, @@ -78,7 +78,7 @@ export default [ onInitialize: async ({gl}) => { const kittiPointCloudRaw = await loadKittiPointCloud(); // Encode/decode mesh with Draco - const compressedMesh = encodeFileSync({attributes: kittiPointCloudRaw}, DracoWriter, { + const compressedMesh = encodeSync({attributes: kittiPointCloudRaw}, DracoWriter, { pointcloud: true, quantization: { POSITION: 14 @@ -87,7 +87,7 @@ export default [ // eslint-disable-next-line // console.log(compressedMesh.byteLength); - const kittiPointCloudFromDraco = parseFileSync(compressedMesh, DracoLoader); + const kittiPointCloudFromDraco = parseSync(compressedMesh, DracoLoader); const model = getModel(gl, kittiPointCloudFromDraco); return {model}; }, diff --git a/test/size/import-nothing.js b/test/size/import-nothing.js index d337043e1f..3ccc6e4bd8 100644 --- a/test/size/import-nothing.js +++ b/test/size/import-nothing.js @@ -1,3 +1,3 @@ -import {parseFile} from '@loaders.gl/core'; +import {parse} from '@loaders.gl/core'; -console.log(parseFile); // eslint-disable-line +console.log(parse); // eslint-disable-line diff --git a/website-ocular/src/mdRoutes.js b/website-ocular/src/mdRoutes.js index c2090645ed..ee19a6d722 100644 --- a/website-ocular/src/mdRoutes.js +++ b/website-ocular/src/mdRoutes.js @@ -70,7 +70,7 @@ export default [ name: 'Core Functions', children: [ { - name: 'loadFile', + name: 'load', markdown: require('../../docs/api-reference/core/load-file.md') }, {