diff --git a/modules/3d-tiles/test/lib/utils/load-utils.ts b/modules/3d-tiles/test/lib/utils/load-utils.ts index 21d810854e..6618c3c310 100644 --- a/modules/3d-tiles/test/lib/utils/load-utils.ts +++ b/modules/3d-tiles/test/lib/utils/load-utils.ts @@ -3,22 +3,17 @@ import {fetchFile, load} from '@loaders.gl/core'; import {Tiles3DLoader} from '@loaders.gl/3d-tiles'; -import {Tileset3D} from '@loaders.gl/tiles'; - -/** @typedef {import('@loaders.gl/tiles').Tile3D} Tile3D */ +import {Tileset3D, Tile3D} from '@loaders.gl/tiles'; /** - * @returns {Promise} */ -export async function loadRootTile(t, tilesetUrl) { +export async function loadRootTile(t, tilesetUrl): Promise { try { // Load tileset const tilesetJson = await load(tilesetUrl, Tiles3DLoader); const tileset = new Tileset3D(tilesetJson, tilesetUrl); // Load root tile - /** @type {Tile3D} */ - // @ts-ignore const sourceRootTile = tileset.root as Tile3D; await tileset._loadTile(sourceRootTile); return sourceRootTile; diff --git a/modules/compression/package.json b/modules/compression/package.json index db4cff53de..ce4bf4fd5d 100644 --- a/modules/compression/package.json +++ b/modules/compression/package.json @@ -41,20 +41,21 @@ "@babel/runtime": "^7.3.1", "@loaders.gl/loader-utils": "4.0.0-alpha.6", "@loaders.gl/worker-utils": "4.0.0-alpha.6", - "@types/brotli": "^1.3.0", "@types/pako": "^1.0.1", + "brotli": "^1.3.3", + "brotli-wasm": "^1.3.1", "fflate": "0.7.4", "lzo-wasm": "^0.0.4", "pako": "1.0.11", "snappyjs": "^0.6.1" }, "optionalDependencies": { - "brotli": "^1.3.2", + "brotli-compress": "^1.3.3", "lz4js": "^0.2.0", "zstd-codec": "^0.1" }, "devDependencies": { - "brotli": "^1.3.2", + "brotli-compress": "^1.3.3", "lz4js": "^0.2.0", "zstd-codec": "^0.1" }, diff --git a/modules/compression/src/index.ts b/modules/compression/src/index.ts index 35af6cb16d..29b9a12cb7 100644 --- a/modules/compression/src/index.ts +++ b/modules/compression/src/index.ts @@ -3,13 +3,29 @@ export type {CompressionOptions} from './lib/compression'; export {Compression} from './lib/compression'; export {NoCompression} from './lib/no-compression'; -export {DeflateCompression} from './lib/deflate-compression'; -export {GZipCompression} from './lib/gzip-compression'; + +export {DeflateCompression} from './lib/deflate-compression-pako'; +export {DeflateCompressionZlib} from './lib/deflate-compression-zlib'; +export {GZipCompression} from './lib/gzip-compression-pako'; +export {GZipCompressionZlib} from './lib/gzip-compression-zlib'; + export {BrotliCompression} from './lib/brotli-compression'; +export {BrotliCompressionZlib} from './lib/brotli-compression-zlib'; + export {SnappyCompression} from './lib/snappy-compression'; + export {LZ4Compression} from './lib/lz4-compression'; + export {ZstdCompression} from './lib/zstd-compression'; + export {LZOCompression} from './lib/lzo-compression'; export type {CompressionWorkerOptions} from './compression-worker'; export {CompressionWorker, compressOnWorker} from './compression-worker'; + +// Versions +export {DeflateCompression as _DeflateCompressionFflate} from './lib/deflate-compression-fflate'; +export {GZipCompression as _GZipCompressionFflate} from './lib/gzip-compression-fflate'; + +export {DeflateCompression as _DeflateCompressionPako} from './lib/deflate-compression-pako'; +export {GZipCompression as _GZipCompressionPako} from './lib/gzip-compression-pako'; diff --git a/modules/compression/src/lib/brotli-compression-zlib.ts b/modules/compression/src/lib/brotli-compression-zlib.ts new file mode 100644 index 0000000000..32fde79d02 --- /dev/null +++ b/modules/compression/src/lib/brotli-compression-zlib.ts @@ -0,0 +1,64 @@ +// BROTLI +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; +import zlib, {BrotliOptions} from 'zlib'; +import {promisify1, promisify2} from '@loaders.gl/loader-utils'; + +export type BrotliCompressionZlibOptions = CompressionOptions & { + brotliZlib?: BrotliOptions; +}; + +/** + * brotli compression / decompression + * zlib implementation + * @note Node uses compression level 11 by default which is 100x slower!! + */ +export class BrotliCompressionZlib extends Compression { + readonly name: string = 'brotli'; + readonly extensions = ['br']; + readonly contentEncodings = ['br']; + readonly isSupported = true; + readonly options: BrotliCompressionZlibOptions; + + constructor(options: BrotliCompressionZlibOptions = {}) { + super(options); + this.options = options; + if (isBrowser) { + throw new Error('zlib only available under Node.js'); + } + } + + async compress(input: ArrayBuffer): Promise { + const options = this._getBrotliZlibOptions(); + // @ts-expect-error promisify type failure on overload + const buffer = await promisify2(zlib.brotliCompress)(input, options); + return toArrayBuffer(buffer); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const options = this._getBrotliZlibOptions(); + const buffer = zlib.brotliCompressSync(input, options); + return toArrayBuffer(buffer); + } + + async decompress(input: ArrayBuffer): Promise { + const buffer = await promisify1(zlib.brotliDecompress)(input); + return toArrayBuffer(buffer); + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const buffer = zlib.brotliDecompressSync(input); + return toArrayBuffer(buffer); + } + + private _getBrotliZlibOptions(): BrotliOptions { + // {params: {[zlib.constants.BROTLI_PARAM_QUALITY]: 4}} + return { + params: { + [zlib.constants.BROTLI_PARAM_QUALITY]: Compression.DEFAULT_COMPRESSION_LEVEL, + ...this.options?.brotliZlib + } + }; + } +} diff --git a/modules/compression/src/lib/brotli-compression.ts b/modules/compression/src/lib/brotli-compression.ts index 1105a6ceb5..9e0688139d 100644 --- a/modules/compression/src/lib/brotli-compression.ts +++ b/modules/compression/src/lib/brotli-compression.ts @@ -1,45 +1,48 @@ // BROTLI -import type {CompressionOptions} from './compression'; +import {isBrowser} from '@loaders.gl/loader-utils'; import {Compression} from './compression'; -import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; +import {BrotliCompressionZlib, BrotliCompressionZlibOptions} from './brotli-compression-zlib'; import type brotliNamespace from 'brotli'; -// import brotli from 'brotli'; // https://bundlephobia.com/package/brotli -import {BrotliDecode} from '../brotli/decode'; -import zlib from 'zlib'; -import {promisify1} from '@loaders.gl/loader-utils'; +import type {BrotliOptions} from 'brotli'; +// import brotli from 'brotli'; +// import {BrotliDecode} from '../brotli/decode'; -export type BrotliCompressionOptions = CompressionOptions & { - brotli?: { - mode?: number; - quality?: number; - lgwin?: number; - useZlib?: boolean; - }; -}; - -const DEFAULT_BROTLI_OPTIONS = { - brotli: { - mode: 0, - quality: 8, - lgwin: 22 - } +export type BrotliCompressionOptions = BrotliCompressionZlibOptions & { + brotli?: {}; }; let brotli: typeof brotliNamespace; /** * brotli compression / decompression + * Implemented with brotli package + * @see https://bundlephobia.com/package/brotli */ export class BrotliCompression extends Compression { readonly name: string = 'brotli'; readonly extensions = ['br']; readonly contentEncodings = ['br']; - readonly isSupported = true; + + get isSupported() { + return brotli; + } + get isCompressionSupported() { + return false; + } + readonly options: BrotliCompressionOptions; - constructor(options: BrotliCompressionOptions) { + constructor(options: BrotliCompressionOptions = {}) { super(options); this.options = options; + + // dependency injection + brotli = brotli || this.options?.modules?.brotli || Compression.modules.brotli; + + if (!isBrowser && this.options.useZlib) { + // @ts-ignore public API is equivalent + return new BrotliCompressionZlib(options); + } } /** @@ -47,65 +50,42 @@ export class BrotliCompression extends Compression { * @param options */ async preload(): Promise { - brotli = brotli || this.options?.modules?.brotli; + brotli = brotli || (await this.options?.modules?.brotli); if (!brotli) { // eslint-disable-next-line no-console console.warn(`${this.name} library not installed`); } } - async compress(input: ArrayBuffer): Promise { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.brotli?.useZlib) { - const buffer = await promisify1(zlib.brotliCompress)(input); - return toArrayBuffer(buffer); - } - return this.compressSync(input); - } - compressSync(input: ArrayBuffer): ArrayBuffer { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.brotli?.useZlib) { - const buffer = zlib.brotliCompressSync(input); - return toArrayBuffer(buffer); - } - const brotliOptions = {...DEFAULT_BROTLI_OPTIONS.brotli, ...this.options?.brotli}; - const inputArray = new Uint8Array(input); - if (!brotli) { throw new Error('brotli compression: brotli module not installed'); } - - // @ts-ignore brotli types state that only Buffers are accepted... - const outputArray = brotli.compress(inputArray, brotliOptions); + const options = this._getBrotliOptions(); + const inputArray = new Uint8Array(input); + const outputArray = brotli.compress(inputArray, options); return outputArray.buffer; } - async decompress(input: ArrayBuffer): Promise { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.brotli?.useZlib) { - const buffer = await promisify1(zlib.brotliDecompress)(input); - return toArrayBuffer(buffer); - } - return this.decompressSync(input); - } - decompressSync(input: ArrayBuffer): ArrayBuffer { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.brotli?.useZlib) { - const buffer = zlib.brotliDecompressSync(input); - return toArrayBuffer(buffer); + if (!brotli) { + throw new Error('brotli compression: brotli module not installed'); } - const brotliOptions = {...DEFAULT_BROTLI_OPTIONS.brotli, ...this.options?.brotli}; + const options = this._getBrotliOptions(); const inputArray = new Uint8Array(input); - if (brotli) { - // @ts-ignore brotli types state that only Buffers are accepted... - const outputArray = brotli.decompress(inputArray, brotliOptions); - return outputArray.buffer; - } - const outputArray = BrotliDecode(inputArray, undefined); + // @ts-ignore brotli types state that only Buffers are accepted... + const outputArray = brotli.decompress(inputArray, options); return outputArray.buffer; + // const outputArray = BrotliDecode(inputArray, undefined); + // return outputArray.buffer; + } + + private _getBrotliOptions(): BrotliOptions { + return { + level: this.options.quality || Compression.DEFAULT_COMPRESSION_LEVEL, + ...this.options?.brotli + }; } } diff --git a/modules/compression/src/lib/compression.ts b/modules/compression/src/lib/compression.ts index 60c9127d74..d602c830b7 100644 --- a/modules/compression/src/lib/compression.ts +++ b/modules/compression/src/lib/compression.ts @@ -3,18 +3,57 @@ import {concatenateArrayBuffersAsync} from '@loaders.gl/loader-utils'; /** Compression options */ export type CompressionOptions = { - // operation: 'compress' | 'decompress'; - modules?: {[moduleName: string]: any}; + /** + * Compression quality (higher values better compression but exponentially slower) + * brotli goes from 1-11 + * zlib goes from 1-9 + * 5 or 6 is usually a good compromise + */ + quality?: number; + + /** + * Whether to use built-in Zlib on node.js for max performance (doesn't handle incremental compression) + * Currently only deflate, gzip and brotli are supported. + */ + useZlib?: boolean; + + /** + * Injection of npm modules - keeps large compression libraries out of standard bundle + */ + modules?: CompressionModules; +}; + +/** + * Injection of npm modules - keeps large compression libraries out of standard bundle + */ +export type CompressionModules = { + brotli?: any; + lz4js?: any; + lzo?: any; + 'zstd-codec'?: any; }; /** Compression */ export abstract class Compression { + /** Default compression level for gzip, brotli etc */ + static DEFAULT_COMPRESSION_LEVEL = 5; + + /** Name of the compression */ abstract readonly name: string; + /** File extensions used for this */ abstract readonly extensions: string[]; + /** Strings used for Content-Encoding headers in browser */ abstract readonly contentEncodings: string[]; + /** Whether decompression is supported */ abstract readonly isSupported: boolean; + /** Whether compression is supported */ + get isCompressionSupported(): boolean { + return this.isSupported; + } + + static modules: CompressionModules = {}; - constructor(options?: CompressionOptions) { + constructor(options) { this.compressBatches = this.compressBatches.bind(this); this.decompressBatches = this.decompressBatches.bind(this); } diff --git a/modules/compression/src/lib/deflate-compression-fflate.ts b/modules/compression/src/lib/deflate-compression-fflate.ts new file mode 100644 index 0000000000..85152fc300 --- /dev/null +++ b/modules/compression/src/lib/deflate-compression-fflate.ts @@ -0,0 +1,120 @@ +// loaders.gl, MIT license +import {isBrowser} from '@loaders.gl/loader-utils'; +import {DeflateCompressionZlib, DeflateCompressionZlibOptions} from './deflate-compression-zlib'; +import {Compression} from './compression'; +import type {DeflateOptions} from 'fflate'; +import {deflateSync, inflateSync} from 'fflate'; + +export type DeflateCompressionOptions = DeflateCompressionZlibOptions & { + deflate?: DeflateOptions; +}; + +/** + * DEFLATE compression / decompression + * Implementation using fflate + * @see https://bundlephobia.com/package/fflate + */ +export class DeflateCompression extends Compression { + readonly name: string = 'fflate'; + readonly extensions: string[] = []; + readonly contentEncodings = ['fflate', 'gzip, zlib']; + readonly isSupported: boolean = true; + + readonly options: DeflateCompressionOptions; + + constructor(options: DeflateCompressionOptions = {}) { + super(options); + this.options = options; + if (!isBrowser && this.options.useZlib) { + // @ts-ignore public API is equivalent + return new DeflateCompressionZlib(options); + } + } + + // Async fflate uses Workers which interferes with loaders.gl + + // async compress(input: ArrayBuffer): Promise { + // // const options = this.options?.gzip || {}; + // const inputArray = new Uint8Array(input); + // const outputArray = await promisify1(deflate)(inputArray); // options - overload pick + // return outputArray.buffer; + // } + + // async decompress(input: ArrayBuffer): Promise { + // const inputArray = new Uint8Array(input); + // const outputArray = await promisify1(inflate)(inputArray); + // return outputArray.buffer; + // } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const options = this.options?.deflate || {}; + const inputArray = new Uint8Array(input); + return deflateSync(inputArray, options).buffer; + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const inputArray = new Uint8Array(input); + return inflateSync(inputArray).buffer; + } + + /* + async *compressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.DeflateOptions = this.options?.fflate || {}; + const pakoProcessor = new pako.Deflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *decompressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.InflateOptions = this.options?.fflate || {}; + const pakoProcessor = new pako.Inflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *transformBatches( + pakoProcessor: pako.Inflate | pako.Deflate, + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + pakoProcessor.onData = this._onData.bind(this); + pakoProcessor.onEnd = this._onEnd.bind(this); + for await (const chunk of asyncIterator) { + const uint8Array = new Uint8Array(chunk); + const ok = pakoProcessor.push(uint8Array, false); // false -> not last chunk + if (!ok) { + throw new Error(`${this._getError()}write`); + } + const chunks = this._getChunks(); + yield* chunks; + } + + // End + const emptyChunk = new Uint8Array(0); + const ok = pakoProcessor.push(emptyChunk, true); // true -> last chunk + if (!ok) { + // For some reason we get error but it still works??? + // throw new Error(this._getError() + 'end'); + } + const chunks = this._getChunks(); + yield* chunks; + } + + _onData(chunk) { + this._chunks.push(chunk); + } + + _onEnd(status) { + if (status !== 0) { + throw new Error(this._getError(status) + this._chunks.length); + } + } + + _getChunks(): ArrayBuffer[] { + const chunks = this._chunks; + this._chunks = []; + return chunks; + } + */ +} diff --git a/modules/compression/src/lib/deflate-compression.ts b/modules/compression/src/lib/deflate-compression-pako.ts similarity index 53% rename from modules/compression/src/lib/deflate-compression.ts rename to modules/compression/src/lib/deflate-compression-pako.ts index 5068485b09..c079ab6393 100644 --- a/modules/compression/src/lib/deflate-compression.ts +++ b/modules/compression/src/lib/deflate-compression-pako.ts @@ -1,17 +1,18 @@ -// DEFLATE -import type {CompressionOptions} from './compression'; +// loaders.gl, MIT license +import {isBrowser} from '@loaders.gl/loader-utils'; +import {DeflateCompressionZlib, DeflateCompressionZlibOptions} from './deflate-compression-zlib'; import {Compression} from './compression'; -import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; -import pako from 'pako'; // https://bundlephobia.com/package/pako -import zlib from 'zlib'; -import {promisify1} from '@loaders.gl/loader-utils'; +import {getPakoError} from './utils/pako-utils'; +import pako from 'pako'; -export type DeflateCompressionOptions = CompressionOptions & { - deflate?: pako.InflateOptions & pako.DeflateOptions & {useZlib?: boolean}; +export type DeflateCompressionOptions = DeflateCompressionZlibOptions & { + deflate?: pako.InflateOptions & pako.DeflateOptions; }; /** * DEFLATE compression / decompression + * Implementation using pako + * @see https://bundlephobia.com/package/pako */ export class DeflateCompression extends Compression { readonly name: string = 'deflate'; @@ -26,47 +27,27 @@ export class DeflateCompression extends Compression { constructor(options: DeflateCompressionOptions = {}) { super(options); this.options = options; + if (!isBrowser && this.options.useZlib) { + // @ts-ignore public API is equivalent + return new DeflateCompressionZlib(options); + } } async compress(input: ArrayBuffer): Promise { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip - ? await promisify1(zlib.gzip)(input) - : await promisify1(zlib.deflate)(input); - return toArrayBuffer(buffer); - } return this.compressSync(input); } async decompress(input: ArrayBuffer): Promise { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip - ? await promisify1(zlib.gunzip)(input) - : await promisify1(zlib.inflate)(input); - return toArrayBuffer(buffer); - } return this.decompressSync(input); } compressSync(input: ArrayBuffer): ArrayBuffer { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip ? zlib.gzipSync(input) : zlib.deflateSync(input); - return toArrayBuffer(buffer); - } const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; const inputArray = new Uint8Array(input); return pako.deflate(inputArray, pakoOptions).buffer; } decompressSync(input: ArrayBuffer): ArrayBuffer { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip ? zlib.gunzipSync(input) : zlib.inflateSync(input); - return toArrayBuffer(buffer); - } const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; const inputArray = new Uint8Array(input); return pako.inflate(inputArray, pakoOptions).buffer; @@ -88,7 +69,7 @@ export class DeflateCompression extends Compression { yield* this.transformBatches(pakoProcessor, asyncIterator); } - async *transformBatches( + private async *transformBatches( pakoProcessor: pako.Inflate | pako.Deflate, asyncIterator: AsyncIterable | Iterable ): AsyncIterable { @@ -98,7 +79,7 @@ export class DeflateCompression extends Compression { const uint8Array = new Uint8Array(chunk); const ok = pakoProcessor.push(uint8Array, false); // false -> not last chunk if (!ok) { - throw new Error(`${this._getError()}write`); + throw new Error(`${getPakoError()}write`); } const chunks = this._getChunks(); yield* chunks; @@ -109,50 +90,25 @@ export class DeflateCompression extends Compression { const ok = pakoProcessor.push(emptyChunk, true); // true -> last chunk if (!ok) { // For some reason we get error but it still works??? - // throw new Error(this._getError() + 'end'); + // throw new Error(getPakoError() + 'end'); } const chunks = this._getChunks(); yield* chunks; } - _onData(chunk) { + private _onData(chunk) { this._chunks.push(chunk); } - _onEnd(status) { + private _onEnd(status) { if (status !== 0) { - throw new Error(this._getError(status) + this._chunks.length); + throw new Error(getPakoError(status) + this._chunks.length); } } - _getChunks(): ArrayBuffer[] { + private _getChunks(): ArrayBuffer[] { const chunks = this._chunks; this._chunks = []; return chunks; } - - // TODO - For some reason we don't get the error message from pako in _onEnd? - _getError(code: number = 0): string { - const MESSAGES = { - /* Z_NEED_DICT 2 */ - 2: 'need dictionary', - /* Z_STREAM_END 1 */ - 1: 'stream end', - /* Z_OK 0 */ - 0: '', - /* Z_ERRNO (-1) */ - '-1': 'file error', - /* Z_STREAM_ERROR (-2) */ - '-2': 'stream error', - /* Z_DATA_ERROR (-3) */ - '-3': 'data error', - /* Z_MEM_ERROR (-4) */ - '-4': 'insufficient memory', - /* Z_BUF_ERROR (-5) */ - '-5': 'buffer error', - /* Z_VERSION_ERROR (-6) */ - '-6': 'incompatible version' - }; - return `${this.name}: ${MESSAGES[code]}`; - } } diff --git a/modules/compression/src/lib/deflate-compression-zlib.ts b/modules/compression/src/lib/deflate-compression-zlib.ts new file mode 100644 index 0000000000..57cd9a9111 --- /dev/null +++ b/modules/compression/src/lib/deflate-compression-zlib.ts @@ -0,0 +1,63 @@ +// loaders.gl, MIT license +import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; +import {promisify2} from '@loaders.gl/loader-utils'; +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import * as zlib from 'zlib'; +import type {ZlibOptions} from 'zlib'; + +export type DeflateCompressionZlibOptions = CompressionOptions & { + deflateZlib?: ZlibOptions; +}; + +/** + * DEFLATE compression / decompression + * Using Node.js zlib library (works under Node only) + */ +export class DeflateCompressionZlib extends Compression { + readonly name: string = 'deflate'; + readonly extensions: string[] = []; + readonly contentEncodings = ['deflate']; + readonly isSupported = isBrowser; + + readonly options: DeflateCompressionZlibOptions; + + constructor(options: DeflateCompressionZlibOptions = {}) { + super(options); + this.options = options; + if (!isBrowser) { + throw new Error('zlib only available under Node.js'); + } + } + + async compress(input: ArrayBuffer): Promise { + const options = this._getZlibOptions(); + const buffer = await promisify2(zlib.deflate)(input, options); + return toArrayBuffer(buffer); + } + + async decompress(input: ArrayBuffer): Promise { + const options = this._getZlibOptions(); + const buffer = await promisify2(zlib.inflate)(input, options); + return toArrayBuffer(buffer); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const options = this._getZlibOptions(); + const buffer = zlib.deflateSync(input, options); + return toArrayBuffer(buffer); + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const options = this._getZlibOptions(); + const buffer = zlib.inflateSync(input, options); + return toArrayBuffer(buffer); + } + + protected _getZlibOptions(): ZlibOptions { + return { + level: this.options.quality || Compression.DEFAULT_COMPRESSION_LEVEL, + ...this.options?.deflateZlib + }; + } +} diff --git a/modules/compression/src/lib/gzip-compression-fflate.ts b/modules/compression/src/lib/gzip-compression-fflate.ts new file mode 100644 index 0000000000..f14f041a08 --- /dev/null +++ b/modules/compression/src/lib/gzip-compression-fflate.ts @@ -0,0 +1,114 @@ +// loaders.gl, MIT license +import {isBrowser} from '@loaders.gl/loader-utils'; +import {GZipCompressionZlib, GZipCompressionZlibOptions} from './gzip-compression-zlib'; +import {Compression} from './compression'; +import type {GzipOptions, AsyncGzipOptions} from 'fflate'; +import {gzipSync, gunzipSync, Gzip, Gunzip} from 'fflate'; // https://bundlephobia.com/package/pako + +export type GZipCompressionOptions = GZipCompressionZlibOptions & { + gzip?: GzipOptions | AsyncGzipOptions; +}; + +/** + * GZIP compression / decompression + * Implementation using fflate + * @see https://bundlephobia.com/package/fflate + */ +export class GZipCompression extends Compression { + readonly name: string = 'gzip'; + readonly extensions = ['gz', 'gzip']; + readonly contentEncodings = ['gzip', 'x-gzip']; + readonly isSupported = true; + + readonly options: GZipCompressionOptions; + private _chunks: ArrayBuffer[] = []; + + constructor(options: GZipCompressionOptions = {}) { + super(options); + this.options = options; + if (!isBrowser && this.options.useZlib) { + // @ts-ignore public API is equivalent + return new GZipCompressionZlib(options); + } + } + + // Async fflate uses Workers which interferes with loaders.gl + // async compress(input: ArrayBuffer): Promise { + // // const options = this.options?.gzip || {}; + // const inputArray = new Uint8Array(input); + // const outputArray = await promisify1(gzip)(inputArray); // options - overload pick + // return outputArray.buffer; + // } + + // Async fflate uses Workers which interferes with loaders.gl + // async decompress(input: ArrayBuffer): Promise { + // // const options = this.options?.gzip || {}; + // const inputArray = new Uint8Array(input); + // const outputArray = await promisify1(gunzip)(inputArray); // options - overload pick + // return outputArray.buffer; + // } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const options = this._getFflateOptions(); + const inputArray = new Uint8Array(input); + return gzipSync(inputArray, options).buffer; + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const inputArray = new Uint8Array(input); + return gunzipSync(inputArray).buffer; + } + + async *compressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const options = this._getFflateOptions(); + const streamProcessor = new Gzip(options); + streamProcessor.ondata = this._onData.bind(this); + yield* this.transformBatches(streamProcessor, asyncIterator); + } + + async *decompressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const streamProcessor = new Gunzip(); + streamProcessor.ondata = this._onData.bind(this); + yield* this.transformBatches(streamProcessor, asyncIterator); + } + + private async *transformBatches( + streamProcessor: Gzip | Gunzip, + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + for await (const chunk of asyncIterator) { + const uint8Array = new Uint8Array(chunk); + streamProcessor.push(uint8Array, false); // false -> not last chunk + const chunks = this._getChunks(); + yield* chunks; + } + + // End + const emptyChunk = new Uint8Array(0); + streamProcessor.push(emptyChunk, true); // true -> last chunk + const chunks = this._getChunks(); + yield* chunks; + } + + private _onData(data: Uint8Array, final: boolean): void { + this._chunks.push(data); + } + + private _getChunks(): ArrayBuffer[] { + const chunks = this._chunks; + this._chunks = []; + return chunks; + } + + private _getFflateOptions(): GzipOptions { + return { + // @ts-ignore-error + level: this.options.quality || Compression.DEFAULT_COMPRESSION_LEVEL, + ...this.options?.gzip + }; + } +} diff --git a/modules/compression/src/lib/gzip-compression-pako.ts b/modules/compression/src/lib/gzip-compression-pako.ts new file mode 100644 index 0000000000..bfc6965539 --- /dev/null +++ b/modules/compression/src/lib/gzip-compression-pako.ts @@ -0,0 +1,123 @@ +// loaders.gl, MIT license +import {isBrowser} from '@loaders.gl/loader-utils'; +import {GZipCompressionZlib, GZipCompressionZlibOptions} from './gzip-compression-zlib'; + +import {Compression} from './compression'; +import {getPakoError} from './utils/pako-utils'; +import pako from 'pako'; + +export type GZipCompressionOptions = GZipCompressionZlibOptions & { + gzip?: pako.InflateOptions & pako.DeflateOptions; +}; + +/** + * GZIP compression / decompression + * Implementation using pako + * @see https://bundlephobia.com/package/pako + */ +export class GZipCompression extends Compression { + readonly name: string = 'gzip'; + readonly extensions = ['gz', 'gzip']; + readonly contentEncodings = ['gzip', 'x-gzip']; + readonly isSupported = true; + + readonly options: GZipCompressionOptions; + + private _chunks: ArrayBuffer[] = []; + + constructor(options: GZipCompressionOptions = {}) { + super(options); + this.options = options; + if (!isBrowser && this.options.useZlib) { + // @ts-ignore public API is equivalent + return new GZipCompressionZlib(options); + } + } + + async compress(input: ArrayBuffer): Promise { + return this.compressSync(input); + } + + async decompress(input: ArrayBuffer): Promise { + return this.decompressSync(input); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const pakoOptions = this._getPakoOptions(); + const inputArray = new Uint8Array(input); + return pako.gzip(inputArray, pakoOptions).buffer; + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const pakoOptions: pako.InflateOptions = this.options?.gzip || {}; + const inputArray = new Uint8Array(input); + return pako.ungzip(inputArray, pakoOptions).buffer; + } + + async *compressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions = this._getPakoOptions(); + const pakoProcessor = new pako.Deflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *decompressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.InflateOptions = this.options?.gzip || {}; + const pakoProcessor = new pako.Inflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + private async *transformBatches( + pakoProcessor: pako.Inflate | pako.Deflate, + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + pakoProcessor.onData = this._onData.bind(this); + pakoProcessor.onEnd = this._onEnd.bind(this); + for await (const chunk of asyncIterator) { + const uint8Array = new Uint8Array(chunk); + const ok = pakoProcessor.push(uint8Array, false); // false -> not last chunk + if (!ok) { + throw new Error(`${getPakoError()}write`); + } + const chunks = this._getChunks(); + yield* chunks; + } + + // End + const emptyChunk = new Uint8Array(0); + const ok = pakoProcessor.push(emptyChunk, true); // true -> last chunk + if (!ok) { + // For some reason we get error but it still works??? + // throw new Error(getPakoError() + 'end'); + } + const chunks = this._getChunks(); + yield* chunks; + } + + private _onData(chunk) { + this._chunks.push(chunk); + } + + private _onEnd(status) { + if (status !== 0) { + throw new Error(getPakoError(status) + this._chunks.length); + } + } + + private _getChunks(): ArrayBuffer[] { + const chunks = this._chunks; + this._chunks = []; + return chunks; + } + + private _getPakoOptions(): pako.DeflateOptions { + return { + // @ts-ignore level is too strongly typed + level: this.options.quality || Compression.DEFAULT_COMPRESSION_LEVEL, + ...this.options?.gzipZlib + }; + } +} diff --git a/modules/compression/src/lib/gzip-compression-zlib.ts b/modules/compression/src/lib/gzip-compression-zlib.ts new file mode 100644 index 0000000000..44e8b556c7 --- /dev/null +++ b/modules/compression/src/lib/gzip-compression-zlib.ts @@ -0,0 +1,65 @@ +// loaders.gl, MIT license +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; +import {promisify2} from '@loaders.gl/loader-utils'; +import type {ZlibOptions} from 'zlib'; +import * as zlib from 'zlib'; + +const DEFAULT_COMPRESSION_LEVEL = 6; + +export type GZipCompressionZlibOptions = CompressionOptions & { + gzipZlib?: ZlibOptions; +}; + +/** + * GZIP compression / decompression + * Using Node.js zlib library (works under Node only) + */ +export class GZipCompressionZlib extends Compression { + readonly name: string = 'gzip'; + readonly extensions = ['gz', 'gzip']; + readonly contentEncodings = ['gzip', 'x-gzip']; + readonly isSupported = isBrowser; + + readonly options: GZipCompressionZlibOptions; + + constructor(options: GZipCompressionZlibOptions = {}) { + super(options); + this.options = options; + if (isBrowser) { + throw new Error('zlib only available under Node.js'); + } + } + + async compress(input: ArrayBuffer): Promise { + const options = this._getZlibOptions(); + const buffer = await promisify2(zlib.gzip)(input, options); + return toArrayBuffer(buffer); + } + + async decompress(input: ArrayBuffer): Promise { + const options = this._getZlibOptions(); + const buffer = await promisify2(zlib.gunzip)(input, options); + return toArrayBuffer(buffer); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const options = this._getZlibOptions(); + const buffer = zlib.gzipSync(input, options); + return toArrayBuffer(buffer); + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const options = this._getZlibOptions(); + const buffer = zlib.gunzipSync(input, options); + return toArrayBuffer(buffer); + } + + protected _getZlibOptions(): ZlibOptions { + return { + level: this.options.quality || DEFAULT_COMPRESSION_LEVEL, + ...this.options?.gzipZlib + }; + } +} diff --git a/modules/compression/src/lib/gzip-compression.ts b/modules/compression/src/lib/gzip-compression.ts deleted file mode 100644 index 11312fe277..0000000000 --- a/modules/compression/src/lib/gzip-compression.ts +++ /dev/null @@ -1,23 +0,0 @@ -// GZIP -// import {isBrowser} from '@loaders.gl/loader-utils'; -import type {CompressionOptions} from './compression'; -import {DeflateCompression} from './deflate-compression'; -import pako from 'pako'; // https://bundlephobia.com/package/pako - -export type GZipCompressionOptions = CompressionOptions & { - gzip?: pako.InflateOptions & pako.DeflateOptions; -}; - -/** - * GZIP compression / decompression - */ -export class GZipCompression extends DeflateCompression { - readonly name: string = 'gzip'; - readonly extensions = ['gz', 'gzip']; - readonly contentEncodings = ['gzip', 'x-gzip']; - readonly isSupported = true; - - constructor(options?: GZipCompressionOptions) { - super({...options, deflate: {...options?.gzip, gzip: true}}); - } -} diff --git a/modules/compression/src/lib/lz4-compression.ts b/modules/compression/src/lib/lz4-compression.ts index 72471989ed..64e7087e9e 100644 --- a/modules/compression/src/lib/lz4-compression.ts +++ b/modules/compression/src/lib/lz4-compression.ts @@ -38,13 +38,14 @@ export class LZ4Compression extends Compression { readonly extensions = ['lz4']; readonly contentEncodings = ['x-lz4']; readonly isSupported = true; + readonly options: CompressionOptions; - constructor(options: CompressionOptions) { + constructor(options: CompressionOptions = {}) { super(options); this.options = options; - lz4js = lz4js || this.options?.modules?.lz4js; + lz4js = lz4js || this.options?.modules?.lz4js || Compression.modules.lz4js; if (!lz4js) { throw new Error(this.name); } diff --git a/modules/compression/src/lib/lzo-compression.ts b/modules/compression/src/lib/lzo-compression.ts index 78b745ccd9..9266259992 100644 --- a/modules/compression/src/lib/lzo-compression.ts +++ b/modules/compression/src/lib/lzo-compression.ts @@ -22,17 +22,18 @@ export class LZOCompression extends Compression { readonly extensions = []; readonly contentEncodings = []; readonly isSupported = false; // !isBrowser; + readonly options: CompressionOptions; /** * lzo is an injectable dependency due to big size * @param options */ - constructor(options: CompressionOptions) { + constructor(options: CompressionOptions = {}) { super(options); this.options = options; - lzo = lzo || this.options?.modules?.lzo; + lzo = lzo || this.options?.modules?.lzo || Compression.modules.lzo; if (!lzo) { throw new Error(this.name); } diff --git a/modules/compression/src/lib/snappy-compression.ts b/modules/compression/src/lib/snappy-compression.ts index a72c6b010d..ac7af2086a 100644 --- a/modules/compression/src/lib/snappy-compression.ts +++ b/modules/compression/src/lib/snappy-compression.ts @@ -11,6 +11,7 @@ export class SnappyCompression extends Compression { readonly extensions = []; readonly contentEncodings = []; readonly isSupported = true; + readonly options: CompressionOptions; constructor(options?: CompressionOptions) { diff --git a/modules/compression/src/lib/utils/pako-utils.ts b/modules/compression/src/lib/utils/pako-utils.ts new file mode 100644 index 0000000000..cd877873ee --- /dev/null +++ b/modules/compression/src/lib/utils/pako-utils.ts @@ -0,0 +1,24 @@ +// TODO - For some reason we don't get the error message from pako in _onEnd? +export function getPakoError(code: number = 0): string { + const MESSAGES = { + /* Z_NEED_DICT 2 */ + 2: 'need dictionary', + /* Z_STREAM_END 1 */ + 1: 'stream end', + /* Z_OK 0 */ + 0: '', + /* Z_ERRNO (-1) */ + '-1': 'file error', + /* Z_STREAM_ERROR (-2) */ + '-2': 'stream error', + /* Z_DATA_ERROR (-3) */ + '-3': 'data error', + /* Z_MEM_ERROR (-4) */ + '-4': 'insufficient memory', + /* Z_BUF_ERROR (-5) */ + '-5': 'buffer error', + /* Z_VERSION_ERROR (-6) */ + '-6': 'incompatible version' + }; + return MESSAGES[code] || 'unknown Pako library error'; +} diff --git a/modules/compression/src/lib/zstd-compression.ts b/modules/compression/src/lib/zstd-compression.ts index f9b4ee6709..0584465744 100644 --- a/modules/compression/src/lib/zstd-compression.ts +++ b/modules/compression/src/lib/zstd-compression.ts @@ -13,18 +13,21 @@ export class ZstdCompression extends Compression { readonly name: string = 'zstd'; readonly extensions = []; readonly contentEncodings = []; - readonly isSupported = true; + get isSupported() { + return zstd; + } + readonly options: CompressionOptions; /** * zstd-codec is an injectable dependency due to big size * @param options */ - constructor(options: CompressionOptions) { + constructor(options: CompressionOptions = {}) { super(options); this.options = options; - ZstdCodec = this.options?.modules?.['zstd-codec']; + ZstdCodec = this.options?.modules?.['zstd-codec'] || Compression.modules?.['zstd-codec']; if (!ZstdCodec) { // eslint-disable-next-line no-console console.warn(`${this.name} library not installed`); diff --git a/modules/compression/src/workers/worker.ts b/modules/compression/src/workers/worker.ts index ce52494f0a..1665e43880 100644 --- a/modules/compression/src/workers/worker.ts +++ b/modules/compression/src/workers/worker.ts @@ -1,10 +1,12 @@ import {createWorker} from '@loaders.gl/worker-utils'; +import type {Compression} from '../lib/compression'; + // Compressors import {NoCompression} from '../lib/no-compression'; import {BrotliCompression} from '../lib/brotli-compression'; -import {DeflateCompression} from '../lib/deflate-compression'; -import {GZipCompression} from '../lib/gzip-compression'; +import {DeflateCompression} from '../lib/deflate-compression-pako'; +import {GZipCompression} from '../lib/gzip-compression-pako'; import {LZ4Compression} from '../lib/lz4-compression'; // import {LZOCompression} from '../lib/lzo-compression'; import {SnappyCompression} from '../lib/snappy-compression'; @@ -13,35 +15,37 @@ import {ZstdCompression} from '../lib/zstd-compression'; // Import big dependencies // import brotli from 'brotli'; - brotli has problems with decompress in browsers -// import brotliDecompress from 'brotli/decompress'; +import brotliDecompress from 'brotli/decompress'; import lz4js from 'lz4js'; // import lzo from 'lzo'; // import {ZstdCodec} from 'zstd-codec'; +// globalThis.Worker = globalThis.Worker || {}; +// globalThis.Blob = globalThis.Blob || {}; + // Inject large dependencies through Compression constructor options const modules = { // brotli has problems with decompress in browsers - // brotli: { - // decompress: brotliDecompress, - // compress: () => { - // throw new Error('brotli compress'); - // } - // }, + brotli: { + decompress: brotliDecompress, + compress: () => { + throw new Error('brotli compress'); + } + }, lz4js + // 'zstd-codec': ZstdCodec, // lzo, - // 'zstd-codec': ZstdCodec }; -/** @type {Compression[]} */ -const COMPRESSIONS = [ +const COMPRESSIONS: Compression[] = [ new NoCompression({modules}), new BrotliCompression({modules}), new DeflateCompression({modules}), new GZipCompression({modules}), - // new LZOCompression({modules}), - new LZ4Compression({modules}), new SnappyCompression({modules}), + new LZ4Compression({modules}), new ZstdCompression({modules}) + // new LZOCompression({modules}) ]; createWorker(async (data, options = {}) => { diff --git a/modules/compression/test/compression.bench.js b/modules/compression/test/compression.bench.js deleted file mode 100644 index 386d5e9d7c..0000000000 --- a/modules/compression/test/compression.bench.js +++ /dev/null @@ -1,46 +0,0 @@ -/* -import { - NoCompression, - GZipCompression, - DeflateCompression, - LZ4Compression, - ZstdCompression, - SnappyCompression, - BrotliCompression, - LZOCompression, - CompressionWorker -} from '@loaders.gl/compression'; -import {getData} from './utils/test-utils'; - -// import brotli from 'brotli'; - brotli has problems with decompress in browsers -import brotliDecompress from 'brotli/decompress'; -import lz4js from 'lz4js'; -import lzo from 'lzo'; -import {ZstdCodec} from 'zstd-codec'; - -// Inject large dependencies through Compression constructor options -const modules = { - // brotli has problems with decompress in browsers - brotli: { - decompress: brotliDecompress, - compress: () => { - throw new Error('brotli compress'); - } - }, - lz4js, - lzo, - 'zstd-codec': ZstdCodec -}; - -export default async function compressionBench(bench) { - // const {binaryData} = getData(); - - bench = bench.group('Compression'); - - // bench = bench.addAsync('SHA256Hash#hash()', {multiplier: 100000, unit: 'bytes'}, () => - // new SHA256Hash({modules: {CryptoJS}}).hash(binaryData) - // ); - - return bench; -} -*/ diff --git a/modules/compression/test/compression.bench.ts b/modules/compression/test/compression.bench.ts new file mode 100644 index 0000000000..0914d6ab40 --- /dev/null +++ b/modules/compression/test/compression.bench.ts @@ -0,0 +1,180 @@ +import type {Bench} from '@probe.gl/bench'; +import {isBrowser} from '@loaders.gl/loader-utils'; +import { + Compression, + // NoCompression, + // DeflateCompression, + // DeflateCompressionZlib, + GZipCompression, + GZipCompressionZlib, + // LZ4Compression, + // ZstdCompression, + SnappyCompression, + BrotliCompression, + BrotliCompressionZlib, + // LZOCompression, + // CompressionWorker, + _GZipCompressionFflate, + _GZipCompressionPako + // _DeflateCompressionFflate, + // _DeflateCompressionPako +} from '@loaders.gl/compression'; +import {getData} from './utils/test-utils'; + +// import * as brotli from 'brotli-compress'; +// import brotliPromise, {BrotliWasmType} from 'brotli-wasm'; // Import the default export +// let brotli: BrotliWasmType | undefined; + +import brotliDecompress from 'brotli/decompress'; // brotli has problems with compress in browsers +import lz4js from 'lz4js'; +import {ZstdCodec} from 'zstd-codec'; +// import lzo from 'lzo'; + +// Inject large dependencies through Compression constructor options +Object.assign(Compression.modules, { + lz4js, + 'zstd-codec': ZstdCodec, + // brotli module has big problems with compress in browsers + brotli: { + decompress: brotliDecompress, + compress: () => { + throw new Error('brotli compress'); + } + } + // lzo +}); + +// Prepare data +const {binaryData} = getData(); +const gzippedData = new GZipCompression().compressSync(binaryData); +const snappyData = new SnappyCompression().compressSync(binaryData); +let brotliData: ArrayBuffer | undefined; +try { + brotliData = new BrotliCompressionZlib().compressSync(binaryData); +} catch { + // ignore errors +} + +// const noCompression = new NoCompression(); +// const gzipFflate = new GZipCompression(); +// const gzipPako = new _GZipCompressionPako(); +// const gzipZlib = new GZipCompressionZlib(); +// const brotli = new BrotliCompression(); +// const brotliZlib = new BrotliCompressionZlib(); +// const snappy = new SnappyCompression(); +// const lz4 = new LZ4Compression(); +// const zstd = new ZstdCompression(); + +export async function compressionBench(bench: Bench): Promise { + await compressionBenchDecompression(bench); + + await compressionBenchCompression(bench); + + // bench = bench.addAsync( + // 'GZip - compress - Pako - async', + // {multiplier: binaryData.byteLength, unit: 'bytes'}, + // () => { + // new _GZipCompressionPako().compress(binaryData); + // } + // ); + // bench = bench.addAsync( + // 'GZip - compress - Fflate - async', + // {multiplier: binaryData.byteLength, unit: 'bytes'}, + // () => new _GZipCompressionFflate().compress(binaryData) + // ); + // bench = bench.addAsync( + // 'GZip - compress - Zlib - async', + // {multiplier: binaryData.byteLength, unit: 'bytes'}, + // () => new GZipCompressionZlib().compress(binaryData) + // ); + + return bench; +} + +async function compressionBenchDecompression(bench: Bench): Promise { + bench = bench.group('Decompression'); + + bench = bench.add( + 'Snappy - decompress - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new SnappyCompression().decompressSync(snappyData) + ); + + if (!isBrowser) { + bench = bench.add( + 'GZip - decompress - Zlib - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new GZipCompressionZlib().compressSync(gzippedData) + ); + } + + bench = bench.add( + 'GZip - decompress - Fflate - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new _GZipCompressionFflate().compressSync(gzippedData) + ); + + bench = bench.add( + 'GZip - decompress - Pako - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new _GZipCompressionPako().compressSync(gzippedData) + ); + + if (brotliData) { + const data = brotliData; + bench = bench.add( + 'Brotli - decompress - Zlib - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new BrotliCompressionZlib().decompressSync(data) + ); + bench = bench.add( + 'Brotli - decompress - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new BrotliCompression().decompressSync(data) + ); + } +} + +async function compressionBenchCompression(bench: Bench): Promise { + bench = bench.group('Compression'); + + bench = bench.addAsync( + 'Snappy - compress - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new SnappyCompression().compressSync(binaryData) + ); + + if (!isBrowser) { + bench = bench.add( + 'GZip - compress 6 - Zlib - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new GZipCompressionZlib().compressSync(binaryData) + ); + } + bench = bench.add( + 'GZip - compress 6 - Fflate - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new _GZipCompressionFflate().compressSync(binaryData) + ); + bench = bench.add( + 'GZip - compress 6 - Pako - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new _GZipCompressionPako().compressSync(binaryData) + ); + + if (!isBrowser) { + bench = bench.addAsync( + 'Brotli - compress 6 - Zlib - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new BrotliCompressionZlib().compressSync(binaryData) + ); + } + + if (brotliData && new BrotliCompression().isCompressionSupported) { + bench = bench.addAsync( + 'Brotli - compress 6 - sync', + {multiplier: binaryData.byteLength, unit: 'bytes'}, + () => new BrotliCompression().compressSync(binaryData) + ); + } +} diff --git a/modules/compression/test/compression.spec.js b/modules/compression/test/compression.spec.ts similarity index 90% rename from modules/compression/test/compression.spec.js rename to modules/compression/test/compression.spec.ts index f34af8aeaa..3a50f83bc8 100644 --- a/modules/compression/test/compression.spec.js +++ b/modules/compression/test/compression.spec.ts @@ -55,7 +55,8 @@ const TEST_CASES = [ compressedLength: 10903 }, gzip: { - compressedLength: 10915 + compressedLength: 10913 // fflate + // compressedLength: 10915 // pako }, lz4: { compressedLength: 10422 @@ -137,7 +138,10 @@ test('compression#batched', async (t) => { let compressedBatches = compression.compressBatches(inputChunks); const compressedData = await concatenateArrayBuffersAsync(compressedBatches); if (name === 'gzip') { - t.equals(compressedData.byteLength, 29, `${name}(${title}) batches: length correct`); // Header overhead + // FFLATE + t.equals(compressedData.byteLength, 47, `${name}(${title}) batches: length correct`); // Header overhead + // PAKO + // t.equals(compressedData.byteLength, 29, `${name}(${title}) batches: length correct`); // Header overhead } // test chained iterators @@ -160,6 +164,15 @@ test('compression#batched', async (t) => { // WORKER TESTS test('gzip#worker', async (t) => { + if (!isBrowser) { + t.comment('not testing worker under Node.js'); + t.end(); + return; + } + + const COMPRESSED_LENGTH_EXPECTED = 12824; // fflate + // const COMPRESSED_LENGTH_EXPECTED = 12825; // pako + const {binaryData} = getData(); t.equal(binaryData.byteLength, 100000, 'Length correct'); @@ -173,7 +186,7 @@ test('gzip#worker', async (t) => { } }); - t.equal(compressedData.byteLength, 12825, 'Length correct'); + t.equal(compressedData.byteLength, COMPRESSED_LENGTH_EXPECTED, 'Length correct'); const decompressdData = await processOnWorker(CompressionWorker, compressedData, { compression: 'gzip', @@ -197,6 +210,12 @@ test('gzip#worker', async (t) => { }); test('lz4#worker', async (t) => { + if (!isBrowser) { + t.comment('not testing worker under Node.js'); + t.end(); + return; + } + const {binaryData} = getData(); t.equal(binaryData.byteLength, 100000, 'Length correct'); diff --git a/test/bench/modules.js b/test/bench/modules.js index bea203f5f6..6e951119f6 100644 --- a/test/bench/modules.js +++ b/test/bench/modules.js @@ -25,6 +25,7 @@ import {_addAliases} from '@loaders.gl/loader-utils'; import loaderUtilsBench from '@loaders.gl/loader-utils/test/loader-utils.bench'; import coreBench from '@loaders.gl/core/test/core.bench'; +import compressionBench from '@loaders.gl/compression/test/compression.bench'; import csvBench from '@loaders.gl/csv/test/csv.bench'; import dracoBench from '@loaders.gl/draco/test/draco.bench'; import excelBench from '@loaders.gl/excel/test/excel.bench'; @@ -41,23 +42,25 @@ _addAliases(ALIASES); // add benchmarks export async function addModuleBenchmarksToSuite(suite) { - await coreBench(suite); + await compressionBench(suite); - await parquetBench(suite); + // await coreBench(suite); - await jsonBench(suite); + // await parquetBench(suite); - // await shapefileBench(suite); + // await jsonBench(suite); - await mvtBench(suite); - await loaderUtilsBench(suite); + // // await shapefileBench(suite); - await imageBench(suite); - await cryptoBench(suite); + // await mvtBench(suite); + // await loaderUtilsBench(suite); - await dracoBench(suite); - await csvBench(suite); - await excelBench(suite); + // await imageBench(suite); + // await cryptoBench(suite); + + // await dracoBench(suite); + // await csvBench(suite); + // await excelBench(suite); // await i3sLoaderBench(suite); } diff --git a/yarn.lock b/yarn.lock index dba978b92a..cf62fb8225 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2439,13 +2439,6 @@ "@turf/invariant" "^5.1.5" "@turf/meta" "^5.1.5" -"@types/brotli@^1.3.0": - version "1.3.1" - resolved "https://registry.yarnpkg.com/@types/brotli/-/brotli-1.3.1.tgz#65dc6c69bb9f4159677032f60e81ffc09faf1fce" - integrity sha512-mGwX0BBQqmpHoX8+b8Oez0X+ZEYnl2gbDL2n0HxYT4imqhTChhj1AAgAKVWNZSuPvXGZXqVoOtBS0071tN6Tkw== - dependencies: - "@types/node" "*" - "@types/bson@4.2.0": version "4.2.0" resolved "https://registry.yarnpkg.com/@types/bson/-/bson-4.2.0.tgz#a2f71e933ff54b2c3bf267b67fa221e295a33337" @@ -2553,7 +2546,7 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b" integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw== -"@types/node@^17.0.36": +"@types/node@^17.0.36", "@types/node@^17.0.40": version "17.0.45" resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190" integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== @@ -3737,7 +3730,25 @@ brorand@^1.0.1, brorand@^1.1.0: resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== -brotli@^1.3.2: +brotli-compress@^1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/brotli-compress/-/brotli-compress-1.3.3.tgz#fe3b50b6234bfbc8e26a137bffff2a07e2a4efe9" + integrity sha512-cwKOmzEuKqUmRxXDdZimiNoXRRr7AQKMSubJSbYA9FXk+LTPT3fBGpHU8VZRZZctAJ5OCeXGK9PzPpZ1vD0pDA== + dependencies: + "@types/node" "^17.0.40" + brotli-wasm "1.2.0" + +brotli-wasm@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/brotli-wasm/-/brotli-wasm-1.2.0.tgz#0f99b97b0020c8152308c277388aecf2a06b6e32" + integrity sha512-PdDi7awF36zFujZyFJb9UNrP1l+If7iCgXhLKE1SpwqFQSK2yc7w2dysOmME7p325yQaZNvae7ruzypB3YhFxA== + +brotli-wasm@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/brotli-wasm/-/brotli-wasm-1.3.1.tgz#51deb0a17a146b36fe10cbc99cd4f92d9ce166d7" + integrity sha512-Vp+v3QXddvy39Ycbmvd3/Y1kUvKhwtnprzeABcKWN4jmyg6W3W5MhGPCfXBMHeSQnizgpV59iWmkSRp7ykOnDQ== + +brotli@^1.3.2, brotli@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/brotli/-/brotli-1.3.3.tgz#7365d8cc00f12cf765d2b2c898716bcf4b604d48" integrity sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg==