diff --git a/packages/duckdb-wasm/karma/s3rver/s3rver.js b/packages/duckdb-wasm/karma/s3rver/s3rver.js
index 72a8c51ec..4f31c8121 100644
--- a/packages/duckdb-wasm/karma/s3rver/s3rver.js
+++ b/packages/duckdb-wasm/karma/s3rver/s3rver.js
@@ -1,15 +1,16 @@
const S3rver = require('s3rver');
-const CORS_CONFIG = "\n" +
- " \n" +
- " *\n" +
- " PUT\n" +
- " GET\n" +
- " HEAD\n" +
- " *\n" +
- " Content-Range\n" +
- " \n" +
- "";
+const CORS_CONFIG =
+ '\n' +
+ ' \n' +
+ ' *\n' +
+ ' PUT\n' +
+ ' GET\n' +
+ ' HEAD\n' +
+ ' *\n' +
+ ' Content-Range\n' +
+ ' \n' +
+ '';
var createS3rver = function (args, config, logger) {
const log = logger.create('S3-test-server');
@@ -19,10 +20,10 @@ var createS3rver = function (args, config, logger) {
address: 'localhost',
silent: config.s3rver.silent,
directory: './../../.tmp/s3rver',
- configureBuckets: [{name: 'test-bucket', configs:[CORS_CONFIG]}]
+ configureBuckets: [{ name: 'test-bucket', configs: [CORS_CONFIG] }],
}).run();
};
module.exports = {
- 'framework:s3rver': ['factory', createS3rver]
-};
\ No newline at end of file
+ 'framework:s3rver': ['factory', createS3rver],
+};
diff --git a/packages/duckdb-wasm/package.json b/packages/duckdb-wasm/package.json
index 7fba82009..9931a2401 100644
--- a/packages/duckdb-wasm/package.json
+++ b/packages/duckdb-wasm/package.json
@@ -62,6 +62,7 @@
"build:debug": "node bundle.mjs debug && tsc --emitDeclarationOnly",
"build:release": "node bundle.mjs release && tsc --emitDeclarationOnly",
"docs": "typedoc",
+ "format": "prettier --write \"**/*.+(js|ts)\"",
"report": "node ./coverage.mjs",
"test:node": "node --enable-source-maps ../../node_modules/jasmine/bin/jasmine ./dist/tests-node.cjs",
"test:node:debug": "node --inspect-brk --enable-source-maps ../../node_modules/jasmine/bin/jasmine ./dist/tests-node.cjs",
diff --git a/packages/duckdb-wasm/src/bindings/bindings_base.ts b/packages/duckdb-wasm/src/bindings/bindings_base.ts
index 8e8a1aecd..d73bfa2c2 100644
--- a/packages/duckdb-wasm/src/bindings/bindings_base.ts
+++ b/packages/duckdb-wasm/src/bindings/bindings_base.ts
@@ -135,10 +135,15 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
/** Tokenize a script */
public tokenize(text: string): ScriptTokens {
const BUF = TEXT_ENCODER.encode(text);
- const bufferPtr = this.mod._malloc(BUF.length );
- const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length );
+ const bufferPtr = this.mod._malloc(BUF.length);
+ const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length);
bufferOfs.set(BUF);
- const [s, d, n] = callSRet(this.mod, 'duckdb_web_tokenize_buffer', ['number', 'number'], [bufferPtr, BUF.length]);
+ const [s, d, n] = callSRet(
+ this.mod,
+ 'duckdb_web_tokenize_buffer',
+ ['number', 'number'],
+ [bufferPtr, BUF.length],
+ );
this.mod._free(bufferPtr);
if (s !== StatusCode.SUCCESS) {
throw new Error(readString(this.mod, d, n));
@@ -172,7 +177,12 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
const bufferPtr = this.mod._malloc(BUF.length);
const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length);
bufferOfs.set(BUF);
- const [s, d, n] = callSRet(this.mod, 'duckdb_web_query_run_buffer', ['number', 'number', 'number'], [conn, bufferPtr, BUF.length]);
+ const [s, d, n] = callSRet(
+ this.mod,
+ 'duckdb_web_query_run_buffer',
+ ['number', 'number', 'number'],
+ [conn, bufferPtr, BUF.length],
+ );
this.mod._free(bufferPtr);
if (s !== StatusCode.SUCCESS) {
throw new Error(readString(this.mod, d, n));
@@ -189,10 +199,15 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
*/
public startPendingQuery(conn: number, text: string, allowStreamResult: boolean = false): Uint8Array | null {
const BUF = TEXT_ENCODER.encode(text);
- const bufferPtr = this.mod._malloc(BUF.length );
- const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length );
+ const bufferPtr = this.mod._malloc(BUF.length);
+ const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length);
bufferOfs.set(BUF);
- const [s, d, n] = callSRet(this.mod, 'duckdb_web_pending_query_start_buffer', ['number', 'number', 'number', 'boolean'], [conn, bufferPtr, BUF.length, allowStreamResult]);
+ const [s, d, n] = callSRet(
+ this.mod,
+ 'duckdb_web_pending_query_start_buffer',
+ ['number', 'number', 'number', 'boolean'],
+ [conn, bufferPtr, BUF.length, allowStreamResult],
+ );
this.mod._free(bufferPtr);
if (s !== StatusCode.SUCCESS) {
throw new Error(readString(this.mod, d, n));
@@ -230,7 +245,12 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
}
if (!IsArrowBuffer(s)) {
- throw new Error("Unexpected StatusCode from duckdb_web_query_fetch_results (" + s + ") and with self reported error as" + readString(this.mod, d, n));
+ throw new Error(
+ 'Unexpected StatusCode from duckdb_web_query_fetch_results (' +
+ s +
+ ') and with self reported error as' +
+ readString(this.mod, d, n),
+ );
}
if (s !== StatusCode.SUCCESS) {
throw new Error(readString(this.mod, d, n));
@@ -246,7 +266,12 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
const bufferPtr = this.mod._malloc(BUF.length);
const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length);
bufferOfs.set(BUF);
- const [s, d, n] = callSRet(this.mod, 'duckdb_web_get_tablenames_buffer', ['number', 'number', 'number'], [conn, bufferPtr, BUF.length]);
+ const [s, d, n] = callSRet(
+ this.mod,
+ 'duckdb_web_get_tablenames_buffer',
+ ['number', 'number', 'number'],
+ [conn, bufferPtr, BUF.length],
+ );
this.mod._free(bufferPtr);
if (s !== StatusCode.SUCCESS) {
throw new Error(readString(this.mod, d, n));
@@ -306,7 +331,12 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
const bufferPtr = this.mod._malloc(BUF.length);
const bufferOfs = this.mod.HEAPU8.subarray(bufferPtr, bufferPtr + BUF.length);
bufferOfs.set(BUF);
- const [s, d, n] = callSRet(this.mod, 'duckdb_web_prepared_create_buffer', ['number', 'number', 'number'], [conn, bufferPtr, BUF.length]);
+ const [s, d, n] = callSRet(
+ this.mod,
+ 'duckdb_web_prepared_create_buffer',
+ ['number', 'number', 'number'],
+ [conn, bufferPtr, BUF.length],
+ );
this.mod._free(bufferPtr);
if (s !== StatusCode.SUCCESS) {
throw new Error(readString(this.mod, d, n));
@@ -513,28 +543,28 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
directIO: boolean,
): Promise {
if (protocol === DuckDBDataProtocol.BROWSER_FSACCESS) {
- if( handle instanceof FileSystemSyncAccessHandle ){
+ if (handle instanceof FileSystemSyncAccessHandle) {
// already a handle is sync handle.
- } else if( handle instanceof FileSystemFileHandle ){
+ } else if (handle instanceof FileSystemFileHandle) {
// handle is an async handle, should convert to sync handle
const fileHandle: FileSystemFileHandle = handle as any;
try {
handle = (await fileHandle.createSyncAccessHandle()) as any;
} catch (e: any) {
- throw new Error( e.message + ":" + name );
+ throw new Error(e.message + ':' + name);
}
- } else if( name != null ){
+ } else if (name != null) {
// should get sync handle from the file name.
try {
const opfsRoot = await navigator.storage.getDirectory();
const fileHandle = await opfsRoot.getFileHandle(name);
handle = (await fileHandle.createSyncAccessHandle()) as any;
} catch (e: any) {
- throw new Error( e.message + ":" + name );
+ throw new Error(e.message + ':' + name);
}
}
}
- return handle;
+ return handle;
}
/** Register a file object URL async */
public async registerFileHandleAsync(
@@ -625,10 +655,10 @@ export abstract class DuckDBBindingsBase implements DuckDBBindings {
}
/** Enable tracking of file statistics */
public registerOPFSFileName(file: string): Promise {
- if (file.startsWith("opfs://")) {
- return this.prepareFileHandle(file, DuckDBDataProtocol.BROWSER_FSACCESS);
- } else {
- throw new Error("Not an OPFS file name: " + file);
+ if (file.startsWith('opfs://')) {
+ return this.prepareFileHandle(file, DuckDBDataProtocol.BROWSER_FSACCESS);
+ } else {
+ throw new Error('Not an OPFS file name: ' + file);
}
}
public collectFileStatistics(file: string, enable: boolean): void {
diff --git a/packages/duckdb-wasm/src/bindings/connection.ts b/packages/duckdb-wasm/src/bindings/connection.ts
index fb54a3660..a80c12215 100644
--- a/packages/duckdb-wasm/src/bindings/connection.ts
+++ b/packages/duckdb-wasm/src/bindings/connection.ts
@@ -111,7 +111,11 @@ export class ResultStreamIterator implements Iterable {
/** Reached end of stream? */
_depleted: boolean;
- constructor(protected bindings: DuckDBBindings, protected conn: number, protected header: Uint8Array) {
+ constructor(
+ protected bindings: DuckDBBindings,
+ protected conn: number,
+ protected header: Uint8Array,
+ ) {
this._first = true;
this._depleted = false;
}
diff --git a/packages/duckdb-wasm/src/bindings/runtime.ts b/packages/duckdb-wasm/src/bindings/runtime.ts
index f894dba46..86b27d992 100644
--- a/packages/duckdb-wasm/src/bindings/runtime.ts
+++ b/packages/duckdb-wasm/src/bindings/runtime.ts
@@ -140,7 +140,7 @@ export interface DuckDBRuntime {
openFile(mod: DuckDBModule, fileId: number, flags: FileFlags): void;
syncFile(mod: DuckDBModule, fileId: number): void;
closeFile(mod: DuckDBModule, fileId: number): void;
- dropFile(mod: DuckDBModule, fileNamePtr: number, fileNameLen:number): void;
+ dropFile(mod: DuckDBModule, fileNamePtr: number, fileNameLen: number): void;
getLastFileModificationTime(mod: DuckDBModule, fileId: number): number;
truncateFile(mod: DuckDBModule, fileId: number, newSize: number): void;
readFile(mod: DuckDBModule, fileId: number, buffer: number, bytes: number, location: number): number;
@@ -162,7 +162,7 @@ export interface DuckDBRuntime {
prepareDBFileHandle?: (path: string, protocol: DuckDBDataProtocol) => Promise;
// Internal API - experimental
- progressUpdate(final: number, percentage: number, iteration:number): void;
+ progressUpdate(final: number, percentage: number, iteration: number): void;
// Call a scalar UDF function
callScalarUDF(
@@ -184,7 +184,7 @@ export const DEFAULT_RUNTIME: DuckDBRuntime = {
openFile: (_mod: DuckDBModule, _fileId: number, flags: FileFlags): void => {},
syncFile: (_mod: DuckDBModule, _fileId: number): void => {},
closeFile: (_mod: DuckDBModule, _fileId: number): void => {},
- dropFile: (_mod: DuckDBModule, _fileNamePtr: number, _fileNameLen:number): void => {},
+ dropFile: (_mod: DuckDBModule, _fileNamePtr: number, _fileNameLen: number): void => {},
getLastFileModificationTime: (_mod: DuckDBModule, _fileId: number): number => {
return 0;
},
diff --git a/packages/duckdb-wasm/src/bindings/runtime_browser.ts b/packages/duckdb-wasm/src/bindings/runtime_browser.ts
index b5069dac4..e9f88b2b4 100644
--- a/packages/duckdb-wasm/src/bindings/runtime_browser.ts
+++ b/packages/duckdb-wasm/src/bindings/runtime_browser.ts
@@ -1,8 +1,6 @@
-import {StatusCode} from '../status';
-import {
- WorkerResponseType,
-} from '../parallel/worker_request';
-import {addS3Headers, getHTTPUrl} from '../utils';
+import { StatusCode } from '../status';
+import { WorkerResponseType } from '../parallel/worker_request';
+import { addS3Headers, getHTTPUrl } from '../utils';
import {
callSRet,
@@ -22,7 +20,6 @@ import * as udf from './udf_runtime';
const OPFS_PREFIX_LEN = 'opfs://'.length;
const PATH_SEP_REGEX = /\/|\\/;
-
export const BROWSER_RUNTIME: DuckDBRuntime & {
_files: Map;
_fileInfoCache: Map;
@@ -100,7 +97,7 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
if (info == null) {
return null;
}
- BROWSER_RUNTIME._globalFileInfo = { ...info, blob: null} as DuckDBGlobalFileInfo;
+ BROWSER_RUNTIME._globalFileInfo = { ...info, blob: null } as DuckDBGlobalFileInfo;
return BROWSER_RUNTIME._globalFileInfo;
} catch (e: any) {
@@ -111,7 +108,7 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
async assignOPFSRoot(): Promise {
if (!BROWSER_RUNTIME._opfsRoot) {
BROWSER_RUNTIME._opfsRoot = await navigator.storage.getDirectory();
- }
+ }
},
/** Prepare a file handle that could only be acquired aschronously */
async prepareFileHandles(filePaths: string[], protocol: DuckDBDataProtocol): Promise {
@@ -157,7 +154,7 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
fromCached: false,
};
} catch (e: any) {
- throw new Error(e.message + ":" + name);
+ throw new Error(e.message + ':' + name);
}
};
const result: PreparedDBFileHandle[] = [];
@@ -268,7 +265,6 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
mod.HEAPF64[(result >> 3) + 1] = 0;
return result;
}
-
} catch (e: any) {
error = e;
console.warn(`HEAD request with range header failed: ${e}`);
@@ -318,13 +314,23 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
}
}
- if (xhr.status == 206 && contentLength2 !== null && +contentLength2 == 1 && presumedLength !== null) {
+ if (
+ xhr.status == 206 &&
+ contentLength2 !== null &&
+ +contentLength2 == 1 &&
+ presumedLength !== null
+ ) {
const result = mod._malloc(2 * 8);
mod.HEAPF64[(result >> 3) + 0] = +presumedLength;
mod.HEAPF64[(result >> 3) + 1] = 0;
return result;
}
- if (xhr.status == 200 && contentLength2 !== null && contentLength !== null && +contentLength2 == +contentLength) {
+ if (
+ xhr.status == 200 &&
+ contentLength2 !== null &&
+ contentLength !== null &&
+ +contentLength2 == +contentLength
+ ) {
console.warn(`fall back to full HTTP read for: ${file.dataUrl}`);
const data = mod._malloc(xhr.response.byteLength);
const src = new Uint8Array(xhr.response, 0, xhr.response.byteLength);
@@ -494,24 +500,24 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
closeFile: (mod: DuckDBModule, fileId: number) => {
const file = BROWSER_RUNTIME.getFileInfo(mod, fileId);
BROWSER_RUNTIME._fileInfoCache.delete(fileId);
- try {
- switch (file?.dataProtocol) {
- case DuckDBDataProtocol.BUFFER:
- case DuckDBDataProtocol.HTTP:
- case DuckDBDataProtocol.S3:
- break;
- case DuckDBDataProtocol.NODE_FS:
- case DuckDBDataProtocol.BROWSER_FILEREADER:
- // XXX Remove from registry
- return;
- case DuckDBDataProtocol.BROWSER_FSACCESS: {
- const handle: FileSystemSyncAccessHandle = BROWSER_RUNTIME._files?.get(file.fileName);
- if (!handle) {
- throw new Error(`No OPFS access handle registered with name: ${file.fileName}`);
+ try {
+ switch (file?.dataProtocol) {
+ case DuckDBDataProtocol.BUFFER:
+ case DuckDBDataProtocol.HTTP:
+ case DuckDBDataProtocol.S3:
+ break;
+ case DuckDBDataProtocol.NODE_FS:
+ case DuckDBDataProtocol.BROWSER_FILEREADER:
+ // XXX Remove from registry
+ return;
+ case DuckDBDataProtocol.BROWSER_FSACCESS: {
+ const handle: FileSystemSyncAccessHandle = BROWSER_RUNTIME._files?.get(file.fileName);
+ if (!handle) {
+ throw new Error(`No OPFS access handle registered with name: ${file.fileName}`);
+ }
+ return handle.flush();
}
- return handle.flush();
}
- }
} catch (e: any) {
console.log(e);
failWith(mod, e.toString());
@@ -691,9 +697,13 @@ export const BROWSER_RUNTIME: DuckDBRuntime & {
return 0;
},
progressUpdate: (done: number, percentage: number, repeat: number): void => {
- if (postMessage) {
- postMessage({requestId: 0, type: WorkerResponseType.PROGRESS_UPDATE, data: {status: done?"completed":"in-progress", percentage: percentage, repetitions: repeat}});
- }
+ if (postMessage) {
+ postMessage({
+ requestId: 0,
+ type: WorkerResponseType.PROGRESS_UPDATE,
+ data: { status: done ? 'completed' : 'in-progress', percentage: percentage, repetitions: repeat },
+ });
+ }
},
checkDirectory: (mod: DuckDBModule, pathPtr: number, pathLen: number) => {
const path = readString(mod, pathPtr, pathLen);
diff --git a/packages/duckdb-wasm/src/bindings/runtime_node.ts b/packages/duckdb-wasm/src/bindings/runtime_node.ts
index 5037068a2..7f331d079 100644
--- a/packages/duckdb-wasm/src/bindings/runtime_node.ts
+++ b/packages/duckdb-wasm/src/bindings/runtime_node.ts
@@ -127,7 +127,7 @@ export const NODE_RUNTIME: DuckDBRuntime & {
}
return 0;
},
- dropFile: (mod: DuckDBModule, _fileNamePtr: number, _fileNameLen:number) => {},
+ dropFile: (mod: DuckDBModule, _fileNamePtr: number, _fileNameLen: number) => {},
truncateFile: (mod: DuckDBModule, fileId: number, newSize: number) => {
try {
const file = NODE_RUNTIME.resolveFileInfo(mod, fileId);
diff --git a/packages/duckdb-wasm/src/log.ts b/packages/duckdb-wasm/src/log.ts
index 4a4f836ca..c705e28b3 100644
--- a/packages/duckdb-wasm/src/log.ts
+++ b/packages/duckdb-wasm/src/log.ts
@@ -45,7 +45,7 @@ export type ProgressEntry = {
readonly status: string;
readonly percentage: string;
readonly repetitions: string;
-}
+};
/** An execution progress handler */
export type ExecutionProgressHandler = (p: ProgressEntry) => void;
diff --git a/packages/duckdb-wasm/src/parallel/async_bindings.ts b/packages/duckdb-wasm/src/parallel/async_bindings.ts
index 9d8a1c2e7..2e2b3dc72 100644
--- a/packages/duckdb-wasm/src/parallel/async_bindings.ts
+++ b/packages/duckdb-wasm/src/parallel/async_bindings.ts
@@ -129,7 +129,7 @@ export class AsyncDuckDB implements AsyncDuckDBBindings {
case WorkerResponseType.PROGRESS_UPDATE: {
for (const p of this._onExecutionProgress) {
p(response.data);
- }
+ }
return;
}
case WorkerResponseType.LOG: {
@@ -524,7 +524,7 @@ export class AsyncDuckDB implements AsyncDuckDBBindings {
/** Register an empty file buffer. */
public async registerEmptyFileBuffer(name: string): Promise {
-/*
+ /*
const task = new WorkerTask(
WorkerRequestType.REGISTER_FILE_BUFFER,
[name, new Uint8Array()],
diff --git a/packages/duckdb-wasm/src/parallel/async_connection.ts b/packages/duckdb-wasm/src/parallel/async_connection.ts
index d4bdd8ecd..89bec591f 100644
--- a/packages/duckdb-wasm/src/parallel/async_connection.ts
+++ b/packages/duckdb-wasm/src/parallel/async_connection.ts
@@ -42,8 +42,8 @@ export class AsyncDuckDBConnection {
});
const buffer = await this._bindings.runQuery(this._conn, text);
const reader = arrow.RecordBatchReader.from(buffer);
- console.assert(reader.isSync(), "Reader is not sync");
- console.assert(reader.isFile(), "Reader is not file");
+ console.assert(reader.isSync(), 'Reader is not sync');
+ console.assert(reader.isFile(), 'Reader is not file');
return new arrow.Table(reader as arrow.RecordBatchFileReader);
}
diff --git a/packages/duckdb-wasm/src/platform.ts b/packages/duckdb-wasm/src/platform.ts
index 8f17c3ad6..8d175d69c 100644
--- a/packages/duckdb-wasm/src/platform.ts
+++ b/packages/duckdb-wasm/src/platform.ts
@@ -20,10 +20,10 @@ export const isFirefox = () => userAgent().includes('Firefox');
export const isSafari = () => /^((?!chrome|android).)*safari/i.test(userAgent());
/** Bundles have different characteristics:
- * - MVP: minimum viable product (uses features from first stable version of WebAssembly standard)
- * - EH: exception handling
- * - COI: cross origin isolation
- */
+ * - MVP: minimum viable product (uses features from first stable version of WebAssembly standard)
+ * - EH: exception handling
+ * - COI: cross origin isolation
+ */
export interface DuckDBBundles {
mvp: {
mainModule: string;
diff --git a/packages/duckdb-wasm/src/status.ts b/packages/duckdb-wasm/src/status.ts
index 4ae0f8f98..7b0557da1 100644
--- a/packages/duckdb-wasm/src/status.ts
+++ b/packages/duckdb-wasm/src/status.ts
@@ -1,13 +1,13 @@
export enum StatusCode {
- SUCCESS = 0,
- MAX_ARROW_ERROR = 255,
- DUCKDB_WASM_RETRY = 256,
+ SUCCESS = 0,
+ MAX_ARROW_ERROR = 255,
+ DUCKDB_WASM_RETRY = 256,
}
export function IsArrowBuffer(status: StatusCode): boolean {
- return status <= StatusCode.MAX_ARROW_ERROR;
+ return status <= StatusCode.MAX_ARROW_ERROR;
}
export function IsDuckDBWasmRetry(status: StatusCode): boolean {
- return status === StatusCode.DUCKDB_WASM_RETRY;
+ return status === StatusCode.DUCKDB_WASM_RETRY;
}
diff --git a/packages/duckdb-wasm/src/utils/s3_helper.ts b/packages/duckdb-wasm/src/utils/s3_helper.ts
index aa0d908a2..72390a9e6 100644
--- a/packages/duckdb-wasm/src/utils/s3_helper.ts
+++ b/packages/duckdb-wasm/src/utils/s3_helper.ts
@@ -1,30 +1,30 @@
-import {S3Config} from "../bindings";
-import {sha256} from "js-sha256";
+import { S3Config } from '../bindings';
+import { sha256 } from 'js-sha256';
export interface S3Params {
- url: string,
- query: string,
- host: string,
- region: string,
- service: string,
- method: string,
- accessKeyId: string,
- secretAccessKey: string,
- sessionToken: string,
- dateNow: string,
- datetimeNow: string
+ url: string;
+ query: string;
+ host: string;
+ region: string;
+ service: string;
+ method: string;
+ accessKeyId: string;
+ secretAccessKey: string;
+ sessionToken: string;
+ dateNow: string;
+ datetimeNow: string;
}
export interface S3PayloadParams {
- contentHash: string | null,
- contentType: string | null
+ contentHash: string | null;
+ contentType: string | null;
}
-const getHTTPHost = function (config : S3Config | undefined, url : string, bucket : string) : string {
- if (config?.endpoint?.startsWith("http")) {
+const getHTTPHost = function (config: S3Config | undefined, url: string, bucket: string): string {
+ if (config?.endpoint?.startsWith('http')) {
// Endpoint is a full url, we append the bucket
const httpHost = `${config?.endpoint}`;
- const offset = httpHost.indexOf("://")+3;
+ const offset = httpHost.indexOf('://') + 3;
return httpHost.substring(offset);
} else if (config?.endpoint) {
// Endpoint is not a full url and the https://{bucket}.{domain} format will be used
@@ -33,53 +33,60 @@ const getHTTPHost = function (config : S3Config | undefined, url : string, bucke
// Default aws s3 url
return `${bucket}.s3.amazonaws.com`;
}
-}
+};
-export function getS3Params (config : S3Config | undefined, url: string, method : string) : S3Params {
+export function getS3Params(config: S3Config | undefined, url: string, method: string): S3Params {
const parsedS3Url = parseS3Url(url);
- // when using S3 path-style access, the signed URL should also include the bucket name,
+ // when using S3 path-style access, the signed URL should also include the bucket name,
// as it is present in the HTTP URL path.
// See: https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html#path-style-url-ex
- let path = parsedS3Url.path;
+ let path = parsedS3Url.path;
if (isPathStyleAccess(config)) {
path = `/${parsedS3Url.bucket}${path}`;
}
return {
url: path,
- query: "",
+ query: '',
host: getHTTPHost(config, url, parsedS3Url.bucket),
- region: (config?.region) ?? "",
- service: "s3",
+ region: config?.region ?? '',
+ service: 's3',
method: method,
- accessKeyId: (config?.accessKeyId) ?? "",
- secretAccessKey: (config?.secretAccessKey) ?? "",
- sessionToken: (config?.sessionToken) ?? "",
- dateNow: new Date().toISOString().replace(/-/g,'').split('T')[0],
- datetimeNow: new Date().toISOString().replace(/-/g,'').replace(/:/g,'').split('.')[0]+ 'Z',
+ accessKeyId: config?.accessKeyId ?? '',
+ secretAccessKey: config?.secretAccessKey ?? '',
+ sessionToken: config?.sessionToken ?? '',
+ dateNow: new Date().toISOString().replace(/-/g, '').split('T')[0],
+ datetimeNow: new Date().toISOString().replace(/-/g, '').replace(/:/g, '').split('.')[0] + 'Z',
};
}
-export function uriEncode(input : string, encode_slash = false) {
+export function uriEncode(input: string, encode_slash = false) {
// https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
- const hexDigit = "0123456789ABCDEF";
- let result = "";
+ const hexDigit = '0123456789ABCDEF';
+ let result = '';
for (let i = 0; i < input.length; i++) {
- const ch : string = input[i];
-
- if ((ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') || (ch >= '0' && ch <= '9') || ch == '_' ||
- ch == '-' || ch == '~' || ch == '.') {
+ const ch: string = input[i];
+
+ if (
+ (ch >= 'A' && ch <= 'Z') ||
+ (ch >= 'a' && ch <= 'z') ||
+ (ch >= '0' && ch <= '9') ||
+ ch == '_' ||
+ ch == '-' ||
+ ch == '~' ||
+ ch == '.'
+ ) {
result += ch;
} else if (ch == '/') {
if (encode_slash) {
- result += "%2F";
+ result += '%2F';
} else {
result += ch;
}
} else {
- result += "%";
+ result += '%';
result += hexDigit[ch.charCodeAt(0) >> 4];
result += hexDigit[ch.charCodeAt(0) & 15];
}
@@ -87,47 +94,57 @@ export function uriEncode(input : string, encode_slash = false) {
return result;
}
-export function createS3Headers(params: S3Params, payloadParams : S3PayloadParams | null = null) : Map {
+export function createS3Headers(params: S3Params, payloadParams: S3PayloadParams | null = null): Map {
// this is the sha256 of the empty string, its useful since we have no payload for GET requests
- const payloadHash = (payloadParams?.contentHash) ?? "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
+ const payloadHash =
+ payloadParams?.contentHash ?? 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855';
const res = new Map();
// res.set("host", params.host)
- res.set("x-amz-date", params.datetimeNow);
- res.set("x-amz-content-sha256", payloadHash);
+ res.set('x-amz-date', params.datetimeNow);
+ res.set('x-amz-content-sha256', payloadHash);
if (params.sessionToken) {
- res.set("x-amz-security-token", params.sessionToken);
+ res.set('x-amz-security-token', params.sessionToken);
}
// construct string to sign
- let signedHeaders = "";
+ let signedHeaders = '';
if (payloadParams?.contentType) {
- signedHeaders += "content-type;";
+ signedHeaders += 'content-type;';
}
- signedHeaders += "host;x-amz-content-sha256;x-amz-date";
+ signedHeaders += 'host;x-amz-content-sha256;x-amz-date';
if (params.sessionToken) {
- signedHeaders += ";x-amz-security-token";
+ signedHeaders += ';x-amz-security-token';
}
- let canonicalRequest = params.method + "\n" + uriEncode(params.url) + "\n" + params.query;
+ let canonicalRequest = params.method + '\n' + uriEncode(params.url) + '\n' + params.query;
if (payloadParams?.contentType) {
- canonicalRequest += "\ncontent-type:" + payloadParams?.contentType;
+ canonicalRequest += '\ncontent-type:' + payloadParams?.contentType;
}
- canonicalRequest += "\nhost:" + params.host +
- "\nx-amz-content-sha256:" + payloadHash + "\nx-amz-date:" + params.datetimeNow;
+ canonicalRequest +=
+ '\nhost:' + params.host + '\nx-amz-content-sha256:' + payloadHash + '\nx-amz-date:' + params.datetimeNow;
if (params.sessionToken && params.sessionToken.length > 0) {
- canonicalRequest += "\nx-amz-security-token:" + params.sessionToken;
+ canonicalRequest += '\nx-amz-security-token:' + params.sessionToken;
}
- canonicalRequest += "\n\n" + signedHeaders + "\n" + payloadHash;
+ canonicalRequest += '\n\n' + signedHeaders + '\n' + payloadHash;
const canonicalRequestHashStr = sha256(canonicalRequest);
- const stringToSign = "AWS4-HMAC-SHA256\n" + params.datetimeNow + "\n" + params.dateNow + "/" + params.region + "/" + params.service +
- "/aws4_request\n" + canonicalRequestHashStr;
+ const stringToSign =
+ 'AWS4-HMAC-SHA256\n' +
+ params.datetimeNow +
+ '\n' +
+ params.dateNow +
+ '/' +
+ params.region +
+ '/' +
+ params.service +
+ '/aws4_request\n' +
+ canonicalRequestHashStr;
// ts-ignore's because library can accept array buffer as key, but TS arg is incorrect
- const signKey = "AWS4" + params.secretAccessKey;
+ const signKey = 'AWS4' + params.secretAccessKey;
const kDate = sha256.hmac.arrayBuffer(signKey, params.dateNow);
// Note, js-sha256 has a bug in the TS interface that only supports strings as keys, while we need a bytearray
@@ -137,31 +154,56 @@ export function createS3Headers(params: S3Params, payloadParams : S3PayloadParam
const kRegion = sha256.hmac.arrayBuffer(kDate, params.region);
// eslint-disable-next-line
// @ts-ignore
- const kService = sha256.hmac.arrayBuffer(kRegion, params.service,);
+ const kService = sha256.hmac.arrayBuffer(kRegion, params.service);
// eslint-disable-next-line
// @ts-ignore
- const signingKey = sha256.hmac.arrayBuffer(kService, "aws4_request");
+ const signingKey = sha256.hmac.arrayBuffer(kService, 'aws4_request');
// eslint-disable-next-line
// @ts-ignore
const signature = sha256.hmac(signingKey, stringToSign);
- res.set("Authorization", "AWS4-HMAC-SHA256 Credential=" + params.accessKeyId + "/" + params.dateNow + "/" + params.region + "/" +
- params.service + "/aws4_request, SignedHeaders=" + signedHeaders +
- ", Signature=" + signature);
+ res.set(
+ 'Authorization',
+ 'AWS4-HMAC-SHA256 Credential=' +
+ params.accessKeyId +
+ '/' +
+ params.dateNow +
+ '/' +
+ params.region +
+ '/' +
+ params.service +
+ '/aws4_request, SignedHeaders=' +
+ signedHeaders +
+ ', Signature=' +
+ signature,
+ );
return res;
}
-const createS3HeadersFromS3Config = function (config : S3Config | undefined, url : string, method : string, contentType: string | null = null, payload : Uint8Array | null = null) : Map {
+const createS3HeadersFromS3Config = function (
+ config: S3Config | undefined,
+ url: string,
+ method: string,
+ contentType: string | null = null,
+ payload: Uint8Array | null = null,
+): Map {
const params = getS3Params(config, url, method);
const payloadParams = {
contentType: contentType,
- contentHash: payload ? sha256.hex(payload!) : null
+ contentHash: payload ? sha256.hex(payload!) : null,
} as S3PayloadParams;
return createS3Headers(params, payloadParams);
-}
+};
-export function addS3Headers(xhr: XMLHttpRequest, config : S3Config | undefined, url : string, method: string, contentType: string | null = null, payload : Uint8Array | null = null) {
+export function addS3Headers(
+ xhr: XMLHttpRequest,
+ config: S3Config | undefined,
+ url: string,
+ method: string,
+ contentType: string | null = null,
+ payload: Uint8Array | null = null,
+) {
if (config?.accessKeyId || config?.sessionToken) {
const headers = createS3HeadersFromS3Config(config, url, method, contentType, payload);
headers.forEach((value: string, header: string) => {
@@ -169,14 +211,14 @@ export function addS3Headers(xhr: XMLHttpRequest, config : S3Config | undefined,
});
if (contentType) {
- xhr.setRequestHeader("content-type", contentType);
+ xhr.setRequestHeader('content-type', contentType);
}
}
}
-export function parseS3Url (url: string) : {bucket : string, path : string} {
- if (url.indexOf("s3://") != 0) {
- throw new Error("URL needs to start with s3://");
+export function parseS3Url(url: string): { bucket: string; path: string } {
+ if (url.indexOf('s3://') != 0) {
+ throw new Error('URL needs to start with s3://');
}
const slashPos = url.indexOf('/', 5);
@@ -186,28 +228,28 @@ export function parseS3Url (url: string) : {bucket : string, path : string} {
const bucket = url.substring(5, slashPos);
if (!bucket) {
- throw new Error("URL needs to contain a bucket name");
+ throw new Error('URL needs to contain a bucket name');
}
const path = url.substring(slashPos);
if (!path) {
- throw new Error("URL needs to contain key");
+ throw new Error('URL needs to contain key');
}
- return {bucket: bucket, path: path}
+ return { bucket: bucket, path: path };
}
-function isPathStyleAccess(config : S3Config | undefined) : boolean {
- if (config?.endpoint?.startsWith("http")) {
- return true
+function isPathStyleAccess(config: S3Config | undefined): boolean {
+ if (config?.endpoint?.startsWith('http')) {
+ return true;
}
- return false
+ return false;
}
-export function getHTTPUrl(config : S3Config | undefined, url : string) : string {
+export function getHTTPUrl(config: S3Config | undefined, url: string): string {
const parsedUrl = parseS3Url(url);
if (isPathStyleAccess(config)) {
// Endpoint is a full url, we append the bucket
return `${config?.endpoint}/${parsedUrl.bucket}` + parsedUrl.path;
}
return 'https://' + getHTTPHost(config, url, parsedUrl.bucket) + parsedUrl.path;
-}
\ No newline at end of file
+}
diff --git a/packages/duckdb-wasm/test/bindings.test.ts b/packages/duckdb-wasm/test/bindings.test.ts
index e016ba13e..97a4447e0 100644
--- a/packages/duckdb-wasm/test/bindings.test.ts
+++ b/packages/duckdb-wasm/test/bindings.test.ts
@@ -45,12 +45,10 @@ export function testBindings(db: () => duckdb.DuckDBBindings, baseURL: string):
it('Platform check', async () => {
await db().reset();
conn = db().connect();
- const version = conn.query<{ name: arrow.Utf8 }>(
- "PRAGMA platform;",
- );
+ const version = conn.query<{ name: arrow.Utf8 }>('PRAGMA platform;');
const rows = version.getChildAt(0)?.toArray();
expect(rows.length).toEqual(1);
- expect(rows[0].toString().substr(0,5)).toEqual("wasm_");
+ expect(rows[0].toString().substr(0, 5)).toEqual('wasm_');
await db().reset();
});
});
diff --git a/packages/duckdb-wasm/test/excel.test.ts b/packages/duckdb-wasm/test/excel.test.ts
index 14e266f07..d293c0e05 100644
--- a/packages/duckdb-wasm/test/excel.test.ts
+++ b/packages/duckdb-wasm/test/excel.test.ts
@@ -14,15 +14,30 @@ export function testEXCEL(db: () => duckdb.DuckDBBindings): void {
describe('EXCEL', () => {
it('sample', async () => {
- expect(conn.query("SELECT text(1234567.897, 'h:mm:ss.00')",).getChildAt(0)?.toArray()).toEqual(['21:31:40.80']);
- expect(conn.query("SELECT text(1234567.897, 'm/d/yyyy h:mm AM/PM')",).getChildAt(0)?.toArray()).toEqual(['2/15/5280 9:31 PM']);
- expect(conn.query("SELECT text(1234567.897, 'dddd, dd of MMMM of YYYY')",).getChildAt(0)?.toArray()).toEqual(['Thursday, 15 of February of 5280']);
+ expect(conn.query("SELECT text(1234567.897, 'h:mm:ss.00')").getChildAt(0)?.toArray()).toEqual([
+ '21:31:40.80',
+ ]);
+ expect(conn.query("SELECT text(1234567.897, 'm/d/yyyy h:mm AM/PM')").getChildAt(0)?.toArray()).toEqual([
+ '2/15/5280 9:31 PM',
+ ]);
+ expect(conn.query("SELECT text(1234567.897, 'dddd, dd of MMMM of YYYY')").getChildAt(0)?.toArray()).toEqual(
+ ['Thursday, 15 of February of 5280'],
+ );
- expect(conn.query("SELECT text(1234567.897, '# ??/??')",).getChildAt(0)?.toArray()).toEqual(['1234567 61/68']);
+ expect(conn.query("SELECT text(1234567.897, '# ??/??')").getChildAt(0)?.toArray()).toEqual([
+ '1234567 61/68',
+ ]);
- expect(conn.query("SELECT text(12345678912, '(###) ###-####')",).getChildAt(0)?.toArray()).toEqual(['(1234) 567-8912']);
- expect(conn.query("SELECT text(1234567.897, '$#,##0')",).getChildAt(0)?.toArray()).toEqual(['$1,234,568']);
- expect(conn.query("SELECT excel_text(123456789123, '[<=9999999]##-####;[>9999999](###) ###-####')",).getChildAt(0)?.toArray()).toEqual(['(12345) 678-9123']);
+ expect(conn.query("SELECT text(12345678912, '(###) ###-####')").getChildAt(0)?.toArray()).toEqual([
+ '(1234) 567-8912',
+ ]);
+ expect(conn.query("SELECT text(1234567.897, '$#,##0')").getChildAt(0)?.toArray()).toEqual(['$1,234,568']);
+ expect(
+ conn
+ .query("SELECT excel_text(123456789123, '[<=9999999]##-####;[>9999999](###) ###-####')")
+ .getChildAt(0)
+ ?.toArray(),
+ ).toEqual(['(12345) 678-9123']);
});
});
}
diff --git a/packages/duckdb-wasm/test/httpfs_test.ts b/packages/duckdb-wasm/test/httpfs_test.ts
index 6e2f719cd..9b9617ceb 100644
--- a/packages/duckdb-wasm/test/httpfs_test.ts
+++ b/packages/duckdb-wasm/test/httpfs_test.ts
@@ -2,7 +2,7 @@ import * as duckdb from '../src/';
import { getS3Params, S3Params, S3PayloadParams, createS3Headers, uriEncode, getHTTPUrl } from '../src/utils';
import { AsyncDuckDBConnection, DuckDBBindings, DuckDBBindingsBase, DuckDBModule } from '../src/';
import BROWSER_RUNTIME from '../src/bindings/runtime_browser';
-import {generateLongQueryString} from "./string_test_helper";
+import { generateLongQueryString } from './string_test_helper';
// S3 config for tests
const BUCKET_NAME = 'test-bucket';
@@ -312,7 +312,7 @@ export function testHTTPFSAsync(
`COPY (SELECT * FROM range(1000,1010) tbl(i)) TO 's3://${BUCKET_NAME}/test_written.csv' (FORMAT 'csv');`,
);
const result = await conn!.query(`SELECT * FROM "s3://${BUCKET_NAME}/test_written.csv";`);
- expect(Number((result.getChildAt(0)?.get(6)))).toEqual(Number(1006));
+ expect(Number(result.getChildAt(0)?.get(6))).toEqual(Number(1006));
await expectAsync(
conn!.query(
`COPY (SELECT * FROM range(2000,2010) tbl(i)) TO 's3://${BUCKET_NAME}/test_written.csv' (FORMAT 'csv');`,
@@ -330,7 +330,7 @@ export function testHTTPFSAsync(
const result = await conn!.query(
`SELECT * FROM "${S3_ENDPOINT}/${BUCKET_NAME}/correct_auth_test.parquet?${queryString}";`,
);
- expect(Number((result.getChildAt(0)?.get(6)))).toEqual(Number(29120));
+ expect(Number(result.getChildAt(0)?.get(6))).toEqual(Number(29120));
});
it('can read csv file from URL with long query string', async () => {
@@ -343,7 +343,7 @@ export function testHTTPFSAsync(
const result = await conn!.query(
`SELECT * FROM "${S3_ENDPOINT}/${BUCKET_NAME}/correct_auth_test.csv?${queryString}";`,
);
- expect(Number((result.getChildAt(0)?.get(6)))).toEqual(Number(29120));
+ expect(Number(result.getChildAt(0)?.get(6))).toEqual(Number(29120));
});
});
}
diff --git a/packages/duckdb-wasm/test/json.test.ts b/packages/duckdb-wasm/test/json.test.ts
index 7e7cacfb1..46d115903 100644
--- a/packages/duckdb-wasm/test/json.test.ts
+++ b/packages/duckdb-wasm/test/json.test.ts
@@ -14,8 +14,8 @@ export function testJSON(db: () => duckdb.DuckDBBindings): void {
describe('JSON', () => {
it('sample', async () => {
- expect(conn.query("select to_json({n: 42})",).getChildAt(0)?.toArray()).toEqual(['{"n":42}']);
- expect(conn.query("select json_object('duck', 42)",).getChildAt(0)?.toArray()).toEqual(['{"duck":42}']);
+ expect(conn.query('select to_json({n: 42})').getChildAt(0)?.toArray()).toEqual(['{"n":42}']);
+ expect(conn.query("select json_object('duck', 42)").getChildAt(0)?.toArray()).toEqual(['{"duck":42}']);
});
});
}
diff --git a/packages/duckdb-wasm/test/long_queries.test.ts b/packages/duckdb-wasm/test/long_queries.test.ts
index 5ff02daf6..87729d022 100644
--- a/packages/duckdb-wasm/test/long_queries.test.ts
+++ b/packages/duckdb-wasm/test/long_queries.test.ts
@@ -26,14 +26,13 @@ export function longQueries(db: () => duckdb.AsyncDuckDB): void {
let str = `with big_expr as ( select `;
let i = 1;
- while (str.length < 1e6) {
+ while (str.length < 1e6) {
str += ` ` + i + ` as col_` + i + `,`;
i++;
}
- str += ` NULL as col_NULL) select 99;`
+ str += ` NULL as col_NULL) select 99;`;
await conn.query(str);
});
});
}
-
diff --git a/packages/duckdb-wasm/test/opfs.test.ts b/packages/duckdb-wasm/test/opfs.test.ts
index eaf1a0fcc..f4dfb4e02 100644
--- a/packages/duckdb-wasm/test/opfs.test.ts
+++ b/packages/duckdb-wasm/test/opfs.test.ts
@@ -1,5 +1,5 @@
import * as duckdb from '../src/';
-import {LogLevel} from '../src/';
+import { LogLevel } from '../src/';
import * as arrow from 'apache-arrow';
export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): void {
@@ -29,7 +29,7 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
await db.instantiate(bundle().mainModule, bundle().pthreadWorker);
await db.open({
path: 'opfs://test.db',
- accessMode: duckdb.DuckDBAccessMode.READ_WRITE
+ accessMode: duckdb.DuckDBAccessMode.READ_WRITE,
});
conn = await db.connect();
});
@@ -83,7 +83,7 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
await db.instantiate(bundle().mainModule, bundle().pthreadWorker);
await db.open({
path: 'opfs://test.db',
- accessMode: duckdb.DuckDBAccessMode.READ_WRITE
+ accessMode: duckdb.DuckDBAccessMode.READ_WRITE,
});
conn = await db.connect();
@@ -102,7 +102,7 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
res.arrayBuffer(),
);
const opfsRoot = await navigator.storage.getDirectory();
- const fileHandle = await opfsRoot.getFileHandle('test.parquet', {create: true});
+ const fileHandle = await opfsRoot.getFileHandle('test.parquet', { create: true });
const writable = await fileHandle.createWritable();
await writable.write(parquetBuffer);
await writable.close();
@@ -126,8 +126,8 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
res.arrayBuffer(),
);
const opfsRoot = await navigator.storage.getDirectory();
- const datadir = await opfsRoot.getDirectoryHandle("datadir", {create: true});
- const fileHandle = await datadir.getFileHandle('test.parquet', {create: true});
+ const datadir = await opfsRoot.getDirectoryHandle('datadir', { create: true });
+ const fileHandle = await datadir.getFileHandle('test.parquet', { create: true });
const writable = await fileHandle.createWritable();
await writable.write(parquetBuffer);
await writable.close();
@@ -150,7 +150,7 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
res.arrayBuffer(),
);
const opfsRoot = await navigator.storage.getDirectory();
- const fileHandle = await opfsRoot.getFileHandle('test.parquet', {create: true});
+ const fileHandle = await opfsRoot.getFileHandle('test.parquet', { create: true });
const writable = await fileHandle.createWritable();
await writable.write(parquetBuffer);
await writable.close();
@@ -192,12 +192,11 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
const table3 = await new arrow.Table<{ cnt: arrow.Int }>(batches3);
expect(table3.getChildAt(0)?.get(0)).toBeGreaterThan(60_000);
}
-
});
it('Drop File + Export as CSV to OPFS + Load CSV', async () => {
const opfsRoot = await navigator.storage.getDirectory();
- const testHandle = await opfsRoot.getFileHandle('test.csv', {create: true});
+ const testHandle = await opfsRoot.getFileHandle('test.csv', { create: true });
await db.registerFileHandle('test.csv', testHandle, duckdb.DuckDBDataProtocol.BROWSER_FSACCESS, true);
await conn.send(`CREATE TABLE zzz AS SELECT * FROM "${baseDir}/tpch/0_01/parquet/lineitem.parquet"`);
await conn.send(`COPY (SELECT * FROM zzz) TO 'test.csv'`);
@@ -221,12 +220,11 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
await db.dropFile('test.csv');
});
-
it('Drop Files + Export as CSV to OPFS + Load CSV', async () => {
const opfsRoot = await navigator.storage.getDirectory();
- const testHandle1 = await opfsRoot.getFileHandle('test1.csv', {create: true});
- const testHandle2 = await opfsRoot.getFileHandle('test2.csv', {create: true});
- const testHandle3 = await opfsRoot.getFileHandle('test3.csv', {create: true});
+ const testHandle1 = await opfsRoot.getFileHandle('test1.csv', { create: true });
+ const testHandle2 = await opfsRoot.getFileHandle('test2.csv', { create: true });
+ const testHandle3 = await opfsRoot.getFileHandle('test3.csv', { create: true });
await db.registerFileHandle('test1.csv', testHandle1, duckdb.DuckDBDataProtocol.BROWSER_FSACCESS, true);
await db.registerFileHandle('test2.csv', testHandle2, duckdb.DuckDBDataProtocol.BROWSER_FSACCESS, true);
await db.registerFileHandle('test3.csv', testHandle3, duckdb.DuckDBDataProtocol.BROWSER_FSACCESS, true);
@@ -280,28 +278,19 @@ export function testOPFS(baseDir: string, bundle: () => duckdb.DuckDBBundle): vo
async function removeFiles() {
const opfsRoot = await navigator.storage.getDirectory();
- await opfsRoot.removeEntry('test.db').catch(() => {
- });
- await opfsRoot.removeEntry('test.db.wal').catch(() => {
- });
- await opfsRoot.removeEntry('test.csv').catch(() => {
- });
- await opfsRoot.removeEntry('test1.csv').catch(() => {
- });
- await opfsRoot.removeEntry('test2.csv').catch(() => {
- });
- await opfsRoot.removeEntry('test3.csv').catch(() => {
- });
- await opfsRoot.removeEntry('test.parquet').catch(() => {
- });
+ await opfsRoot.removeEntry('test.db').catch(() => {});
+ await opfsRoot.removeEntry('test.db.wal').catch(() => {});
+ await opfsRoot.removeEntry('test.csv').catch(() => {});
+ await opfsRoot.removeEntry('test1.csv').catch(() => {});
+ await opfsRoot.removeEntry('test2.csv').catch(() => {});
+ await opfsRoot.removeEntry('test3.csv').catch(() => {});
+ await opfsRoot.removeEntry('test.parquet').catch(() => {});
try {
const datadir = await opfsRoot.getDirectoryHandle('datadir');
- datadir.removeEntry('test.parquet').catch(() => {
- });
+ datadir.removeEntry('test.parquet').catch(() => {});
} catch (e) {
//
}
- await opfsRoot.removeEntry('datadir').catch(() => {
- });
+ await opfsRoot.removeEntry('datadir').catch(() => {});
}
}
diff --git a/packages/duckdb-wasm/test/regression/github_1833.test.ts b/packages/duckdb-wasm/test/regression/github_1833.test.ts
index 4e597ed2c..ca89e0497 100644
--- a/packages/duckdb-wasm/test/regression/github_1833.test.ts
+++ b/packages/duckdb-wasm/test/regression/github_1833.test.ts
@@ -14,17 +14,17 @@ export function test1833(db: () => duckdb.AsyncDuckDB): void {
});
describe('GitHub issues', () => {
it('1833', async () => {
- await conn.query(`
+ await conn.query(`
CREATE TABLE "Test" (value VARCHAR)
`);
- const stmt = await conn.prepare(`
+ const stmt = await conn.prepare(`
INSERT INTO "Test" (value)
VALUES (?)
`);
- await stmt.query('🦆🦆🦆🦆🦆');
- await stmt.query('goo␀se');
- await stmt.query('goo\u0000se');
- const result = await conn.query(`
+ await stmt.query('🦆🦆🦆🦆🦆');
+ await stmt.query('goo␀se');
+ await stmt.query('goo\u0000se');
+ const result = await conn.query(`
SELECT * FROM "Test"
`);
expect(result.schema.fields.length).toBe(1);
diff --git a/packages/duckdb-wasm/test/string_test_helper.ts b/packages/duckdb-wasm/test/string_test_helper.ts
index 96e83b823..96078f0d4 100644
--- a/packages/duckdb-wasm/test/string_test_helper.ts
+++ b/packages/duckdb-wasm/test/string_test_helper.ts
@@ -5,16 +5,18 @@ export function generateLongQueryString(): string {
const eee = repeatCharacter('E', 256);
const ggg = repeatCharacter('G', 128);
- return `test=inline` +
+ return (
+ `test=inline` +
`&Test-Security-Token=${aaa}` +
`&Test-Algorithm=${ccc}` +
`&Test-Date=${ddd}` +
`&Test-SignedHeaders=host` +
`&Test-Expires=43200` +
`&Test-Credential=${eee}` +
- `&Test-Signature=${ggg}`;
+ `&Test-Signature=${ggg}`
+ );
}
export function repeatCharacter(char: string, length: number): string {
return char.repeat(length);
-}
\ No newline at end of file
+}