diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 50b635544b91..02955837fe53 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -27,7 +27,10 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: clippy - - run: cargo clippy --all-features + targets: wasm32-unknown-unknown + - run: | + cargo clippy --all-features + cargo clippy --all-features -p query-engine-wasm --target wasm32-unknown-unknown format: runs-on: ubuntu-latest diff --git a/.github/workflows/test-query-engine-driver-adapters.yml b/.github/workflows/test-query-engine-driver-adapters.yml index 0a9c933c9b58..d9af2a375a98 100644 --- a/.github/workflows/test-query-engine-driver-adapters.yml +++ b/.github/workflows/test-query-engine-driver-adapters.yml @@ -25,27 +25,26 @@ jobs: fail-fast: false matrix: adapter: - - name: '@prisma/adapter-planetscale' + - name: 'planetscale (napi)' setup_task: 'dev-planetscale-js' - - name: '@prisma/adapter-pg (napi)' + - name: 'pg (napi)' setup_task: 'dev-pg-js' - - name: '@prisma/adapter-neon (ws) (napi)' + - name: 'neon (ws) (napi)' setup_task: 'dev-neon-js' - - name: '@prisma/adapter-libsql (Turso) (napi)' + - name: 'libsql (Turso) (napi)' setup_task: 'dev-libsql-js' - # TODO: uncomment when WASM engine is functional - # - name: '@prisma/adapter-planetscale' - # setup_task: 'dev-planetscale-wasm' - # needs_wasm_pack: true - # - name: '@prisma/adapter-pg (wasm)' - # setup_task: 'dev-pg-wasm' - # needs_wasm_pack: true - # - name: '@prisma/adapter-neon (ws) (wasm)' - # setup_task: 'dev-neon-wasm' - # needs_wasm_pack: true - # - name: '@prisma/adapter-libsql (Turso) (wasm)' - # setup_task: 'dev-libsql-wasm' - # needs_wasm_pack: true + - name: 'planetscale (wasm)' + setup_task: 'dev-planetscale-wasm' + needs_wasm_pack: true + - name: 'pg (wasm)' + setup_task: 'dev-pg-wasm' + needs_wasm_pack: true + - name: 'neon (ws) (wasm)' + setup_task: 'dev-neon-wasm' + needs_wasm_pack: true + - name: 'libsql (Turso) (wasm)' + setup_task: 'dev-libsql-wasm' + needs_wasm_pack: true node_version: ['18'] env: LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/.gitignore b/.gitignore index 75c06e9ce68b..a4b51023344f 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,6 @@ prisma-schema-wasm/nodejs # Ignore pnpm-lock.yaml query-engine/driver-adapters/pnpm-lock.yaml package-lock.json + +# Useful for local wasm32-* development +.cargo/ diff --git a/Cargo.lock b/Cargo.lock index 93d70a3bae4a..fe627f763520 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "once_cell", "version_check", ] @@ -673,16 +673,6 @@ dependencies = [ "windows-sys 0.45.0", ] -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - [[package]] name = "convert_case" version = "0.4.0" @@ -848,6 +838,18 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crosstarget-utils" +version = "0.1.0" +dependencies = [ + "futures", + "js-sys", + "pin-project", + "tokio", + "wasm-bindgen", + "wasm-bindgen-futures", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -871,11 +873,13 @@ dependencies = [ [[package]] name = "cuid" version = "1.3.2" -source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#ccfd958c224c79758c2527a0bca9efcd71790a19" dependencies = [ "base36", "cuid-util", "cuid2", + "getrandom 0.2.11", + "js-sys", "num", "once_cell", "rand 0.8.5", @@ -885,12 +889,12 @@ dependencies = [ [[package]] name = "cuid-util" version = "0.1.0" -source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#ccfd958c224c79758c2527a0bca9efcd71790a19" [[package]] name = "cuid2" version = "0.1.2" -source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#ccfd958c224c79758c2527a0bca9efcd71790a19" dependencies = [ "cuid-util", "num", @@ -1095,23 +1099,27 @@ name = "driver-adapters" version = "0.1.0" dependencies = [ "async-trait", - "bigdecimal", - "chrono", "expect-test", "futures", + "js-sys", "metrics 0.18.1", "napi", "napi-derive", - "num-bigint", "once_cell", - "psl", + "pin-project", "quaint", "serde", + "serde-wasm-bindgen", "serde_json", + "serde_repr", "tokio", "tracing", "tracing-core", + "tsify", "uuid", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-rs-dbg", ] [[package]] @@ -1550,9 +1558,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", "js-sys", @@ -1917,9 +1925,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.21.2" +version = "1.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "261bf85ed492cd1c47c9ba675e48649682a9d2d2e77f515c5386d7726fb0ba76" +checksum = "5d64600be34b2fcfc267740a243fa7744441bb4947a619ac4e5bb6507f35fbfc" dependencies = [ "console", "lazy_static", @@ -1983,9 +1991,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.61" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" dependencies = [ "wasm-bindgen", ] @@ -2111,9 +2119,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.147" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" [[package]] name = "libloading" @@ -3553,9 +3561,10 @@ dependencies = [ "bytes", "chrono", "connection-string", + "crosstarget-utils", "either", "futures", - "getrandom 0.2.10", + "getrandom 0.2.11", "hex", "indoc 0.3.6", "lru-cache", @@ -3655,6 +3664,7 @@ dependencies = [ "chrono", "connection-string", "crossbeam-channel", + "crosstarget-utils", "cuid", "enumflags2", "futures", @@ -3664,7 +3674,6 @@ dependencies = [ "once_cell", "opentelemetry", "petgraph 0.4.13", - "pin-project", "psl", "query-connector", "query-engine-metrics", @@ -3680,7 +3689,6 @@ dependencies = [ "tracing-subscriber", "user-facing-errors", "uuid", - "wasm-bindgen-futures", ] [[package]] @@ -3802,10 +3810,10 @@ dependencies = [ "anyhow", "async-trait", "connection-string", - "console_error_panic_hook", + "driver-adapters", "futures", "js-sys", - "log", + "opentelemetry", "psl", "quaint", "query-connector", @@ -3820,13 +3828,14 @@ dependencies = [ "tokio", "tracing", "tracing-futures", + "tracing-opentelemetry", "tracing-subscriber", "tsify", "url", "user-facing-errors", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-logger", + "wasm-rs-dbg", ] [[package]] @@ -3836,7 +3845,7 @@ dependencies = [ "bigdecimal", "chrono", "cuid", - "getrandom 0.2.10", + "getrandom 0.2.11", "itertools", "nanoid", "prisma-value", @@ -3865,6 +3874,7 @@ dependencies = [ "hyper", "indexmap 1.9.3", "indoc 2.0.3", + "insta", "itertools", "jsonrpc-core", "nom", @@ -4020,7 +4030,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -4650,9 +4660,9 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" +checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", @@ -5925,7 +5935,7 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "serde", ] @@ -6019,9 +6029,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6029,9 +6039,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", @@ -6056,9 +6066,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6066,9 +6076,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", @@ -6079,9 +6089,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-logger" @@ -6094,11 +6104,20 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasm-rs-dbg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e5fe4ac478ca5cf1db842029f41a5881da39e70320deb0006912f226ea63f4" +dependencies = [ + "web-sys", +] + [[package]] name = "web-sys" -version = "0.3.61" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index eb0d6635ca36..444a48f012f2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,6 +57,14 @@ napi = { version = "2.12.4", default-features = false, features = [ "serde-json", ] } napi-derive = "2.12.4" +js-sys = { version = "0.3" } +serde_repr = { version = "0.1.17" } +serde-wasm-bindgen = { version = "0.5" } +tsify = { version = "0.4.5" } +wasm-bindgen = { version = "0.2.89" } +wasm-bindgen-futures = { version = "0.4" } +wasm-rs-dbg = { version = "0.1.2" } +wasm-bindgen-test = { version = "0.3.0" } [workspace.dependencies.quaint] path = "quaint" diff --git a/libs/crosstarget-utils/Cargo.toml b/libs/crosstarget-utils/Cargo.toml new file mode 100644 index 000000000000..627efbf23c36 --- /dev/null +++ b/libs/crosstarget-utils/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "crosstarget-utils" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +futures = "0.3" + +[target.'cfg(target_arch = "wasm32")'.dependencies] +js-sys.workspace = true +wasm-bindgen.workspace = true +wasm-bindgen-futures.workspace = true +tokio = { version = "1.25", features = ["macros", "sync"] } +pin-project = "1" + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +tokio.workspace = true diff --git a/libs/crosstarget-utils/src/common.rs b/libs/crosstarget-utils/src/common.rs new file mode 100644 index 000000000000..92a1d5094e89 --- /dev/null +++ b/libs/crosstarget-utils/src/common.rs @@ -0,0 +1,23 @@ +use std::fmt::Display; + +#[derive(Debug)] +pub struct SpawnError; + +impl Display for SpawnError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Failed to spawn a future") + } +} + +impl std::error::Error for SpawnError {} + +#[derive(Debug)] +pub struct TimeoutError; + +impl Display for TimeoutError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Operation timed out") + } +} + +impl std::error::Error for TimeoutError {} diff --git a/libs/crosstarget-utils/src/lib.rs b/libs/crosstarget-utils/src/lib.rs new file mode 100644 index 000000000000..a41d8dd0f9a6 --- /dev/null +++ b/libs/crosstarget-utils/src/lib.rs @@ -0,0 +1,12 @@ +mod common; +#[cfg(target_arch = "wasm32")] +mod wasm; +#[cfg(target_arch = "wasm32")] +pub use crate::wasm::*; + +#[cfg(not(target_arch = "wasm32"))] +mod native; +#[cfg(not(target_arch = "wasm32"))] +pub use crate::native::*; + +pub use common::SpawnError; diff --git a/libs/crosstarget-utils/src/native/mod.rs b/libs/crosstarget-utils/src/native/mod.rs new file mode 100644 index 000000000000..b19a356ff8ff --- /dev/null +++ b/libs/crosstarget-utils/src/native/mod.rs @@ -0,0 +1,3 @@ +pub mod spawn; +pub mod task; +pub mod time; diff --git a/libs/crosstarget-utils/src/native/spawn.rs b/libs/crosstarget-utils/src/native/spawn.rs new file mode 100644 index 000000000000..cd1d5246d123 --- /dev/null +++ b/libs/crosstarget-utils/src/native/spawn.rs @@ -0,0 +1,11 @@ +use std::future::Future; + +use crate::common::SpawnError; + +pub async fn spawn_if_possible(future: F) -> Result +where + F: Future + 'static + Send, + F::Output: Send + 'static, +{ + tokio::spawn(future).await.map_err(|_| SpawnError) +} diff --git a/libs/crosstarget-utils/src/native/task.rs b/libs/crosstarget-utils/src/native/task.rs new file mode 100644 index 000000000000..017d6866eb2d --- /dev/null +++ b/libs/crosstarget-utils/src/native/task.rs @@ -0,0 +1,46 @@ +use futures::Future; +use tokio::sync::broadcast::{self}; + +pub struct JoinHandle { + handle: tokio::task::JoinHandle, + + sx_exit: Option>, +} + +impl JoinHandle { + pub fn abort(&mut self) { + if let Some(sx_exit) = self.sx_exit.as_ref() { + sx_exit.send(()).ok(); + } + + self.handle.abort(); + } +} + +pub fn spawn(future: T) -> JoinHandle +where + T: Future + Send + 'static, + T::Output: Send + 'static, +{ + spawn_with_sx_exit::(future, None) +} + +pub fn spawn_controlled(future_fn: Box) -> T>) -> JoinHandle +where + T: Future + Send + 'static, + T::Output: Send + 'static, +{ + let (sx_exit, rx_exit) = tokio::sync::broadcast::channel::<()>(1); + let future = future_fn(rx_exit); + + spawn_with_sx_exit::(future, Some(sx_exit)) +} + +fn spawn_with_sx_exit(future: T, sx_exit: Option>) -> JoinHandle +where + T: Future + Send + 'static, + T::Output: Send + 'static, +{ + let handle = tokio::spawn(future); + JoinHandle { handle, sx_exit } +} diff --git a/libs/crosstarget-utils/src/native/time.rs b/libs/crosstarget-utils/src/native/time.rs new file mode 100644 index 000000000000..3b154a27565c --- /dev/null +++ b/libs/crosstarget-utils/src/native/time.rs @@ -0,0 +1,35 @@ +use std::{ + future::Future, + time::{Duration, Instant}, +}; + +use crate::common::TimeoutError; + +pub struct ElapsedTimeCounter { + instant: Instant, +} + +impl ElapsedTimeCounter { + pub fn start() -> Self { + let instant = Instant::now(); + + Self { instant } + } + + pub fn elapsed_time(&self) -> Duration { + self.instant.elapsed() + } +} + +pub async fn sleep(duration: Duration) { + tokio::time::sleep(duration).await +} + +pub async fn timeout(duration: Duration, future: F) -> Result +where + F: Future + Send, +{ + let result = tokio::time::timeout(duration, future).await; + + result.map_err(|_| TimeoutError) +} diff --git a/libs/crosstarget-utils/src/wasm/mod.rs b/libs/crosstarget-utils/src/wasm/mod.rs new file mode 100644 index 000000000000..b19a356ff8ff --- /dev/null +++ b/libs/crosstarget-utils/src/wasm/mod.rs @@ -0,0 +1,3 @@ +pub mod spawn; +pub mod task; +pub mod time; diff --git a/libs/crosstarget-utils/src/wasm/spawn.rs b/libs/crosstarget-utils/src/wasm/spawn.rs new file mode 100644 index 000000000000..33ed1d21b3b7 --- /dev/null +++ b/libs/crosstarget-utils/src/wasm/spawn.rs @@ -0,0 +1,10 @@ +use std::future::Future; + +use crate::common::SpawnError; + +pub async fn spawn_if_possible(future: F) -> Result +where + F: Future + 'static, +{ + Ok(future.await) +} diff --git a/libs/crosstarget-utils/src/wasm/task.rs b/libs/crosstarget-utils/src/wasm/task.rs new file mode 100644 index 000000000000..80bbc6991c89 --- /dev/null +++ b/libs/crosstarget-utils/src/wasm/task.rs @@ -0,0 +1,64 @@ +use futures::Future; +use tokio::sync::{ + broadcast::{self}, + oneshot::{self}, +}; + +// Wasm-compatible alternative to `tokio::task::JoinHandle`. +// `pin_project` enables pin-projection and a `Pin`-compatible implementation of the `Future` trait. +#[pin_project::pin_project] +pub struct JoinHandle { + #[pin] + receiver: oneshot::Receiver, + + sx_exit: Option>, +} + +impl Future for JoinHandle { + type Output = Result; + + fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { + // the `self.project()` method is provided by the `pin_project` macro + core::pin::Pin::new(&mut self.receiver).poll(cx) + } +} + +impl JoinHandle { + pub fn abort(&mut self) { + if let Some(sx_exit) = self.sx_exit.as_ref() { + sx_exit.send(()).ok(); + } + } +} + +pub fn spawn(future: T) -> JoinHandle +where + T: Future + 'static, + T::Output: Send + 'static, +{ + spawn_with_sx_exit::(future, None) +} + +pub fn spawn_controlled(future_fn: Box) -> T>) -> JoinHandle +where + T: Future + 'static, + T::Output: Send + 'static, +{ + let (sx_exit, rx_exit) = tokio::sync::broadcast::channel::<()>(1); + let future = future_fn(rx_exit); + spawn_with_sx_exit::(future, Some(sx_exit)) +} + +fn spawn_with_sx_exit(future: T, sx_exit: Option>) -> JoinHandle +where + T: Future + 'static, + T::Output: Send + 'static, +{ + let (sender, receiver) = oneshot::channel(); + wasm_bindgen_futures::spawn_local(async move { + let result = future.await; + sender.send(result).ok(); + }); + + JoinHandle { receiver, sx_exit } +} diff --git a/libs/crosstarget-utils/src/wasm/time.rs b/libs/crosstarget-utils/src/wasm/time.rs new file mode 100644 index 000000000000..1c230ba1eecc --- /dev/null +++ b/libs/crosstarget-utils/src/wasm/time.rs @@ -0,0 +1,57 @@ +use js_sys::{Date, Function, Promise}; +use std::future::Future; +use std::time::Duration; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::JsFuture; + +use crate::common::TimeoutError; + +#[wasm_bindgen] +extern "C" { + + type Performance; + #[wasm_bindgen(js_name = "performance")] + static PERFORMANCE: Option; + + #[wasm_bindgen(method)] + fn now(this: &Performance) -> f64; + + #[wasm_bindgen(js_name = setTimeout)] + fn set_timeout(closure: &Function, millis: u32) -> f64; + +} + +pub struct ElapsedTimeCounter { + start_time: f64, +} + +impl ElapsedTimeCounter { + pub fn start() -> Self { + Self { start_time: now() } + } + + pub fn elapsed_time(&self) -> Duration { + Duration::from_millis((self.start_time - now()) as u64) + } +} + +pub async fn sleep(duration: Duration) { + let _ = JsFuture::from(Promise::new(&mut |resolve, _reject| { + set_timeout(&resolve, duration.as_millis() as u32); + })) + .await; +} + +pub async fn timeout(duration: Duration, future: F) -> Result +where + F: Future, +{ + tokio::select! { + result = future => Ok(result), + _ = sleep(duration) => Err(TimeoutError) + } +} + +fn now() -> f64 { + PERFORMANCE.as_ref().map(|p| p.now()).unwrap_or_else(Date::now) +} diff --git a/prisma-schema-wasm/Cargo.toml b/prisma-schema-wasm/Cargo.toml index 51638e55b1c1..38ef9328cb8a 100644 --- a/prisma-schema-wasm/Cargo.toml +++ b/prisma-schema-wasm/Cargo.toml @@ -7,6 +7,6 @@ edition = "2021" crate-type = ["cdylib"] [dependencies] -wasm-bindgen = "=0.2.88" +wasm-bindgen.workspace = true wasm-logger = { version = "0.2.0", optional = true } prisma-fmt = { path = "../prisma-fmt" } diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index 52a7edf72aca..254c27446c9b 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -29,12 +29,7 @@ docs = [] # way to access database-specific methods when you need extra control. expose-drivers = [] -native = [ - "postgresql-native", - "mysql-native", - "mssql-native", - "sqlite-native", -] +native = ["postgresql-native", "mysql-native", "mssql-native", "sqlite-native"] all = ["native", "pooled"] @@ -57,13 +52,7 @@ postgresql-native = [ ] postgresql = [] -mssql-native = [ - "mssql", - "tiberius", - "tokio-util", - "tokio/time", - "tokio/net", -] +mssql-native = ["mssql", "tiberius", "tokio-util", "tokio/time", "tokio/net"] mssql = [] mysql-native = ["mysql", "mysql_async", "tokio/time", "lru-cache"] @@ -100,6 +89,7 @@ mobc = { version = "0.8", optional = true } serde = { version = "1.0", optional = true } sqlformat = { version = "0.2.0", optional = true } uuid = { version = "1", features = ["v4"] } +crosstarget-utils = { path = "../libs/crosstarget-utils" } [dev-dependencies] once_cell = "1.3" diff --git a/quaint/src/connector/metrics.rs b/quaint/src/connector/metrics.rs index 2705a40b32b2..a0c4ef426988 100644 --- a/quaint/src/connector/metrics.rs +++ b/quaint/src/connector/metrics.rs @@ -1,7 +1,8 @@ use tracing::{info_span, Instrument}; use crate::ast::{Params, Value}; -use std::{future::Future, time::Instant}; +use crosstarget_utils::time::ElapsedTimeCounter; +use std::future::Future; pub async fn query<'a, F, T, U>(tag: &'static str, query: &'a str, params: &'a [Value<'_>], f: F) -> crate::Result where @@ -17,7 +18,7 @@ where F: FnOnce() -> U + 'a, U: Future>, { - let start = Instant::now(); + let start = ElapsedTimeCounter::start(); let res = f().await; let result = match res { @@ -34,19 +35,19 @@ where sqlformat::FormatOptions::default(), ); - trace_query(&query_fmt, params, result, start); + trace_query(&query_fmt, params, result, &start); } else { - trace_query(query, params, result, start); + trace_query(query, params, result, &start); }; } #[cfg(not(feature = "fmt-sql"))] { - trace_query(query, params, result, start); + trace_query(query, params, result, &start); } - histogram!(format!("{tag}.query.time"), start.elapsed()); - histogram!("prisma_datasource_queries_duration_histogram_ms", start.elapsed()); + histogram!(format!("{tag}.query.time"), start.elapsed_time()); + histogram!("prisma_datasource_queries_duration_histogram_ms", start.elapsed_time()); increment_counter!("prisma_datasource_queries_total"); res @@ -57,7 +58,7 @@ pub(crate) async fn check_out(f: F) -> std::result::Result>>, { - let start = Instant::now(); + let start = ElapsedTimeCounter::start(); let res = f.await; let result = match res { @@ -67,24 +68,24 @@ where tracing::trace!( message = "Fetched a connection from the pool", - duration_ms = start.elapsed().as_millis() as u64, + duration_ms = start.elapsed_time().as_millis() as u64, item_type = "query", is_query = true, result, ); - histogram!("pool.check_out", start.elapsed()); + histogram!("pool.check_out", start.elapsed_time()); res } -fn trace_query<'a>(query: &'a str, params: &'a [Value<'_>], result: &str, start: Instant) { +fn trace_query<'a>(query: &'a str, params: &'a [Value<'_>], result: &str, start: &ElapsedTimeCounter) { tracing::debug!( query = %query, params = %Params(params), result, item_type = "query", is_query = true, - duration_ms = start.elapsed().as_millis() as u64, + duration_ms = start.elapsed_time().as_millis() as u64, ); } diff --git a/quaint/src/lib.rs b/quaint/src/lib.rs index 1458a6ae1615..45c2a10a1698 100644 --- a/quaint/src/lib.rs +++ b/quaint/src/lib.rs @@ -113,7 +113,8 @@ mod macros; #[macro_use] extern crate metrics; -extern crate bigdecimal; +pub extern crate bigdecimal; +pub extern crate chrono; pub mod ast; pub mod connector; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs index 73455011d04e..62add25c3e72 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs @@ -2,7 +2,15 @@ use query_engine_tests::*; #[test_suite(schema(generic))] mod raw_params { - #[connector_test(only(Postgres), exclude(Postgres("neon.js"), Postgres("pg.js")))] + #[connector_test( + only(Postgres), + exclude( + Postgres("neon.js"), + Postgres("pg.js"), + Postgres("neon.js.wasm"), + Postgres("pg.js.wasm") + ) + )] async fn value_too_many_bind_variables(runner: Runner) -> TestResult<()> { let n = 32768; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 33908a9e079e..4372b23c282d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -1,7 +1,7 @@ use query_engine_tests::test_suite; use std::borrow::Cow; -#[test_suite(schema(generic), exclude(Vitess("planetscale.js")))] +#[test_suite(schema(generic), exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] mod interactive_tx { use query_engine_tests::*; use tokio::time; @@ -573,7 +573,7 @@ mod itx_isolation { use query_engine_tests::*; // All (SQL) connectors support serializable. - #[connector_test(exclude(MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn basic_serializable(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, Some("Serializable".to_owned())).await?; runner.set_active_tx(tx_id.clone()); @@ -595,7 +595,7 @@ mod itx_isolation { Ok(()) } - #[connector_test(exclude(MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn casing_doesnt_matter(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, Some("sErIaLiZaBlE".to_owned())).await?; runner.set_active_tx(tx_id.clone()); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index cd270bb334c6..dff1ecdb03a5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -3,10 +3,9 @@ use query_engine_tests::test_suite; #[test_suite( schema(generic), exclude( - Vitess("planetscale.js"), - Postgres("neon.js"), - Postgres("pg.js"), - Sqlite("libsql.js") + Vitess("planetscale.js", "planetscale.js.wasm"), + Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm"), + Sqlite("libsql.js", "libsql.js.wasm") ) )] mod metrics { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/multi_schema.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/multi_schema.rs index 29c93689f542..40f646277f2c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/multi_schema.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/multi_schema.rs @@ -1,6 +1,9 @@ use query_engine_tests::test_suite; -#[test_suite(capabilities(MultiSchema), exclude(Mysql, Vitess("planetscale.js")))] +#[test_suite( + capabilities(MultiSchema), + exclude(Mysql, Vitess("planetscale.js", "planetscale.js.wasm")) +)] mod multi_schema { use query_engine_tests::*; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs index b495c8627e5a..d074a223531e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs @@ -112,7 +112,10 @@ mod occ { assert_eq!(booked_user_id, found_booked_user_id); } - #[connector_test(schema(occ_simple), exclude(MongoDB, CockroachDb, Vitess("planetscale.js")))] + #[connector_test( + schema(occ_simple), + exclude(MongoDB, CockroachDb, Vitess("planetscale.js", "planetscale.js.wasm")) + )] async fn occ_update_many_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); @@ -127,7 +130,10 @@ mod occ { Ok(()) } - #[connector_test(schema(occ_simple), exclude(CockroachDb, Vitess("planetscale.js")))] + #[connector_test( + schema(occ_simple), + exclude(CockroachDb, Vitess("planetscale.js", "planetscale.js.wasm")) + )] async fn occ_update_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); @@ -158,7 +164,7 @@ mod occ { Ok(()) } - #[connector_test(schema(occ_simple), exclude(Vitess("planetscale.js")))] + #[connector_test(schema(occ_simple), exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn occ_delete_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index 40ef54ed11f1..d96c3d3576ff 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -2,7 +2,10 @@ use indoc::indoc; use query_engine_tests::*; -#[test_suite(suite = "setdefault_onD_1to1_req", exclude(MongoDb, MySQL, Vitess("planetscale.js")))] +#[test_suite( + suite = "setdefault_onD_1to1_req", + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) +)] mod one2one_req { fn required_with_default() -> String { let schema = indoc! { @@ -66,7 +69,10 @@ mod one2one_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, Vitess("planetscale.js")))] + #[connector_test( + schema(required_with_default), + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) + )] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -103,7 +109,10 @@ mod one2one_req { } } -#[test_suite(suite = "setdefault_onD_1to1_opt", exclude(MongoDb, MySQL, Vitess("planetscale.js")))] +#[test_suite( + suite = "setdefault_onD_1to1_opt", + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) +)] mod one2one_opt { fn optional_with_default() -> String { let schema = indoc! { @@ -167,7 +176,10 @@ mod one2one_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, Vitess("planetscale.js")))] + #[connector_test( + schema(optional_with_default), + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) + )] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -206,7 +218,10 @@ mod one2one_opt { } } -#[test_suite(suite = "setdefault_onD_1toM_req", exclude(MongoDb, MySQL, Vitess("planetscale.js")))] +#[test_suite( + suite = "setdefault_onD_1toM_req", + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) +)] mod one2many_req { fn required_with_default() -> String { let schema = indoc! { @@ -270,7 +285,10 @@ mod one2many_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, Vitess("planetscale.js")))] + #[connector_test( + schema(required_with_default), + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) + )] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -307,7 +325,10 @@ mod one2many_req { } } -#[test_suite(suite = "setdefault_onD_1toM_opt", exclude(MongoDb, MySQL, Vitess("planetscale.js")))] +#[test_suite( + suite = "setdefault_onD_1toM_opt", + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) +)] mod one2many_opt { fn optional_with_default() -> String { let schema = indoc! { @@ -371,7 +392,10 @@ mod one2many_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, Vitess("planetscale.js")))] + #[connector_test( + schema(optional_with_default), + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) + )] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs index 78206f6394a6..e00b2d22e198 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs @@ -187,7 +187,16 @@ mod max_integer { schema.to_owned() } - #[connector_test(schema(overflow_pg), only(Postgres), exclude(Postgres("neon.js"), Postgres("pg.js")))] + #[connector_test( + schema(overflow_pg), + only(Postgres), + exclude( + Postgres("neon.js"), + Postgres("pg.js"), + Postgres("neon.js.wasm"), + Postgres("pg.js.wasm") + ) + )] async fn unfitted_int_should_fail_pg_quaint(runner: Runner) -> TestResult<()> { // int assert_error!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs index 35f056f8fa80..a107b354d159 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs @@ -26,7 +26,11 @@ mod prisma_12572 { .to_owned() } - #[connector_test] + #[connector_test(exclude( + Postgres("pg.js.wasm", "neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") + ))] async fn all_generated_timestamps_are_the_same(runner: Runner) -> TestResult<()> { runner .query(r#"mutation { createOneTest1(data: {id:"one", test2s: { create: {id: "two"}}}) { id }}"#) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs index 8582c14d0bc0..9f4ccdcb3b11 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs @@ -24,7 +24,11 @@ mod conversion_error { schema.to_owned() } - #[connector_test(schema(schema_int), only(Sqlite), exclude(Sqlite("libsql.js")))] + #[connector_test( + schema(schema_int), + only(Sqlite), + exclude(Sqlite("libsql.js"), Sqlite("libsql.js.wasm")) + )] async fn convert_to_int_sqlite_quaint(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -38,7 +42,7 @@ mod conversion_error { Ok(()) } - #[connector_test(schema(schema_int), only(Sqlite("libsql.js")))] + #[connector_test(schema(schema_int), only(Sqlite("libsql.js"), Sqlite("libsql.js.wasm")))] async fn convert_to_int_sqlite_js(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -52,7 +56,11 @@ mod conversion_error { Ok(()) } - #[connector_test(schema(schema_bigint), only(Sqlite), exclude(Sqlite("libsql.js")))] + #[connector_test( + schema(schema_bigint), + only(Sqlite), + exclude(Sqlite("libsql.js"), Sqlite("libsql.js.wasm")) + )] async fn convert_to_bigint_sqlite_quaint(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -66,7 +74,7 @@ mod conversion_error { Ok(()) } - #[connector_test(schema(schema_bigint), only(Sqlite("libsql.js")))] + #[connector_test(schema(schema_bigint), only(Sqlite("libsql.js"), Sqlite("libsql.js.wasm")))] async fn convert_to_bigint_sqlite_js(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_17103.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_17103.rs index c9065ec54c58..8168b66a3a0f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_17103.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_17103.rs @@ -21,7 +21,7 @@ mod prisma_17103 { schema.to_owned() } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn regression(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs index e5fa8388d66e..8e5fb2457b15 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs @@ -4,7 +4,13 @@ use query_engine_tests::*; mod not_in_batching { use query_engine_tests::Runner; - #[connector_test] + #[connector_test(exclude( + CockroachDb, + Postgres("pg.js.wasm"), + Postgres("neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") + ))] async fn not_in_batch_filter(runner: Runner) -> TestResult<()> { runner.query(r#"mutation { createManyTestModel(data: [{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}]) { count }}"#).await?.assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs index 4793fa24ae2a..387d05dc5e21 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs @@ -33,7 +33,7 @@ mod aggregation_avg { Ok(()) } - #[connector_test(exclude(MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn avg_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 5.5, int: 5, bInt: "5" }"#).await?; create_row(&runner, r#"{ id: 2, float: 4.5, int: 10, bInt: "10" }"#).await?; @@ -126,7 +126,7 @@ mod decimal_aggregation_avg { Ok(()) } - #[connector_test(exclude(MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn avg_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, decimal: "5.5" }"#).await?; create_row(&runner, r#"{ id: 2, decimal: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs index 3c1f1b092690..e7116894cffe 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs @@ -87,7 +87,7 @@ mod combinations { } // Mongo precision issue. - #[connector_test(exclude(MongoDB, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDB, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn with_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: "1", float: 5.5, int: 5 }"#).await?; create_row(&runner, r#"{ id: "2", float: 4.5, int: 10 }"#).await?; @@ -369,7 +369,7 @@ mod decimal_combinations { } // Mongo precision issue. - #[connector_test(exclude(MongoDB, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDB, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn with_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: "1", dec: "5.5" }"#).await?; create_row(&runner, r#"{ id: "2", dec: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs index 78ab88fd59c6..043419a58b2d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs @@ -27,7 +27,7 @@ mod aggregation_count { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn count_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, string: "1" }"#).await?; create_row(&runner, r#"{ id: 2, string: "2" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs index 12f9b6861892..9c6c055e939d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs @@ -30,7 +30,7 @@ mod aggregation_max { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn max_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 5.5, int: 5, bInt: "5", string: "2" }"#).await?; create_row(&runner, r#"{ id: 2, float: 4.5, int: 10, bInt: "10", string: "f" }"#).await?; @@ -120,7 +120,7 @@ mod decimal_aggregation_max { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn max_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, decimal: "5.5" }"#).await?; create_row(&runner, r#"{ id: 2, decimal: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs index 332a5e10707f..c5ce60653d8f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs @@ -30,7 +30,7 @@ mod aggregation_min { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn min_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 5.5, int: 5, bInt: "5", string: "2" }"#).await?; create_row(&runner, r#"{ id: 2, float: 4.5, int: 10, bInt: "10", string: "f" }"#).await?; @@ -120,7 +120,7 @@ mod decimal_aggregation_min { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn min_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, decimal: "5.5" }"#).await?; create_row(&runner, r#"{ id: 2, decimal: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs index 14d194a1a4f4..b713d216edb7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs @@ -30,7 +30,7 @@ mod aggregation_sum { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn sum_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 5.5, int: 5, bInt: "5" }"#).await?; create_row(&runner, r#"{ id: 2, float: 4.5, int: 10, bInt: "10" }"#).await?; @@ -120,7 +120,7 @@ mod decimal_aggregation_sum { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn sum_with_all_sorts_of_query_args(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, decimal: "5.5" }"#).await?; create_row(&runner, r#"{ id: 2, decimal: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs index f5e7face6768..aacdb50f687c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs @@ -88,7 +88,13 @@ mod isb { Ok(()) } - #[connector_test(exclude(MongoDb))] + #[connector_test(exclude( + MongoDb, + Postgres("pg.js.wasm"), + Postgres("neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") + ))] async fn order_by_aggregation_should_fail(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -103,7 +109,16 @@ mod isb { Ok(()) } - #[connector_test(exclude(MongoDb), capabilities(FullTextSearchWithoutIndex))] + #[connector_test( + capabilities(FullTextSearchWithoutIndex), + exclude( + MongoDb, + Postgres("pg.js.wasm"), + Postgres("neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") + ) + )] async fn order_by_relevance_should_fail(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs index 2c332f95f29a..f4ad29cf0584 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs @@ -44,7 +44,7 @@ mod transactional { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn one_success_one_fail(runner: Runner) -> TestResult<()> { let queries = vec![ r#"mutation { createOneModelA(data: { id: 1 }) { id }}"#.to_string(), @@ -77,7 +77,7 @@ mod transactional { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn one_query(runner: Runner) -> TestResult<()> { // Existing ModelA in the DB will prevent the nested ModelA creation in the batch. insta::assert_snapshot!( @@ -104,7 +104,7 @@ mod transactional { Ok(()) } - #[connector_test(exclude(MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn valid_isolation_level(runner: Runner) -> TestResult<()> { let queries = vec![r#"mutation { createOneModelB(data: { id: 1 }) { id }}"#.to_string()]; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/bytes.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/bytes.rs index a4957d75e1ab..265a75763794 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/bytes.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/bytes.rs @@ -1,6 +1,14 @@ use query_engine_tests::*; -#[test_suite(schema(common_nullable_types))] +#[test_suite( + schema(common_nullable_types), + exclude( + Postgres("pg.js.wasm"), + Postgres("neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") + ) +)] mod bytes { use query_engine_tests::{run_query, EngineProtocol, Runner}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs index b2af72ab955e..8baceb69e98b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs @@ -34,7 +34,11 @@ mod scalar_relations { // TODO: fix https://github.com/prisma/team-orm/issues/684, https://github.com/prisma/team-orm/issues/685 and unexclude DAs #[connector_test( schema(schema_common), - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude( + Postgres("pg.js", "neon.js", "pg.js.wasm", "neon.js.wasm"), + Vitess("planetscale.js", "planetscale.js.wasm"), + Sqlite("libsql.js.wasm") + ) )] async fn common_types(runner: Runner) -> TestResult<()> { create_common_children(&runner).await?; @@ -236,7 +240,7 @@ mod scalar_relations { #[connector_test( schema(schema_scalar_lists), capabilities(ScalarLists), - exclude(Postgres("pg.js", "neon.js")) + exclude(Postgres("pg.js", "neon.js", "pg.js.wasm", "neon.js.wasm")) )] async fn scalar_lists(runner: Runner) -> TestResult<()> { create_child( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bigint_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bigint_filter.rs index 8230c7e2f04b..16e5804cd7f6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bigint_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bigint_filter.rs @@ -1,7 +1,10 @@ use super::common_test_data; use query_engine_tests::*; -#[test_suite(schema(schemas::common_nullable_types))] +#[test_suite( + schema(schemas::common_nullable_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) +)] mod bigint_filter_spec { use query_engine_tests::run_query; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bytes_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bytes_filter.rs index dd8963dca6e8..58ec7e08f8c8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bytes_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/bytes_filter.rs @@ -1,7 +1,10 @@ use super::common_test_data; use query_engine_tests::*; -#[test_suite(schema(schemas::common_nullable_types))] +#[test_suite( + schema(schemas::common_nullable_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) +)] mod bytes_filter_spec { use query_engine_tests::run_query; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bigint_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bigint_filter.rs index bcd12fb1b5b7..0ef65c7af43a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bigint_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bigint_filter.rs @@ -6,7 +6,10 @@ mod bigint_filter { use super::setup; use query_engine_tests::run_query; - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn basic_where(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -28,7 +31,10 @@ mod bigint_filter { Ok(()) } - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn numeric_comparison_filters(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -137,7 +143,11 @@ mod bigint_filter { Ok(()) } - #[connector_test(schema(setup::common_list_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_list_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) + )] async fn scalar_list_filters(runner: Runner) -> TestResult<()> { setup::test_data_list_common(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bytes_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bytes_filter.rs index bcb4a76c6158..a77bf6e765b2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bytes_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/bytes_filter.rs @@ -6,7 +6,10 @@ mod bytes_filter { use super::setup; use query_engine_tests::run_query; - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn basic_where(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -28,7 +31,11 @@ mod bytes_filter { Ok(()) } - #[connector_test(schema(setup::common_mixed_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_mixed_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) + )] async fn inclusion_filter(runner: Runner) -> TestResult<()> { setup::test_data_common_mixed_types(&runner).await?; @@ -50,7 +57,11 @@ mod bytes_filter { Ok(()) } - #[connector_test(schema(setup::common_list_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_list_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) + )] async fn scalar_list_filters(runner: Runner) -> TestResult<()> { setup::test_data_list_common(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/datetime_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/datetime_filter.rs index 327379bd4903..2753471bc635 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/datetime_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/datetime_filter.rs @@ -6,7 +6,10 @@ mod datetime_filter { use super::setup; use query_engine_tests::run_query; - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn basic_where(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -28,7 +31,10 @@ mod datetime_filter { Ok(()) } - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn numeric_comparison_filters(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -137,7 +143,11 @@ mod datetime_filter { Ok(()) } - #[connector_test(schema(setup::common_list_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_list_types), + capabilities(ScalarLists), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")) + )] async fn scalar_list_filters(runner: Runner) -> TestResult<()> { setup::test_data_list_common(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/float_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/float_filter.rs index 5dfae5f09c36..f40f73bbc180 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/float_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/float_filter.rs @@ -6,7 +6,10 @@ mod float_filter { use super::setup; use query_engine_tests::run_query; - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn basic_where(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -28,7 +31,10 @@ mod float_filter { Ok(()) } - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn numeric_comparison_filters(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -137,7 +143,11 @@ mod float_filter { Ok(()) } - #[connector_test(schema(setup::common_list_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_list_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) + )] async fn scalar_list_filters(runner: Runner) -> TestResult<()> { setup::test_data_list_common(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/int_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/int_filter.rs index 972539ec1f15..cedbb81c3a1f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/int_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/int_filter.rs @@ -6,7 +6,10 @@ mod int_filter { use super::setup; use query_engine_tests::run_query; - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn basic_where(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -28,7 +31,10 @@ mod int_filter { Ok(()) } - #[connector_test(schema(setup::common_types))] + #[connector_test( + schema(setup::common_types), + exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")) + )] async fn numeric_comparison_filters(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -137,7 +143,11 @@ mod int_filter { Ok(()) } - #[connector_test(schema(setup::common_list_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_list_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) + )] async fn scalar_list_filters(runner: Runner) -> TestResult<()> { setup::test_data_list_common(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs index b865731161c2..2666e8c80900 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs @@ -126,7 +126,7 @@ mod json_filter { Ok(()) } - #[connector_test(schema(schema), exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(schema(schema), exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn string_comparison_filters(runner: Runner) -> TestResult<()> { test_string_data(&runner).await?; @@ -169,7 +169,7 @@ mod json_filter { Ok(()) } - #[connector_test(schema(schema), exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(schema(schema), exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn array_comparison_filters(runner: Runner) -> TestResult<()> { test_array_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/string_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/string_filter.rs index f9c2e6e06acc..c62821ef4604 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/string_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/string_filter.rs @@ -6,7 +6,7 @@ mod string_filter { use super::setup; use query_engine_tests::run_query; - #[connector_test] + #[connector_test(exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")))] async fn basic_where_sensitive(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -50,7 +50,7 @@ mod string_filter { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")))] async fn numeric_comparison_filters_sensitive(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; @@ -225,7 +225,7 @@ mod string_filter { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("libsql.js.wasm"), Vitess("planetscale.js.wasm")))] async fn string_comparison_filters_sensitive(runner: Runner) -> TestResult<()> { setup::test_data_common_types(&runner).await?; run_query!( @@ -435,7 +435,11 @@ mod string_filter { Ok(()) } - #[connector_test(schema(setup::common_list_types), capabilities(ScalarLists))] + #[connector_test( + schema(setup::common_list_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) + )] async fn scalar_list_filters_sensitive(runner: Runner) -> TestResult<()> { setup::test_data_list_common(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs index d1b62a086153..ca8cc885798a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs @@ -212,9 +212,8 @@ mod json { #[connector_test( schema(json_opt), exclude( - Vitess("planetscale.js"), - Postgres("neon.js"), - Postgres("pg.js"), + Vitess("planetscale.js", "planetscale.js.wasm"), + Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm"), Sqlite("libsql.js"), MySQL(5.6) ) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs index e2ab83cfd62f..a1a3072e242c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs @@ -27,7 +27,7 @@ mod json_filters { schema.to_owned() } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn no_path_without_filter(runner: Runner) -> TestResult<()> { assert_error!( runner, @@ -46,53 +46,68 @@ mod json_filters { create_row(&runner, 4, r#"{ \"a\": { \"b\": [null] } }"#, false).await?; create_row(&runner, 5, r#"{ }"#, false).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: ["a", "b"], equals: "\"c\"" "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### + let res = run_query!( + runner, + jsonq(&runner, r#"path: ["a", "b"], equals: "\"c\"" "#, Some("")) ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: ["a", "b", "0"], equals: "1" "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### + let res = run_query!( + runner, + jsonq(&runner, r#"path: ["a", "b", "0"], equals: "1" "#, Some("")) ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: ["a", "b", "0"], equals: JsonNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### + let res = run_query!( + runner, + jsonq(&runner, r#"path: ["a", "b", "0"], equals: JsonNull "#, Some("")) ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: ["a", "b"], equals: JsonNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":3}]}}"### + let res = run_query!( + runner, + jsonq(&runner, r#"path: ["a", "b"], equals: JsonNull "#, Some("")) ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":3}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: ["a", "b"], equals: DbNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":5}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"path: ["a", "b"], equals: DbNull "#, Some(""))); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":5}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: ["a", "b"], equals: AnyNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":3},{"id":5}]}}"### + let res = run_query!( + runner, + jsonq(&runner, r#"path: ["a", "b"], equals: AnyNull "#, Some("")) ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":3},{"id":5}]}}"### + ); + } Ok(()) } @@ -120,13 +135,13 @@ mod json_filters { create_row(&runner, 5, r#"{ \"a\": { \"b\": [null] } }"#, false).await?; create_row(&runner, 6, r#"{ }"#, false).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: "$.a.b", equals: "\"c\"" "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"path: "$.a.b", equals: "\"c\"" "#, Some(""))); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### + ); + } insta::assert_snapshot!( run_query!( @@ -136,29 +151,32 @@ mod json_filters { @r###"{"data":{"findManyTestModel":[{"id":2},{"id":3}]}}"### ); - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: "$.a.b[0]", equals: JsonNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":4},{"id":5}]}}"### + let res = run_query!( + runner, + jsonq(&runner, r#"path: "$.a.b[0]", equals: JsonNull "#, Some("")) ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4},{"id":5}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: "$.a.b", equals: DbNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":6}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"path: "$.a.b", equals: DbNull "#, Some(""))); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":6}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"path: "$.a.b", equals: AnyNull "#, Some("")) - ), - @r###"{"data":{"findManyTestModel":[{"id":4},{"id":6}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"path: "$.a.b", equals: AnyNull "#, Some(""))); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4},{"id":6}]}}"### + ); + } Ok(()) } @@ -174,36 +192,36 @@ mod json_filters { create_row(&runner, 8, r#"[1, [null], 2]"#, true).await?; // array_contains - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_contains: "[3]""#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_contains: "[\"a\"]""#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_contains: "[3]""#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } + let res = run_query!(runner, jsonq(&runner, r#"array_contains: "[\"a\"]""#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### + ); + } // NOT array_contains - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_contains: "[3]""#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":4},{"id":6},{"id":7},{"id":8}]}}"### - ); - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_contains: "[\"a\"]""#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":6},{"id":7},{"id":8}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_contains: "[3]""#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4},{"id":6},{"id":7},{"id":8}]}}"### + ); + } + let res = run_query!(runner, not_jsonq(&runner, r#"array_contains: "[\"a\"]""#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":6},{"id":7},{"id":8}]}}"### + ); + } // MySQL has slightly different semantics and also coerces null to [null]. is_one_of!( @@ -225,30 +243,30 @@ mod json_filters { match runner.connector_version() { // MariaDB does not support finding arrays in arrays, unlike MySQL ConnectorVersion::MySql(Some(MySqlVersion::MariaDb)) => { - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_contains: "[[1, 2]]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":6},{"id":7},{"id":8}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_contains: "[[1, 2]]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":6},{"id":7},{"id":8}]}}"### + ); + } } _ => { - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_contains: "[[1, 2]]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":6}]}}"### - ); - - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_contains: "[[null]]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":8}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_contains: "[[1, 2]]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":6}]}}"### + ); + } + + let res = run_query!(runner, jsonq(&runner, r#"array_contains: "[[null]]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":8}]}}"### + ); + } } } @@ -262,7 +280,7 @@ mod json_filters { Ok(()) } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn array_contains(runner: Runner) -> TestResult<()> { array_contains_runner(runner).await?; @@ -280,86 +298,86 @@ mod json_filters { create_row(&runner, 8, r#"[null, \"test\"]"#, true).await?; create_row(&runner, 9, r#"[[null], \"test\"]"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_starts_with: "3" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_starts_with: "3" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_starts_with: "\"a\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_starts_with: "\"a\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_starts_with: "[1, 2]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":6}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_starts_with: "[1, 2]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":6}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_starts_with: "null" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":8}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_starts_with: "null" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":8}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_starts_with: "[null]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":9}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_starts_with: "[null]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":9}]}}"### + ); + } // NOT - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_starts_with: "3" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":4},{"id":6},{"id":8},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_starts_with: "3" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":4},{"id":6},{"id":8},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_starts_with: "\"a\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":6},{"id":8},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_starts_with: "\"a\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":6},{"id":8},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_starts_with: "[1, 2]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":8},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_starts_with: "[1, 2]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":8},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_starts_with: "null" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":6},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_starts_with: "null" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":6},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_starts_with: "[null]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":6},{"id":8}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_starts_with: "[null]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":6},{"id":8}]}}"### + ); + } Ok(()) } @@ -371,7 +389,7 @@ mod json_filters { Ok(()) } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn array_starts_with(runner: Runner) -> TestResult<()> { array_starts_with_runner(runner).await?; @@ -387,86 +405,86 @@ mod json_filters { create_row(&runner, 8, r#"[\"test\", null]"#, true).await?; create_row(&runner, 9, r#"[\"test\", [null]]"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_ends_with: "3" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_ends_with: "3" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_ends_with: "\"b\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":3}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_ends_with: "\"b\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":3}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_ends_with: "[3, 4]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_ends_with: "[3, 4]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_ends_with: "null" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":8}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_ends_with: "null" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":8}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"array_ends_with: "[null]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":9}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"array_ends_with: "[null]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":9}]}}"### + ); + } // NOT - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_ends_with: "3" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":2},{"id":3},{"id":4},{"id":8},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_ends_with: "3" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":2},{"id":3},{"id":4},{"id":8},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_ends_with: "\"b\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":8},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_ends_with: "\"b\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":4},{"id":8},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_ends_with: "[3, 4]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":3},{"id":8},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_ends_with: "[3, 4]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":3},{"id":8},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_ends_with: "null" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":3},{"id":4},{"id":9}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_ends_with: "null" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":3},{"id":4},{"id":9}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"array_ends_with: "[null]" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":3},{"id":4},{"id":8}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"array_ends_with: "[null]" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2},{"id":3},{"id":4},{"id":8}]}}"### + ); + } Ok(()) } @@ -478,7 +496,7 @@ mod json_filters { Ok(()) } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn array_ends_with(runner: Runner) -> TestResult<()> { array_ends_with_runner(runner).await?; @@ -490,22 +508,22 @@ mod json_filters { create_row(&runner, 2, r#"\"fool\""#, true).await?; create_row(&runner, 3, r#"[\"foo\"]"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"string_contains: "oo" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"string_contains: "oo" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } // NOT - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"string_contains: "ab" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"string_contains: "ab" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } Ok(()) } @@ -517,7 +535,7 @@ mod json_filters { Ok(()) } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn string_contains(runner: Runner) -> TestResult<()> { string_contains_runner(runner).await?; @@ -530,22 +548,22 @@ mod json_filters { create_row(&runner, 3, r#"[\"foo\"]"#, true).await?; // string_starts_with - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"string_starts_with: "foo" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"string_starts_with: "foo" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } // NOT string_starts_with - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"string_starts_with: "ab" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"string_starts_with: "ab" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } Ok(()) } @@ -557,7 +575,7 @@ mod json_filters { Ok(()) } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn string_starts_with(runner: Runner) -> TestResult<()> { string_starts_with_runner(runner).await?; @@ -569,22 +587,22 @@ mod json_filters { create_row(&runner, 2, r#"\"fool\""#, true).await?; create_row(&runner, 3, r#"[\"foo\"]"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"string_ends_with: "oo" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"string_ends_with: "oo" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1}]}}"### + ); + } // NOT - insta::assert_snapshot!( - run_query!( - runner, - not_jsonq(&runner, r#"string_ends_with: "oo" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### - ); + let res = run_query!(runner, not_jsonq(&runner, r#"string_ends_with: "oo" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### + ); + } Ok(()) } @@ -596,7 +614,7 @@ mod json_filters { Ok(()) } - #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js")))] + #[connector_test(exclude(MySQL(5.6), Vitess("planetscale.js", "planetscale.js.wasm")))] async fn string_ends_with(runner: Runner) -> TestResult<()> { string_ends_with_runner(runner).await?; @@ -612,37 +630,37 @@ mod json_filters { create_row(&runner, 6, r#"100"#, true).await?; create_row(&runner, 7, r#"[\"foo\"]"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"gt: "\"b\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"gt: "\"b\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"gte: "\"b\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"gte: "\"b\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"gt: "1" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":4},{"id":5},{"id":6}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"gt: "1" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":4},{"id":5},{"id":6}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"gte: "1" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":3},{"id":4},{"id":5},{"id":6}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"gte: "1" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":3},{"id":4},{"id":5},{"id":6}]}}"### + ); + } Ok(()) } @@ -693,37 +711,37 @@ mod json_filters { create_row(&runner, 6, r#"100"#, true).await?; create_row(&runner, 7, r#"[\"foo\"]"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"lt: "\"f\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"lt: "\"f\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":2}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"lte: "\"foo\"" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"lte: "\"foo\"" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"lt: "100" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":3},{"id":4},{"id":5}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"lt: "100" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":3},{"id":4},{"id":5}]}}"### + ); + } - insta::assert_snapshot!( - run_query!( - runner, - jsonq(&runner, r#"lte: "100" "#, None) - ), - @r###"{"data":{"findManyTestModel":[{"id":3},{"id":4},{"id":5},{"id":6}]}}"### - ); + let res = run_query!(runner, jsonq(&runner, r#"lte: "100" "#, None)); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"id":3},{"id":4},{"id":5},{"id":6}]}}"### + ); + } Ok(()) } @@ -753,70 +771,97 @@ mod json_filters { create_row(&runner, 6, r#"2.4"#, true).await?; create_row(&runner, 7, r#"3"#, true).await?; - insta::assert_snapshot!( - run_query!( - runner, - format!(r#"query {{ - findManyTestModel( - where: {{ json: {{ {}, array_contains: "3", array_starts_with: "3" }} }}, - cursor: {{ id: 2 }}, - take: 2 - ) {{ json }} - }}"#, json_path(&runner)) - ), - @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":[3,4,5]}}"},{"json":"{\"a\":{\"b\":[3,4,6]}}"}]}}"### - ); - insta::assert_snapshot!( - run_query!( - runner, - format!(r#"query {{ - findManyTestModel( - where: {{ - AND: [ - {{ json: {{ {}, gte: "1" }} }}, - {{ json: {{ {}, lt: "3" }} }}, - ] - }} - ) {{ json }} - }}"#, json_path(&runner), json_path(&runner)) - ), - @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":1}}"},{"json":"{\"a\":{\"b\":2.4}}"}]}}"### - ); + let res = run_query!( + runner, + format!( + r#"query {{ + findManyTestModel( + where: {{ json: {{ {}, array_contains: "3", array_starts_with: "3" }} }}, + cursor: {{ id: 2 }}, + take: 2 + ) {{ json }} + }}"#, + json_path(&runner) + ) + ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":[3,4,5]}}"},{"json":"{\"a\":{\"b\":[3,4,6]}}"}]}}"### + ); + } + + let res = run_query!( + runner, + format!( + r#"query {{ + findManyTestModel( + where: {{ + AND: [ + {{ json: {{ {}, gte: "1" }} }}, + {{ json: {{ {}, lt: "3" }} }}, + ] + }} + ) {{ json }} + }}"#, + json_path(&runner), + json_path(&runner) + ) + ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":1}}"},{"json":"{\"a\":{\"b\":2.4}}"}]}}"### + ); + } // NOT - insta::assert_snapshot!( - run_query!( - runner, - format!(r#"query {{ - findManyTestModel( - where: {{ NOT: {{ json: {{ {}, array_contains: "3", array_starts_with: "3" }} }} }}, - cursor: {{ id: 2 }}, - take: 2 - ) {{ json }} - }}"#, json_path(&runner)) - ), - @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":[5,6,7]}}"}]}}"### - ); + let res = run_query!( + runner, + format!( + r#"query {{ + findManyTestModel( + where: {{ NOT: {{ json: {{ {}, array_contains: "3", array_starts_with: "3" }} }} }}, + cursor: {{ id: 2 }}, + take: 2 + ) {{ json }} + }}"#, + json_path(&runner) + ) + ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":[5,6,7]}}"}]}}"### + ); + } // 1, 2.4, 3 // filter: false, true, false // negated: true, false, true // result: 1, 3 - insta::assert_snapshot!( - run_query!( - runner, - format!(r#"query {{ - findManyTestModel( - where: {{ - NOT: {{ AND: [ - {{ json: {{ {}, gt: "1" }} }}, - {{ json: {{ {}, lt: "3" }} }}, - ]}} - }} - ) {{ json }} - }}"#, json_path(&runner), json_path(&runner)) - ), - @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":1}}"},{"json":"{\"a\":{\"b\":3}}"}]}}"### - ); + let res = run_query!( + runner, + format!( + r#"query {{ + findManyTestModel( + where: {{ + NOT: {{ AND: [ + {{ json: {{ {}, gt: "1" }} }}, + {{ json: {{ {}, lt: "3" }} }}, + ]}} + }} + ) {{ json }} + }}"#, + json_path(&runner), + json_path(&runner) + ) + ); + insta::allow_duplicates! { + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"json":"{\"a\":{\"b\":1}}"},{"json":"{\"a\":{\"b\":3}}"}]}}"### + ); + } Ok(()) } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs index 16b9a0ab0437..f34675ba3ff1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs @@ -1,6 +1,10 @@ use query_engine_tests::*; -#[test_suite(schema(common_list_types), capabilities(ScalarLists))] +#[test_suite( + schema(common_list_types), + exclude(Postgres("pg.js.wasm", "neon.js.wasm")), + capabilities(ScalarLists) +)] mod lists { use indoc::indoc; use query_engine_tests::run_query; @@ -623,7 +627,7 @@ mod lists { } // Cockroachdb does not like the bytes empty array check in v21 but this will be fixed in 22. - #[connector_test(exclude(CockroachDB))] + #[connector_test(exclude(CockroachDB), exclude(Postgres("pg.js.wasm", "neon.js.wasm")))] async fn is_empty_bytes(runner: Runner) -> TestResult<()> { test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs index 51637d3bbcb8..abf7f04efdf3 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs @@ -229,7 +229,7 @@ mod search_filter_with_index { super::ensure_filter_tree_shake_works(runner).await } - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn throws_error_on_missing_index(runner: Runner) -> TestResult<()> { super::create_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs index 6a67b87d56b1..34af3fc21ed9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs @@ -80,7 +80,7 @@ mod nested_pagination { ***************/ // should skip the first item - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn mid_lvl_skip_1(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -102,7 +102,7 @@ mod nested_pagination { } // should "skip all items" - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn mid_lvl_skip_3(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -124,7 +124,7 @@ mod nested_pagination { } // should "skip all items" - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn mid_lvl_skip_4(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs index c8f7429451a7..d12f7fcfed65 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs @@ -223,7 +223,7 @@ mod order_by_dependent { } // "[Circular with differing records] Ordering by related record field ascending" should "work" - #[connector_test(exclude(SqlServer, Vitess("planetscale.js")))] + #[connector_test(exclude(SqlServer, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn circular_diff_related_record_asc(runner: Runner) -> TestResult<()> { // Records form circles with their relations create_row(&runner, 1, Some(1), Some(1), Some(3)).await?; @@ -258,7 +258,7 @@ mod order_by_dependent { } // "[Circular with differing records] Ordering by related record field descending" should "work" - #[connector_test(exclude(SqlServer, Vitess("planetscale.js")))] + #[connector_test(exclude(SqlServer, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn circular_diff_related_record_desc(runner: Runner) -> TestResult<()> { // Records form circles with their relations create_row(&runner, 1, Some(1), Some(1), Some(3)).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs index f8e5e831971b..323192be180d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs @@ -79,7 +79,7 @@ mod order_by_dependent_pag { // "[Hops: 1] Ordering by related record field ascending with nulls" should "work" // TODO(julius): should enable for SQL Server when partial indices are in the PSL - #[connector_test(exclude(SqlServer, Vitess("planetscale.js")))] + #[connector_test(exclude(SqlServer, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn hop_1_related_record_asc_nulls(runner: Runner) -> TestResult<()> { // 1 record has the "full chain", one half, one none create_row(&runner, 1, Some(1), Some(1), None).await?; @@ -146,7 +146,7 @@ mod order_by_dependent_pag { // "[Hops: 2] Ordering by related record field ascending with nulls" should "work" // TODO(garren): should enable for SQL Server when partial indices are in the PSL - #[connector_test(exclude(SqlServer, Vitess("planetscale.js")))] + #[connector_test(exclude(SqlServer, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn hop_2_related_record_asc_null(runner: Runner) -> TestResult<()> { // 1 record has the "full chain", one half, one none create_row(&runner, 1, Some(1), Some(1), None).await?; @@ -227,7 +227,7 @@ mod order_by_dependent_pag { // "[Circular with differing records] Ordering by related record field ascending" should "work" // TODO(julius): should enable for SQL Server when partial indices are in the PSL - #[connector_test(exclude(SqlServer, Vitess("planetscale.js")))] + #[connector_test(exclude(SqlServer, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn circular_diff_related_record_asc(runner: Runner) -> TestResult<()> { // Records form circles with their relations create_row(&runner, 1, Some(1), Some(1), Some(3)).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs index 83c472a064e7..e6cbee21d9b7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs @@ -277,7 +277,7 @@ mod pagination { ********************/ // "A skip" should "return all records after the offset specified" - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn skip_returns_all_after_offset(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -296,7 +296,7 @@ mod pagination { } // "A skip with order reversed" should "return all records after the offset specified" - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn skip_reversed_order(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -315,7 +315,7 @@ mod pagination { } // "A skipping beyond all records" should "return no records" - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn skipping_beyond_all_records(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs index c03067eed818..146892889beb 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs @@ -18,7 +18,7 @@ mod casts { // // Bails with: ERROR: invalid input syntax for type integer: "42.51" // - #[connector_test(only(Postgres), exclude(Postgres("neon.js"), Postgres("pg.js")))] + #[connector_test(only(Postgres), exclude(Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm")))] async fn query_numeric_casts(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query_pretty!(&runner, fmt_query_raw(r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs index cb44a2285ff2..4d38c60b5b75 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs @@ -37,7 +37,7 @@ mod raw_errors { #[connector_test( schema(common_nullable_types), only(Postgres), - exclude(Postgres("neon.js"), Postgres("pg.js")) + exclude(Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm")) )] async fn list_param_for_scalar_column_should_not_panic_quaint(runner: Runner) -> TestResult<()> { assert_error!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/input_coercion.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/input_coercion.rs index eac2bc42b4cd..215cd539af3c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/input_coercion.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/input_coercion.rs @@ -5,7 +5,7 @@ mod input_coercion { use query_engine_tests::fmt_execute_raw; // Checks that query raw inputs are coerced to the correct types - #[connector_test] + #[connector_test(only(Postgres), exclude(Postgres("pg.js.wasm", "neon.js.wasm"),))] async fn scalar_input_correctly_coerced(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/null_list.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/null_list.rs index 32a8a8ef281d..4ae2a2b6b57c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/null_list.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/null_list.rs @@ -5,7 +5,11 @@ use query_engine_tests::*; mod null_list { use query_engine_tests::{fmt_query_raw, run_query, run_query_pretty}; - #[connector_test(schema(common_list_types))] + #[connector_test( + schema(common_list_types), + only(Postgres), + exclude(Postgres("pg.js.wasm", "neon.js.wasm"),) + )] async fn null_scalar_lists(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs index c3687ddd9f3e..8434da64073e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs @@ -26,7 +26,7 @@ mod typed_output { schema.to_owned() } - #[connector_test(schema(schema_pg), only(Postgres))] + #[connector_test(schema(schema_pg), only(Postgres), exclude(Postgres("pg.js.wasm", "neon.js.wasm")))] async fn all_scalars_pg(runner: Runner) -> TestResult<()> { create_row( &runner, @@ -483,7 +483,7 @@ mod typed_output { schema.to_owned() } - #[connector_test(schema(schema_sqlite), only(Sqlite))] + #[connector_test(schema(schema_sqlite), only(Sqlite), exclude(Sqlite("libsql.js.wasm")))] async fn all_scalars_sqlite(runner: Runner) -> TestResult<()> { create_row( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs index 1cb9adf534a8..c78b522f4994 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs @@ -17,7 +17,11 @@ mod bigint { } // "Using a BigInt field" should "work" - #[connector_test] + #[connector_test(exclude( + Postgres("pg.js.wasm", "neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") + ))] async fn using_bigint_field(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bytes.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bytes.rs index 791b0a2137fb..654463f491f7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bytes.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bytes.rs @@ -1,6 +1,10 @@ use query_engine_tests::*; -#[test_suite] +#[test_suite(exclude( + Postgres("pg.js.wasm", "neon.js.wasm"), + Sqlite("libsql.js.wasm"), + Vitess("planetscale.js.wasm") +))] mod bytes { use indoc::indoc; use query_engine_tests::run_query; @@ -77,7 +81,16 @@ mod bytes { Ok(()) } - #[connector_test(schema(bytes_id), exclude(MySQL, Vitess, SqlServer))] + #[connector_test( + schema(bytes_id), + exclude( + MySQL, + Vitess, + SqlServer, + Postgres("pg.js.wasm", "neon.js.wasm"), + Sqlite("libsql.js.wasm") + ) + )] async fn byte_id_coercion(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/postgres.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/postgres.rs index 2d487ec4f137..2a83d17f6fb7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/postgres.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/postgres.rs @@ -23,7 +23,7 @@ mod postgres { } //"Postgres native int types" should "work" - #[connector_test(schema(schema_int))] + #[connector_test(schema(schema_int), only(Postgres), exclude(Postgres("pg.js.wasm", "neon.js.wasm")))] async fn native_int_types(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -191,7 +191,11 @@ mod postgres { } // "Other Postgres native types" should "work" - #[connector_test(schema(schema_other_types), only(Postgres), exclude(CockroachDb))] + #[connector_test( + schema(schema_other_types), + only(Postgres), + exclude(CockroachDb, Postgres("pg.js.wasm", "neon.js.wasm")) + )] async fn native_other_types(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs index 9a5e74dd8547..2bd989573da8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs @@ -28,7 +28,7 @@ mod basic_types { schema.to_owned() } - #[connector_test] + #[connector_test(exclude(Postgres("pg.js.wasm", "neon.js.wasm")))] async fn set_base(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, format!(r#"mutation {{ @@ -59,7 +59,7 @@ mod basic_types { // "Scalar lists" should "be behave like regular values for create and update operations" // Skipped for CockroachDB as enum array concatenation is not supported (https://github.com/cockroachdb/cockroach/issues/71388). - #[connector_test(exclude(CockroachDb))] + #[connector_test(exclude(CockroachDb, Postgres("pg.js.wasm", "neon.js.wasm")))] async fn behave_like_regular_val_for_create_and_update(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, format!(r#"mutation {{ @@ -158,7 +158,7 @@ mod basic_types { } // "A Create Mutation" should "create and return items with list values with shorthand notation" - #[connector_test] + #[connector_test(exclude(Postgres("pg.js.wasm", "neon.js.wasm")))] async fn create_mut_work_with_list_vals(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, format!(r#"mutation {{ diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/defaults.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/defaults.rs index 39370e62c572..c216b36ae458 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/defaults.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/defaults.rs @@ -29,7 +29,7 @@ mod basic { schema.to_owned() } - #[connector_test] + #[connector_test(exclude(Postgres("pg.js.wasm", "neon.js.wasm")))] async fn basic_write(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs index 5493ff7f2778..5d46b75a98fa 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs @@ -48,7 +48,7 @@ mod byoid { #[connector_test( schema(schema_1), only(MySql, Postgres, Sqlite, Vitess), - exclude(Vitess("planetscale.js")) + exclude(Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn create_and_return_item_woi_1(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( @@ -80,7 +80,7 @@ mod byoid { #[connector_test( schema(schema_2), only(MySql, Postgres, Sqlite, Vitess), - exclude(Vitess("planetscale.js")) + exclude(Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn create_and_return_item_woi_2(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( @@ -142,7 +142,7 @@ mod byoid { #[connector_test( schema(schema_1), only(MySql, Postgres, Sqlite, Vitess), - exclude(Vitess("planetscale.js")) + exclude(Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn nested_create_return_item_woi_1(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( @@ -174,7 +174,7 @@ mod byoid { #[connector_test( schema(schema_2), only(MySql, Postgres, Sqlite, Vitess), - exclude(Vitess("planetscale.js")) + exclude(Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn nested_create_return_item_woi_2(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs index 05931d16084b..c6b48405f8c9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs @@ -59,7 +59,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToOneReq", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_c1_req_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; @@ -98,7 +98,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToOneOpt", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_c1_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; @@ -137,7 +137,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToMany", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_cm_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; @@ -176,7 +176,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToOneReq", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_c1_req_many_ums(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; @@ -221,7 +221,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToOneReq", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_c1_req_empty_filter(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; @@ -262,7 +262,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToOneReq", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_c1_req_noop_no_hit(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; @@ -309,7 +309,7 @@ mod um_inside_update { #[relation_link_test( on_parent = "ToMany", on_child = "ToOneReq", - exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js")) + exclude(Postgres("pg.js", "neon.js"), Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn pm_c1_req_many_filters(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = setup_data(runner, t).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs index 45562b5f6be8..cd71df429ea3 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs @@ -78,7 +78,7 @@ mod nested_create_many { // "Nested createMany" should "error on duplicates by default" // TODO(dom): Not working for mongo - #[connector_test(exclude(Sqlite, MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(Sqlite, MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn nested_createmany_fail_dups(runner: Runner) -> TestResult<()> { assert_error!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/compound_fks_mixed_requiredness.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/compound_fks_mixed_requiredness.rs index 808af82deec4..8f91a6039de4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/compound_fks_mixed_requiredness.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/compound_fks_mixed_requiredness.rs @@ -26,7 +26,7 @@ mod compound_fks { } // "A One to Many relation with mixed requiredness" should "be writable and readable" - #[connector_test(exclude(MySql(5.6), MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MySql(5.6), MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn one2m_mix_required_writable_readable(runner: Runner) -> TestResult<()> { // Setup user insta::assert_snapshot!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs index 1507ea0c082b..5c91f1c7f18a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs @@ -205,7 +205,7 @@ mod create { // TODO(dom): Not working on mongo // TODO(dom): 'Expected result to return an error, but found success: {"data":{"createOneScalarModel":{"optUnique":"test"}}}' // Comment(dom): Expected, we're not enforcing uniqueness for the test setup yet. - #[connector_test(exclude(MongoDb, Vitess("planetscale.js")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] async fn gracefully_fails_when_uniq_violation(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index 94118b669c1b..832205e66c60 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -165,7 +165,7 @@ mod create_many { } // "createMany" should "error on duplicates by default" - #[connector_test(schema(schema_4), exclude(Vitess("planetscale.js")))] + #[connector_test(schema(schema_4), exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn create_many_error_dups(runner: Runner) -> TestResult<()> { assert_error!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs index 749048fd3edc..80c59a1a65f4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs @@ -123,7 +123,7 @@ mod update_many { } // "An updateMany mutation" should "correctly apply all number operations for Int" - #[connector_test(exclude(Vitess("planetscale.js"), CockroachDb))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm"), CockroachDb))] async fn apply_number_ops_for_int(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, optStr: "str1" }"#).await?; create_row(&runner, r#"{ id: 2, optStr: "str2", optInt: 2 }"#).await?; @@ -240,7 +240,7 @@ mod update_many { } // "An updateMany mutation" should "correctly apply all number operations for Float" - #[connector_test(exclude(Vitess("planetscale.js")))] + #[connector_test(exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn apply_number_ops_for_float(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, optStr: "str1" }"#).await?; create_row(&runner, r#"{ id: 2, optStr: "str2", optFloat: 2 }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs index f4f43eda05ac..e876bac06211 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs @@ -674,7 +674,7 @@ mod upsert { Ok(()) } - #[connector_test(schema(generic), exclude(Vitess("planetscale.js")))] + #[connector_test(schema(generic), exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn upsert_fails_if_filter_dont_match(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml index cf1b98b25adb..aa9f5957fb89 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml @@ -12,7 +12,7 @@ request-handlers = { path = "../../request-handlers" } tokio.workspace = true query-core = { path = "../../core", features = ["metrics"] } sql-query-connector = { path = "../../connectors/sql-query-connector" } -query-engine = { path = "../../query-engine"} +query-engine = { path = "../../query-engine" } psl.workspace = true user-facing-errors = { path = "../../../libs/user-facing-errors" } thiserror = "1.0" @@ -30,9 +30,10 @@ indoc.workspace = true enumflags2 = "0.7" hyper = { version = "0.14", features = ["full"] } indexmap = { version = "1.0", features = ["serde-1"] } -query-engine-metrics = {path = "../../metrics"} +query-engine-metrics = { path = "../../metrics" } quaint.workspace = true jsonrpc-core = "17" +insta = "1.7.1" # Only this version is vetted, upgrade only after going through the code, # as this is a small crate with little user base. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 1abfedbaf8ee..06d1551f9405 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -1,8 +1,13 @@ use super::*; use once_cell::sync::Lazy; use serde::de::DeserializeOwned; -use std::{fmt::Display, io::Write as _, sync::atomic::Ordering}; -use tokio::sync::{mpsc, oneshot}; +use std::{ + error::Error as StdError, + fmt::Display, + io::Write as _, + sync::{atomic::Ordering, Arc}, +}; +use tokio::sync::{mpsc, oneshot, RwLock}; type Result = std::result::Result>; @@ -29,6 +34,17 @@ fn exit_with_message(status_code: i32, message: &str) -> ! { } impl ExecutorProcess { + fn spawn() -> ExecutorProcess { + match std::thread::spawn(ExecutorProcess::new).join() { + Ok(Ok(process)) => process, + Ok(Err(err)) => exit_with_message(1, &format!("Failed to start node process. Details: {err}")), + Err(err) => { + let err = err.downcast_ref::().map(ToOwned::to_owned).unwrap_or_default(); + exit_with_message(1, &format!("Panic while trying to start node process.\nDetails: {err}")) + } + } + } + fn new() -> Result { let (sender, receiver) = mpsc::channel::(300); @@ -81,15 +97,86 @@ impl ExecutorProcess { } } -pub(super) static EXTERNAL_PROCESS: Lazy = - Lazy::new(|| match std::thread::spawn(ExecutorProcess::new).join() { - Ok(Ok(process)) => process, - Ok(Err(err)) => exit_with_message(1, &format!("Failed to start node process. Details: {err}")), - Err(err) => { - let err = err.downcast_ref::().map(ToOwned::to_owned).unwrap_or_default(); - exit_with_message(1, &format!("Panic while trying to start node process.\nDetails: {err}")) +/// Wraps an ExecutorProcess allowing for restarting it. +/// +/// A node process can die for a number of reasons, being one that any `panic!` occurring in Rust +/// asynchronous code are translated to an abort trap by wasm-bindgen, which kills the node process. +#[derive(Clone)] +pub(crate) struct RestartableExecutorProcess { + process: Arc>, +} + +impl RestartableExecutorProcess { + fn new() -> Self { + Self { + process: Arc::new(RwLock::new(ExecutorProcess::spawn())), } - }); + } + + async fn restart(&self) { + let mut process = self.process.write().await; + *process = ExecutorProcess::spawn(); + } + + pub(crate) async fn request(&self, method: &str, params: serde_json::Value) -> Result { + let p = self.process.read().await; + p.request(method, params).await + } +} + +struct ExecutorProcessDiedError; + +impl fmt::Debug for ExecutorProcessDiedError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "The external test executor process died") + } +} + +impl Display for ExecutorProcessDiedError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(self, f) + } +} + +impl StdError for ExecutorProcessDiedError {} + +struct PendingRequests { + map: HashMap>>, + last_id: Option, +} + +impl PendingRequests { + fn new() -> Self { + Self { + map: HashMap::new(), + last_id: None, + } + } + + fn insert(&mut self, id: jsonrpc_core::Id, sender: oneshot::Sender>) { + self.map.insert(id.clone(), sender); + self.last_id = Some(id); + } + + fn respond(&mut self, id: &jsonrpc_core::Id, response: Result) { + self.map + .remove(id) + .expect("no sender for response") + .send(response) + .unwrap(); + } + + fn respond_to_last(&mut self, response: Result) { + let last_id = self + .last_id + .as_ref() + .expect("Expected last response to exist") + .to_owned(); + self.respond(&last_id, response); + } +} + +pub(super) static EXTERNAL_PROCESS: Lazy = Lazy::new(RestartableExecutorProcess::new); type ReqImpl = ( jsonrpc_core::MethodCall, @@ -122,8 +209,7 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { let mut stdout = BufReader::new(process.stdout.unwrap()).lines(); let mut stdin = process.stdin.unwrap(); - let mut pending_requests: HashMap>> = - HashMap::new(); + let mut pending_requests = PendingRequests::new(); loop { tokio::select! { @@ -136,20 +222,20 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { Ok(Some(line)) => // new response { match serde_json::from_str::(&line) { - Ok(response) => { - let sender = pending_requests.remove(response.id()).unwrap(); - match response { + Ok(ref response) => { + let res: Result = match response { jsonrpc_core::Output::Success(success) => { // The other end may be dropped if the whole // request future was dropped and not polled to // completion, so we ignore send errors here. - _ = sender.send(Ok(success.result)); + Ok(success.result.clone()) } jsonrpc_core::Output::Failure(err) => { tracing::error!("error response from jsonrpc: {err:?}"); - _ = sender.send(Err(Box::new(err.error))); + Err(Box::new(err.error.clone())) } - } + }; + pending_requests.respond(response.id(), res) } Err(err) => { tracing::error!(%err, "error when decoding response from child node process. Response was: `{}`", &line); @@ -159,7 +245,11 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { } Ok(None) => // end of the stream { - exit_with_message(1, "child node process stdout closed") + tracing::error!("Error when reading from child node process. Process might have exited. Restarting..."); + + pending_requests.respond_to_last(Err(Box::new(ExecutorProcessDiedError))); + EXTERNAL_PROCESS.restart().await; + break; } Err(err) => // log it { @@ -175,6 +265,7 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { } Some((request, response_sender)) => { pending_requests.insert(request.id.clone(), response_sender); + let mut req = serde_json::to_vec(&request).unwrap(); req.push(b'\n'); stdin.write_all(&req).await.unwrap(); diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index 6cc6120f71c8..5a0dff3b49a2 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -99,7 +99,9 @@ pub(crate) fn connection_string( Some(PostgresVersion::V12) if is_ci => { format!("postgresql://postgres:prisma@test-db-postgres-12:5432/{database}") } - Some(PostgresVersion::V13) | Some(PostgresVersion::NeonJs) | Some(PostgresVersion::PgJs) if is_ci => { + Some(PostgresVersion::V13) | Some(PostgresVersion::NeonJsNapi) | Some(PostgresVersion::PgJsNapi) + if is_ci => + { format!("postgresql://postgres:prisma@test-db-postgres-13:5432/{database}") } Some(PostgresVersion::V14) if is_ci => { @@ -116,7 +118,11 @@ pub(crate) fn connection_string( Some(PostgresVersion::V10) => format!("postgresql://postgres:prisma@127.0.0.1:5432/{database}"), Some(PostgresVersion::V11) => format!("postgresql://postgres:prisma@127.0.0.1:5433/{database}"), Some(PostgresVersion::V12) => format!("postgresql://postgres:prisma@127.0.0.1:5434/{database}"), - Some(PostgresVersion::V13) | Some(PostgresVersion::NeonJs) | Some(PostgresVersion::PgJs) => { + Some(PostgresVersion::V13) + | Some(PostgresVersion::NeonJsNapi) + | Some(PostgresVersion::PgJsNapi) + | Some(PostgresVersion::PgJsWasm) + | Some(PostgresVersion::NeonJsWasm) => { format!("postgresql://postgres:prisma@127.0.0.1:5435/{database}") } Some(PostgresVersion::V14) => format!("postgresql://postgres:prisma@127.0.0.1:5437/{database}"), @@ -201,7 +207,7 @@ pub(crate) fn connection_string( } ConnectorVersion::Vitess(Some(VitessVersion::V8_0)) => "mysql://root@localhost:33807/test".into(), - ConnectorVersion::Vitess(Some(VitessVersion::PlanetscaleJs)) => { + ConnectorVersion::Vitess(Some(VitessVersion::PlanetscaleJsNapi | VitessVersion::PlanetscaleJsWasm)) => { format!("mysql://root@127.0.0.1:3310/{database}") } @@ -380,8 +386,8 @@ mod tests { let only = vec![("postgres", None)]; let exclude = vec![("postgres", Some("neon.js"))]; let postgres = &PostgresConnectorTag as ConnectorTag; - let neon = ConnectorVersion::Postgres(Some(PostgresVersion::NeonJs)); - let pg = ConnectorVersion::Postgres(Some(PostgresVersion::PgJs)); + let neon = ConnectorVersion::Postgres(Some(PostgresVersion::NeonJsNapi)); + let pg = ConnectorVersion::Postgres(Some(PostgresVersion::PgJsNapi)); assert!(!super::should_run(&postgres, &neon, &only, &exclude, Default::default())); assert!(super::should_run(&postgres, &pg, &only, &exclude, Default::default())); @@ -393,7 +399,7 @@ mod tests { let only = vec![("postgres", None)]; let exclude = vec![("postgres", None)]; let postgres = &PostgresConnectorTag as ConnectorTag; - let neon = ConnectorVersion::Postgres(Some(PostgresVersion::NeonJs)); + let neon = ConnectorVersion::Postgres(Some(PostgresVersion::NeonJsNapi)); super::should_run(&postgres, &neon, &only, &exclude, Default::default()); } @@ -404,7 +410,7 @@ mod tests { let only = vec![("postgres", Some("neon.js"))]; let exclude = vec![("postgres", None)]; let postgres = &PostgresConnectorTag as ConnectorTag; - let neon = ConnectorVersion::Postgres(Some(PostgresVersion::NeonJs)); + let neon = ConnectorVersion::Postgres(Some(PostgresVersion::NeonJsNapi)); super::should_run(&postgres, &neon, &only, &exclude, Default::default()); } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs index 42d0a8c7afdc..2a839ab22584 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs @@ -36,8 +36,10 @@ pub enum PostgresVersion { V14, V15, PgBouncer, - NeonJs, - PgJs, + NeonJsNapi, + PgJsNapi, + NeonJsWasm, + PgJsWasm, } impl TryFrom<&str> for PostgresVersion { @@ -53,8 +55,10 @@ impl TryFrom<&str> for PostgresVersion { "14" => Self::V14, "15" => Self::V15, "pgbouncer" => Self::PgBouncer, - "neon.js" => Self::NeonJs, - "pg.js" => Self::PgJs, + "neon.js" => Self::NeonJsNapi, + "pg.js" => Self::PgJsNapi, + "pg.js.wasm" => Self::PgJsWasm, + "neon.js.wasm" => Self::NeonJsWasm, _ => return Err(TestError::parse_error(format!("Unknown Postgres version `{s}`"))), }; @@ -73,8 +77,10 @@ impl ToString for PostgresVersion { PostgresVersion::V14 => "14", PostgresVersion::V15 => "15", PostgresVersion::PgBouncer => "pgbouncer", - PostgresVersion::NeonJs => "neon.js", - PostgresVersion::PgJs => "pg.js", + PostgresVersion::NeonJsNapi => "neon.js", + PostgresVersion::PgJsNapi => "pg.js", + PostgresVersion::PgJsWasm => "pg.js.wasm", + PostgresVersion::NeonJsWasm => "pg.js.wasm", } .to_owned() } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs index 5f4dab56784a..2173bbdd38f2 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs @@ -29,14 +29,16 @@ impl ConnectorTagInterface for SqliteConnectorTag { #[derive(Clone, Debug, PartialEq, Eq)] pub enum SqliteVersion { V3, - LibsqlJS, + LibsqlJsNapi, + LibsqlJsWasm, } impl ToString for SqliteVersion { fn to_string(&self) -> String { match self { SqliteVersion::V3 => "3".to_string(), - SqliteVersion::LibsqlJS => "libsql.js".to_string(), + SqliteVersion::LibsqlJsNapi => "libsql.js".to_string(), + SqliteVersion::LibsqlJsWasm => "libsql.js.wasm".to_string(), } } } @@ -47,7 +49,8 @@ impl TryFrom<&str> for SqliteVersion { fn try_from(s: &str) -> Result { let version = match s { "3" => Self::V3, - "libsql.js" => Self::LibsqlJS, + "libsql.js" => Self::LibsqlJsNapi, + "libsql.js.wasm" => Self::LibsqlJsWasm, _ => return Err(TestError::parse_error(format!("Unknown SQLite version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs index ce827927b403..ba0f4249cd7c 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs @@ -34,7 +34,8 @@ impl ConnectorTagInterface for VitessConnectorTag { #[derive(Debug, Clone, Copy, PartialEq)] pub enum VitessVersion { V8_0, - PlanetscaleJs, + PlanetscaleJsNapi, + PlanetscaleJsWasm, } impl FromStr for VitessVersion { @@ -43,7 +44,8 @@ impl FromStr for VitessVersion { fn from_str(s: &str) -> Result { let version = match s { "8.0" => Self::V8_0, - "planetscale.js" => Self::PlanetscaleJs, + "planetscale.js" => Self::PlanetscaleJsNapi, + "planetscale.js.wasm" => Self::PlanetscaleJsWasm, _ => return Err(TestError::parse_error(format!("Unknown Vitess version `{s}`"))), }; @@ -55,7 +57,8 @@ impl Display for VitessVersion { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::V8_0 => write!(f, "8.0"), - Self::PlanetscaleJs => write!(f, "planetscale.js"), + Self::PlanetscaleJsNapi => write!(f, "planetscale.js"), + Self::PlanetscaleJsWasm => write!(f, "planetscale.js.wasm"), } } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index af99d9a7a7d3..d7dbd0f53897 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -144,45 +144,47 @@ fn run_relation_link_test_impl( let (dms, capabilities) = schema_with_relation(on_parent, on_child, id_only); - for (i, (dm, caps)) in dms.into_iter().zip(capabilities.into_iter()).enumerate() { - if RELATION_TEST_IDX.map(|idx| idx != i).unwrap_or(false) { - continue; - } - - let required_capabilities_for_test = required_capabilities | caps; - let test_db_name = format!("{suite_name}_{test_name}_{i}"); - let template = dm.datamodel().to_owned(); - let (connector, version) = CONFIG.test_connector().unwrap(); - - if !should_run(&connector, &version, only, exclude, required_capabilities_for_test) { - continue; - } - - let datamodel = render_test_datamodel(&test_db_name, template, &[], None, Default::default(), None); - let (connector_tag, version) = CONFIG.test_connector().unwrap(); - let metrics = setup_metrics(); - let metrics_for_subscriber = metrics.clone(); - let (log_capture, log_tx) = TestLogCapture::new(); - - run_with_tokio( - async move { - println!("Used datamodel:\n {}", datamodel.yellow()); - let runner = Runner::load(datamodel.clone(), &[], version, connector_tag, metrics, log_capture) - .await - .unwrap(); + insta::allow_duplicates! { + for (i, (dm, caps)) in dms.into_iter().zip(capabilities.into_iter()).enumerate() { + if RELATION_TEST_IDX.map(|idx| idx != i).unwrap_or(false) { + continue; + } - test_fn(&runner, &dm).await.unwrap(); + let required_capabilities_for_test = required_capabilities | caps; + let test_db_name = format!("{suite_name}_{test_name}_{i}"); + let template = dm.datamodel().to_owned(); + let (connector, version) = CONFIG.test_connector().unwrap(); - teardown_project(&datamodel, Default::default(), runner.schema_id()) - .await - .unwrap(); + if !should_run(&connector, &version, only, exclude, required_capabilities_for_test) { + continue; } - .with_subscriber(test_tracing_subscriber( - ENV_LOG_LEVEL.to_string(), - metrics_for_subscriber, - log_tx, - )), - ); + + let datamodel = render_test_datamodel(&test_db_name, template, &[], None, Default::default(), None); + let (connector_tag, version) = CONFIG.test_connector().unwrap(); + let metrics = setup_metrics(); + let metrics_for_subscriber = metrics.clone(); + let (log_capture, log_tx) = TestLogCapture::new(); + + run_with_tokio( + async move { + println!("Used datamodel:\n {}", datamodel.yellow()); + let runner = Runner::load(datamodel.clone(), &[], version, connector_tag, metrics, log_capture) + .await + .unwrap(); + + test_fn(&runner, &dm).await.unwrap(); + + teardown_project(&datamodel, Default::default(), runner.schema_id()) + .await + .unwrap(); + } + .with_subscriber(test_tracing_subscriber( + ENV_LOG_LEVEL.to_string(), + metrics_for_subscriber, + log_tx, + )), + ); + } } } @@ -287,7 +289,9 @@ fn run_connector_test_impl( .unwrap(); let schema_id = runner.schema_id(); - test_fn(runner).await.unwrap(); + if let Err(err) = test_fn(runner).await { + panic!("💥 Test failed due to an error: {err:?}"); + } crate::teardown_project(&datamodel, db_schemas, schema_id) .await diff --git a/query-engine/connector-test-kit-rs/test-configs/libsql-wasm b/query-engine/connector-test-kit-rs/test-configs/libsql-wasm index b93966875dea..96ca6a4d7f13 100644 --- a/query-engine/connector-test-kit-rs/test-configs/libsql-wasm +++ b/query-engine/connector-test-kit-rs/test-configs/libsql-wasm @@ -1,5 +1,6 @@ { "connector": "sqlite", + "version": "libsql.js.wasm", "driver_adapter": "libsql", "external_test_executor": "Wasm" } \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/neon-wasm b/query-engine/connector-test-kit-rs/test-configs/neon-wasm index 2697c5227399..132796d62ee7 100644 --- a/query-engine/connector-test-kit-rs/test-configs/neon-wasm +++ b/query-engine/connector-test-kit-rs/test-configs/neon-wasm @@ -1,6 +1,6 @@ { "connector": "postgres", - "version": "13", + "version": "neon.js.wasm", "driver_adapter": "neon:ws", "driver_adapter_config": { "proxy_url": "127.0.0.1:5488/v1" }, "external_test_executor": "Wasm" diff --git a/query-engine/connector-test-kit-rs/test-configs/pg-wasm b/query-engine/connector-test-kit-rs/test-configs/pg-wasm index b5d8ac3c7b15..a71ea4ece7bb 100644 --- a/query-engine/connector-test-kit-rs/test-configs/pg-wasm +++ b/query-engine/connector-test-kit-rs/test-configs/pg-wasm @@ -1,6 +1,6 @@ { "connector": "postgres", - "version": "13", + "version": "pg.js.wasm", "driver_adapter": "pg", "external_test_executor": "Wasm" } \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/planetscale-wasm b/query-engine/connector-test-kit-rs/test-configs/planetscale-wasm index 62dd895e970c..b9f190e064c6 100644 --- a/query-engine/connector-test-kit-rs/test-configs/planetscale-wasm +++ b/query-engine/connector-test-kit-rs/test-configs/planetscale-wasm @@ -1,6 +1,6 @@ { "connector": "vitess", - "version": "planetscale.js", + "version": "planetscale.js.wasm", "driver_adapter": "planetscale", "driver_adapter_config": { "proxy_url": "http://root:root@127.0.0.1:8085" diff --git a/query-engine/connectors/sql-query-connector/src/database/js.rs b/query-engine/connectors/sql-query-connector/src/database/js.rs index 0d4714871e59..16181755b04e 100644 --- a/query-engine/connectors/sql-query-connector/src/database/js.rs +++ b/query-engine/connectors/sql-query-connector/src/database/js.rs @@ -13,6 +13,10 @@ use quaint::{ }; use std::sync::{Arc, Mutex}; +/// TODO: evaluate turning this into `Lazy>>>` to avoid +/// a clone+drop on the adapter passed via `Js::from_source`. +/// Note: this is currently blocked by Napi causing linking errors when building test binaries, +/// as commented in [`DriverAdapter`]. static ACTIVE_DRIVER_ADAPTER: Lazy>> = Lazy::new(|| Mutex::new(None)); fn active_driver_adapter(provider: &str) -> connector::Result { @@ -84,23 +88,21 @@ impl Connector for Js { } } -// TODO: miguelff: I haven´t found a better way to do this, yet... please continue reading. -// -// There is a bug in NAPI-rs by wich compiling a binary crate that links code using napi-rs -// bindings breaks. We could have used a JsQueryable from the `driver-adapters` crate directly, as the -// `connection` field of a driver adapter, but that will imply using napi-rs transitively, and break -// the tests (which are compiled as binary creates) -// -// To avoid the problem above I separated interface from implementation, making DriverAdapter -// independent on napi-rs. Initially, I tried having a field Arc<&dyn TransactionCabable> to hold -// JsQueryable at runtime. I did this, because TransactionCapable is the trait bounds required to -// create a value of `SqlConnection` (see [SqlConnection::new])) to actually performt the queries. -// using JSQueryable. However, this didn't work because TransactionCapable is not object safe. -// (has Sized as a supertrait) -// -// The thing is that TransactionCapable is not object safe and cannot be used in a dynamic type -// declaration, so finally I couldn't come up with anything better then wrapping a QuaintQueryable -// in this object, and implementing TransactionCapable (and quaint::Queryable) explicitly for it. +/// There is a bug in NAPI-rs by wich compiling a binary crate that links code using napi-rs +/// bindings breaks. We could have used a JsQueryable from the `driver-adapters` crate directly, as the +/// `connection` field of a driver adapter, but that will imply using napi-rs transitively, and break +/// the tests (which are compiled as binary creates) +/// +/// To avoid the problem above I separated interface from implementation, making DriverAdapter +/// independent on napi-rs. Initially, I tried having a field Arc<&dyn TransactionCabable> to hold +/// JsQueryable at runtime. I did this, because TransactionCapable is the trait bounds required to +/// create a value of `SqlConnection` (see [SqlConnection::new])) to actually performt the queries. +/// using JSQueryable. However, this didn't work because TransactionCapable is not object safe. +/// (has Sized as a supertrait) +/// +/// The thing is that TransactionCapable is not object safe and cannot be used in a dynamic type +/// declaration, so finally I couldn't come up with anything better then wrapping a QuaintQueryable +/// in this object, and implementing TransactionCapable (and quaint::Queryable) explicitly for it. #[derive(Clone)] pub struct DriverAdapter { connector: Arc, diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index b23050ab7eec..192f32b217ad 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -5,12 +5,13 @@ version = "0.1.0" [features] metrics = ["query-engine-metrics"] +graphql-protocol = [] [dependencies] async-trait = "0.1" bigdecimal = "0.3" chrono = "0.4" -connection-string.workspace = true +connection-string.workspace = true connector = { path = "../connectors/query-connector", package = "query-connector" } crossbeam-channel = "0.5.6" psl.workspace = true @@ -19,7 +20,9 @@ indexmap = { version = "1.7", features = ["serde-1"] } itertools = "0.10" once_cell = "1" petgraph = "0.4" -query-structure = { path = "../query-structure", features = ["default_generators"] } +query-structure = { path = "../query-structure", features = [ + "default_generators", +] } opentelemetry = { version = "0.17.0", features = ["rt-tokio", "serialize"] } query-engine-metrics = { path = "../metrics", optional = true } serde.workspace = true @@ -34,12 +37,6 @@ user-facing-errors = { path = "../../libs/user-facing-errors" } uuid = "1" cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } schema = { path = "../schema" } +crosstarget-utils = { path = "../../libs/crosstarget-utils" } lru = "0.7.7" enumflags2 = "0.7" - -pin-project = "1" -wasm-bindgen-futures = "0.4" - -[target.'cfg(target_arch = "wasm32")'.dependencies] -pin-project = "1" -wasm-bindgen-futures = "0.4" diff --git a/query-engine/core/src/executor/execute_operation.rs b/query-engine/core/src/executor/execute_operation.rs index 6ba21d37f9ff..dabe071cd688 100644 --- a/query-engine/core/src/executor/execute_operation.rs +++ b/query-engine/core/src/executor/execute_operation.rs @@ -1,4 +1,5 @@ #![cfg_attr(target_arch = "wasm32", allow(unused_variables))] +#![cfg_attr(not(feature = "metrics"), allow(clippy::let_and_return))] use super::pipeline::QueryPipeline; use crate::{ @@ -6,6 +7,7 @@ use crate::{ QueryGraphBuilder, QueryInterpreter, ResponseData, }; use connector::{Connection, ConnectionLike, Connector}; +use crosstarget_utils::time::ElapsedTimeCounter; use futures::future; #[cfg(feature = "metrics")] @@ -14,7 +16,7 @@ use query_engine_metrics::{ }; use schema::{QuerySchema, QuerySchemaRef}; -use std::time::{Duration, Instant}; +use std::time::Duration; use tracing::Instrument; use tracing_futures::WithSubscriber; @@ -24,13 +26,16 @@ pub async fn execute_single_operation( operation: &Operation, trace_id: Option, ) -> crate::Result { - let operation_timer = Instant::now(); + let operation_timer = ElapsedTimeCounter::start(); let (graph, serializer) = build_graph(&query_schema, operation.clone())?; let result = execute_on(conn, graph, serializer, query_schema.as_ref(), trace_id).await; #[cfg(feature = "metrics")] - histogram!(PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, operation_timer.elapsed()); + histogram!( + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + operation_timer.elapsed_time() + ); result } @@ -49,11 +54,14 @@ pub async fn execute_many_operations( let mut results = Vec::with_capacity(queries.len()); for (i, (graph, serializer)) in queries.into_iter().enumerate() { - let operation_timer = Instant::now(); + let operation_timer = ElapsedTimeCounter::start(); let result = execute_on(conn, graph, serializer, query_schema.as_ref(), trace_id.clone()).await; #[cfg(feature = "metrics")] - histogram!(PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, operation_timer.elapsed()); + histogram!( + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + operation_timer.elapsed_time() + ); match result { Ok(result) => results.push(Ok(result)), @@ -116,7 +124,7 @@ pub async fn execute_many_self_contained( ); let conn = connector.get_connection().instrument(conn_span).await?; - futures.push(tokio::spawn( + futures.push(crosstarget_utils::spawn::spawn_if_possible( request_context::with_request_context( engine_protocol, execute_self_contained( @@ -150,14 +158,14 @@ async fn execute_self_contained( retry_on_transient_error: bool, trace_id: Option, ) -> crate::Result { - let operation_timer = Instant::now(); + let operation_timer = ElapsedTimeCounter::start(); let result = if retry_on_transient_error { execute_self_contained_with_retry( &mut conn, query_schema, operation, force_transactions, - Instant::now(), + ElapsedTimeCounter::start(), trace_id, ) .await @@ -168,7 +176,10 @@ async fn execute_self_contained( }; #[cfg(feature = "metrics")] - histogram!(PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, operation_timer.elapsed()); + histogram!( + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + operation_timer.elapsed_time() + ); result } @@ -200,7 +211,7 @@ async fn execute_self_contained_with_retry( query_schema: QuerySchemaRef, operation: Operation, force_transactions: bool, - retry_timeout: Instant, + retry_timeout: ElapsedTimeCounter, trace_id: Option, ) -> crate::Result { let (graph, serializer) = build_graph(&query_schema, operation.clone())?; @@ -216,8 +227,8 @@ async fn execute_self_contained_with_retry( let (graph, serializer) = build_graph(&query_schema, operation.clone())?; let res = execute_in_tx(conn, graph, serializer, query_schema.as_ref(), trace_id.clone()).await; - if is_transient_error(&res) && retry_timeout.elapsed() < MAX_TX_TIMEOUT_RETRY_LIMIT { - tokio::time::sleep(TX_RETRY_BACKOFF).await; + if is_transient_error(&res) && retry_timeout.elapsed_time() < MAX_TX_TIMEOUT_RETRY_LIMIT { + crosstarget_utils::time::sleep(TX_RETRY_BACKOFF).await; continue; } else { return res; diff --git a/query-engine/core/src/executor/interpreting_executor.rs b/query-engine/core/src/executor/interpreting_executor.rs index fb2b13938378..0408361b766d 100644 --- a/query-engine/core/src/executor/interpreting_executor.rs +++ b/query-engine/core/src/executor/interpreting_executor.rs @@ -8,7 +8,7 @@ use crate::{ use async_trait::async_trait; use connector::Connector; use schema::QuerySchemaRef; -use tokio::time::{self, Duration}; +use tokio::time::Duration; use tracing_futures::Instrument; /// Central query executor and main entry point into the query core. @@ -36,7 +36,8 @@ where } } -#[async_trait] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] impl QueryExecutor for InterpretingExecutor where C: Connector + Send + Sync + 'static, @@ -140,7 +141,8 @@ where } } -#[async_trait] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] impl TransactionManager for InterpretingExecutor where C: Connector + Send + Sync, @@ -162,7 +164,7 @@ where user_facing = true, "db.type" = self.connector.name() ); - let conn = time::timeout( + let conn = crosstarget_utils::time::timeout( Duration::from_millis(tx_opts.max_acquisition_millis), self.connector.get_connection(), ) diff --git a/query-engine/core/src/executor/mod.rs b/query-engine/core/src/executor/mod.rs index ba2784d3c71a..fee7bc68fe7b 100644 --- a/query-engine/core/src/executor/mod.rs +++ b/query-engine/core/src/executor/mod.rs @@ -10,7 +10,6 @@ mod execute_operation; mod interpreting_executor; mod pipeline; mod request_context; -pub(crate) mod task; pub use self::{execute_operation::*, interpreting_executor::InterpretingExecutor}; @@ -25,7 +24,8 @@ use connector::Connector; use serde::{Deserialize, Serialize}; use tracing::Dispatch; -#[async_trait] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] pub trait QueryExecutor: TransactionManager { /// Executes a single operation and returns its result. /// Implementers must honor the passed transaction ID and execute the operation on the transaction identified @@ -95,7 +95,8 @@ impl TransactionOptions { tx_id } } -#[async_trait] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] pub trait TransactionManager { /// Starts a new transaction. /// Returns ID of newly opened transaction. diff --git a/query-engine/core/src/executor/task.rs b/query-engine/core/src/executor/task.rs deleted file mode 100644 index 8d1c39bbcd06..000000000000 --- a/query-engine/core/src/executor/task.rs +++ /dev/null @@ -1,59 +0,0 @@ -//! This module provides a unified interface for spawning asynchronous tasks, regardless of the target platform. - -pub use arch::{spawn, JoinHandle}; -use futures::Future; - -// On native targets, `tokio::spawn` spawns a new asynchronous task. -#[cfg(not(target_arch = "wasm32"))] -mod arch { - use super::*; - - pub type JoinHandle = tokio::task::JoinHandle; - - pub fn spawn(future: T) -> JoinHandle - where - T: Future + Send + 'static, - T::Output: Send + 'static, - { - tokio::spawn(future) - } -} - -// On Wasm targets, `wasm_bindgen_futures::spawn_local` spawns a new asynchronous task. -#[cfg(target_arch = "wasm32")] -mod arch { - use super::*; - use tokio::sync::oneshot::{self}; - - // Wasm-compatible alternative to `tokio::task::JoinHandle`. - // `pin_project` enables pin-projection and a `Pin`-compatible implementation of the `Future` trait. - pub struct JoinHandle(oneshot::Receiver); - - impl Future for JoinHandle { - type Output = Result; - - fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { - // the `self.project()` method is provided by the `pin_project` macro - core::pin::Pin::new(&mut self.0).poll(cx) - } - } - - impl JoinHandle { - pub fn abort(&mut self) { - // abort is noop on Wasm targets - } - } - - pub fn spawn(future: T) -> JoinHandle - where - T: Future + Send + 'static, - T::Output: Send + 'static, - { - let (sender, receiver) = oneshot::channel(); - wasm_bindgen_futures::spawn_local(async move { - let result = future.await; - sender.send(result).ok(); - }); - JoinHandle(receiver) - } -} diff --git a/query-engine/core/src/interactive_transactions/actor_manager.rs b/query-engine/core/src/interactive_transactions/actor_manager.rs index 105733be4166..e6c1c7fbd1dc 100644 --- a/query-engine/core/src/interactive_transactions/actor_manager.rs +++ b/query-engine/core/src/interactive_transactions/actor_manager.rs @@ -1,6 +1,6 @@ -use crate::executor::task::JoinHandle; use crate::{protocol::EngineProtocol, ClosedTx, Operation, ResponseData}; use connector::Connection; +use crosstarget_utils::task::JoinHandle; use lru::LruCache; use once_cell::sync::Lazy; use schema::QuerySchemaRef; @@ -37,7 +37,6 @@ pub struct TransactionActorManager { impl Drop for TransactionActorManager { fn drop(&mut self) { - debug!("DROPPING TPM"); self.bg_reader_clear.abort(); } } @@ -108,7 +107,7 @@ impl TransactionActorManager { of the transaction. Consider increasing the interactive transaction timeout \ or doing less work in the transaction", timeout.as_millis(), - start_time.elapsed().as_millis(), + start_time.elapsed_time().as_millis(), ) } None => { diff --git a/query-engine/core/src/interactive_transactions/actors.rs b/query-engine/core/src/interactive_transactions/actors.rs index 104ffc26812f..86ebd5c13b84 100644 --- a/query-engine/core/src/interactive_transactions/actors.rs +++ b/query-engine/core/src/interactive_transactions/actors.rs @@ -1,10 +1,11 @@ use super::{CachedTx, TransactionError, TxOpRequest, TxOpRequestMsg, TxOpResponse}; -use crate::executor::task::{spawn, JoinHandle}; use crate::{ execute_many_operations, execute_single_operation, protocol::EngineProtocol, ClosedTx, Operation, ResponseData, TxId, }; use connector::Connection; +use crosstarget_utils::task::{spawn, spawn_controlled, JoinHandle}; +use crosstarget_utils::time::ElapsedTimeCounter; use schema::QuerySchemaRef; use std::{collections::HashMap, sync::Arc}; use tokio::{ @@ -12,7 +13,7 @@ use tokio::{ mpsc::{channel, Receiver, Sender}, oneshot, RwLock, }, - time::{self, Duration, Instant}, + time::Duration, }; use tracing::Span; use tracing_futures::Instrument; @@ -296,8 +297,8 @@ pub(crate) async fn spawn_itx_actor( query_schema, ); - let start_time = Instant::now(); - let sleep = time::sleep(timeout); + let start_time = ElapsedTimeCounter::start(); + let sleep = crosstarget_utils::time::sleep(timeout); tokio::pin!(sleep); loop { @@ -314,6 +315,8 @@ pub(crate) async fn spawn_itx_actor( if run_state == RunState::Finished { break } + } else { + break; } } } @@ -385,17 +388,38 @@ pub(crate) fn spawn_client_list_clear_actor( closed_txs: Arc>>>, mut rx: Receiver<(TxId, Option)>, ) -> JoinHandle<()> { - spawn(async move { - loop { - if let Some((id, closed_tx)) = rx.recv().await { - trace!("removing {} from client list", id); + // Note: tasks implemented via loops cannot be cancelled implicitly, so we need to spawn them in a + // "controlled" way, via `spawn_controlled`. + // The `rx_exit` receiver is used to signal the loop to exit, and that signal is emitted whenever + // the task is aborted (likely, due to the engine shutting down and cleaning up the allocated resources). + spawn_controlled(Box::new( + |mut rx_exit: tokio::sync::broadcast::Receiver<()>| async move { + loop { + tokio::select! { + result = rx.recv() => { + match result { + Some((id, closed_tx)) => { + trace!("removing {} from client list", id); - let mut clients_guard = clients.write().await; - clients_guard.remove(&id); - drop(clients_guard); + let mut clients_guard = clients.write().await; - closed_txs.write().await.put(id, closed_tx); + clients_guard.remove(&id); + drop(clients_guard); + + closed_txs.write().await.put(id, closed_tx); + } + None => { + // the `rx` channel is closed. + tracing::error!("rx channel is closed!"); + break; + } + } + }, + _ = rx_exit.recv() => { + break; + }, + } } - } - }) + }, + )) } diff --git a/query-engine/core/src/interactive_transactions/mod.rs b/query-engine/core/src/interactive_transactions/mod.rs index ce125e8fa17e..c3ee76703a06 100644 --- a/query-engine/core/src/interactive_transactions/mod.rs +++ b/query-engine/core/src/interactive_transactions/mod.rs @@ -1,8 +1,9 @@ use crate::CoreError; use connector::Transaction; +use crosstarget_utils::time::ElapsedTimeCounter; use serde::Deserialize; use std::fmt::Display; -use tokio::time::{Duration, Instant}; +use tokio::time::Duration; mod actor_manager; mod actors; @@ -104,7 +105,7 @@ impl<'a> CachedTx<'a> { } } - pub(crate) fn to_closed(&self, start_time: Instant, timeout: Duration) -> Option { + pub(crate) fn to_closed(&self, start_time: ElapsedTimeCounter, timeout: Duration) -> Option { match self { CachedTx::Open(_) => None, CachedTx::Committed => Some(ClosedTx::Committed), @@ -117,5 +118,8 @@ impl<'a> CachedTx<'a> { pub(crate) enum ClosedTx { Committed, RolledBack, - Expired { start_time: Instant, timeout: Duration }, + Expired { + start_time: ElapsedTimeCounter, + timeout: Duration, + }, } diff --git a/query-engine/core/src/lib.rs b/query-engine/core/src/lib.rs index 38f39e9fb5d9..219b78753277 100644 --- a/query-engine/core/src/lib.rs +++ b/query-engine/core/src/lib.rs @@ -9,10 +9,9 @@ pub mod protocol; pub mod query_document; pub mod query_graph_builder; pub mod response_ir; - -#[cfg(feature = "metrics")] pub mod telemetry; +pub use self::telemetry::*; pub use self::{ error::{CoreError, FieldConversionError}, executor::{QueryExecutor, TransactionOptions}, @@ -20,9 +19,6 @@ pub use self::{ query_document::*, }; -#[cfg(feature = "metrics")] -pub use self::telemetry::*; - pub use connector::{ error::{ConnectorError, ErrorKind as ConnectorErrorKind}, Connector, diff --git a/query-engine/core/src/protocol.rs b/query-engine/core/src/protocol.rs index 75e8dbc0fd70..e92438d5e92d 100644 --- a/query-engine/core/src/protocol.rs +++ b/query-engine/core/src/protocol.rs @@ -3,6 +3,7 @@ use serde::Deserialize; #[derive(Debug, Clone, Copy, Deserialize)] #[serde(rename_all = "camelCase")] pub enum EngineProtocol { + #[cfg(feature = "graphql-protocol")] Graphql, Json, } @@ -14,6 +15,7 @@ impl EngineProtocol { } /// Returns `true` if the engine protocol is [`Graphql`]. + #[cfg(feature = "graphql-protocol")] pub fn is_graphql(&self) -> bool { matches!(self, Self::Graphql) } @@ -22,6 +24,7 @@ impl EngineProtocol { impl From<&String> for EngineProtocol { fn from(s: &String) -> Self { match s.as_str() { + #[cfg(feature = "graphql-protocol")] "graphql" => EngineProtocol::Graphql, "json" => EngineProtocol::Json, x => panic!("Unknown engine protocol '{x}'. Must be 'graphql' or 'json'."), diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index 47385692b387..a75d69f34573 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -724,11 +724,13 @@ fn serialize_scalar(field: &OutputField<'_>, value: PrismaValue) -> crate::Resul fn convert_prisma_value(field: &OutputField<'_>, value: PrismaValue, st: &ScalarType) -> crate::Result { match crate::executor::get_engine_protocol() { + #[cfg(feature = "graphql-protocol")] EngineProtocol::Graphql => convert_prisma_value_graphql_protocol(field, value, st), EngineProtocol::Json => convert_prisma_value_json_protocol(field, value, st), } } +#[cfg(feature = "graphql-protocol")] fn convert_prisma_value_graphql_protocol( field: &OutputField<'_>, value: PrismaValue, diff --git a/query-engine/core/src/telemetry/capturing/mod.rs b/query-engine/core/src/telemetry/capturing/mod.rs index 73a5c318697d..bbdc6ae9a083 100644 --- a/query-engine/core/src/telemetry/capturing/mod.rs +++ b/query-engine/core/src/telemetry/capturing/mod.rs @@ -134,7 +134,8 @@ //! - Finally, the server sets the `logs` and `traces` extensions in the `PrismaResponse`**[12]**, //! it serializes the extended response in json format and returns it as an HTTP Response //! blob **[13]**. -//! +//! +#![allow(unused_imports, dead_code)] pub use self::capturer::Capturer; pub use self::settings::Settings; pub use tx_ext::TxTraceExt; @@ -142,7 +143,6 @@ pub use tx_ext::TxTraceExt; use self::capturer::Processor; use once_cell::sync::Lazy; use opentelemetry::{global, sdk, trace}; -use query_engine_metrics::MetricRegistry; use tracing::subscriber; use tracing_subscriber::{ filter::filter_fn, layer::Layered, prelude::__tracing_subscriber_SubscriberExt, Layer, Registry, @@ -158,9 +158,13 @@ pub fn capturer(trace_id: trace::TraceId, settings: Settings) -> Capturer { /// Adds a capturing layer to the given subscriber and installs the transformed subscriber as the /// global, default subscriber +#[cfg(feature = "metrics")] #[allow(clippy::type_complexity)] pub fn install_capturing_layer( - subscriber: Layered, Layered + Send + Sync>, Registry>>, + subscriber: Layered< + Option, + Layered + Send + Sync>, Registry>, + >, log_queries: bool, ) { // set a trace context propagator, so that the trace context is propagated via the diff --git a/query-engine/driver-adapters/Cargo.toml b/query-engine/driver-adapters/Cargo.toml index 4c0b55bb0a92..9f9db91287f0 100644 --- a/query-engine/driver-adapters/Cargo.toml +++ b/query-engine/driver-adapters/Cargo.toml @@ -8,23 +8,29 @@ async-trait = "0.1" once_cell = "1.15" serde.workspace = true serde_json.workspace = true -quaint.workspace = true -psl.workspace = true tracing = "0.1" tracing-core = "0.1" metrics = "0.18" uuid = { version = "1", features = ["v4"] } +pin-project = "1" +serde_repr.workspace = true -# Note: these deps are temporarily specified here to avoid importing them from tiberius (the SQL server driver). -# They will be imported from quaint-core instead in a future PR. -num-bigint = "0.4.3" -bigdecimal = "0.3.0" -chrono = "0.4.20" futures = "0.3" +[dev-dependencies] +expect-test = "1" +tokio = { version = "1.0", features = ["macros", "time", "sync"] } +wasm-rs-dbg.workspace = true + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] napi.workspace = true napi-derive.workspace = true +quaint.workspace = true -[dev-dependencies] -expect-test = "1" -tokio.workspace = true +[target.'cfg(target_arch = "wasm32")'.dependencies] +quaint = { path = "../../quaint" } +js-sys.workspace = true +serde-wasm-bindgen.workspace = true +wasm-bindgen.workspace = true +wasm-bindgen-futures.workspace = true +tsify.workspace = true diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index 4e847742e51b..632a01c89eab 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -22,6 +22,9 @@ import { PrismaPlanetScale } from '@prisma/adapter-planetscale' import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; +import { webcrypto } from 'node:crypto'; + +(global as any).crypto = webcrypto const SUPPORTED_ADAPTERS: Record Promise> diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/wasm.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/wasm.ts index 439fd0c3f94f..6eea2ee36cef 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/src/wasm.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/wasm.ts @@ -9,6 +9,5 @@ const bytes = await fs.readFile(path.resolve(dirname, '..', '..', '..', 'query-e const module = new WebAssembly.Module(bytes) const instance = new WebAssembly.Instance(module, { './query_engine_bg.js': wasm }) wasm.__wbg_set_wasm(instance.exports); -wasm.init() export const WasmQueryEngine = wasm.QueryEngine \ No newline at end of file diff --git a/query-engine/driver-adapters/src/conversion/js_arg.rs b/query-engine/driver-adapters/src/conversion/js_arg.rs new file mode 100644 index 000000000000..c5b65e80882a --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/js_arg.rs @@ -0,0 +1,16 @@ +use serde::Serialize; +use serde_json::value::Value as JsonValue; + +#[derive(Debug, PartialEq, Serialize)] +#[serde(untagged)] +pub enum JSArg { + Value(serde_json::Value), + Buffer(Vec), + Array(Vec), +} + +impl From for JSArg { + fn from(v: JsonValue) -> Self { + JSArg::Value(v) + } +} diff --git a/query-engine/driver-adapters/src/conversion/js_to_quaint.rs b/query-engine/driver-adapters/src/conversion/js_to_quaint.rs new file mode 100644 index 000000000000..3f5a65395896 --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/js_to_quaint.rs @@ -0,0 +1,701 @@ +use std::borrow::Cow; +use std::str::FromStr; + +pub use crate::types::{ColumnType, JSResultSet, Query, TransactionOptions}; +use quaint::bigdecimal::{BigDecimal, FromPrimitive}; +use quaint::chrono::{DateTime, NaiveDate, NaiveTime, Utc}; +use quaint::{ + connector::ResultSet as QuaintResultSet, + error::{Error as QuaintError, ErrorKind}, + Value as QuaintValue, +}; + +impl TryFrom for QuaintResultSet { + type Error = quaint::error::Error; + + fn try_from(js_result_set: JSResultSet) -> Result { + let JSResultSet { + rows, + column_names, + column_types, + last_insert_id, + } = js_result_set; + + let mut quaint_rows = Vec::with_capacity(rows.len()); + + for row in rows { + let mut quaint_row = Vec::with_capacity(column_types.len()); + + for (i, row) in row.into_iter().enumerate() { + let column_type = column_types[i]; + let column_name = column_names[i].as_str(); + + quaint_row.push(js_value_to_quaint(row, column_type, column_name)?); + } + + quaint_rows.push(quaint_row); + } + + let last_insert_id = last_insert_id.and_then(|id| id.parse::().ok()); + let mut quaint_result_set = QuaintResultSet::new(column_names, quaint_rows); + + // Not a fan of this (extracting the `Some` value from an `Option` and pass it to a method that creates a new `Some` value), + // but that's Quaint's ResultSet API and that's how the MySQL connector does it. + // Sqlite, on the other hand, uses a `last_insert_id.unwrap_or(0)` approach. + if let Some(last_insert_id) = last_insert_id { + quaint_result_set.set_last_insert_id(last_insert_id); + } + + Ok(quaint_result_set) + } +} + +fn conversion_error(args: &std::fmt::Arguments) -> QuaintError { + let msg = match args.as_str() { + Some(s) => Cow::Borrowed(s), + None => Cow::Owned(args.to_string()), + }; + QuaintError::builder(ErrorKind::ConversionError(msg)).build() +} + +macro_rules! conversion_error { + ($($arg:tt)*) => { + conversion_error(&format_args!($($arg)*)) + }; +} + +/// Handle data-type conversion from a JSON value to a Quaint value. +/// This is used for most data types, except those that require connector-specific handling, e.g., `ColumnType::Boolean`. +pub fn js_value_to_quaint( + json_value: serde_json::Value, + column_type: ColumnType, + column_name: &str, +) -> quaint::Result> { + let parse_number_as_i64 = |n: &serde_json::Number| { + n.as_i64().ok_or(conversion_error!( + "number must be an integer in column '{column_name}', got '{n}'" + )) + }; + + // Note for the future: it may be worth revisiting how much bloat so many panics with different static + // strings add to the compiled artefact, and in case we should come up with a restricted set of panic + // messages, or even find a way of removing them altogether. + match column_type { + ColumnType::Int32 => match json_value { + serde_json::Value::Number(n) => { + // n.as_i32() is not implemented, so we need to downcast from i64 instead + parse_number_as_i64(&n) + .and_then(|n| -> quaint::Result { + n.try_into() + .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) + }) + .map(QuaintValue::int32) + } + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int32).map_err(|e| { + conversion_error!("string-encoded number must be an i32 in column '{column_name}', got {s}: {e}") + }), + serde_json::Value::Null => Ok(QuaintValue::null_int32()), + mismatch => Err(conversion_error!( + "expected an i32 number in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Int64 => match json_value { + serde_json::Value::Number(n) => parse_number_as_i64(&n).map(QuaintValue::int64), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { + conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") + }), + serde_json::Value::Null => Ok(QuaintValue::null_int64()), + mismatch => Err(conversion_error!( + "expected a string or number in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Float => match json_value { + // n.as_f32() is not implemented, so we need to downcast from f64 instead. + // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. + serde_json::Value::Number(n) => n + .as_f64() + .ok_or(conversion_error!( + "number must be a float in column '{column_name}', got {n}" + )) + .and_then(f64_to_f32) + .map(QuaintValue::float), + serde_json::Value::Null => Ok(QuaintValue::null_float()), + mismatch => Err(conversion_error!( + "expected an f32 number in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Double => match json_value { + serde_json::Value::Number(n) => n.as_f64().map(QuaintValue::double).ok_or(conversion_error!( + "number must be a f64 in column '{column_name}', got {n}" + )), + serde_json::Value::Null => Ok(QuaintValue::null_double()), + mismatch => Err(conversion_error!( + "expected an f64 number in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Numeric => match json_value { + serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { + conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") + }), + serde_json::Value::Number(n) => n + .as_f64() + .and_then(BigDecimal::from_f64) + .ok_or(conversion_error!( + "number must be an f64 in column '{column_name}', got {n}" + )) + .map(QuaintValue::numeric), + serde_json::Value::Null => Ok(QuaintValue::null_numeric()), + mismatch => Err(conversion_error!( + "expected a string-encoded number in column '{column_name}', found {mismatch}", + )), + }, + ColumnType::Boolean => match json_value { + serde_json::Value::Bool(b) => Ok(QuaintValue::boolean(b)), + serde_json::Value::Null => Ok(QuaintValue::null_boolean()), + serde_json::Value::Number(n) => match n.as_i64() { + Some(0) => Ok(QuaintValue::boolean(false)), + Some(1) => Ok(QuaintValue::boolean(true)), + _ => Err(conversion_error!( + "expected number-encoded boolean to be 0 or 1 in column '{column_name}', got {n}" + )), + }, + serde_json::Value::String(s) => match s.as_str() { + "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), + "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), + _ => Err(conversion_error!( + "expected string-encoded boolean in column '{column_name}', got {s}" + )), + }, + mismatch => Err(conversion_error!( + "expected a boolean in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Character => match json_value { + serde_json::Value::String(s) => match s.chars().next() { + Some(c) => Ok(QuaintValue::character(c)), + None => Ok(QuaintValue::null_character()), + }, + serde_json::Value::Null => Ok(QuaintValue::null_character()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Text => match json_value { + serde_json::Value::String(s) => Ok(QuaintValue::text(s)), + serde_json::Value::Null => Ok(QuaintValue::null_text()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Date => match json_value { + serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") + .map(QuaintValue::date) + .map_err(|_| conversion_error!("expected a date string in column '{column_name}', got {s}")), + serde_json::Value::Null => Ok(QuaintValue::null_date()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Time => match json_value { + serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S%.f") + .map(QuaintValue::time) + .map_err(|_| conversion_error!("expected a time string in column '{column_name}', got {s}")), + serde_json::Value::Null => Ok(QuaintValue::null_time()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::DateTime => match json_value { + // TODO: change parsing order to prefer RFC3339 + serde_json::Value::String(s) => quaint::chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") + .map(|dt| DateTime::from_utc(dt, Utc)) + .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) + .map(QuaintValue::datetime) + .map_err(|_| conversion_error!("expected a datetime string in column '{column_name}', found {s}")), + serde_json::Value::Null => Ok(QuaintValue::null_datetime()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Json => { + match json_value { + // DbNull + serde_json::Value::Null => Ok(QuaintValue::null_json()), + // JsonNull + serde_json::Value::String(s) if s == "$__prisma_null" => Ok(QuaintValue::json(serde_json::Value::Null)), + json => Ok(QuaintValue::json(json)), + } + } + ColumnType::Enum => match json_value { + serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), + serde_json::Value::Null => Ok(QuaintValue::null_enum()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::Bytes => match json_value { + serde_json::Value::String(s) => Ok(QuaintValue::bytes(s.into_bytes())), + serde_json::Value::Array(array) => array + .iter() + .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) + .collect::>>() + .map(QuaintValue::bytes) + .ok_or(conversion_error!( + "elements of the array in column '{column_name}' must be u8" + )), + serde_json::Value::Null => Ok(QuaintValue::null_bytes()), + mismatch => Err(conversion_error!( + "expected a string or an array in column '{column_name}', found {mismatch}", + )), + }, + ColumnType::Uuid => match json_value { + serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) + .map(QuaintValue::uuid) + .map_err(|_| conversion_error!("Expected a UUID string in column '{column_name}'")), + serde_json::Value::Null => Ok(QuaintValue::null_bytes()), + mismatch => Err(conversion_error!( + "Expected a UUID string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::UnknownNumber => match json_value { + serde_json::Value::Number(n) => n + .as_i64() + .map(QuaintValue::int64) + .or(n.as_f64().map(QuaintValue::double)) + .ok_or(conversion_error!( + "number must be an i64 or f64 in column '{column_name}', got {n}" + )), + mismatch => Err(conversion_error!( + "expected a either an i64 or a f64 in column '{column_name}', found {mismatch}", + )), + }, + + ColumnType::Int32Array => js_array_to_quaint(ColumnType::Int32, json_value, column_name), + ColumnType::Int64Array => js_array_to_quaint(ColumnType::Int64, json_value, column_name), + ColumnType::FloatArray => js_array_to_quaint(ColumnType::Float, json_value, column_name), + ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), + ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), + ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), + ColumnType::CharacterArray => js_array_to_quaint(ColumnType::Character, json_value, column_name), + ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), + ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), + ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), + ColumnType::DateTimeArray => js_array_to_quaint(ColumnType::DateTime, json_value, column_name), + ColumnType::JsonArray => js_array_to_quaint(ColumnType::Json, json_value, column_name), + ColumnType::EnumArray => js_array_to_quaint(ColumnType::Enum, json_value, column_name), + ColumnType::BytesArray => js_array_to_quaint(ColumnType::Bytes, json_value, column_name), + ColumnType::UuidArray => js_array_to_quaint(ColumnType::Uuid, json_value, column_name), + + unimplemented => { + todo!("support column type {:?} in column {}", unimplemented, column_name) + } + } +} + +fn js_array_to_quaint( + base_type: ColumnType, + json_value: serde_json::Value, + column_name: &str, +) -> quaint::Result> { + match json_value { + serde_json::Value::Array(array) => Ok(QuaintValue::array( + array + .into_iter() + .enumerate() + .map(|(index, elem)| js_value_to_quaint(elem, base_type, &format!("{column_name}[{index}]"))) + .collect::>>()?, + )), + serde_json::Value::Null => Ok(QuaintValue::null_array()), + mismatch => Err(conversion_error!( + "expected an array in column '{column_name}', found {mismatch}", + )), + } +} + +/// Coerce a `f64` to a `f32`, asserting that the conversion is lossless. +/// Note that, when overflow occurs during conversion, the result is `infinity`. +fn f64_to_f32(x: f64) -> quaint::Result { + let y = x as f32; + + if x.is_finite() == y.is_finite() { + Ok(y) + } else { + Err(conversion_error!("f32 overflow during conversion")) + } +} + +#[cfg(test)] +mod proxy_test { + use quaint::bigdecimal::num_bigint::BigInt; + use serde_json::json; + + use super::*; + + #[track_caller] + fn test_null<'a, T: Into>>(quaint_none: T, column_type: ColumnType) { + let json_value = serde_json::Value::Null; + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, quaint_none.into()); + } + + #[test] + fn js_value_binary_to_quaint() { + let column_type = ColumnType::Bytes; + + // null + test_null(QuaintValue::null_bytes(), column_type); + + // "" + let json_value = serde_json::Value::String("".to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::bytes(vec![])); + + // "hello" + let json_value = serde_json::Value::String("hello".to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::bytes(vec![104, 101, 108, 108, 111])); + } + + #[test] + fn js_value_int32_to_quaint() { + let column_type = ColumnType::Int32; + + // null + test_null(QuaintValue::null_int32(), column_type); + + // 0 + let n: i32 = 0; + let json_value = serde_json::Value::Number(serde_json::Number::from(n)); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); + + // max + let n: i32 = i32::MAX; + let json_value = serde_json::Value::Number(serde_json::Number::from(n)); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); + + // min + let n: i32 = i32::MIN; + let json_value = serde_json::Value::Number(serde_json::Number::from(n)); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); + + // string-encoded + let n = i32::MAX; + let json_value = serde_json::Value::String(n.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); + } + + #[test] + fn js_value_int64_to_quaint() { + let column_type = ColumnType::Int64; + + // null + test_null(QuaintValue::null_int64(), column_type); + + // 0 + let n: i64 = 0; + let json_value = serde_json::Value::String(n.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); + + // max + let n: i64 = i64::MAX; + let json_value = serde_json::Value::String(n.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); + + // min + let n: i64 = i64::MIN; + let json_value = serde_json::Value::String(n.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); + + // number-encoded + let n: i64 = (1 << 53) - 1; // max JS safe integer + let json_value = serde_json::Value::Number(serde_json::Number::from(n)); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); + } + + #[test] + fn js_value_float_to_quaint() { + let column_type = ColumnType::Float; + + // null + test_null(QuaintValue::null_float(), column_type); + + // 0 + let n: f32 = 0.0; + let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::float(n)); + + // max + let n: f32 = f32::MAX; + let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::float(n)); + + // min + let n: f32 = f32::MIN; + let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::float(n)); + } + + #[test] + fn js_value_double_to_quaint() { + let column_type = ColumnType::Double; + + // null + test_null(QuaintValue::null_double(), column_type); + + // 0 + let n: f64 = 0.0; + let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::double(n)); + + // max + let n: f64 = f64::MAX; + let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::double(n)); + + // min + let n: f64 = f64::MIN; + let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::double(n)); + } + + #[test] + fn js_value_numeric_to_quaint() { + let column_type = ColumnType::Numeric; + + // null + test_null(QuaintValue::null_numeric(), column_type); + + let n_as_string = "1234.99"; + let decimal = BigDecimal::new(BigInt::parse_bytes(b"123499", 10).unwrap(), 2); + + let json_value = serde_json::Value::String(n_as_string.into()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::numeric(decimal)); + + let n_as_string = "1234.999999"; + let decimal = BigDecimal::new(BigInt::parse_bytes(b"1234999999", 10).unwrap(), 6); + + let json_value = serde_json::Value::String(n_as_string.into()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::numeric(decimal)); + } + + #[test] + fn js_value_boolean_to_quaint() { + let column_type = ColumnType::Boolean; + + // null + test_null(QuaintValue::null_boolean(), column_type); + + // true + for truthy_value in [json!(true), json!(1), json!("true"), json!("TRUE"), json!("1")] { + let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::boolean(true)); + } + + // false + for falsy_value in [json!(false), json!(0), json!("false"), json!("FALSE"), json!("0")] { + let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::boolean(false)); + } + } + + #[test] + fn js_value_char_to_quaint() { + let column_type = ColumnType::Character; + + // null + test_null(QuaintValue::null_character(), column_type); + + let c = 'c'; + let json_value = serde_json::Value::String(c.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::character(c)); + } + + #[test] + fn js_value_text_to_quaint() { + let column_type = ColumnType::Text; + + // null + test_null(QuaintValue::null_text(), column_type); + + let s = "some text"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::text(s)); + } + + #[test] + fn js_value_date_to_quaint() { + let column_type = ColumnType::Date; + + // null + test_null(QuaintValue::null_date(), column_type); + + let s = "2023-01-01"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); + assert_eq!(quaint_value, QuaintValue::date(date)); + } + + #[test] + fn js_value_time_to_quaint() { + let column_type = ColumnType::Time; + + // null + test_null(QuaintValue::null_time(), column_type); + + let s = "23:59:59"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); + assert_eq!(quaint_value, QuaintValue::time(time)); + + let s = "13:02:20.321"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + let time: NaiveTime = NaiveTime::from_hms_milli_opt(13, 2, 20, 321).unwrap(); + assert_eq!(quaint_value, QuaintValue::time(time)); + } + + #[test] + fn js_value_datetime_to_quaint() { + let column_type = ColumnType::DateTime; + + // null + test_null(QuaintValue::null_datetime(), column_type); + + let s = "2023-01-01 23:59:59.415"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) + .unwrap() + .and_hms_milli_opt(23, 59, 59, 415) + .unwrap(); + let datetime = DateTime::from_utc(datetime, Utc); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); + + let s = "2023-01-01 23:59:59.123456"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) + .unwrap() + .and_hms_micro_opt(23, 59, 59, 123_456) + .unwrap(); + let datetime = DateTime::from_utc(datetime, Utc); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); + + let s = "2023-01-01 23:59:59"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) + .unwrap() + .and_hms_milli_opt(23, 59, 59, 0) + .unwrap(); + let datetime = DateTime::from_utc(datetime, Utc); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); + } + + #[test] + fn js_value_json_to_quaint() { + let column_type = ColumnType::Json; + + // null + test_null(QuaintValue::null_json(), column_type); + + let json = json!({ + "key": "value", + "nested": [ + true, + false, + 1, + null + ] + }); + let json_value = json.clone(); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::json(json.clone())); + } + + #[test] + fn js_value_enum_to_quaint() { + let column_type = ColumnType::Enum; + + // null + test_null(QuaintValue::null_enum(), column_type); + + let s = "some enum variant"; + let json_value = serde_json::Value::String(s.to_string()); + + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::enum_variant(s)); + } + + #[test] + fn js_int32_array_to_quaint() { + let column_type = ColumnType::Int32Array; + test_null(QuaintValue::null_array(), column_type); + + let json_value = json!([1, 2, 3]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + assert_eq!( + quaint_value, + QuaintValue::array(vec![ + QuaintValue::int32(1), + QuaintValue::int32(2), + QuaintValue::int32(3) + ]) + ); + + let json_value = json!([1, 2, {}]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + + assert_eq!( + quaint_value.err().unwrap().to_string(), + "Conversion failed: expected an i32 number in column 'column_name[2]', found {}" + ); + } + + #[test] + fn js_text_array_to_quaint() { + let column_type = ColumnType::TextArray; + test_null(QuaintValue::null_array(), column_type); + + let json_value = json!(["hi", "there"]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + assert_eq!( + quaint_value, + QuaintValue::array(vec![QuaintValue::text("hi"), QuaintValue::text("there"),]) + ); + + let json_value = json!([10]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + + assert_eq!( + quaint_value.err().unwrap().to_string(), + "Conversion failed: expected a string in column 'column_name[0]', found 10" + ); + } +} diff --git a/query-engine/driver-adapters/src/conversion/mod.rs b/query-engine/driver-adapters/src/conversion/mod.rs new file mode 100644 index 000000000000..3ef41fed903e --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/mod.rs @@ -0,0 +1,9 @@ +pub(crate) mod js_arg; +pub(crate) mod js_to_quaint; + +pub(crate) mod mysql; +pub(crate) mod postgres; +pub(crate) mod sqlite; + +pub use js_arg::JSArg; +pub use js_to_quaint::*; diff --git a/query-engine/driver-adapters/src/conversion/mysql.rs b/query-engine/driver-adapters/src/conversion/mysql.rs index aab33213431a..bd59d3b94ed0 100644 --- a/query-engine/driver-adapters/src/conversion/mysql.rs +++ b/query-engine/driver-adapters/src/conversion/mysql.rs @@ -1,4 +1,4 @@ -use crate::conversion::JSArg; +use super::JSArg; use serde_json::value::Value as JsonValue; const DATETIME_FORMAT: &str = "%Y-%m-%d %H:%M:%S%.f"; @@ -28,8 +28,8 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { #[cfg(test)] mod test { use super::*; - use bigdecimal::BigDecimal; - use chrono::*; + use quaint::bigdecimal::BigDecimal; + use quaint::chrono::*; use quaint::ValueType; use std::str::FromStr; @@ -93,6 +93,11 @@ mod test { JSArg::Value(JsonValue::String("23:13:01".to_string())) )) ), + ( + ValueType::Bytes(Some("hello".as_bytes().into())), + JSArg::Buffer("hello".as_bytes().to_vec()) + ), + ]; let mut errors: Vec = vec![]; diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 113be5170a84..949cc17e9eba 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -1,6 +1,6 @@ use crate::conversion::JSArg; -use chrono::format::StrftimeItems; use once_cell::sync::Lazy; +use quaint::chrono::format::StrftimeItems; use serde_json::value::Value as JsonValue; static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); @@ -30,8 +30,8 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { #[cfg(test)] mod test { use super::*; - use bigdecimal::BigDecimal; - use chrono::*; + use quaint::bigdecimal::BigDecimal; + use quaint::chrono::*; use quaint::ValueType; use std::str::FromStr; @@ -105,6 +105,10 @@ mod test { JSArg::Value(JsonValue::Null), )) ), + ( + ValueType::Bytes(Some("hello".as_bytes().into())).into_value(), + JSArg::Buffer("hello".as_bytes().to_vec()) + ), ]; let mut errors: Vec = vec![]; diff --git a/query-engine/driver-adapters/src/conversion/sqlite.rs b/query-engine/driver-adapters/src/conversion/sqlite.rs index 032c16923256..b11acdca0d7f 100644 --- a/query-engine/driver-adapters/src/conversion/sqlite.rs +++ b/query-engine/driver-adapters/src/conversion/sqlite.rs @@ -25,8 +25,8 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { #[cfg(test)] mod test { use super::*; - use bigdecimal::BigDecimal; - use chrono::*; + use quaint::bigdecimal::BigDecimal; + use quaint::chrono::*; use quaint::ValueType; use serde_json::Value; use std::str::FromStr; @@ -94,6 +94,10 @@ mod test { JSArg::Value(Value::Null), )) ), + ( + ValueType::Bytes(Some("hello".as_bytes().into())), + JSArg::Buffer("hello".as_bytes().to_vec()) + ), ]; let mut errors: Vec = vec![]; diff --git a/query-engine/driver-adapters/src/error.rs b/query-engine/driver-adapters/src/error.rs index 4f4128088f49..fa01759d9213 100644 --- a/query-engine/driver-adapters/src/error.rs +++ b/query-engine/driver-adapters/src/error.rs @@ -1,35 +1,45 @@ -use futures::{Future, FutureExt}; -use napi::Error as NapiError; -use quaint::error::Error as QuaintError; -use std::{any::Any, panic::AssertUnwindSafe}; +use quaint::error::{MysqlError, PostgresError, SqliteError}; +use serde::Deserialize; -/// transforms a napi error into a quaint error copying the status and reason -/// properties over -pub(crate) fn into_quaint_error(napi_err: NapiError) -> QuaintError { - let status = napi_err.status.as_ref().to_owned(); - let reason = napi_err.reason.clone(); +#[derive(Deserialize)] +#[serde(remote = "PostgresError")] +pub struct PostgresErrorDef { + code: String, + message: String, + severity: String, + detail: Option, + column: Option, + hint: Option, +} - QuaintError::raw_connector_error(status, reason) +#[derive(Deserialize)] +#[serde(remote = "MysqlError")] +pub struct MysqlErrorDef { + pub code: u16, + pub message: String, + pub state: String, } -/// catches a panic thrown during the execution of an asynchronous closure and transforms it into -/// the Error variant of a napi::Result. -pub(crate) async fn async_unwinding_panic(fut: F) -> napi::Result -where - F: Future>, -{ - AssertUnwindSafe(fut) - .catch_unwind() - .await - .unwrap_or_else(panic_to_napi_err) +#[derive(Deserialize)] +#[serde(remote = "SqliteError", rename_all = "camelCase")] +pub struct SqliteErrorDef { + pub extended_code: i32, + pub message: Option, } -fn panic_to_napi_err(panic_payload: Box) -> napi::Result { - panic_payload - .downcast_ref::<&str>() - .map(|s| -> String { (*s).to_owned() }) - .or_else(|| panic_payload.downcast_ref::().map(|s| s.to_owned())) - .map(|message| Err(napi::Error::from_reason(format!("PANIC: {message}")))) - .ok_or(napi::Error::from_reason("PANIC: unknown panic".to_string())) - .unwrap() +#[derive(Deserialize)] +#[serde(tag = "kind")] +/// Wrapper for JS-side errors +pub(crate) enum DriverAdapterError { + /// Unexpected JS exception + GenericJs { + id: i32, + }, + UnsupportedNativeDataType { + #[serde(rename = "type")] + native_type: String, + }, + Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), + Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), + Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), } diff --git a/query-engine/driver-adapters/src/lib.rs b/query-engine/driver-adapters/src/lib.rs index 6e29f9e69609..8c9dc58ba573 100644 --- a/query-engine/driver-adapters/src/lib.rs +++ b/query-engine/driver-adapters/src/lib.rs @@ -1,17 +1,123 @@ //! Query Engine Driver Adapters -//! This crate is responsible for defining a quaint::Connector implementation that uses functions -//! exposed by client connectors via N-API. +//! This crate is responsible for defining a `quaint::Connector` implementation that uses functions +//! exposed by client connectors via either `napi-rs` (on native targets) or `wasm_bindgen` / `js_sys` (on Wasm targets). //! //! A driver adapter is an object defined in javascript that uses a driver -//! (ex. '@planetscale/database') to provide a similar implementation of that of a quaint Connector. i.e. the ability to query and execute SQL -//! plus some transformation of types to adhere to what a quaint::Value expresses. +//! (ex. '@planetscale/database') to provide a similar implementation of that of a `quaint::Connector`. i.e. the ability to query and execute SQL +//! plus some transformation of types to adhere to what a `quaint::Value` expresses. //! -mod async_js_function; -mod conversion; -mod error; -mod proxy; -mod queryable; -mod result; -mod transaction; -pub use queryable::{from_napi, JsQueryable}; +pub(crate) mod conversion; +pub(crate) mod error; +pub(crate) mod proxy; +pub(crate) mod queryable; +pub(crate) mod send_future; +pub(crate) mod transaction; +pub(crate) mod types; + +use crate::error::DriverAdapterError; +use quaint::error::{Error as QuaintError, ErrorKind}; + +#[cfg(target_arch = "wasm32")] +pub(crate) use wasm::result::AdapterResult; + +#[cfg(not(target_arch = "wasm32"))] +pub(crate) use napi::result::AdapterResult; + +impl From for QuaintError { + fn from(value: DriverAdapterError) -> Self { + match value { + DriverAdapterError::UnsupportedNativeDataType { native_type } => { + QuaintError::builder(ErrorKind::UnsupportedColumnType { + column_type: native_type, + }) + .build() + } + DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), + DriverAdapterError::Postgres(e) => e.into(), + DriverAdapterError::Mysql(e) => e.into(), + DriverAdapterError::Sqlite(e) => e.into(), + // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here + } + } +} + +pub use queryable::from_js; +pub(crate) use transaction::JsTransaction; + +#[cfg(target_arch = "wasm32")] +pub use wasm::JsObjectExtern as JsObject; + +#[cfg(not(target_arch = "wasm32"))] +pub use ::napi::JsObject; + +#[cfg(not(target_arch = "wasm32"))] +pub mod napi; + +#[cfg(not(target_arch = "wasm32"))] +pub(crate) use napi::*; + +#[cfg(target_arch = "wasm32")] +pub mod wasm; + +#[cfg(target_arch = "wasm32")] +pub(crate) use wasm::*; + +#[cfg(target_arch = "wasm32")] +mod arch { + pub(crate) use js_sys::JsString; + use std::str::FromStr; + use tsify::Tsify; + + pub(crate) fn get_named_property(object: &super::wasm::JsObjectExtern, name: &str) -> JsResult + where + T: From, + { + Ok(object.get(name.into())?.into()) + } + + pub(crate) fn has_named_property(object: &super::wasm::JsObjectExtern, name: &str) -> JsResult { + js_sys::Reflect::has(object, &JsString::from_str(name).unwrap().into()) + } + + pub(crate) fn to_rust_str(value: JsString) -> JsResult { + Ok(value.into()) + } + + pub(crate) fn from_js_value(value: wasm_bindgen::JsValue) -> C + where + C: Tsify + serde::de::DeserializeOwned, + { + C::from_js(value).unwrap() + } + + pub(crate) type JsResult = core::result::Result; +} + +#[cfg(not(target_arch = "wasm32"))] +mod arch { + pub(crate) use ::napi::JsString; + + pub(crate) fn get_named_property(object: &::napi::JsObject, name: &str) -> JsResult + where + T: ::napi::bindgen_prelude::FromNapiValue, + { + object.get_named_property(name) + } + + pub(crate) fn has_named_property(object: &::napi::JsObject, name: &str) -> JsResult { + object.has_named_property(name) + } + + pub(crate) fn to_rust_str(value: JsString) -> JsResult { + Ok(value.into_utf8()?.as_str()?.to_string()) + } + + pub(crate) fn from_js_value(value: C) -> C { + value + } + + pub(crate) type JsResult = ::napi::Result; +} + +pub(crate) use arch::*; diff --git a/query-engine/driver-adapters/src/async_js_function.rs b/query-engine/driver-adapters/src/napi/async_js_function.rs similarity index 83% rename from query-engine/driver-adapters/src/async_js_function.rs rename to query-engine/driver-adapters/src/napi/async_js_function.rs index 4926534f58b1..5b53ecbadc65 100644 --- a/query-engine/driver-adapters/src/async_js_function.rs +++ b/query-engine/driver-adapters/src/napi/async_js_function.rs @@ -2,13 +2,11 @@ use std::marker::PhantomData; use napi::{ bindgen_prelude::*, - threadsafe_function::{ErrorStrategy, ThreadsafeFunction}, + threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode}, }; -use crate::{ - error::{async_unwinding_panic, into_quaint_error}, - result::JsResult, -}; +use super::error::{async_unwinding_panic, into_quaint_error}; +use crate::AdapterResult; /// Wrapper for napi-rs's ThreadsafeFunction that is aware of /// JS drivers conventions. Performs following things: @@ -47,7 +45,7 @@ where let js_result = async_unwinding_panic(async { let promise = self .threadsafe_fn - .call_async::>>(arg) + .call_async::>>(arg) .await?; promise.await }) @@ -56,8 +54,8 @@ where js_result.into() } - pub(crate) fn as_raw(&self) -> &ThreadsafeFunction { - &self.threadsafe_fn + pub(crate) fn call_non_blocking(&self, arg: ArgType) { + _ = self.threadsafe_fn.call(arg, ThreadsafeFunctionCallMode::NonBlocking); } } diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/napi/conversion.rs similarity index 84% rename from query-engine/driver-adapters/src/conversion.rs rename to query-engine/driver-adapters/src/napi/conversion.rs index 00061d72de44..ac2dda60a279 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/napi/conversion.rs @@ -1,25 +1,7 @@ -pub(crate) mod mysql; -pub(crate) mod postgres; -pub(crate) mod sqlite; +pub(crate) use crate::conversion::JSArg; use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::NapiValue; -use serde::Serialize; -use serde_json::value::Value as JsonValue; - -#[derive(Debug, PartialEq, Serialize)] -#[serde(untagged)] -pub enum JSArg { - Value(serde_json::Value), - Buffer(Vec), - Array(Vec), -} - -impl From for JSArg { - fn from(v: JsonValue) -> Self { - JSArg::Value(v) - } -} // FromNapiValue is the napi equivalent to serde::Deserialize. // Note: we can safely leave this unimplemented as we don't need deserialize napi_value back to JSArg. diff --git a/query-engine/driver-adapters/src/napi/error.rs b/query-engine/driver-adapters/src/napi/error.rs new file mode 100644 index 000000000000..4f4128088f49 --- /dev/null +++ b/query-engine/driver-adapters/src/napi/error.rs @@ -0,0 +1,35 @@ +use futures::{Future, FutureExt}; +use napi::Error as NapiError; +use quaint::error::Error as QuaintError; +use std::{any::Any, panic::AssertUnwindSafe}; + +/// transforms a napi error into a quaint error copying the status and reason +/// properties over +pub(crate) fn into_quaint_error(napi_err: NapiError) -> QuaintError { + let status = napi_err.status.as_ref().to_owned(); + let reason = napi_err.reason.clone(); + + QuaintError::raw_connector_error(status, reason) +} + +/// catches a panic thrown during the execution of an asynchronous closure and transforms it into +/// the Error variant of a napi::Result. +pub(crate) async fn async_unwinding_panic(fut: F) -> napi::Result +where + F: Future>, +{ + AssertUnwindSafe(fut) + .catch_unwind() + .await + .unwrap_or_else(panic_to_napi_err) +} + +fn panic_to_napi_err(panic_payload: Box) -> napi::Result { + panic_payload + .downcast_ref::<&str>() + .map(|s| -> String { (*s).to_owned() }) + .or_else(|| panic_payload.downcast_ref::().map(|s| s.to_owned())) + .map(|message| Err(napi::Error::from_reason(format!("PANIC: {message}")))) + .ok_or(napi::Error::from_reason("PANIC: unknown panic".to_string())) + .unwrap() +} diff --git a/query-engine/driver-adapters/src/napi/mod.rs b/query-engine/driver-adapters/src/napi/mod.rs new file mode 100644 index 000000000000..c53414c78c85 --- /dev/null +++ b/query-engine/driver-adapters/src/napi/mod.rs @@ -0,0 +1,8 @@ +//! Query Engine Driver Adapters: `napi`-specific implementation. + +mod async_js_function; +mod conversion; +mod error; +pub(crate) mod result; + +pub(crate) use async_js_function::AsyncJsFunction; diff --git a/query-engine/driver-adapters/src/napi/result.rs b/query-engine/driver-adapters/src/napi/result.rs new file mode 100644 index 000000000000..529455bf9a0b --- /dev/null +++ b/query-engine/driver-adapters/src/napi/result.rs @@ -0,0 +1,60 @@ +use crate::error::DriverAdapterError; +use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; + +impl FromNapiValue for DriverAdapterError { + unsafe fn from_napi_value(napi_env: napi::sys::napi_env, napi_val: napi::sys::napi_value) -> napi::Result { + let env = Env::from_raw(napi_env); + let value = JsUnknown::from_raw(napi_env, napi_val)?; + env.from_js_value(value) + } +} + +/// Wrapper for JS-side result type. +/// This Napi-specific implementation has the same shape and API as the Wasm implementation, +/// but it asks for a `FromNapiValue` bound on the generic type. +/// The duplication is needed as it's currently impossible to have target-specific generic bounds in Rust. +pub(crate) enum AdapterResult +where + T: FromNapiValue, +{ + Ok(T), + Err(DriverAdapterError), +} + +impl AdapterResult +where + T: FromNapiValue, +{ + fn from_js_unknown(unknown: JsUnknown) -> napi::Result { + let object = unknown.coerce_to_object()?; + let ok: bool = object.get_named_property("ok")?; + if ok { + let value: JsUnknown = object.get_named_property("value")?; + return Ok(Self::Ok(T::from_unknown(value)?)); + } + + let error = object.get_named_property("error")?; + Ok(Self::Err(error)) + } +} + +impl FromNapiValue for AdapterResult +where + T: FromNapiValue, +{ + unsafe fn from_napi_value(napi_env: napi::sys::napi_env, napi_val: napi::sys::napi_value) -> napi::Result { + Self::from_js_unknown(JsUnknown::from_raw(napi_env, napi_val)?) + } +} + +impl From> for quaint::Result +where + T: FromNapiValue, +{ + fn from(value: AdapterResult) -> Self { + match value { + AdapterResult::Ok(result) => Ok(result), + AdapterResult::Err(error) => Err(error.into()), + } + } +} diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 6ce12c5bca42..4e44caf3db75 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -1,28 +1,19 @@ -use std::borrow::Cow; -use std::str::FromStr; -use std::sync::atomic::{AtomicBool, Ordering}; +use crate::send_future::UnsafeFuture; +pub use crate::types::{ColumnType, JSResultSet, Query, TransactionOptions}; +use crate::{from_js_value, get_named_property, has_named_property, to_rust_str, JsObject, JsResult, JsString}; -use crate::async_js_function::AsyncJsFunction; -use crate::conversion::JSArg; -use crate::transaction::JsTransaction; +use crate::{AsyncJsFunction, JsTransaction}; +use futures::Future; use metrics::increment_gauge; -use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; -use napi::{JsObject, JsString}; -use napi_derive::napi; -use quaint::connector::ResultSet as QuaintResultSet; -use quaint::{ - error::{Error as QuaintError, ErrorKind}, - Value as QuaintValue, -}; +use std::sync::atomic::{AtomicBool, Ordering}; -// TODO(jkomyno): import these 3rd-party crates from the `quaint-core` crate. -use bigdecimal::{BigDecimal, FromPrimitive}; -use chrono::{DateTime, Utc}; -use chrono::{NaiveDate, NaiveTime}; +#[cfg(target_arch = "wasm32")] +use wasm_bindgen::prelude::wasm_bindgen; /// Proxy is a struct wrapping a javascript object that exhibits basic primitives for -/// querying and executing SQL (i.e. a client connector). The Proxy uses NAPI ThreadSafeFunction to -/// invoke the code within the node runtime that implements the client connector. +/// querying and executing SQL (i.e. a client connector). The Proxy uses Napi/Wasm's JsFunction +/// to invoke the code within the node runtime that implements the client connector. +#[cfg_attr(target_arch = "wasm32", wasm_bindgen(getter_with_clone))] pub(crate) struct CommonProxy { /// Execute a query given as SQL, interpolating the given parameters. query_raw: AsyncJsFunction, @@ -37,11 +28,14 @@ pub(crate) struct CommonProxy { /// This is a JS proxy for accessing the methods specific to top level /// JS driver objects +#[cfg_attr(target_arch = "wasm32", wasm_bindgen(getter_with_clone))] pub(crate) struct DriverProxy { start_transaction: AsyncJsFunction<(), JsTransaction>, } + /// This a JS proxy for accessing the methods, specific /// to JS transaction objects +#[cfg_attr(target_arch = "wasm32", wasm_bindgen(getter_with_clone))] pub(crate) struct TransactionProxy { /// transaction options options: TransactionOptions, @@ -56,495 +50,22 @@ pub(crate) struct TransactionProxy { closed: AtomicBool, } -/// This result set is more convenient to be manipulated from both Rust and NodeJS. -/// Quaint's version of ResultSet is: -/// -/// pub struct ResultSet { -/// pub(crate) columns: Arc>, -/// pub(crate) rows: Vec>>, -/// pub(crate) last_insert_id: Option, -/// } -/// -/// If we used this ResultSet would we would have worse ergonomics as quaint::Value is a structured -/// enum and cannot be used directly with the #[napi(Object)] macro. Thus requiring us to implement -/// the FromNapiValue and ToNapiValue traits for quaint::Value, and use a different custom type -/// representing the Value in javascript. -/// -#[napi(object)] -#[derive(Debug)] -pub struct JSResultSet { - pub column_types: Vec, - pub column_names: Vec, - // Note this might be encoded differently for performance reasons - pub rows: Vec>, - pub last_insert_id: Option, -} - -impl JSResultSet { - pub fn len(&self) -> usize { - self.rows.len() - } -} - -#[napi] -#[derive(Debug)] -pub enum ColumnType { - // [PLANETSCALE_TYPE] (MYSQL_TYPE) -> [TypeScript example] - /// The following PlanetScale type IDs are mapped into Int32: - /// - INT8 (TINYINT) -> e.g. `127` - /// - INT16 (SMALLINT) -> e.g. `32767` - /// - INT24 (MEDIUMINT) -> e.g. `8388607` - /// - INT32 (INT) -> e.g. `2147483647` - Int32 = 0, - - /// The following PlanetScale type IDs are mapped into Int64: - /// - INT64 (BIGINT) -> e.g. `"9223372036854775807"` (String-encoded) - Int64 = 1, - - /// The following PlanetScale type IDs are mapped into Float: - /// - FLOAT32 (FLOAT) -> e.g. `3.402823466` - Float = 2, - - /// The following PlanetScale type IDs are mapped into Double: - /// - FLOAT64 (DOUBLE) -> e.g. `1.7976931348623157` - Double = 3, - - /// The following PlanetScale type IDs are mapped into Numeric: - /// - DECIMAL (DECIMAL) -> e.g. `"99999999.99"` (String-encoded) - Numeric = 4, - - /// The following PlanetScale type IDs are mapped into Boolean: - /// - BOOLEAN (BOOLEAN) -> e.g. `1` - Boolean = 5, - - Character = 6, - - /// The following PlanetScale type IDs are mapped into Text: - /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) - /// - VARCHAR (VARCHAR) -> e.g. `"foo"` (String-encoded) - Text = 7, - - /// The following PlanetScale type IDs are mapped into Date: - /// - DATE (DATE) -> e.g. `"2023-01-01"` (String-encoded, yyyy-MM-dd) - Date = 8, - - /// The following PlanetScale type IDs are mapped into Time: - /// - TIME (TIME) -> e.g. `"23:59:59"` (String-encoded, HH:mm:ss) - Time = 9, - - /// The following PlanetScale type IDs are mapped into DateTime: - /// - DATETIME (DATETIME) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) - /// - TIMESTAMP (TIMESTAMP) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) - DateTime = 10, - - /// The following PlanetScale type IDs are mapped into Json: - /// - JSON (JSON) -> e.g. `"{\"key\": \"value\"}"` (String-encoded) - Json = 11, - - /// The following PlanetScale type IDs are mapped into Enum: - /// - ENUM (ENUM) -> e.g. `"foo"` (String-encoded) - Enum = 12, - - /// The following PlanetScale type IDs are mapped into Bytes: - /// - BLOB (BLOB) -> e.g. `"\u0012"` (String-encoded) - /// - VARBINARY (VARBINARY) -> e.g. `"\u0012"` (String-encoded) - /// - BINARY (BINARY) -> e.g. `"\u0012"` (String-encoded) - /// - GEOMETRY (GEOMETRY) -> e.g. `"\u0012"` (String-encoded) - Bytes = 13, - - /// The following PlanetScale type IDs are mapped into Set: - /// - SET (SET) -> e.g. `"foo,bar"` (String-encoded, comma-separated) - /// This is currently unhandled, and will panic if encountered. - Set = 14, - - /// UUID from postgres-flavored driver adapters is mapped to this type. - Uuid = 15, - - /* - * Scalar arrays - */ - /// Int32 array (INT2_ARRAY and INT4_ARRAY in PostgreSQL) - Int32Array = 64, - - /// Int64 array (INT8_ARRAY in PostgreSQL) - Int64Array = 65, - - /// Float array (FLOAT4_ARRAY in PostgreSQL) - FloatArray = 66, - - /// Double array (FLOAT8_ARRAY in PostgreSQL) - DoubleArray = 67, - - /// Numeric array (NUMERIC_ARRAY, MONEY_ARRAY etc in PostgreSQL) - NumericArray = 68, - - /// Boolean array (BOOL_ARRAY in PostgreSQL) - BooleanArray = 69, - - /// Char array (CHAR_ARRAY in PostgreSQL) - CharacterArray = 70, - - /// Text array (TEXT_ARRAY in PostgreSQL) - TextArray = 71, - - /// Date array (DATE_ARRAY in PostgreSQL) - DateArray = 72, - - /// Time array (TIME_ARRAY in PostgreSQL) - TimeArray = 73, - - /// DateTime array (TIMESTAMP_ARRAY in PostgreSQL) - DateTimeArray = 74, - - /// Json array (JSON_ARRAY in PostgreSQL) - JsonArray = 75, - - /// Enum array - EnumArray = 76, - - /// Bytes array (BYTEA_ARRAY in PostgreSQL) - BytesArray = 77, - - /// Uuid array (UUID_ARRAY in PostgreSQL) - UuidArray = 78, - - /* - * Below there are custom types that don't have a 1:1 translation with a quaint::Value. - * enum variant. - */ - /// UnknownNumber is used when the type of the column is a number but of unknown particular type - /// and precision. - /// - /// It's used by some driver adapters, like libsql to return aggregation values like AVG, or - /// COUNT, and it can be mapped to either Int64, or Double - UnknownNumber = 128, -} - -#[napi(object)] -#[derive(Debug)] -pub struct Query { - pub sql: String, - pub args: Vec, -} - -fn conversion_error(args: &std::fmt::Arguments) -> QuaintError { - let msg = match args.as_str() { - Some(s) => Cow::Borrowed(s), - None => Cow::Owned(args.to_string()), - }; - QuaintError::builder(ErrorKind::ConversionError(msg)).build() -} - -macro_rules! conversion_error { - ($($arg:tt)*) => { - conversion_error(&format_args!($($arg)*)) - }; -} - -/// Handle data-type conversion from a JSON value to a Quaint value. -/// This is used for most data types, except those that require connector-specific handling, e.g., `ColumnType::Boolean`. -fn js_value_to_quaint( - json_value: serde_json::Value, - column_type: ColumnType, - column_name: &str, -) -> quaint::Result> { - let parse_number_as_i64 = |n: &serde_json::Number| { - n.as_i64().ok_or(conversion_error!( - "number must be an integer in column '{column_name}', got '{n}'" - )) - }; - - // Note for the future: it may be worth revisiting how much bloat so many panics with different static - // strings add to the compiled artefact, and in case we should come up with a restricted set of panic - // messages, or even find a way of removing them altogether. - match column_type { - ColumnType::Int32 => match json_value { - serde_json::Value::Number(n) => { - // n.as_i32() is not implemented, so we need to downcast from i64 instead - parse_number_as_i64(&n) - .and_then(|n| -> quaint::Result { - n.try_into() - .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) - }) - .map(QuaintValue::int32) - } - serde_json::Value::String(s) => s.parse::().map(QuaintValue::int32).map_err(|e| { - conversion_error!("string-encoded number must be an i32 in column '{column_name}', got {s}: {e}") - }), - serde_json::Value::Null => Ok(QuaintValue::null_int32()), - mismatch => Err(conversion_error!( - "expected an i32 number in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Int64 => match json_value { - serde_json::Value::Number(n) => parse_number_as_i64(&n).map(QuaintValue::int64), - serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { - conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") - }), - serde_json::Value::Null => Ok(QuaintValue::null_int64()), - mismatch => Err(conversion_error!( - "expected a string or number in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Float => match json_value { - // n.as_f32() is not implemented, so we need to downcast from f64 instead. - // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. - serde_json::Value::Number(n) => n - .as_f64() - .ok_or(conversion_error!( - "number must be a float in column '{column_name}', got {n}" - )) - .and_then(f64_to_f32) - .map(QuaintValue::float), - serde_json::Value::Null => Ok(QuaintValue::null_float()), - mismatch => Err(conversion_error!( - "expected an f32 number in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Double => match json_value { - serde_json::Value::Number(n) => n.as_f64().map(QuaintValue::double).ok_or(conversion_error!( - "number must be a f64 in column '{column_name}', got {n}" - )), - serde_json::Value::Null => Ok(QuaintValue::null_double()), - mismatch => Err(conversion_error!( - "expected an f64 number in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Numeric => match json_value { - serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { - conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") - }), - serde_json::Value::Number(n) => n - .as_f64() - .and_then(BigDecimal::from_f64) - .ok_or(conversion_error!( - "number must be an f64 in column '{column_name}', got {n}" - )) - .map(QuaintValue::numeric), - serde_json::Value::Null => Ok(QuaintValue::null_numeric()), - mismatch => Err(conversion_error!( - "expected a string-encoded number in column '{column_name}', found {mismatch}", - )), - }, - ColumnType::Boolean => match json_value { - serde_json::Value::Bool(b) => Ok(QuaintValue::boolean(b)), - serde_json::Value::Null => Ok(QuaintValue::null_boolean()), - serde_json::Value::Number(n) => match n.as_i64() { - Some(0) => Ok(QuaintValue::boolean(false)), - Some(1) => Ok(QuaintValue::boolean(true)), - _ => Err(conversion_error!( - "expected number-encoded boolean to be 0 or 1 in column '{column_name}', got {n}" - )), - }, - serde_json::Value::String(s) => match s.as_str() { - "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), - "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), - _ => Err(conversion_error!( - "expected string-encoded boolean in column '{column_name}', got {s}" - )), - }, - mismatch => Err(conversion_error!( - "expected a boolean in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Character => match json_value { - serde_json::Value::String(s) => match s.chars().next() { - Some(c) => Ok(QuaintValue::character(c)), - None => Ok(QuaintValue::null_character()), - }, - serde_json::Value::Null => Ok(QuaintValue::null_character()), - mismatch => Err(conversion_error!( - "expected a string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Text => match json_value { - serde_json::Value::String(s) => Ok(QuaintValue::text(s)), - serde_json::Value::Null => Ok(QuaintValue::null_text()), - mismatch => Err(conversion_error!( - "expected a string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Date => match json_value { - serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") - .map(QuaintValue::date) - .map_err(|_| conversion_error!("expected a date string in column '{column_name}', got {s}")), - serde_json::Value::Null => Ok(QuaintValue::null_date()), - mismatch => Err(conversion_error!( - "expected a string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Time => match json_value { - serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S%.f") - .map(QuaintValue::time) - .map_err(|_| conversion_error!("expected a time string in column '{column_name}', got {s}")), - serde_json::Value::Null => Ok(QuaintValue::null_time()), - mismatch => Err(conversion_error!( - "expected a string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::DateTime => match json_value { - // TODO: change parsing order to prefer RFC3339 - serde_json::Value::String(s) => chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") - .map(|dt| DateTime::from_utc(dt, Utc)) - .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) - .map(QuaintValue::datetime) - .map_err(|_| conversion_error!("expected a datetime string in column '{column_name}', found {s}")), - serde_json::Value::Null => Ok(QuaintValue::null_datetime()), - mismatch => Err(conversion_error!( - "expected a string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Json => { - match json_value { - // DbNull - serde_json::Value::Null => Ok(QuaintValue::null_json()), - // JsonNull - serde_json::Value::String(s) if s == "$__prisma_null" => Ok(QuaintValue::json(serde_json::Value::Null)), - json => Ok(QuaintValue::json(json)), - } - } - ColumnType::Enum => match json_value { - serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), - serde_json::Value::Null => Ok(QuaintValue::null_enum()), - mismatch => Err(conversion_error!( - "expected a string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::Bytes => match json_value { - serde_json::Value::String(s) => Ok(QuaintValue::bytes(s.into_bytes())), - serde_json::Value::Array(array) => array - .iter() - .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) - .collect::>>() - .map(QuaintValue::bytes) - .ok_or(conversion_error!( - "elements of the array in column '{column_name}' must be u8" - )), - serde_json::Value::Null => Ok(QuaintValue::null_bytes()), - mismatch => Err(conversion_error!( - "expected a string or an array in column '{column_name}', found {mismatch}", - )), - }, - ColumnType::Uuid => match json_value { - serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) - .map(QuaintValue::uuid) - .map_err(|_| conversion_error!("Expected a UUID string in column '{column_name}'")), - serde_json::Value::Null => Ok(QuaintValue::null_bytes()), - mismatch => Err(conversion_error!( - "Expected a UUID string in column '{column_name}', found {mismatch}" - )), - }, - ColumnType::UnknownNumber => match json_value { - serde_json::Value::Number(n) => n - .as_i64() - .map(QuaintValue::int64) - .or(n.as_f64().map(QuaintValue::double)) - .ok_or(conversion_error!( - "number must be an i64 or f64 in column '{column_name}', got {n}" - )), - mismatch => Err(conversion_error!( - "expected a either an i64 or a f64 in column '{column_name}', found {mismatch}", - )), - }, - - ColumnType::Int32Array => js_array_to_quaint(ColumnType::Int32, json_value, column_name), - ColumnType::Int64Array => js_array_to_quaint(ColumnType::Int64, json_value, column_name), - ColumnType::FloatArray => js_array_to_quaint(ColumnType::Float, json_value, column_name), - ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), - ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), - ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), - ColumnType::CharacterArray => js_array_to_quaint(ColumnType::Character, json_value, column_name), - ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), - ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), - ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), - ColumnType::DateTimeArray => js_array_to_quaint(ColumnType::DateTime, json_value, column_name), - ColumnType::JsonArray => js_array_to_quaint(ColumnType::Json, json_value, column_name), - ColumnType::EnumArray => js_array_to_quaint(ColumnType::Enum, json_value, column_name), - ColumnType::BytesArray => js_array_to_quaint(ColumnType::Bytes, json_value, column_name), - ColumnType::UuidArray => js_array_to_quaint(ColumnType::Uuid, json_value, column_name), - - unimplemented => { - todo!("support column type {:?} in column {}", unimplemented, column_name) - } - } -} - -fn js_array_to_quaint( - base_type: ColumnType, - json_value: serde_json::Value, - column_name: &str, -) -> quaint::Result> { - match json_value { - serde_json::Value::Array(array) => Ok(QuaintValue::array( - array - .into_iter() - .enumerate() - .map(|(index, elem)| js_value_to_quaint(elem, base_type, &format!("{column_name}[{index}]"))) - .collect::>>()?, - )), - serde_json::Value::Null => Ok(QuaintValue::null_array()), - mismatch => Err(conversion_error!( - "expected an array in column '{column_name}', found {mismatch}", - )), - } -} - -impl TryFrom for QuaintResultSet { - type Error = quaint::error::Error; - - fn try_from(js_result_set: JSResultSet) -> Result { - let JSResultSet { - rows, - column_names, - column_types, - last_insert_id, - } = js_result_set; - - let mut quaint_rows = Vec::with_capacity(rows.len()); - - for row in rows { - let mut quaint_row = Vec::with_capacity(column_types.len()); - - for (i, row) in row.into_iter().enumerate() { - let column_type = column_types[i]; - let column_name = column_names[i].as_str(); - - quaint_row.push(js_value_to_quaint(row, column_type, column_name)?); - } - - quaint_rows.push(quaint_row); - } - - let last_insert_id = last_insert_id.and_then(|id| id.parse::().ok()); - let mut quaint_result_set = QuaintResultSet::new(column_names, quaint_rows); - - // Not a fan of this (extracting the `Some` value from an `Option` and pass it to a method that creates a new `Some` value), - // but that's Quaint's ResultSet API and that's how the MySQL connector does it. - // Sqlite, on the other hand, uses a `last_insert_id.unwrap_or(0)` approach. - if let Some(last_insert_id) = last_insert_id { - quaint_result_set.set_last_insert_id(last_insert_id); - } - - Ok(quaint_result_set) - } -} - impl CommonProxy { - pub fn new(object: &JsObject) -> napi::Result { - // Background infos: + pub fn new(object: &JsObject) -> JsResult { + // Background info: // - the provider was previously called "flavour", so we provide a temporary fallback for third-party providers // to give them time to adapt // - reading a named property that does not exist yields a panic, despite the `Result<_, _>` return type - let provider: JsString = if object.has_named_property("provider")? { - object.get_named_property("provider")? + let provider: JsString = if has_named_property(object, "provider")? { + get_named_property(object, "provider")? } else { - object.get_named_property("flavour")? + get_named_property(object, "flavour")? }; Ok(Self { - query_raw: object.get_named_property("queryRaw")?, - execute_raw: object.get_named_property("executeRaw")?, - provider: provider.into_utf8()?.as_str()?.to_owned(), + query_raw: get_named_property(object, "queryRaw")?, + execute_raw: get_named_property(object, "executeRaw")?, + provider: to_rust_str(provider)?, }) } @@ -558,13 +79,13 @@ impl CommonProxy { } impl DriverProxy { - pub fn new(driver_adapter: &JsObject) -> napi::Result { + pub fn new(object: &JsObject) -> JsResult { Ok(Self { - start_transaction: driver_adapter.get_named_property("startTransaction")?, + start_transaction: get_named_property(object, "startTransaction")?, }) } - pub async fn start_transaction(&self) -> quaint::Result> { + async fn start_transaction_inner(&self) -> quaint::Result> { let tx = self.start_transaction.call(()).await?; // Decrement for this gauge is done in JsTransaction::commit/JsTransaction::rollback @@ -574,21 +95,18 @@ impl DriverProxy { increment_gauge!("prisma_client_queries_active", 1.0); Ok(Box::new(tx)) } -} -#[derive(Debug)] -#[napi(object)] -pub struct TransactionOptions { - /// Whether or not to run a phantom query (i.e., a query that only influences Prisma event logs, but not the database itself) - /// before opening a transaction, committing, or rollbacking. - pub use_phantom_query: bool, + pub fn start_transaction(&self) -> UnsafeFuture>> + '_> { + UnsafeFuture(self.start_transaction_inner()) + } } impl TransactionProxy { - pub fn new(js_transaction: &JsObject) -> napi::Result { - let commit = js_transaction.get_named_property("commit")?; - let rollback = js_transaction.get_named_property("rollback")?; - let options = js_transaction.get_named_property("options")?; + pub fn new(js_transaction: &JsObject) -> JsResult { + let commit = get_named_property(js_transaction, "commit")?; + let rollback = get_named_property(js_transaction, "rollback")?; + let options = get_named_property(js_transaction, "options")?; + let options = from_js_value::(options); Ok(Self { commit, @@ -617,9 +135,9 @@ impl TransactionProxy { /// the underlying FFI call will be delivered to JavaScript side in lockstep, so the destructor /// will not attempt rolling the transaction back even if the `commit` future was dropped while /// waiting on the JavaScript call to complete and deliver response. - pub async fn commit(&self) -> quaint::Result<()> { + pub fn commit(&self) -> UnsafeFuture> + '_> { self.closed.store(true, Ordering::Relaxed); - self.commit.call(()).await + UnsafeFuture(self.commit.call(())) } /// Rolls back the transaction via the driver adapter. @@ -637,9 +155,9 @@ impl TransactionProxy { /// the underlying FFI call will be delivered to JavaScript side in lockstep, so the destructor /// will not attempt rolling back again even if the `rollback` future was dropped while waiting /// on the JavaScript call to complete and deliver response. - pub async fn rollback(&self) -> quaint::Result<()> { + pub fn rollback(&self) -> UnsafeFuture> + '_> { self.closed.store(true, Ordering::Relaxed); - self.rollback.call(()).await + UnsafeFuture(self.rollback.call(())) } } @@ -649,378 +167,21 @@ impl Drop for TransactionProxy { return; } - _ = self - .rollback - .as_raw() - .call((), napi::threadsafe_function::ThreadsafeFunctionCallMode::NonBlocking); + self.rollback.call_non_blocking(()); } } -/// Coerce a `f64` to a `f32`, asserting that the conversion is lossless. -/// Note that, when overflow occurs during conversion, the result is `infinity`. -fn f64_to_f32(x: f64) -> quaint::Result { - let y = x as f32; - - if x.is_finite() == y.is_finite() { - Ok(y) - } else { - Err(conversion_error!("f32 overflow during conversion")) - } +macro_rules! impl_send_sync_on_wasm { + ($struct:ident) => { + #[cfg(target_arch = "wasm32")] + unsafe impl Send for $struct {} + #[cfg(target_arch = "wasm32")] + unsafe impl Sync for $struct {} + }; } -#[cfg(test)] -mod proxy_test { - use num_bigint::BigInt; - use serde_json::json; - - use super::*; - - #[track_caller] - fn test_null<'a, T: Into>>(quaint_none: T, column_type: ColumnType) { - let json_value = serde_json::Value::Null; - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, quaint_none.into()); - } - - #[test] - fn js_value_int32_to_quaint() { - let column_type = ColumnType::Int32; - - // null - test_null(QuaintValue::null_int32(), column_type); - // 0 - let n: i32 = 0; - let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int32(n)); - - // max - let n: i32 = i32::MAX; - let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int32(n)); - - // min - let n: i32 = i32::MIN; - let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int32(n)); - - // string-encoded - let n = i32::MAX; - let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int32(n)); - } - - #[test] - fn js_value_int64_to_quaint() { - let column_type = ColumnType::Int64; - - // null - test_null(QuaintValue::null_int64(), column_type); - - // 0 - let n: i64 = 0; - let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int64(n)); - - // max - let n: i64 = i64::MAX; - let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int64(n)); - - // min - let n: i64 = i64::MIN; - let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int64(n)); - - // number-encoded - let n: i64 = (1 << 53) - 1; // max JS safe integer - let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::int64(n)); - } - - #[test] - fn js_value_float_to_quaint() { - let column_type = ColumnType::Float; - - // null - test_null(QuaintValue::null_float(), column_type); - - // 0 - let n: f32 = 0.0; - let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::float(n)); - - // max - let n: f32 = f32::MAX; - let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::float(n)); - - // min - let n: f32 = f32::MIN; - let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::float(n)); - } - - #[test] - fn js_value_double_to_quaint() { - let column_type = ColumnType::Double; - - // null - test_null(QuaintValue::null_double(), column_type); - - // 0 - let n: f64 = 0.0; - let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::double(n)); - - // max - let n: f64 = f64::MAX; - let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::double(n)); - - // min - let n: f64 = f64::MIN; - let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::double(n)); - } - - #[test] - fn js_value_numeric_to_quaint() { - let column_type = ColumnType::Numeric; - - // null - test_null(QuaintValue::null_numeric(), column_type); - - let n_as_string = "1234.99"; - let decimal = BigDecimal::new(BigInt::parse_bytes(b"123499", 10).unwrap(), 2); - - let json_value = serde_json::Value::String(n_as_string.into()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::numeric(decimal)); - - let n_as_string = "1234.999999"; - let decimal = BigDecimal::new(BigInt::parse_bytes(b"1234999999", 10).unwrap(), 6); - - let json_value = serde_json::Value::String(n_as_string.into()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::numeric(decimal)); - } - - #[test] - fn js_value_boolean_to_quaint() { - let column_type = ColumnType::Boolean; - - // null - test_null(QuaintValue::null_boolean(), column_type); - - // true - for truthy_value in [json!(true), json!(1), json!("true"), json!("TRUE"), json!("1")] { - let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::boolean(true)); - } - - // false - for falsy_value in [json!(false), json!(0), json!("false"), json!("FALSE"), json!("0")] { - let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::boolean(false)); - } - } - - #[test] - fn js_value_char_to_quaint() { - let column_type = ColumnType::Character; - - // null - test_null(QuaintValue::null_character(), column_type); - - let c = 'c'; - let json_value = serde_json::Value::String(c.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::character(c)); - } - - #[test] - fn js_value_text_to_quaint() { - let column_type = ColumnType::Text; - - // null - test_null(QuaintValue::null_text(), column_type); - - let s = "some text"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::text(s)); - } - - #[test] - fn js_value_date_to_quaint() { - let column_type = ColumnType::Date; - - // null - test_null(QuaintValue::null_date(), column_type); - - let s = "2023-01-01"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - - let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); - assert_eq!(quaint_value, QuaintValue::date(date)); - } - - #[test] - fn js_value_time_to_quaint() { - let column_type = ColumnType::Time; - - // null - test_null(QuaintValue::null_time(), column_type); - - let s = "23:59:59"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); - assert_eq!(quaint_value, QuaintValue::time(time)); - - let s = "13:02:20.321"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - let time: NaiveTime = NaiveTime::from_hms_milli_opt(13, 2, 20, 321).unwrap(); - assert_eq!(quaint_value, QuaintValue::time(time)); - } - - #[test] - fn js_value_datetime_to_quaint() { - let column_type = ColumnType::DateTime; - - // null - test_null(QuaintValue::null_datetime(), column_type); - - let s = "2023-01-01 23:59:59.415"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - - let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) - .unwrap() - .and_hms_milli_opt(23, 59, 59, 415) - .unwrap(); - let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::datetime(datetime)); - - let s = "2023-01-01 23:59:59.123456"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - - let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) - .unwrap() - .and_hms_micro_opt(23, 59, 59, 123_456) - .unwrap(); - let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::datetime(datetime)); - - let s = "2023-01-01 23:59:59"; - let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - - let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) - .unwrap() - .and_hms_milli_opt(23, 59, 59, 0) - .unwrap(); - let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::datetime(datetime)); - } - - #[test] - fn js_value_json_to_quaint() { - let column_type = ColumnType::Json; - - // null - test_null(QuaintValue::null_json(), column_type); - - let json = json!({ - "key": "value", - "nested": [ - true, - false, - 1, - null - ] - }); - let json_value = json.clone(); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::json(json.clone())); - } - - #[test] - fn js_value_enum_to_quaint() { - let column_type = ColumnType::Enum; - - // null - test_null(QuaintValue::null_enum(), column_type); - - let s = "some enum variant"; - let json_value = serde_json::Value::String(s.to_string()); - - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::enum_variant(s)); - } - - #[test] - fn js_int32_array_to_quaint() { - let column_type = ColumnType::Int32Array; - test_null(QuaintValue::null_array(), column_type); - - let json_value = json!([1, 2, 3]); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - - assert_eq!( - quaint_value, - QuaintValue::array(vec![ - QuaintValue::int32(1), - QuaintValue::int32(2), - QuaintValue::int32(3) - ]) - ); - - let json_value = json!([1, 2, {}]); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - - assert_eq!( - quaint_value.err().unwrap().to_string(), - "Conversion failed: expected an i32 number in column 'column_name[2]', found {}" - ); - } - - #[test] - fn js_text_array_to_quaint() { - let column_type = ColumnType::TextArray; - test_null(QuaintValue::null_array(), column_type); - - let json_value = json!(["hi", "there"]); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - - assert_eq!( - quaint_value, - QuaintValue::array(vec![QuaintValue::text("hi"), QuaintValue::text("there"),]) - ); - - let json_value = json!([10]); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - - assert_eq!( - quaint_value.err().unwrap().to_string(), - "Conversion failed: expected a string in column 'column_name[0]', found 10" - ); - } -} +// Assume the proxy object will not be sent to service workers, we can unsafe impl Send + Sync. +impl_send_sync_on_wasm!(TransactionProxy); +impl_send_sync_on_wasm!(DriverProxy); +impl_send_sync_on_wasm!(CommonProxy); +impl_send_sync_on_wasm!(JsTransaction); diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 728bceed698f..3afa9ecd2180 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -1,10 +1,11 @@ -use crate::{ - conversion, - proxy::{CommonProxy, DriverProxy, Query}, -}; +use crate::proxy::{CommonProxy, DriverProxy}; +use crate::types::{AdapterFlavour, Query}; +use crate::JsObject; + +use super::conversion; +use crate::send_future::UnsafeFuture; use async_trait::async_trait; -use napi::JsObject; -use psl::datamodel_connector::Flavour; +use futures::Future; use quaint::{ connector::{metrics, IsolationLevel, Transaction}, error::{Error, ErrorKind}, @@ -13,6 +14,9 @@ use quaint::{ }; use tracing::{info_span, Instrument}; +#[cfg(target_arch = "wasm32")] +use wasm_bindgen::prelude::wasm_bindgen; + /// A JsQueryable adapts a Proxy to implement quaint's Queryable interface. It has the /// responsibility of transforming inputs and outputs of `query` and `execute` methods from quaint /// types to types that can be translated into javascript and viceversa. This is to let the rest of @@ -25,25 +29,24 @@ use tracing::{info_span, Instrument}; /// Transforming a `JSResultSet` (what client connectors implemented in javascript provide) /// into a `quaint::connector::result_set::ResultSet`. A quaint `ResultSet` is basically a vector /// of `quaint::Value` but said type is a tagged enum, with non-unit variants that cannot be converted to javascript as is. -/// +#[cfg_attr(target_arch = "wasm32", wasm_bindgen(getter_with_clone))] pub(crate) struct JsBaseQueryable { pub(crate) proxy: CommonProxy, - pub provider: Flavour, + pub provider: AdapterFlavour, } impl JsBaseQueryable { pub(crate) fn new(proxy: CommonProxy) -> Self { - let provider: Flavour = proxy.provider.parse().unwrap(); + let provider: AdapterFlavour = proxy.provider.parse().unwrap(); Self { proxy, provider } } /// visit a quaint query AST according to the provider of the JS connector fn visit_quaint_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { match self.provider { - Flavour::Mysql => visitor::Mysql::build(q), - Flavour::Postgres => visitor::Postgres::build(q), - Flavour::Sqlite => visitor::Sqlite::build(q), - _ => unimplemented!("Unsupported provider for JS connector {:?}", self.provider), + AdapterFlavour::Mysql => visitor::Mysql::build(q), + AdapterFlavour::Postgres => visitor::Postgres::build(q), + AdapterFlavour::Sqlite => visitor::Sqlite::build(q), } } @@ -51,10 +54,9 @@ impl JsBaseQueryable { let sql: String = sql.to_string(); let converter = match self.provider { - Flavour::Postgres => conversion::postgres::value_to_js_arg, - Flavour::Sqlite => conversion::sqlite::value_to_js_arg, - Flavour::Mysql => conversion::mysql::value_to_js_arg, - _ => unreachable!("Unsupported provider for JS connector {:?}", self.provider), + AdapterFlavour::Postgres => conversion::postgres::value_to_js_arg, + AdapterFlavour::Sqlite => conversion::sqlite::value_to_js_arg, + AdapterFlavour::Mysql => conversion::mysql::value_to_js_arg, }; let args = values @@ -126,7 +128,7 @@ impl QuaintQueryable for JsBaseQueryable { return Err(Error::builder(ErrorKind::invalid_isolation_level(&isolation_level)).build()); } - if self.provider == Flavour::Sqlite { + if self.provider == AdapterFlavour::Sqlite { return match isolation_level { IsolationLevel::Serializable => Ok(()), _ => Err(Error::builder(ErrorKind::invalid_isolation_level(&isolation_level)).build()), @@ -139,9 +141,8 @@ impl QuaintQueryable for JsBaseQueryable { fn requires_isolation_first(&self) -> bool { match self.provider { - Flavour::Mysql => true, - Flavour::Postgres | Flavour::Sqlite => false, - _ => unreachable!(), + AdapterFlavour::Mysql => true, + AdapterFlavour::Postgres | AdapterFlavour::Sqlite => false, } } } @@ -151,7 +152,7 @@ impl JsBaseQueryable { format!(r#"-- Implicit "{}" query via underlying driver"#, stmt) } - async fn do_query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { + async fn do_query_raw_inner(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); let query = self.build_query(sql, params).instrument(serialization_span).await?; @@ -165,7 +166,15 @@ impl JsBaseQueryable { result_set.try_into() } - async fn do_execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { + fn do_query_raw<'a>( + &'a self, + sql: &'a str, + params: &'a [quaint::Value<'a>], + ) -> UnsafeFuture> + 'a> { + UnsafeFuture(self.do_query_raw_inner(sql, params)) + } + + async fn do_execute_raw_inner(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); let query = self.build_query(sql, params).instrument(serialization_span).await?; @@ -175,6 +184,14 @@ impl JsBaseQueryable { Ok(affected_rows as u64) } + + fn do_execute_raw<'a>( + &'a self, + sql: &'a str, + params: &'a [quaint::Value<'a>], + ) -> UnsafeFuture> + 'a> { + UnsafeFuture(self.do_execute_raw_inner(sql, params)) + } } /// A JsQueryable adapts a Proxy to implement quaint's Queryable interface. It has the @@ -292,7 +309,7 @@ impl TransactionCapable for JsQueryable { } } -pub fn from_napi(driver: JsObject) -> JsQueryable { +pub fn from_js(driver: JsObject) -> JsQueryable { let common = CommonProxy::new(&driver).unwrap(); let driver_proxy = DriverProxy::new(&driver).unwrap(); diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs deleted file mode 100644 index ad4ce7cbb546..000000000000 --- a/query-engine/driver-adapters/src/result.rs +++ /dev/null @@ -1,119 +0,0 @@ -use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, ErrorKind, MysqlError, PostgresError, SqliteError}; -use serde::Deserialize; - -#[derive(Deserialize)] -#[serde(remote = "PostgresError")] -pub struct PostgresErrorDef { - code: String, - message: String, - severity: String, - detail: Option, - column: Option, - hint: Option, -} - -#[derive(Deserialize)] -#[serde(remote = "MysqlError")] -pub struct MysqlErrorDef { - pub code: u16, - pub message: String, - pub state: String, -} - -#[derive(Deserialize)] -#[serde(remote = "SqliteError", rename_all = "camelCase")] -pub struct SqliteErrorDef { - pub extended_code: i32, - pub message: Option, -} - -#[derive(Deserialize)] -#[serde(tag = "kind")] -/// Wrapper for JS-side errors -pub(crate) enum DriverAdapterError { - /// Unexpected JS exception - GenericJs { - id: i32, - }, - UnsupportedNativeDataType { - #[serde(rename = "type")] - native_type: String, - }, - Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), - Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), - Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), -} - -impl FromNapiValue for DriverAdapterError { - unsafe fn from_napi_value(napi_env: napi::sys::napi_env, napi_val: napi::sys::napi_value) -> napi::Result { - let env = Env::from_raw(napi_env); - let value = JsUnknown::from_raw(napi_env, napi_val)?; - env.from_js_value(value) - } -} - -impl From for QuaintError { - fn from(value: DriverAdapterError) -> Self { - match value { - DriverAdapterError::UnsupportedNativeDataType { native_type } => { - QuaintError::builder(ErrorKind::UnsupportedColumnType { - column_type: native_type, - }) - .build() - } - DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), - DriverAdapterError::Postgres(e) => e.into(), - DriverAdapterError::Mysql(e) => e.into(), - DriverAdapterError::Sqlite(e) => e.into(), - // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here - } - } -} - -/// Wrapper for JS-side result type -pub(crate) enum JsResult -where - T: FromNapiValue, -{ - Ok(T), - Err(DriverAdapterError), -} - -impl JsResult -where - T: FromNapiValue, -{ - fn from_js_unknown(unknown: JsUnknown) -> napi::Result { - let object = unknown.coerce_to_object()?; - let ok: bool = object.get_named_property("ok")?; - if ok { - let value: JsUnknown = object.get_named_property("value")?; - return Ok(Self::Ok(T::from_unknown(value)?)); - } - - let error = object.get_named_property("error")?; - Ok(Self::Err(error)) - } -} - -impl FromNapiValue for JsResult -where - T: FromNapiValue, -{ - unsafe fn from_napi_value(napi_env: napi::sys::napi_env, napi_val: napi::sys::napi_value) -> napi::Result { - Self::from_js_unknown(JsUnknown::from_raw(napi_env, napi_val)?) - } -} - -impl From> for quaint::Result -where - T: FromNapiValue, -{ - fn from(value: JsResult) -> Self { - match value { - JsResult::Ok(result) => Ok(result), - JsResult::Err(error) => Err(error.into()), - } - } -} diff --git a/query-engine/driver-adapters/src/send_future.rs b/query-engine/driver-adapters/src/send_future.rs new file mode 100644 index 000000000000..52a59d764708 --- /dev/null +++ b/query-engine/driver-adapters/src/send_future.rs @@ -0,0 +1,32 @@ +use futures::Future; + +/// Allow asynchronous futures to be sent across threads, solving the following error on `wasm32-*` targets: +/// +/// ```text +/// future cannot be sent between threads safely +/// the trait `Send` is not implemented for `dyn Future>`. +/// ``` +/// +/// This wrapper is used by both the Napi.rs and Wasm implementation of `driver-adapters`, but is only really +/// needed because `wasm-bindgen` does not implement `Send` for `Future`, and most of the codebase +/// uses `#[async_trait]`, which requires `Send` on the future returned by `async fn` declarations. +/// +/// In fact, `UnsafeFuture` safely implements `Send` if `F` implements `Future + Send`, which is the case +/// with Napi.rs, but not with Wasm. +/// +/// See: https://github.com/rustwasm/wasm-bindgen/issues/2409#issuecomment-820750943 +#[pin_project::pin_project] +pub struct UnsafeFuture(#[pin] pub F); + +impl Future for UnsafeFuture { + type Output = F::Output; + + fn poll(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { + // the `self.project()` method is provided by the `pin_project` macro + let future: std::pin::Pin<&mut F> = self.project().0; + future.poll(cx) + } +} + +#[cfg(target_arch = "wasm32")] +unsafe impl Send for UnsafeFuture {} diff --git a/query-engine/driver-adapters/src/transaction.rs b/query-engine/driver-adapters/src/transaction.rs index d35a9019c6bc..264c363ea608 100644 --- a/query-engine/driver-adapters/src/transaction.rs +++ b/query-engine/driver-adapters/src/transaction.rs @@ -1,16 +1,14 @@ use async_trait::async_trait; use metrics::decrement_gauge; -use napi::{bindgen_prelude::FromNapiValue, JsObject}; use quaint::{ connector::{IsolationLevel, Transaction as QuaintTransaction}, prelude::{Query as QuaintQuery, Queryable, ResultSet}, Value, }; -use crate::{ - proxy::{CommonProxy, TransactionOptions, TransactionProxy}, - queryable::JsBaseQueryable, -}; +use crate::proxy::{TransactionOptions, TransactionProxy}; +use crate::{proxy::CommonProxy, queryable::JsBaseQueryable, send_future::UnsafeFuture}; +use crate::{JsObject, JsResult}; // Wrapper around JS transaction objects that implements Queryable // and quaint::Transaction. Can be used in place of quaint transaction, @@ -50,7 +48,7 @@ impl QuaintTransaction for JsTransaction { self.inner.raw_cmd(commit_stmt).await?; } - self.tx_proxy.commit().await + UnsafeFuture(self.tx_proxy.commit()).await } async fn rollback(&self) -> quaint::Result<()> { @@ -66,7 +64,7 @@ impl QuaintTransaction for JsTransaction { self.inner.raw_cmd(rollback_stmt).await?; } - self.tx_proxy.rollback().await + UnsafeFuture(self.tx_proxy.rollback()).await } fn as_queryable(&self) -> &dyn Queryable { @@ -121,12 +119,25 @@ impl Queryable for JsTransaction { } } -/// Implementing unsafe `from_napi_value` is only way I managed to get threadsafe -/// JsTransaction value in `DriverProxy`. Going through any intermediate safe napi.rs value, -/// like `JsObject` or `JsUnknown` wrapped inside `JsPromise` makes it impossible to extract the value -/// out of promise while keeping the future `Send`. -impl FromNapiValue for JsTransaction { - unsafe fn from_napi_value(env: napi::sys::napi_env, napi_val: napi::sys::napi_value) -> napi::Result { +#[cfg(target_arch = "wasm32")] +impl super::wasm::FromJsValue for JsTransaction { + fn from_js_value(value: wasm_bindgen::prelude::JsValue) -> JsResult { + use wasm_bindgen::JsCast; + + let object = value.dyn_into::()?; + let common_proxy = CommonProxy::new(&object)?; + let base = JsBaseQueryable::new(common_proxy); + let tx_proxy = TransactionProxy::new(&object)?; + + Ok(Self::new(base, tx_proxy)) + } +} + +/// Implementing unsafe `from_napi_value` allows retrieving a threadsafe `JsTransaction` in `DriverProxy` +/// while keeping derived futures `Send`. +#[cfg(not(target_arch = "wasm32"))] +impl ::napi::bindgen_prelude::FromNapiValue for JsTransaction { + unsafe fn from_napi_value(env: napi::sys::napi_env, napi_val: napi::sys::napi_value) -> JsResult { let object = JsObject::from_napi_value(env, napi_val)?; let common_proxy = CommonProxy::new(&object)?; let tx_proxy = TransactionProxy::new(&object)?; diff --git a/query-engine/driver-adapters/src/types.rs b/query-engine/driver-adapters/src/types.rs new file mode 100644 index 000000000000..0df9b93c8987 --- /dev/null +++ b/query-engine/driver-adapters/src/types.rs @@ -0,0 +1,225 @@ +#![allow(unused_imports)] + +use std::str::FromStr; + +#[cfg(not(target_arch = "wasm32"))] +use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; + +#[cfg(target_arch = "wasm32")] +use tsify::Tsify; + +use crate::conversion::JSArg; +use serde::{Deserialize, Serialize}; +use serde_repr::{Deserialize_repr, Serialize_repr}; + +#[cfg_attr(target_arch = "wasm32", derive(Serialize, Deserialize, Tsify))] +#[cfg_attr(target_arch = "wasm32", tsify(into_wasm_abi, from_wasm_abi))] +#[derive(Debug, Eq, PartialEq, Clone)] +pub enum AdapterFlavour { + Mysql, + Postgres, + Sqlite, +} + +impl FromStr for AdapterFlavour { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "postgres" => Ok(Self::Postgres), + "mysql" => Ok(Self::Mysql), + "sqlite" => Ok(Self::Sqlite), + _ => Err(format!("Unsupported adapter flavour: {:?}", s)), + } + } +} + +/// This result set is more convenient to be manipulated from both Rust and NodeJS. +/// Quaint's version of ResultSet is: +/// +/// pub struct ResultSet { +/// pub(crate) columns: Arc>, +/// pub(crate) rows: Vec>>, +/// pub(crate) last_insert_id: Option, +/// } +/// +/// If we used this ResultSet would we would have worse ergonomics as quaint::Value is a structured +/// enum and cannot be used directly with the #[napi(Object)] macro. Thus requiring us to implement +/// the FromNapiValue and ToNapiValue traits for quaint::Value, and use a different custom type +/// representing the Value in javascript. +/// +#[cfg_attr(not(target_arch = "wasm32"), napi_derive::napi(object))] +#[cfg_attr(target_arch = "wasm32", derive(Serialize, Deserialize, Tsify))] +#[cfg_attr(target_arch = "wasm32", tsify(into_wasm_abi, from_wasm_abi))] +#[cfg_attr(target_arch = "wasm32", serde(rename_all = "camelCase"))] +#[derive(Debug)] +pub struct JSResultSet { + pub column_types: Vec, + pub column_names: Vec, + // Note this might be encoded differently for performance reasons + pub rows: Vec>, + pub last_insert_id: Option, +} + +impl JSResultSet { + pub fn len(&self) -> usize { + self.rows.len() + } +} + +#[cfg_attr(not(target_arch = "wasm32"), napi_derive::napi(object))] +#[cfg_attr(target_arch = "wasm32", derive(Clone, Copy, Serialize_repr, Deserialize_repr, Tsify))] +#[cfg_attr(target_arch = "wasm32", tsify(into_wasm_abi, from_wasm_abi))] +#[repr(u8)] +#[derive(Debug)] +pub enum ColumnType { + // [PLANETSCALE_TYPE] (MYSQL_TYPE) -> [TypeScript example] + /// The following PlanetScale type IDs are mapped into Int32: + /// - INT8 (TINYINT) -> e.g. `127` + /// - INT16 (SMALLINT) -> e.g. `32767` + /// - INT24 (MEDIUMINT) -> e.g. `8388607` + /// - INT32 (INT) -> e.g. `2147483647` + Int32 = 0, + + /// The following PlanetScale type IDs are mapped into Int64: + /// - INT64 (BIGINT) -> e.g. `"9223372036854775807"` (String-encoded) + Int64 = 1, + + /// The following PlanetScale type IDs are mapped into Float: + /// - FLOAT32 (FLOAT) -> e.g. `3.402823466` + Float = 2, + + /// The following PlanetScale type IDs are mapped into Double: + /// - FLOAT64 (DOUBLE) -> e.g. `1.7976931348623157` + Double = 3, + + /// The following PlanetScale type IDs are mapped into Numeric: + /// - DECIMAL (DECIMAL) -> e.g. `"99999999.99"` (String-encoded) + Numeric = 4, + + /// The following PlanetScale type IDs are mapped into Boolean: + /// - BOOLEAN (BOOLEAN) -> e.g. `1` + Boolean = 5, + + Character = 6, + + /// The following PlanetScale type IDs are mapped into Text: + /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) + /// - VARCHAR (VARCHAR) -> e.g. `"foo"` (String-encoded) + Text = 7, + + /// The following PlanetScale type IDs are mapped into Date: + /// - DATE (DATE) -> e.g. `"2023-01-01"` (String-encoded, yyyy-MM-dd) + Date = 8, + + /// The following PlanetScale type IDs are mapped into Time: + /// - TIME (TIME) -> e.g. `"23:59:59"` (String-encoded, HH:mm:ss) + Time = 9, + + /// The following PlanetScale type IDs are mapped into DateTime: + /// - DATETIME (DATETIME) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) + /// - TIMESTAMP (TIMESTAMP) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) + DateTime = 10, + + /// The following PlanetScale type IDs are mapped into Json: + /// - JSON (JSON) -> e.g. `"{\"key\": \"value\"}"` (String-encoded) + Json = 11, + + /// The following PlanetScale type IDs are mapped into Enum: + /// - ENUM (ENUM) -> e.g. `"foo"` (String-encoded) + Enum = 12, + + /// The following PlanetScale type IDs are mapped into Bytes: + /// - BLOB (BLOB) -> e.g. `"\u0012"` (String-encoded) + /// - VARBINARY (VARBINARY) -> e.g. `"\u0012"` (String-encoded) + /// - BINARY (BINARY) -> e.g. `"\u0012"` (String-encoded) + /// - GEOMETRY (GEOMETRY) -> e.g. `"\u0012"` (String-encoded) + Bytes = 13, + + /// The following PlanetScale type IDs are mapped into Set: + /// - SET (SET) -> e.g. `"foo,bar"` (String-encoded, comma-separated) + /// This is currently unhandled, and will panic if encountered. + Set = 14, + + /// UUID from postgres-flavored driver adapters is mapped to this type. + Uuid = 15, + + /* + * Scalar arrays + */ + /// Int32 array (INT2_ARRAY and INT4_ARRAY in PostgreSQL) + Int32Array = 64, + + /// Int64 array (INT8_ARRAY in PostgreSQL) + Int64Array = 65, + + /// Float array (FLOAT4_ARRAY in PostgreSQL) + FloatArray = 66, + + /// Double array (FLOAT8_ARRAY in PostgreSQL) + DoubleArray = 67, + + /// Numeric array (NUMERIC_ARRAY, MONEY_ARRAY etc in PostgreSQL) + NumericArray = 68, + + /// Boolean array (BOOL_ARRAY in PostgreSQL) + BooleanArray = 69, + + /// Char array (CHAR_ARRAY in PostgreSQL) + CharacterArray = 70, + + /// Text array (TEXT_ARRAY in PostgreSQL) + TextArray = 71, + + /// Date array (DATE_ARRAY in PostgreSQL) + DateArray = 72, + + /// Time array (TIME_ARRAY in PostgreSQL) + TimeArray = 73, + + /// DateTime array (TIMESTAMP_ARRAY in PostgreSQL) + DateTimeArray = 74, + + /// Json array (JSON_ARRAY in PostgreSQL) + JsonArray = 75, + + /// Enum array + EnumArray = 76, + + /// Bytes array (BYTEA_ARRAY in PostgreSQL) + BytesArray = 77, + + /// Uuid array (UUID_ARRAY in PostgreSQL) + UuidArray = 78, + + /* + * Below there are custom types that don't have a 1:1 translation with a quaint::Value. + * enum variant. + */ + /// UnknownNumber is used when the type of the column is a number but of unknown particular type + /// and precision. + /// + /// It's used by some driver adapters, like libsql to return aggregation values like AVG, or + /// COUNT, and it can be mapped to either Int64, or Double + UnknownNumber = 128, +} + +#[cfg_attr(not(target_arch = "wasm32"), napi_derive::napi(object))] +#[cfg_attr(target_arch = "wasm32", derive(Serialize, Tsify))] +#[cfg_attr(target_arch = "wasm32", tsify(into_wasm_abi))] +#[derive(Debug, Default)] +pub struct Query { + pub sql: String, + pub args: Vec, +} + +#[cfg_attr(not(target_arch = "wasm32"), napi_derive::napi(object))] +#[cfg_attr(target_arch = "wasm32", derive(Serialize, Deserialize, Tsify))] +#[cfg_attr(target_arch = "wasm32", tsify(into_wasm_abi, from_wasm_abi))] +#[cfg_attr(target_arch = "wasm32", serde(rename_all = "camelCase"))] +#[derive(Debug, Default)] +pub struct TransactionOptions { + /// Whether or not to run a phantom query (i.e., a query that only influences Prisma event logs, but not the database itself) + /// before opening a transaction, committing, or rollbacking. + pub use_phantom_query: bool, +} diff --git a/query-engine/driver-adapters/src/wasm/async_js_function.rs b/query-engine/driver-adapters/src/wasm/async_js_function.rs new file mode 100644 index 000000000000..bda40cc87a58 --- /dev/null +++ b/query-engine/driver-adapters/src/wasm/async_js_function.rs @@ -0,0 +1,113 @@ +use js_sys::{Function as JsFunction, Promise as JsPromise}; +use serde::Serialize; +use serde_wasm_bindgen::Serializer; +use std::marker::PhantomData; +use wasm_bindgen::convert::FromWasmAbi; +use wasm_bindgen::describe::WasmDescribe; +use wasm_bindgen::{JsCast, JsError, JsValue}; +use wasm_bindgen_futures::JsFuture; + +use super::error::into_quaint_error; +use super::from_js::FromJsValue; +use crate::AdapterResult; + +// `serialize_missing_as_null` is required to make sure that "empty" values (e.g., `None` and `()`) +// are serialized as `null` and not `undefined`. +// This is due to certain drivers (e.g., LibSQL) not supporting `undefined` values. +static SERIALIZER: Serializer = Serializer::new().serialize_missing_as_null(true); + +#[derive(Clone)] +pub(crate) struct AsyncJsFunction +where + ArgType: Serialize, + ReturnType: FromJsValue, +{ + fn_: JsFunction, + + _phantom_arg: PhantomData, + _phantom_return: PhantomData, +} + +impl From for AsyncJsFunction +where + T: Serialize, + R: FromJsValue, +{ + fn from(js_value: JsValue) -> Self { + JsFunction::from(js_value).into() + } +} + +impl From for AsyncJsFunction +where + T: Serialize, + R: FromJsValue, +{ + fn from(js_fn: JsFunction) -> Self { + Self { + fn_: js_fn, + _phantom_arg: PhantomData:: {}, + _phantom_return: PhantomData:: {}, + } + } +} + +impl AsyncJsFunction +where + T: Serialize, + R: FromJsValue, +{ + pub(crate) async fn call(&self, arg1: T) -> quaint::Result { + let result = self.call_internal(arg1).await; + + match result { + Ok(js_result) => js_result.into(), + Err(err) => Err(into_quaint_error(err)), + } + } + + async fn call_internal(&self, arg1: T) -> Result, JsValue> { + let arg1 = arg1 + .serialize(&SERIALIZER) + .map_err(|err| JsValue::from(JsError::from(&err)))?; + let return_value = self.fn_.call1(&JsValue::null(), &arg1)?; + + let value = if let Some(promise) = return_value.dyn_ref::() { + JsFuture::from(promise.to_owned()).await? + } else { + return_value + }; + + let js_result = AdapterResult::::from_js_value(value)?; + + Ok(js_result) + } + + pub(crate) fn call_non_blocking(&self, arg: T) { + if let Ok(arg) = serde_wasm_bindgen::to_value(&arg) { + _ = self.fn_.call1(&JsValue::null(), &arg); + } + } +} + +impl WasmDescribe for AsyncJsFunction +where + ArgType: Serialize, + ReturnType: FromJsValue, +{ + fn describe() { + JsFunction::describe(); + } +} + +impl FromWasmAbi for AsyncJsFunction +where + ArgType: Serialize, + ReturnType: FromJsValue, +{ + type Abi = ::Abi; + + unsafe fn from_abi(js: Self::Abi) -> Self { + JsFunction::from_abi(js).into() + } +} diff --git a/query-engine/driver-adapters/src/wasm/error.rs b/query-engine/driver-adapters/src/wasm/error.rs new file mode 100644 index 000000000000..0aa4fe7981f2 --- /dev/null +++ b/query-engine/driver-adapters/src/wasm/error.rs @@ -0,0 +1,13 @@ +use js_sys::Reflect; +use quaint::error::Error as QuaintError; +use wasm_bindgen::JsValue; + +/// transforms a Wasm error into a Quaint error +pub(crate) fn into_quaint_error(wasm_err: JsValue) -> QuaintError { + let status = "WASM_ERROR".to_string(); + let reason = Reflect::get(&wasm_err, &JsValue::from_str("stack")) + .ok() + .and_then(|value| value.as_string()) + .unwrap_or_else(|| "Unknown error".to_string()); + QuaintError::raw_connector_error(status, reason) +} diff --git a/query-engine/driver-adapters/src/wasm/from_js.rs b/query-engine/driver-adapters/src/wasm/from_js.rs new file mode 100644 index 000000000000..a49095ddbff1 --- /dev/null +++ b/query-engine/driver-adapters/src/wasm/from_js.rs @@ -0,0 +1,15 @@ +use serde::de::DeserializeOwned; +use wasm_bindgen::JsValue; + +pub(crate) trait FromJsValue: Sized { + fn from_js_value(value: JsValue) -> Result; +} + +impl FromJsValue for T +where + T: DeserializeOwned, +{ + fn from_js_value(value: JsValue) -> Result { + serde_wasm_bindgen::from_value(value).map_err(JsValue::from) + } +} diff --git a/query-engine/driver-adapters/src/wasm/js_object_extern.rs b/query-engine/driver-adapters/src/wasm/js_object_extern.rs new file mode 100644 index 000000000000..ac9f72619eac --- /dev/null +++ b/query-engine/driver-adapters/src/wasm/js_object_extern.rs @@ -0,0 +1,12 @@ +use js_sys::{JsString, Object as JsObject}; +use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_name = String, extends = JsObject, is_type_of = JsValue::is_object, typescript_type = "object")] + pub type JsObjectExtern; + + // Note: this custom getter allows us to avoid runtime reflection via `js_sys::Reflect`. + #[wasm_bindgen(method, catch, structural, indexing_getter)] + pub fn get(this: &JsObjectExtern, key: JsString) -> Result; +} diff --git a/query-engine/driver-adapters/src/wasm/mod.rs b/query-engine/driver-adapters/src/wasm/mod.rs new file mode 100644 index 000000000000..a71e6f5d21c9 --- /dev/null +++ b/query-engine/driver-adapters/src/wasm/mod.rs @@ -0,0 +1,11 @@ +//! Query Engine Driver Adapters: `wasm`-specific implementation. + +mod async_js_function; +mod error; +mod from_js; +mod js_object_extern; +pub(crate) mod result; + +pub(crate) use async_js_function::AsyncJsFunction; +pub(crate) use from_js::FromJsValue; +pub use js_object_extern::JsObjectExtern; diff --git a/query-engine/driver-adapters/src/wasm/result.rs b/query-engine/driver-adapters/src/wasm/result.rs new file mode 100644 index 000000000000..18a9c4b26443 --- /dev/null +++ b/query-engine/driver-adapters/src/wasm/result.rs @@ -0,0 +1,51 @@ +use js_sys::Boolean as JsBoolean; +use wasm_bindgen::{JsCast, JsValue}; + +use super::from_js::FromJsValue; +use crate::{error::DriverAdapterError, JsObjectExtern}; + +/// Wrapper for JS-side result type. +/// This Wasm-specific implementation has the same shape and API as the Napi implementation, +/// but it asks for a `FromJsValue` bound on the generic type. +/// The duplication is needed as it's currently impossible to have target-specific generic bounds in Rust. +pub(crate) enum AdapterResult +where + T: FromJsValue, +{ + Ok(T), + Err(DriverAdapterError), +} + +impl FromJsValue for AdapterResult +where + T: FromJsValue, +{ + fn from_js_value(unknown: JsValue) -> Result { + let object = unknown.unchecked_into::(); + + let ok: JsBoolean = object.get("ok".into())?.unchecked_into(); + let ok = ok.value_of(); + + if ok { + let js_value: JsValue = object.get("value".into())?; + let deserialized = T::from_js_value(js_value)?; + return Ok(Self::Ok(deserialized)); + } + + let error = object.get("error".into())?; + let error: DriverAdapterError = serde_wasm_bindgen::from_value(error)?; + Ok(Self::Err(error)) + } +} + +impl From> for quaint::Result +where + T: FromJsValue, +{ + fn from(value: AdapterResult) -> Self { + match value { + AdapterResult::Ok(result) => Ok(result), + AdapterResult::Err(error) => Err(error.into()), + } + } +} diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 23782af1776a..1d56239ecf6d 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -192,7 +192,7 @@ impl QueryEngine { } else { #[cfg(feature = "driver-adapters")] if let Some(adapter) = maybe_adapter { - let js_queryable = driver_adapters::from_napi(adapter); + let js_queryable = driver_adapters::from_js(adapter); sql_connector::activate_driver_adapter(Arc::new(js_queryable)); connector_mode = ConnectorMode::Js; diff --git a/query-engine/query-engine-node-api/src/functions.rs b/query-engine/query-engine-node-api/src/functions.rs index 868178f7361d..5178d82d6120 100644 --- a/query-engine/query-engine-node-api/src/functions.rs +++ b/query-engine/query-engine-node-api/src/functions.rs @@ -1,8 +1,9 @@ -use crate::error::ApiError; use napi_derive::napi; use request_handlers::dmmf; use std::sync::Arc; +use crate::error::ApiError; + #[derive(serde::Serialize, Clone, Copy)] #[napi(object)] pub struct Version { diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml index 60da1c11c6d4..171610f2831a 100644 --- a/query-engine/query-engine-wasm/Cargo.toml +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -9,36 +9,40 @@ crate-type = ["cdylib"] name = "query_engine_wasm" [dependencies] + +query-connector = { path = "../connectors/query-connector" } + anyhow = "1" async-trait = "0.1" user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true query-structure = { path = "../query-structure" } quaint = { path = "../../quaint" } +sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } request-handlers = { path = "../request-handlers", default-features = false, features = [ "sql", "driver-adapters", ] } -connector = { path = "../connectors/query-connector", package = "query-connector" } -sql-query-connector = { path = "../connectors/sql-query-connector" } query-core = { path = "../core" } +driver-adapters = { path = "../driver-adapters" } -thiserror = "1" connection-string.workspace = true -url = "2" +js-sys.workspace = true +serde-wasm-bindgen.workspace = true serde_json.workspace = true +tsify.workspace = true +wasm-bindgen.workspace = true +wasm-bindgen-futures.workspace = true +wasm-rs-dbg.workspace = true + +thiserror = "1" +url = "2" serde.workspace = true tokio = { version = "1.25", features = ["macros", "sync", "io-util", "time"] } futures = "0.3" -wasm-bindgen = "=0.2.88" -wasm-bindgen-futures = "0.4" -serde-wasm-bindgen = "0.5" -js-sys = "0.3" -log = "0.4.6" -wasm-logger = "0.2.0" tracing = "0.1" tracing-subscriber = { version = "0.3" } tracing-futures = "0.2" -tsify = "0.4.5" -console_error_panic_hook = "0.1.7" +tracing-opentelemetry = "0.17.3" +opentelemetry = { version = "0.17"} diff --git a/query-engine/query-engine-wasm/README.md b/query-engine/query-engine-wasm/README.md index f5adc7eb2894..7f294bc997c9 100644 --- a/query-engine/query-engine-wasm/README.md +++ b/query-engine/query-engine-wasm/README.md @@ -37,4 +37,4 @@ From the current folder: To try importing the , you can run: - `nvm use` -- `node --experimental-wasm-modules ./example.js` +- `node --experimental-wasm-modules example/example.js` diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh index aef7b5713774..e4db9fbad6da 100755 --- a/query-engine/query-engine-wasm/build.sh +++ b/query-engine/query-engine-wasm/build.sh @@ -1,36 +1,31 @@ #!/bin/bash -set -e # Call this script as `./build.sh ` +set -euo pipefail -OUT_VERSION="$1" +OUT_VERSION="${1:-}" OUT_FOLDER="pkg" OUT_JSON="${OUT_FOLDER}/package.json" OUT_TARGET="bundler" OUT_NPM_NAME="@prisma/query-engine-wasm" -# The local ./Cargo.toml file uses "name = "query_engine_wasm" as library name -# to avoid conflicts with libquery's `name = "query_engine"` library name declaration. -# This little `sed -i` trick below is a hack to publish "@prisma/query-engine-wasm" -# with the same binding filenames currently expected by the Prisma Client. -sed -i.bak 's/name = "query_engine_wasm"/name = "query_engine"/g' Cargo.toml - # use `wasm-pack build --release` on CI only -if [[ -z "$BUILDKITE" ]] && [[ -z "$GITHUB_ACTIONS" ]]; then +if [[ -z "${BUILDKITE:-}" ]] && [[ -z "${GITHUB_ACTIONS:-}" ]]; then BUILD_PROFILE="--dev" else BUILD_PROFILE="--release" fi -wasm-pack build $BUILD_PROFILE --target $OUT_TARGET - -sed -i.bak 's/name = "query_engine"/name = "query_engine_wasm"/g' Cargo.toml +# Check if wasm-pack is installed +if ! command -v wasm-pack &> /dev/null +then + echo "wasm-pack could not be found, installing now..." + # Install wasm-pack + curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh +fi -# Remove the backup file created by sed. We only created it because there's no -# cross-platform way to specify we don't need one (it's just `-i` in GNU sed -# but `-i ""` in BSD sed). -rm Cargo.toml.bak +wasm-pack build $BUILD_PROFILE --target $OUT_TARGET --out-name query_engine sleep 1 @@ -43,6 +38,10 @@ printf '%s\n' "$(jq --arg version "$OUT_VERSION" '. + {"version": $version}' $OU # Add the package name printf '%s\n' "$(jq --arg name "$OUT_NPM_NAME" '. + {"name": $name}' $OUT_JSON)" > $OUT_JSON +# Some info: enabling Cloudflare Workers in the bindings generated by wasm-package +# is useful for local experiments, but it's not needed here. +# `@prisma/client` has its own `esbuild` plugin for CF-compatible bindings +# and import of `.wasm` files. enable_cf_in_bindings() { # Enable Cloudflare Workers in the generated JS bindings. # The generated bindings are compatible with: diff --git a/query-engine/query-engine-wasm/example.js b/query-engine/query-engine-wasm/example.js deleted file mode 100644 index 6d3a78374bc8..000000000000 --- a/query-engine/query-engine-wasm/example.js +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Run with: `node --experimental-wasm-modules ./example.js` - * on Node.js 18+. - */ - -import { Pool } from '@neondatabase/serverless' -import { PrismaNeon } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { init, QueryEngine, getBuildTimeInfo } from './pkg/query_engine_wasm.js' - -async function main() { - // Always initialize the Wasm library before using it. - // This sets up the logging and panic hooks. - init() - - const connectionString = undefined - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - console.log('buildTimeInfo', getBuildTimeInfo()) - - const options = { - datamodel: /* prisma */` - datasource db { - provider = "postgres" - url = env("DATABASE_URL") - } - - generator client { - provider = "prisma-client-js" - } - - model User { - id Int @id @default(autoincrement()) - } - `, - logLevel: 'info', - logQueries: true, - datasourceOverrides: {}, - env: process.env, - configDir: '/tmp', - ignoreEnvVarErrors: true, - } - const callback = () => { console.log('log-callback') } - - const queryEngine = new QueryEngine(options, callback, driverAdapter) - - await queryEngine.connect('trace') - await queryEngine.disconnect('trace') -} - -main() diff --git a/query-engine/query-engine-wasm/example/.gitignore b/query-engine/query-engine-wasm/example/.gitignore new file mode 100644 index 000000000000..3997beadf829 --- /dev/null +++ b/query-engine/query-engine-wasm/example/.gitignore @@ -0,0 +1 @@ +*.db \ No newline at end of file diff --git a/query-engine/query-engine-wasm/example/example.js b/query-engine/query-engine-wasm/example/example.js new file mode 100644 index 000000000000..5d3449010865 --- /dev/null +++ b/query-engine/query-engine-wasm/example/example.js @@ -0,0 +1,88 @@ +/** + * Run with: `node --experimental-wasm-modules ./example.js` + * on Node.js 18+. + */ +import { readFile } from 'fs/promises' +import { PrismaLibSQL } from '@prisma/adapter-libsql' +import { createClient } from '@libsql/client' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { QueryEngine, getBuildTimeInfo } from '../pkg/query_engine.js' + + +async function main() { + // Always initialize the Wasm library before using it. + // This sets up the logging and panic hooks. + + const client = createClient({ url: "file:./prisma/dev.db"}) + const adapter = new PrismaLibSQL(client) + const driverAdapter = bindAdapter(adapter) + + console.log('buildTimeInfo', getBuildTimeInfo()) + + const datamodel = await readFile('prisma/schema.prisma', 'utf8') + + const options = { + datamodel, + logLevel: 'info', + logQueries: true, + datasourceOverrides: {}, + env: process.env, + configDir: '/tmp', + ignoreEnvVarErrors: true, + } + const callback = () => { console.log('log-callback') } + + const queryEngine = new QueryEngine(options, callback, driverAdapter) + + await queryEngine.connect('trace') + + const created = await queryEngine.query(JSON.stringify({ + modelName: 'User', + action: 'createOne', + query: { + arguments: { + data: { + id: 1235, + }, + }, + selection: { + $scalars: true + } + } + }), 'trace') + + console.log({ created }) + + const res = await queryEngine.query(JSON.stringify({ + modelName: 'User', + action: 'findMany', + query: { + arguments: {}, + selection: { + $scalars: true + } + } + }), 'trace') + const parsed = JSON.parse(res); + console.log('query result = ') + console.dir(parsed, { depth: null }) + + const error = parsed.errors?.[0]?.user_facing_error + if (error?.error_code === 'P2036') { + console.log('js error:', driverAdapter.errorRegistry.consumeError(error.meta.id)) + } + + // console.log('before disconnect') + await queryEngine.disconnect('trace') + // console.log('after disconnect') + + // console.log('before close') + await driverAdapter.close() + // console.log('after close') + + // console.log('before free') + queryEngine.free() + // console.log('after free') +} + +main() diff --git a/query-engine/query-engine-wasm/example/package.json b/query-engine/query-engine-wasm/example/package.json new file mode 100644 index 000000000000..3b0c4c91c9f9 --- /dev/null +++ b/query-engine/query-engine-wasm/example/package.json @@ -0,0 +1,14 @@ +{ + "type": "module", + "main": "./example.js", + "scripts": { + "dev": "node --experimental-wasm-modules ./example.js" + }, + "dependencies": { + "@libsql/client": "0.4.0-pre.2", + "@prisma/adapter-libsql": "5.7.0-dev.54", + "@prisma/client": "5.7.0-dev.54", + "@prisma/driver-adapter-utils": "5.7.0-dev.54", + "prisma": "5.7.0-dev.54" + } +} diff --git a/query-engine/query-engine-wasm/example/pnpm-lock.yaml b/query-engine/query-engine-wasm/example/pnpm-lock.yaml new file mode 100644 index 000000000000..beb050a5398a --- /dev/null +++ b/query-engine/query-engine-wasm/example/pnpm-lock.yaml @@ -0,0 +1,393 @@ +lockfileVersion: '6.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +dependencies: + '@libsql/client': + specifier: 0.4.0-pre.2 + version: 0.4.0-pre.2 + '@prisma/adapter-libsql': + specifier: 5.7.0-dev.54 + version: 5.7.0-dev.54(@libsql/client@0.4.0-pre.2) + '@prisma/client': + specifier: 5.7.0-dev.54 + version: 5.7.0-dev.54(prisma@5.7.0-dev.54) + '@prisma/driver-adapter-utils': + specifier: 5.7.0-dev.54 + version: 5.7.0-dev.54 + prisma: + specifier: 5.7.0-dev.54 + version: 5.7.0-dev.54 + +packages: + + /@libsql/client@0.4.0-pre.2: + resolution: {integrity: sha512-sKWNPU+RQoki5hEoYhpC+fQ/kj+VuwoSXF2PMYGWB19MYBkMaMc7udn1T0ibNjNkFNmd98HvPIHd48NNC2oWvA==} + dependencies: + '@libsql/hrana-client': 0.5.5 + js-base64: 3.7.5 + libsql: 0.2.0-pre.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/darwin-arm64@0.2.0-pre.2: + resolution: {integrity: sha512-PKXAKBJF6XwfCT3yU1N/kHyUGcsatf/4rYNzdnc6UGeg+yWf3ZDk7sGnHHj9bDQ9oKLRVJQmc+cNIEsF2GOr9w==} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/darwin-x64@0.2.0-pre.2: + resolution: {integrity: sha512-e3k4LsAFRf8qFfZqkg/VkoXK/UfDYgoDvLmAJpAGKEFp7d/bTmbF1r0YCjtGaPbheRxARAUXNfekvRhdpXE3mg==} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/hrana-client@0.5.5: + resolution: {integrity: sha512-i+hDBpiV719poqEiHupUUZYKJ9YSbCRFe5Q2PQ0v3mHIftePH6gayLjp2u6TXbqbO/Dv6y8yyvYlBXf/kFfRZA==} + dependencies: + '@libsql/isomorphic-fetch': 0.1.10 + '@libsql/isomorphic-ws': 0.1.5 + js-base64: 3.7.5 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/isomorphic-fetch@0.1.10: + resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} + dependencies: + '@types/node-fetch': 2.6.9 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + dev: false + + /@libsql/isomorphic-ws@0.1.5: + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + dependencies: + '@types/ws': 8.5.10 + ws: 8.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: false + + /@libsql/linux-arm64-gnu@0.2.0-pre.2: + resolution: {integrity: sha512-ZkN6e129joeUu6cinGMRbCvLTnrM5xV5n9XHs2dRrZfL7yu7utbvrY1l+P6VI1gugs93UhgupqyMsolFjvrPww==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.2.0-pre.2: + resolution: {integrity: sha512-tEy4UAIzHYtjCBJnZoTcX1LCYy+XGR3hQCsdRYujWJhUtmtU/AqCRZV3q8MyfX7UhKyawJKWoQvwQ6Vs7w9jAA==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-gnu@0.2.0-pre.2: + resolution: {integrity: sha512-jhHKwz5i9mdlpT4EeaKNUfyW5N9YY8wD5lZ0F5HrrPKhwgufnJY0oPEbvhM4KXDcSJetiIcGJ6K6NQyMSgoJ/Q==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-musl@0.2.0-pre.2: + resolution: {integrity: sha512-HvwZtSQ2eIT968yxAb+htO+wmibdwW1PIyR7iJ5TN7phj7W1gF962l3ZhV1hVYERaMu+liBH1e/cRP1S35q3vQ==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/win32-x64-msvc@0.2.0-pre.2: + resolution: {integrity: sha512-BWjInhsZRF9x+W0T5oJVjqoCCdvh82y74b/T3Ge/irXyLdVhHA9Zb1JWDy5uhu8eBR+d2n9B+IO0YwAvhFRTLw==} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@neon-rs/load@0.0.4: + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false + + /@prisma/adapter-libsql@5.7.0-dev.54(@libsql/client@0.4.0-pre.2): + resolution: {integrity: sha512-P+npdjsKYGv3bW4XWDEruLFAaih9ECZI7vH90DeWY3AOAQY9Siy9bKecsTmCTeKYscrqKVgP1uK3MRHacvWhyQ==} + peerDependencies: + '@libsql/client': ^0.3.5 + dependencies: + '@libsql/client': 0.4.0-pre.2 + '@prisma/driver-adapter-utils': 5.7.0-dev.54 + async-mutex: 0.4.0 + transitivePeerDependencies: + - supports-color + dev: false + + /@prisma/client@5.7.0-dev.54(prisma@5.7.0-dev.54): + resolution: {integrity: sha512-WjR+Cpfssce60M6FSXHjFpH+hFLUAfsRxAbRJTw2+W2HdJyZcVXF4FTqCZLZVaBF5NW/60fK3K3aRnMuEvsDtA==} + engines: {node: '>=16.13'} + requiresBuild: true + peerDependencies: + prisma: '*' + peerDependenciesMeta: + prisma: + optional: true + dependencies: + prisma: 5.7.0-dev.54 + dev: false + + /@prisma/debug@5.7.0-dev.54: + resolution: {integrity: sha512-5KodpKA1Th05sREvQoQ4U8oJa8QFXPjxzE5AduzYLHjXibgd18p2//c0wtU9erP7jgLFC9vrvlSsWhjsAyc0fA==} + dev: false + + /@prisma/driver-adapter-utils@5.7.0-dev.54: + resolution: {integrity: sha512-5wGFzahzgIPgDjuVpU8hisB71RYDVtIeYord920PAW//ZnHPvS6yHg1+O+z/PMndV5iL9UP5EJDx19LpmH+sDg==} + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: false + + /@prisma/engines-version@5.7.0-20.01aad9b63c8d574cc270d2b09461e920d19986e6: + resolution: {integrity: sha512-aKw2Ge9kZQrU5DRxqQ9xwyksH6aFtJR4BIuUDSevkFbrq3PFy/SBhLE4RWVfJmYqWs5/BBat7ZP3T5xK178liQ==} + dev: false + + /@prisma/engines@5.7.0-dev.54: + resolution: {integrity: sha512-qeV4+hbQFaVqUw3CRpRhFt0/W+7BzLZ8RFhuVF9tOqdcZ0Mu5ktdX0pevbWtJHMnbqt9nrcxVv42Ok9mqJ2mFA==} + requiresBuild: true + dependencies: + '@prisma/debug': 5.7.0-dev.54 + '@prisma/engines-version': 5.7.0-20.01aad9b63c8d574cc270d2b09461e920d19986e6 + '@prisma/fetch-engine': 5.7.0-dev.54 + '@prisma/get-platform': 5.7.0-dev.54 + dev: false + + /@prisma/fetch-engine@5.7.0-dev.54: + resolution: {integrity: sha512-pFm+hWMS3zSrjyvlTY8JQWYL9jRCVyEOc/qt1sIe/EILQsQfKjweOk6yyQm4+wLId+otjc738A6+wLVjBfoiNw==} + dependencies: + '@prisma/debug': 5.7.0-dev.54 + '@prisma/engines-version': 5.7.0-20.01aad9b63c8d574cc270d2b09461e920d19986e6 + '@prisma/get-platform': 5.7.0-dev.54 + dev: false + + /@prisma/get-platform@5.7.0-dev.54: + resolution: {integrity: sha512-5vbvS2qo1QtWam4oQKbrVo9kC5YVODTlF3p3GqlrPACy8B4wEvGd2MLEtFb9UQl3gCOcvZNZmMx+hm2aV/f2Fw==} + dependencies: + '@prisma/debug': 5.7.0-dev.54 + dev: false + + /@types/node-fetch@2.6.9: + resolution: {integrity: sha512-bQVlnMLFJ2d35DkPNjEPmd9ueO/rh5EiaZt2bhqiSarPjZIuIV6bPQVqcrEyvNo+AfTrRGVazle1tl597w3gfA==} + dependencies: + '@types/node': 20.9.4 + form-data: 4.0.0 + dev: false + + /@types/node@20.9.4: + resolution: {integrity: sha512-wmyg8HUhcn6ACjsn8oKYjkN/zUzQeNtMy44weTJSM6p4MMzEOuKbA3OjJ267uPCOW7Xex9dyrNTful8XTQYoDA==} + dependencies: + undici-types: 5.26.5 + dev: false + + /@types/ws@8.5.10: + resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} + dependencies: + '@types/node': 20.9.4 + dev: false + + /async-mutex@0.4.0: + resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} + dependencies: + tslib: 2.6.2 + dev: false + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + + /data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + dev: false + + /debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: false + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + dev: false + + /fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.2.1 + dev: false + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + dependencies: + fetch-blob: 3.2.0 + dev: false + + /js-base64@3.7.5: + resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false + + /libsql@0.2.0-pre.2: + resolution: {integrity: sha512-ErF11J/Q0Uo1TMceX1f7RKfFvQ/j4FS8TagzJnAZBwhHsPcr7uItkSTchkuRHm5+cE4dJO7lqf+MpmlDjp/qAQ==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.2.0-pre.2 + '@libsql/darwin-x64': 0.2.0-pre.2 + '@libsql/linux-arm64-gnu': 0.2.0-pre.2 + '@libsql/linux-arm64-musl': 0.2.0-pre.2 + '@libsql/linux-x64-gnu': 0.2.0-pre.2 + '@libsql/linux-x64-musl': 0.2.0-pre.2 + '@libsql/win32-x64-msvc': 0.2.0-pre.2 + dev: false + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + + /ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: false + + /node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + dev: false + + /node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: false + + /node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + dev: false + + /prisma@5.7.0-dev.54: + resolution: {integrity: sha512-+dpJABpFg6l4DTSSCGBIxgrRPJ3QMDtiB3SB56UOk5vX2guG96+yU46N1WWwSCgZirwhy3IR1zVuhmRZFmatSA==} + engines: {node: '>=16.13'} + hasBin: true + requiresBuild: true + dependencies: + '@prisma/engines': 5.7.0-dev.54 + dev: false + + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false + + /tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + dev: false + + /undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + dev: false + + /web-streams-polyfill@3.2.1: + resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} + engines: {node: '>= 8'} + dev: false + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false + + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: false + + /ws@8.14.2: + resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: false diff --git a/query-engine/query-engine-wasm/example/prisma/schema.prisma b/query-engine/query-engine-wasm/example/prisma/schema.prisma new file mode 100644 index 000000000000..c6432a4a671f --- /dev/null +++ b/query-engine/query-engine-wasm/example/prisma/schema.prisma @@ -0,0 +1,13 @@ +datasource db { + provider = "sqlite" + url = "file:./dev.db" +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters", "tracing"] +} + +model User { + id Int @id @default(autoincrement()) +} diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json deleted file mode 100644 index c2d5a7a1162e..000000000000 --- a/query-engine/query-engine-wasm/package-lock.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "name": "query-engine-wasm", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "dependencies": { - "@neondatabase/serverless": "0.6.0", - "@prisma/adapter-neon": "5.5.2", - "@prisma/driver-adapter-utils": "5.5.2" - } - }, - "node_modules/@neondatabase/serverless": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@neondatabase/serverless/-/serverless-0.6.0.tgz", - "integrity": "sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==", - "dependencies": { - "@types/pg": "8.6.6" - } - }, - "node_modules/@prisma/adapter-neon": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.5.2.tgz", - "integrity": "sha512-XcpJ/fgh/sP7mlBFkqjIzEcU/kWnNyiZf19MBP366HF7vXg2UQTbGxmbbeFiohXSJ/rwyu1Qmos7IrKK+QJOgg==", - "dependencies": { - "@prisma/driver-adapter-utils": "5.5.2", - "postgres-array": "^3.0.2" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } - }, - "node_modules/@prisma/adapter-neon/node_modules/postgres-array": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.2.tgz", - "integrity": "sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==", - "engines": { - "node": ">=12" - } - }, - "node_modules/@prisma/driver-adapter-utils": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.5.2.tgz", - "integrity": "sha512-lRkxjboGcIl2VkJNomZQ9b6vc2qGFnVwjaR/o3cTPGmmSxETx71cYRYcG/NHKrhvKxI6oKNZ/xzyuzPpg1+kJQ==", - "dependencies": { - "debug": "^4.3.4" - } - }, - "node_modules/@types/node": { - "version": "20.8.10", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.10.tgz", - "integrity": "sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w==", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@types/pg": { - "version": "8.6.6", - "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", - "integrity": "sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==", - "dependencies": { - "@types/node": "*", - "pg-protocol": "*", - "pg-types": "^2.2.0" - } - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/pg-int8": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", - "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/pg-protocol": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", - "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" - }, - "node_modules/pg-types": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", - "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", - "dependencies": { - "pg-int8": "1.0.1", - "postgres-array": "~2.0.0", - "postgres-bytea": "~1.0.0", - "postgres-date": "~1.0.4", - "postgres-interval": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postgres-array": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", - "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", - "engines": { - "node": ">=4" - } - }, - "node_modules/postgres-bytea": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", - "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postgres-date": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", - "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postgres-interval": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", - "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", - "dependencies": { - "xtend": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" - }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "engines": { - "node": ">=0.4" - } - } - } -} diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json deleted file mode 100644 index 8192656bd56f..000000000000 --- a/query-engine/query-engine-wasm/package.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "module", - "main": "./example.js", - "scripts": { - "dev": "node --experimental-wasm-modules ./example.js" - }, - "dependencies": { - "@neondatabase/serverless": "0.6.0", - "@prisma/adapter-neon": "5.6.0", - "@prisma/driver-adapter-utils": "5.6.0" - } -} diff --git a/query-engine/query-engine-wasm/src/engine.rs b/query-engine/query-engine-wasm/src/engine.rs deleted file mode 100644 index f9a06fabcf4b..000000000000 --- a/query-engine/query-engine-wasm/src/engine.rs +++ /dev/null @@ -1,265 +0,0 @@ -#![allow(dead_code)] -#![allow(unused_variables)] - -use crate::proxy; -use crate::{ - error::ApiError, - logger::{LogCallback, Logger}, -}; -use js_sys::{Function as JsFunction, Object as JsObject}; -use serde::{Deserialize, Serialize}; -use std::{ - collections::{BTreeMap, HashMap}, - path::PathBuf, - sync::Arc, -}; -use tokio::sync::RwLock; -use tracing_subscriber::filter::LevelFilter; -use tsify::Tsify; -use wasm_bindgen::prelude::wasm_bindgen; - -/// The main query engine used by JS -#[wasm_bindgen] -pub struct QueryEngine { - inner: RwLock, - logger: Logger, -} - -/// The state of the engine. -enum Inner { - /// Not connected, holding all data to form a connection. - Builder(EngineBuilder), - /// A connected engine, holding all data to disconnect and form a new - /// connection. Allows querying when on this state. - Connected(ConnectedEngine), -} - -/// Everything needed to connect to the database and have the core running. -struct EngineBuilder { - schema: Arc, - config_dir: PathBuf, - env: HashMap, -} - -/// Internal structure for querying and reconnecting with the engine. -struct ConnectedEngine { - schema: Arc, - config_dir: PathBuf, - env: HashMap, -} - -/// Returned from the `serverInfo` method in javascript. -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -struct ServerInfo { - commit: String, - version: String, - primary_connector: Option, -} - -/// Parameters defining the construction of an engine. -#[derive(Debug, Deserialize, Tsify)] -#[tsify(from_wasm_abi)] -#[serde(rename_all = "camelCase")] -pub struct ConstructorOptions { - datamodel: String, - log_level: String, - #[serde(default)] - log_queries: bool, - #[serde(default)] - datasource_overrides: BTreeMap, - #[serde(default)] - env: serde_json::Value, - config_dir: PathBuf, - #[serde(default)] - ignore_env_var_errors: bool, - #[serde(default)] - engine_protocol: Option, -} - -impl Inner { - /// Returns a builder if the engine is not connected - fn as_builder(&self) -> crate::Result<&EngineBuilder> { - match self { - Inner::Builder(ref builder) => Ok(builder), - Inner::Connected(_) => Err(ApiError::AlreadyConnected), - } - } - - /// Returns the engine if connected - fn as_engine(&self) -> crate::Result<&ConnectedEngine> { - match self { - Inner::Builder(_) => Err(ApiError::NotConnected), - Inner::Connected(ref engine) => Ok(engine), - } - } -} - -#[wasm_bindgen] -impl QueryEngine { - /// Parse a validated datamodel and configuration to allow connecting later on. - #[wasm_bindgen(constructor)] - pub fn new( - options: ConstructorOptions, - callback: JsFunction, - maybe_adapter: Option, - ) -> Result { - log::info!("Called `QueryEngine::new()`"); - - let log_callback = LogCallback(callback); - log::info!("Parsed `log_callback`"); - - let ConstructorOptions { - datamodel, - log_level, - log_queries, - datasource_overrides, - env, - config_dir, - ignore_env_var_errors, - engine_protocol, - } = options; - - let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env - let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); - - let mut schema = psl::validate(datamodel.into()); - let config = &mut schema.configuration; - - if let Some(adapter) = maybe_adapter { - let js_queryable = - proxy::from_wasm(adapter).map_err(|e| ApiError::configuration(e.as_string().unwrap_or_default()))?; - - let provider_name = schema.connector.provider_name(); - log::info!("Received driver adapter for {provider_name}."); - } - - schema - .diagnostics - .to_result() - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; - - config - .resolve_datasource_urls_query_engine( - &overrides, - |key| env.get(key).map(ToString::to_string), - ignore_env_var_errors, - ) - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; - - config - .validate_that_one_datasource_is_provided() - .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; - - let builder = EngineBuilder { - schema: Arc::new(schema), - config_dir, - env, - }; - - let log_level = log_level.parse::().unwrap(); - let logger = Logger::new(log_queries, log_level, log_callback); - - Ok(Self { - inner: RwLock::new(Inner::Builder(builder)), - logger, - }) - } - - /// Connect to the database, allow queries to be run. - #[wasm_bindgen] - pub async fn connect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { - log::info!("Called `QueryEngine::connect()`"); - Ok(()) - } - - /// Disconnect and drop the core. Can be reconnected later with `#connect`. - #[wasm_bindgen] - pub async fn disconnect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { - log::info!("Called `QueryEngine::disconnect()`"); - Ok(()) - } - - /// If connected, sends a query to the core and returns the response. - #[wasm_bindgen] - pub async fn query( - &self, - body: String, - trace: String, - tx_id: Option, - ) -> Result { - log::info!("Called `QueryEngine::query()`"); - Err(ApiError::configuration("Can't use `query` until `request_handlers` is Wasm-compatible.").into()) - } - - /// If connected, attempts to start a transaction in the core and returns its ID. - #[wasm_bindgen(js_name = startTransaction)] - pub async fn start_transaction(&self, input: String, trace: String) -> Result { - log::info!("Called `QueryEngine::start_transaction()`"); - Err(ApiError::configuration("Can't use `start_transaction` until `query_core` is Wasm-compatible.").into()) - } - - /// If connected, attempts to commit a transaction with id `tx_id` in the core. - #[wasm_bindgen(js_name = commitTransaction)] - pub async fn commit_transaction(&self, tx_id: String, trace: String) -> Result { - log::info!("Called `QueryEngine::commit_transaction()`"); - Err(ApiError::configuration("Can't use `commit_transaction` until `query_core` is Wasm-compatible.").into()) - } - - #[wasm_bindgen] - pub async fn dmmf(&self, trace: String) -> Result { - log::info!("Called `QueryEngine::dmmf()`"); - Err(ApiError::configuration("Can't use `dmmf` until `request_handlers` is Wasm-compatible.").into()) - } - - /// If connected, attempts to roll back a transaction with id `tx_id` in the core. - #[wasm_bindgen(js_name = rollbackTransaction)] - pub async fn rollback_transaction(&self, tx_id: String, trace: String) -> Result { - log::info!("Called `QueryEngine::rollback_transaction()`"); - Ok("{}".to_owned()) - } - - /// Loads the query schema. Only available when connected. - #[wasm_bindgen(js_name = sdlSchema)] - pub async fn sdl_schema(&self) -> Result { - log::info!("Called `QueryEngine::sdl_schema()`"); - Ok("{}".to_owned()) - } - - #[wasm_bindgen] - pub async fn metrics(&self, json_options: String) -> Result<(), wasm_bindgen::JsError> { - log::info!("Called `QueryEngine::metrics()`"); - Err(ApiError::configuration("Metrics is not enabled in Wasm.").into()) - } -} - -fn stringify_env_values(origin: serde_json::Value) -> crate::Result> { - use serde_json::Value; - - let msg = match origin { - Value::Object(map) => { - let mut result: HashMap = HashMap::new(); - - for (key, val) in map.into_iter() { - match val { - Value::Null => continue, - Value::String(val) => { - result.insert(key, val); - } - val => { - result.insert(key, val.to_string()); - } - } - } - - return Ok(result); - } - Value::Null => return Ok(Default::default()), - Value::Bool(_) => "Expected an object for the env constructor parameter, got a boolean.", - Value::Number(_) => "Expected an object for the env constructor parameter, got a number.", - Value::String(_) => "Expected an object for the env constructor parameter, got a string.", - Value::Array(_) => "Expected an object for the env constructor parameter, got an array.", - }; - - Err(ApiError::JsonDecode(msg.to_string())) -} diff --git a/query-engine/query-engine-wasm/src/lib.rs b/query-engine/query-engine-wasm/src/lib.rs index 89b519515517..bc22931513e8 100644 --- a/query-engine/query-engine-wasm/src/lib.rs +++ b/query-engine/query-engine-wasm/src/lib.rs @@ -1,19 +1,22 @@ -pub mod engine; -pub mod error; -pub mod functions; -pub mod logger; -mod proxy; - -pub(crate) type Result = std::result::Result; +#[cfg(not(target_arch = "wasm32"))] +mod arch { + // This crate only works in a Wasm environment. + // This conditional compilation block is here to make commands like + // `cargo clippy --all-features` happy, as `clippy` doesn't support the + // `--exclude` option (see: https://github.com/rust-lang/rust-clippy/issues/9555). + // + // This crate can still be inspected by `clippy` via: + // `cargo clippy --all-features -p query-engine-wasm --target wasm32-unknown-unknown` +} -use wasm_bindgen::prelude::wasm_bindgen; +#[cfg(target_arch = "wasm32")] +mod wasm; -/// Function that should be called before any other public function in this module. -#[wasm_bindgen] -pub fn init() { - // Set up temporary logging for the wasm module. - wasm_logger::init(wasm_logger::Config::default()); +#[cfg(target_arch = "wasm32")] +mod arch { + pub use super::wasm::*; - // Set up temporary panic hook for the wasm module. - std::panic::set_hook(Box::new(console_error_panic_hook::hook)); + pub(crate) type Result = std::result::Result; } + +pub use arch::*; diff --git a/query-engine/query-engine-wasm/src/proxy.rs b/query-engine/query-engine-wasm/src/proxy.rs deleted file mode 100644 index ad028e218236..000000000000 --- a/query-engine/query-engine-wasm/src/proxy.rs +++ /dev/null @@ -1,107 +0,0 @@ -#![allow(dead_code)] -#![allow(unused_variables)] - -// This code will likely live in a separate crate, but for now it's here. - -use async_trait::async_trait; -use js_sys::{Function as JsFunction, JsString, Object as JsObject, Promise as JsPromise, Reflect as JsReflect}; -use serde::{de::DeserializeOwned, Serialize}; -use wasm_bindgen::{JsCast, JsValue}; - -type Result = std::result::Result; - -pub struct CommonProxy { - /// Execute a query given as SQL, interpolating the given parameters. - query_raw: JsFunction, - - /// Execute a query given as SQL, interpolating the given parameters and - /// returning the number of affected rows. - execute_raw: JsFunction, - - /// Return the flavour for this driver. - pub(crate) flavour: String, -} - -impl CommonProxy { - pub(crate) fn new(driver: &JsObject) -> Result { - let query_raw = JsReflect::get(driver, &"queryRaw".into())?.dyn_into::()?; - let execute_raw = JsReflect::get(driver, &"executeRaw".into())?.dyn_into::()?; - let flavour: String = JsReflect::get(driver, &"flavour".into())? - .dyn_into::()? - .into(); - - let common_proxy = Self { - query_raw, - execute_raw, - flavour, - }; - Ok(common_proxy) - } -} - -pub struct DriverProxy { - start_transaction: JsFunction, -} - -impl DriverProxy { - pub(crate) fn new(driver: &JsObject) -> Result { - let start_transaction = JsReflect::get(driver, &"startTransaction".into())?.dyn_into::()?; - - let driver_proxy = Self { start_transaction }; - Ok(driver_proxy) - } -} - -pub struct JsQueryable { - inner: CommonProxy, - driver_proxy: DriverProxy, -} - -impl JsQueryable { - pub fn new(inner: CommonProxy, driver_proxy: DriverProxy) -> Self { - Self { inner, driver_proxy } - } -} - -pub fn from_wasm(driver: JsObject) -> Result { - let common_proxy = CommonProxy::new(&driver)?; - let driver_proxy = DriverProxy::new(&driver)?; - - let js_queryable = JsQueryable::new(common_proxy, driver_proxy); - Ok(js_queryable) -} - -#[async_trait(?Send)] -trait JsAsyncFunc { - async fn call1_async(&self, arg1: T) -> Result - where - T: Serialize, - R: DeserializeOwned; - - fn call0_sync(&self) -> Result - where - R: DeserializeOwned; -} - -#[async_trait(?Send)] -impl JsAsyncFunc for JsFunction { - async fn call1_async(&self, arg1: T) -> Result - where - T: Serialize, - R: DeserializeOwned, - { - let arg1 = serde_wasm_bindgen::to_value(&arg1).map_err(|err| js_sys::Error::new(&err.to_string()))?; - let promise = self.call1(&JsValue::null(), &arg1)?; - let future = wasm_bindgen_futures::JsFuture::from(JsPromise::from(promise)); - let value = future.await?; - serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) - } - - fn call0_sync(&self) -> Result - where - R: DeserializeOwned, - { - let value = self.call0(&JsValue::null())?; - serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) - } -} diff --git a/query-engine/query-engine-wasm/src/wasm.rs b/query-engine/query-engine-wasm/src/wasm.rs new file mode 100644 index 000000000000..8174dc8738c4 --- /dev/null +++ b/query-engine/query-engine-wasm/src/wasm.rs @@ -0,0 +1,7 @@ +pub mod engine; +pub mod error; +pub mod functions; +pub mod logger; +mod tracer; + +pub(crate) type Executor = Box; diff --git a/query-engine/query-engine-wasm/src/wasm/engine.rs b/query-engine/query-engine-wasm/src/wasm/engine.rs new file mode 100644 index 000000000000..92b352d76df5 --- /dev/null +++ b/query-engine/query-engine-wasm/src/wasm/engine.rs @@ -0,0 +1,448 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +use crate::{ + error::ApiError, + logger::{LogCallback, Logger}, +}; +use driver_adapters::JsObject; +use js_sys::Function as JsFunction; +use query_core::{ + protocol::EngineProtocol, + schema::{self, QuerySchema}, + telemetry, QueryExecutor, TransactionOptions, TxId, +}; +use request_handlers::ConnectorMode; +use request_handlers::{load_executor, RequestBody, RequestHandler}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::{ + collections::{BTreeMap, HashMap}, + path::PathBuf, + sync::Arc, +}; +use tokio::sync::RwLock; +use tracing::{field, instrument::WithSubscriber, Instrument, Span}; +use tracing_subscriber::filter::LevelFilter; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; +/// The main query engine used by JS +#[wasm_bindgen] +pub struct QueryEngine { + connector_mode: ConnectorMode, + inner: RwLock, + logger: Logger, +} + +/// The state of the engine. +enum Inner { + /// Not connected, holding all data to form a connection. + Builder(EngineBuilder), + /// A connected engine, holding all data to disconnect and form a new + /// connection. Allows querying when on this state. + Connected(ConnectedEngine), +} + +/// Everything needed to connect to the database and have the core running. +struct EngineBuilder { + schema: Arc, + config_dir: PathBuf, + env: HashMap, + engine_protocol: EngineProtocol, +} + +/// Internal structure for querying and reconnecting with the engine. +struct ConnectedEngine { + schema: Arc, + query_schema: Arc, + executor: crate::Executor, + config_dir: PathBuf, + env: HashMap, + engine_protocol: EngineProtocol, +} + +/// Returned from the `serverInfo` method in javascript. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ServerInfo { + commit: String, + version: String, + primary_connector: Option, +} + +impl ConnectedEngine { + /// The schema AST for Query Engine core. + pub fn query_schema(&self) -> &Arc { + &self.query_schema + } + + /// The query executor. + pub fn executor(&self) -> &(dyn QueryExecutor + Send + Sync) { + self.executor.as_ref() + } + + pub fn engine_protocol(&self) -> EngineProtocol { + self.engine_protocol + } +} + +/// Parameters defining the construction of an engine. +#[derive(Debug, Deserialize, Tsify)] +#[tsify(from_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct ConstructorOptions { + datamodel: String, + log_level: String, + #[serde(default)] + log_queries: bool, + #[serde(default)] + datasource_overrides: BTreeMap, + #[serde(default)] + env: serde_json::Value, + config_dir: PathBuf, + #[serde(default)] + ignore_env_var_errors: bool, + #[serde(default)] + engine_protocol: Option, +} + +impl Inner { + /// Returns a builder if the engine is not connected + fn as_builder(&self) -> crate::Result<&EngineBuilder> { + match self { + Inner::Builder(ref builder) => Ok(builder), + Inner::Connected(_) => Err(ApiError::AlreadyConnected), + } + } + + /// Returns the engine if connected + fn as_engine(&self) -> crate::Result<&ConnectedEngine> { + match self { + Inner::Builder(_) => Err(ApiError::NotConnected), + Inner::Connected(ref engine) => Ok(engine), + } + } +} + +#[wasm_bindgen] +impl QueryEngine { + /// Parse a validated datamodel and configuration to allow connecting later on. + #[wasm_bindgen(constructor)] + pub fn new( + options: ConstructorOptions, + callback: JsFunction, + maybe_adapter: Option, + ) -> Result { + let log_callback = LogCallback(callback); + + let ConstructorOptions { + datamodel, + log_level, + log_queries, + datasource_overrides, + env, + config_dir, + ignore_env_var_errors, + engine_protocol, + } = options; + + let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env + let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); + + let mut schema = psl::validate(datamodel.into()); + let config = &mut schema.configuration; + let preview_features = config.preview_features(); + + if let Some(adapter) = maybe_adapter { + let js_queryable = driver_adapters::from_js(adapter); + + sql_connector::activate_driver_adapter(Arc::new(js_queryable)); + + let provider_name = schema.connector.provider_name(); + tracing::info!("Received driver adapter for {provider_name}."); + } + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + ignore_env_var_errors, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + // Telemetry panics on timings if preview feature is enabled + let enable_tracing = false; // config.preview_features().contains(PreviewFeature::Tracing); + let engine_protocol = engine_protocol.unwrap_or(EngineProtocol::Json); + + let builder = EngineBuilder { + schema: Arc::new(schema), + config_dir, + engine_protocol, + env, + }; + + let log_level = log_level.parse::().unwrap(); + let logger = Logger::new(log_queries, log_level, log_callback, enable_tracing); + + let connector_mode = ConnectorMode::Js; + + Ok(Self { + inner: RwLock::new(Inner::Builder(builder)), + logger, + connector_mode, + }) + } + + /// Connect to the database, allow queries to be run. + #[wasm_bindgen] + pub async fn connect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + let dispatcher = self.logger.dispatcher(); + + async { + let span = tracing::info_span!("prisma:engine:connect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + let mut inner = self.inner.write().await; + let builder = inner.as_builder()?; + let arced_schema = Arc::clone(&builder.schema); + let arced_schema_2 = Arc::clone(&builder.schema); + + let url = { + let data_source = builder + .schema + .configuration + .datasources + .first() + .ok_or_else(|| ApiError::configuration("No valid data source found"))?; + data_source + .load_url_with_config_dir(&builder.config_dir, |key| builder.env.get(key).map(ToString::to_string)) + .map_err(|err| crate::error::ApiError::Conversion(err, builder.schema.db.source().to_owned()))? + }; + + let engine = async move { + // We only support one data source & generator at the moment, so take the first one (default not exposed yet). + let data_source = arced_schema + .configuration + .datasources + .first() + .ok_or_else(|| ApiError::configuration("No valid data source found"))?; + + let preview_features = arced_schema.configuration.preview_features(); + + let executor = load_executor(self.connector_mode, data_source, preview_features, &url).await?; + let connector = executor.primary_connector(); + + let conn_span = tracing::info_span!( + "prisma:engine:connection", + user_facing = true, + "db.type" = connector.name(), + ); + + connector.get_connection().instrument(conn_span).await?; + + let query_schema_span = tracing::info_span!("prisma:engine:schema"); + let query_schema = query_schema_span.in_scope(|| schema::build(arced_schema_2, true)); + + Ok(ConnectedEngine { + schema: builder.schema.clone(), + query_schema: Arc::new(query_schema), + executor, + config_dir: builder.config_dir.clone(), + env: builder.env.clone(), + engine_protocol: builder.engine_protocol, + }) as crate::Result + } + .instrument(span) + .await?; + + *inner = Inner::Connected(engine); + + Ok(()) + } + .with_subscriber(dispatcher) + .await + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + #[wasm_bindgen] + pub async fn disconnect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + let dispatcher = self.logger.dispatcher(); + + async { + let span = tracing::info_span!("prisma:engine:disconnect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + async { + let mut inner = self.inner.write().await; + let engine = inner.as_engine()?; + + let builder = EngineBuilder { + schema: engine.schema.clone(), + config_dir: engine.config_dir.clone(), + env: engine.env.clone(), + engine_protocol: engine.engine_protocol(), + }; + + *inner = Inner::Builder(builder); + + Ok(()) + } + .instrument(span) + .await + } + .with_subscriber(dispatcher) + .await + } + + /// If connected, sends a query to the core and returns the response. + #[wasm_bindgen] + pub async fn query( + &self, + body: String, + trace: String, + tx_id: Option, + ) -> Result { + let dispatcher = self.logger.dispatcher(); + + async { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let query = RequestBody::try_from_str(&body, engine.engine_protocol())?; + + async move { + let span = if tx_id.is_none() { + tracing::info_span!("prisma:engine", user_facing = true) + } else { + Span::none() + }; + + let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + let handler = RequestHandler::new(engine.executor(), engine.query_schema(), engine.engine_protocol()); + let response = handler + .handle(query, tx_id.map(TxId::from), trace_id) + .instrument(span) + .await; + + Ok(serde_json::to_string(&response)?) + } + .await + } + .with_subscriber(dispatcher) + .await + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + #[wasm_bindgen(js_name = startTransaction)] + pub async fn start_transaction(&self, input: String, trace: String) -> Result { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + let dispatcher = self.logger.dispatcher(); + + async move { + let span = tracing::info_span!("prisma:engine:itx_runner", user_facing = true, itx_id = field::Empty); + + let tx_opts: TransactionOptions = serde_json::from_str(&input)?; + match engine + .executor() + .start_tx(engine.query_schema().clone(), engine.engine_protocol(), tx_opts) + .instrument(span) + .await + { + Ok(tx_id) => Ok(json!({ "id": tx_id.to_string() }).to_string()), + Err(err) => Ok(map_known_error(err)?), + } + } + .with_subscriber(dispatcher) + .await + } + + /// If connected, attempts to commit a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = commitTransaction)] + pub async fn commit_transaction(&self, tx_id: String, trace: String) -> Result { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let dispatcher = self.logger.dispatcher(); + + async move { + match engine.executor().commit_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } + } + .with_subscriber(dispatcher) + .await + } + + /// If connected, attempts to roll back a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = rollbackTransaction)] + pub async fn rollback_transaction(&self, tx_id: String, trace: String) -> Result { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let dispatcher = self.logger.dispatcher(); + + async move { + match engine.executor().rollback_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } + } + .with_subscriber(dispatcher) + .await + } + + #[wasm_bindgen] + pub async fn metrics(&self, json_options: String) -> Result<(), wasm_bindgen::JsError> { + Err(ApiError::configuration("Metrics is not enabled in Wasm.").into()) + } +} + +fn map_known_error(err: query_core::CoreError) -> crate::Result { + let user_error: user_facing_errors::Error = err.into(); + let value = serde_json::to_string(&user_error)?; + + Ok(value) +} + +fn stringify_env_values(origin: serde_json::Value) -> crate::Result> { + use serde_json::Value; + + let msg = match origin { + Value::Object(map) => { + let mut result: HashMap = HashMap::new(); + + for (key, val) in map.into_iter() { + match val { + Value::Null => continue, + Value::String(val) => { + result.insert(key, val); + } + val => { + result.insert(key, val.to_string()); + } + } + } + + return Ok(result); + } + Value::Null => return Ok(Default::default()), + Value::Bool(_) => "Expected an object for the env constructor parameter, got a boolean.", + Value::Number(_) => "Expected an object for the env constructor parameter, got a number.", + Value::String(_) => "Expected an object for the env constructor parameter, got a string.", + Value::Array(_) => "Expected an object for the env constructor parameter, got an array.", + }; + + Err(ApiError::JsonDecode(msg.to_string())) +} diff --git a/query-engine/query-engine-wasm/src/error.rs b/query-engine/query-engine-wasm/src/wasm/error.rs similarity index 69% rename from query-engine/query-engine-wasm/src/error.rs rename to query-engine/query-engine-wasm/src/wasm/error.rs index 619e96564f6a..cfabc92ea0b0 100644 --- a/query-engine/query-engine-wasm/src/error.rs +++ b/query-engine/query-engine-wasm/src/wasm/error.rs @@ -1,6 +1,6 @@ use psl::diagnostics::Diagnostics; -// use query_connector::error::ConnectorError; -// use query_core::CoreError; +use query_connector::error::ConnectorError; +use query_core::CoreError; use thiserror::Error; #[derive(Debug, Error)] @@ -11,11 +11,12 @@ pub enum ApiError { #[error("{}", _0)] Configuration(String), - // #[error("{}", _0)] - // Core(CoreError), + #[error("{}", _0)] + Core(CoreError), + + #[error("{}", _0)] + Connector(ConnectorError), - // #[error("{}", _0)] - // Connector(ConnectorError), #[error("Can't modify an already connected engine.")] AlreadyConnected, @@ -31,10 +32,10 @@ impl From for user_facing_errors::Error { use std::fmt::Write as _; match err { - // ApiError::Connector(ConnectorError { - // user_facing_error: Some(err), - // .. - // }) => err.into(), + ApiError::Connector(ConnectorError { + user_facing_error: Some(err), + .. + }) => err.into(), ApiError::Conversion(errors, dml_string) => { let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); @@ -43,7 +44,7 @@ impl From for user_facing_errors::Error { user_facing_errors::common::SchemaParserError { full_error }, )) } - // ApiError::Core(error) => user_facing_errors::Error::from(error), + ApiError::Core(error) => user_facing_errors::Error::from(error), other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), } } @@ -59,20 +60,20 @@ impl ApiError { } } -// impl From for ApiError { -// fn from(e: CoreError) -> Self { -// match e { -// CoreError::ConfigurationError(message) => Self::Configuration(message), -// core_error => Self::Core(core_error), -// } -// } -// } - -// impl From for ApiError { -// fn from(e: ConnectorError) -> Self { -// Self::Connector(e) -// } -// } +impl From for ApiError { + fn from(e: CoreError) -> Self { + match e { + CoreError::ConfigurationError(message) => Self::Configuration(message), + core_error => Self::Core(core_error), + } + } +} + +impl From for ApiError { + fn from(e: ConnectorError) -> Self { + Self::Connector(e) + } +} impl From for ApiError { fn from(e: url::ParseError) -> Self { diff --git a/query-engine/query-engine-wasm/src/functions.rs b/query-engine/query-engine-wasm/src/wasm/functions.rs similarity index 61% rename from query-engine/query-engine-wasm/src/functions.rs rename to query-engine/query-engine-wasm/src/wasm/functions.rs index e0f0a93aa5cd..5aa2a8d6ba2a 100644 --- a/query-engine/query-engine-wasm/src/functions.rs +++ b/query-engine/query-engine-wasm/src/wasm/functions.rs @@ -1,4 +1,3 @@ -use crate::error::ApiError; use serde::Serialize; use tsify::Tsify; use wasm_bindgen::prelude::wasm_bindgen; @@ -19,23 +18,6 @@ pub fn version() -> Version { } } -#[wasm_bindgen] -pub fn dmmf(datamodel_string: String) -> Result { - let mut schema = psl::validate(datamodel_string.into()); - - schema - .diagnostics - .to_result() - .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; - - Ok("{}".to_string()) - - // let query_schema = query_core::schema::build(Arc::new(schema), true); - // let dmmf = dmmf::render_dmmf(&query_schema); - - // Ok(serde_json::to_string(&dmmf)?) -} - #[wasm_bindgen] pub fn debug_panic(panic_message: Option) -> Result<(), wasm_bindgen::JsError> { let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( diff --git a/query-engine/query-engine-wasm/src/logger.rs b/query-engine/query-engine-wasm/src/wasm/logger.rs similarity index 79% rename from query-engine/query-engine-wasm/src/logger.rs rename to query-engine/query-engine-wasm/src/wasm/logger.rs index 561c48271b77..c0ccbf7f2a3e 100644 --- a/query-engine/query-engine-wasm/src/logger.rs +++ b/query-engine/query-engine-wasm/src/wasm/logger.rs @@ -2,6 +2,7 @@ use core::fmt; use js_sys::Function as JsFunction; +use query_core::telemetry; use serde_json::Value; use std::collections::BTreeMap; use tracing::{ @@ -16,7 +17,17 @@ use tracing_subscriber::{ }; use wasm_bindgen::JsValue; -pub(crate) struct LogCallback(pub JsFunction); +#[derive(Clone)] +pub struct LogCallback(pub JsFunction); + +impl LogCallback { + pub fn call>(&self, arg1: T) -> Result<(), String> { + self.0 + .call1(&JsValue::NULL, &arg1.into()) + .map(|_| ()) + .map_err(|err| err.as_string().unwrap_or_default()) + } +} unsafe impl Send for LogCallback {} unsafe impl Sync for LogCallback {} @@ -27,7 +38,7 @@ pub(crate) struct Logger { impl Logger { /// Creates a new logger using a call layer - pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback) -> Self { + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback, enable_tracing: bool) -> Self { let is_sql_query = filter_fn(|meta| { meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") }); @@ -44,10 +55,21 @@ impl Logger { FilterExt::boxed(log_level) }; + let is_user_trace = filter_fn(telemetry::helpers::user_facing_span_only_filter); + let tracer = super::tracer::new_pipeline().install_simple(log_callback.clone()); + let telemetry = if enable_tracing { + let telemetry = tracing_opentelemetry::layer() + .with_tracer(tracer) + .with_filter(is_user_trace); + Some(telemetry) + } else { + None + }; + let layer = CallbackLayer::new(log_callback).with_filter(filters); Self { - dispatcher: Dispatch::new(Registry::default().with(layer)), + dispatcher: Dispatch::new(Registry::default().with(telemetry).with(layer)), } } @@ -124,9 +146,6 @@ impl Layer for CallbackLayer { let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); event.record(&mut visitor); - let _ = self - .callback - .0 - .call1(&JsValue::NULL, &JsValue::from_str(&visitor.to_string())); + let _ = self.callback.call(visitor.to_string()); } } diff --git a/query-engine/query-engine-wasm/src/wasm/tracer.rs b/query-engine/query-engine-wasm/src/wasm/tracer.rs new file mode 100644 index 000000000000..7bcd1ab81043 --- /dev/null +++ b/query-engine/query-engine-wasm/src/wasm/tracer.rs @@ -0,0 +1,93 @@ +use async_trait::async_trait; +use opentelemetry::{ + global, sdk, + sdk::{ + export::trace::{ExportResult, SpanData, SpanExporter}, + propagation::TraceContextPropagator, + }, + trace::{TraceError, TracerProvider}, +}; +use query_core::telemetry; +use std::fmt::{self, Debug}; + +use crate::logger::LogCallback; + +/// Pipeline builder +#[derive(Debug)] +pub struct PipelineBuilder { + trace_config: Option, +} + +/// Create a new stdout exporter pipeline builder. +pub fn new_pipeline() -> PipelineBuilder { + PipelineBuilder::default() +} + +impl Default for PipelineBuilder { + /// Return the default pipeline builder. + fn default() -> Self { + Self { trace_config: None } + } +} + +impl PipelineBuilder { + /// Assign the SDK trace configuration. + #[allow(dead_code)] + pub fn with_trace_config(mut self, config: sdk::trace::Config) -> Self { + self.trace_config = Some(config); + self + } +} + +impl PipelineBuilder { + pub fn install_simple(mut self, log_callback: LogCallback) -> sdk::trace::Tracer { + global::set_text_map_propagator(TraceContextPropagator::new()); + let exporter = ClientSpanExporter::new(log_callback); + + let mut provider_builder = sdk::trace::TracerProvider::builder().with_simple_exporter(exporter); + // This doesn't work at the moment because we create the logger outside of an async runtime + // we could later move the creation of logger into the `connect` function + // let mut provider_builder = sdk::trace::TracerProvider::builder().with_batch_exporter(exporter, runtime::Tokio); + // remember to add features = ["rt-tokio"] to the cargo.toml + if let Some(config) = self.trace_config.take() { + provider_builder = provider_builder.with_config(config); + } + let provider = provider_builder.build(); + let tracer = provider.tracer("opentelemetry"); + global::set_tracer_provider(provider); + + tracer + } +} + +/// A [`ClientSpanExporter`] that sends spans to the JS callback. +pub struct ClientSpanExporter { + callback: LogCallback, +} + +impl ClientSpanExporter { + pub fn new(callback: LogCallback) -> Self { + Self { callback } + } +} + +impl Debug for ClientSpanExporter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ClientSpanExporter").finish() + } +} + +#[async_trait] +impl SpanExporter for ClientSpanExporter { + /// Export spans to stdout + async fn export(&mut self, batch: Vec) -> ExportResult { + let result = telemetry::helpers::spans_to_json(batch); + let status = self.callback.call(result); + + if let Err(err) = status { + return Err(TraceError::from(format!("Could not call JS callback: {}", err))); + } + + Ok(()) + } +} diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 51ed4bd8b5ad..3686f14154af 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -20,7 +20,7 @@ bigdecimal = "0.3" thiserror = "1" tracing = "0.1" url = "2" -connection-string.workspace = true +connection-string.workspace = true once_cell = "1.15" mongodb-query-connector = { path = "../connectors/mongodb-query-connector", optional = true } @@ -32,11 +32,17 @@ schema = { path = "../schema" } codspeed-criterion-compat = "1.1.0" [features] -default = ["sql", "mongodb", "native"] +default = ["sql", "mongodb", "native", "graphql-protocol"] mongodb = ["mongodb-query-connector"] sql = ["sql-query-connector"] driver-adapters = ["sql-query-connector/driver-adapters"] -native = ["mongodb", "sql-query-connector", "quaint/native", "query-core/metrics"] +native = [ + "mongodb", + "sql-query-connector", + "quaint/native", + "query-core/metrics", +] +graphql-protocol = ["query-core/graphql-protocol"] [[bench]] name = "query_planning_bench" diff --git a/query-engine/request-handlers/src/lib.rs b/query-engine/request-handlers/src/lib.rs index 361e5c628bdf..949c26b302f3 100644 --- a/query-engine/request-handlers/src/lib.rs +++ b/query-engine/request-handlers/src/lib.rs @@ -12,7 +12,9 @@ mod response; pub use self::{error::HandlerError, load_executor::load as load_executor}; pub use connector_mode::ConnectorMode; pub use handler::*; -pub use protocols::{graphql::*, json::*, RequestBody}; +#[cfg(feature = "graphql-protocol")] +pub use protocols::graphql::*; +pub use protocols::{json::*, RequestBody}; pub use response::*; pub type Result = std::result::Result; diff --git a/query-engine/request-handlers/src/protocols/mod.rs b/query-engine/request-handlers/src/protocols/mod.rs index e2c50c2e7f1f..93bac460fecb 100644 --- a/query-engine/request-handlers/src/protocols/mod.rs +++ b/query-engine/request-handlers/src/protocols/mod.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "graphql-protocol")] pub mod graphql; pub mod json; @@ -5,6 +6,7 @@ use query_core::{protocol::EngineProtocol, schema::QuerySchemaRef, QueryDocument #[derive(Debug)] pub enum RequestBody { + #[cfg(feature = "graphql-protocol")] Graphql(graphql::GraphqlBody), Json(json::JsonBody), } @@ -12,6 +14,7 @@ pub enum RequestBody { impl RequestBody { pub fn into_doc(self, query_schema: &QuerySchemaRef) -> crate::Result { match self { + #[cfg(feature = "graphql-protocol")] RequestBody::Graphql(body) => body.into_doc(), RequestBody::Json(body) => body.into_doc(query_schema), } @@ -19,6 +22,7 @@ impl RequestBody { pub fn try_from_str(val: &str, engine_protocol: EngineProtocol) -> Result { match engine_protocol { + #[cfg(feature = "graphql-protocol")] EngineProtocol::Graphql => serde_json::from_str::(val).map(Self::from), EngineProtocol::Json => serde_json::from_str::(val).map(Self::from), } @@ -26,12 +30,14 @@ impl RequestBody { pub fn try_from_slice(val: &[u8], engine_protocol: EngineProtocol) -> Result { match engine_protocol { + #[cfg(feature = "graphql-protocol")] EngineProtocol::Graphql => serde_json::from_slice::(val).map(Self::from), EngineProtocol::Json => serde_json::from_slice::(val).map(Self::from), } } } +#[cfg(feature = "graphql-protocol")] impl From for RequestBody { fn from(body: graphql::GraphqlBody) -> Self { Self::Graphql(body)