Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 43 additions & 18 deletions packages/cubejs-backend-native/js/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import fs from 'fs';
import path from 'path';
import { Writable } from 'stream';
// import { getEnv } from '@cubejs-backend/shared';

export interface BaseMeta {
Expand Down Expand Up @@ -115,6 +116,9 @@ function wrapNativeFunctionWithChannelCallback(
};
};

const errorString = (err: any) =>
err.message || err.stack?.toString() || typeof err === 'string' ? err.toString() : JSON.stringify(err);

// TODO: Refactor - define classes
function wrapNativeFunctionWithStream(
fn: (extra: any) => unknown | Promise<unknown>
Expand All @@ -129,27 +133,48 @@ function wrapNativeFunctionWithStream(
streamResponse = await fn(JSON.parse(extra));
if (streamResponse && streamResponse.stream) {
writer.start();
let chunk: object[] = [];
streamResponse.stream.on('data', (c: object) => {
chunk.push(c);
if (chunk.length >= chunkLength) {
if (!writer.chunk(JSON.stringify(chunk))) {
// TODO replace with actual stream and high watermark implementation
streamResponse.stream.destroy({
stack: "Rejected by client"
});

let chunkBuffer: any[] = [];
const writable = new Writable({
objectMode: true,
highWaterMark: chunkLength,
write(row: any, encoding: BufferEncoding, callback: (error?: (Error | null)) => void) {
chunkBuffer.push(row);
if (chunkBuffer.length < chunkLength) {
callback(null);
} else {
const toSend = chunkBuffer;
chunkBuffer = [];
writer.chunk(toSend, callback);
}
chunk = [];
}
});
streamResponse.stream.on('close', () => {
if (chunk.length > 0) {
writer.chunk(JSON.stringify(chunk));

},
final(callback: (error?: (Error | null)) => void) {
const end = (err: any) => {
if (err) {
callback(err);
} else {
writer.end(callback);
}
}
if (chunkBuffer.length > 0) {
const toSend = chunkBuffer;
chunkBuffer = [];
writer.chunk(toSend, end);
} else {
end(null);
}
},
destroy(error: Error | null, callback: (error: (Error | null)) => void) {
if (error) {
writer.reject(errorString(error));
}
callback(null);
}
writer.end("");
});
streamResponse.stream.pipe(writable);
streamResponse.stream.on('error', (err: any) => {
writer.reject(err.message || err.toString());
writable.destroy(err);
});
} else {
throw new Error(`Expected stream but nothing returned`);
Expand All @@ -158,7 +183,7 @@ function wrapNativeFunctionWithStream(
if (!!streamResponse && !!streamResponse.stream) {
streamResponse.stream.destroy(e);
}
writer.reject(e.message || e.toString());
writer.reject(errorString(e));
}
};
};
Expand Down
140 changes: 124 additions & 16 deletions packages/cubejs-backend-native/src/stream.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
use cubesql::compile::engine::df::scan::{
transform_response, FieldValue, MemberField, RecordBatch, SchemaRef, ValueObject,
};
use std::future::Future;
use std::sync::{Arc, Mutex};

use cubesql::CubeError;
Expand All @@ -10,11 +14,14 @@ use crate::utils::bind_method;
use tokio::sync::mpsc::{channel as mpsc_channel, Receiver, Sender};
use tokio::sync::oneshot;

type Chunk = Result<String, CubeError>;
type Chunk = Option<Result<RecordBatch, CubeError>>;

pub struct JsWriteStream {
sender: Sender<Chunk>,
ready_sender: Mutex<Option<oneshot::Sender<Result<(), CubeError>>>>,
tokio_handle: tokio::runtime::Handle,
schema: SchemaRef,
member_fields: Vec<MemberField>,
}

impl Finalize for JsWriteStream {}
Expand Down Expand Up @@ -45,10 +52,13 @@ impl JsWriteStream {
Ok(obj)
}

fn push_chunk(&self, chunk: String) -> bool {
match self.sender.try_send(Ok(chunk)) {
Err(_) => false,
Ok(_) => true,
fn push_chunk(&self, chunk: RecordBatch) -> impl Future<Output = Result<(), CubeError>> {
let sender = self.sender.clone();
async move {
sender
.send(Some(Ok(chunk)))
.await
.map_err(|e| CubeError::user(format!("Can't send to channel: {}", e)))
}
}

Expand All @@ -58,29 +68,119 @@ impl JsWriteStream {
}
}

fn end(&self) {
self.push_chunk("".to_string());
fn end(&self) -> impl Future<Output = Result<(), CubeError>> {
let sender = self.sender.clone();
async move {
sender
.send(None)
.await
.map_err(|e| CubeError::user(format!("Can't send to channel: {}", e)))
}
}

fn reject(&self, err: String) {
if let Some(ready_sender) = self.ready_sender.lock().unwrap().take() {
let _ = ready_sender.send(Err(CubeError::internal(err.to_string())));
}
let _ = self.sender.try_send(Err(CubeError::internal(err)));
let _ = self.sender.try_send(Some(Err(CubeError::internal(err))));
}
}

fn js_stream_push_chunk(mut cx: FunctionContext) -> JsResult<JsBoolean> {
fn wait_for_future_and_execute_callback(
tokio_handle: tokio::runtime::Handle,
channel: Channel,
callback: Root<JsFunction>,
future: impl Future<Output = Result<(), CubeError>> + Send + Sync + 'static,
) {
tokio_handle.spawn(async move {
let push_result = future.await;
let send_result = channel.try_send(move |mut cx| {
let undefined = cx.undefined();
let result = match push_result {
Ok(()) => {
let args = vec![cx.null().upcast::<JsValue>(), cx.null().upcast::<JsValue>()];
callback.into_inner(&mut cx).call(&mut cx, undefined, args)
}
Err(e) => {
let args = vec![cx.string(e.message).upcast::<JsValue>()];
callback.into_inner(&mut cx).call(&mut cx, undefined, args)
}
};
if let Err(e) = result {
log::error!("Error during callback execution: {}", e);
}
Ok(())
});
if let Err(e) = send_result {
log::error!("Can't execute callback on node event loop: {}", e);
}
});
}

pub struct JsValueObject<'a> {
pub cx: FunctionContext<'a>,
pub handle: Handle<'a, JsArray>,
}

impl ValueObject for JsValueObject<'_> {
fn len(&mut self) -> Result<usize, CubeError> {
Ok(self.handle.len(&mut self.cx) as usize)
}

fn get(&mut self, index: usize, field_name: &str) -> Result<FieldValue, CubeError> {
let value = self
.handle
.get::<JsObject, _, _>(&mut self.cx, index as u32)
.map_err(|e| {
CubeError::user(format!("Can't get object at array index {}: {}", index, e))
})?
.get::<JsValue, _, _>(&mut self.cx, field_name)
.map_err(|e| {
CubeError::user(format!("Can't get '{}' field value: {}", field_name, e))
})?;
if let Ok(s) = value.downcast::<JsString, _>(&mut self.cx) {
Ok(FieldValue::String(s.value(&mut self.cx)))
} else if let Ok(n) = value.downcast::<JsNumber, _>(&mut self.cx) {
Ok(FieldValue::Number(n.value(&mut self.cx)))
} else if let Ok(b) = value.downcast::<JsBoolean, _>(&mut self.cx) {
Ok(FieldValue::Bool(b.value(&mut self.cx)))
} else if value.downcast::<JsUndefined, _>(&mut self.cx).is_ok()
|| value.downcast::<JsNull, _>(&mut self.cx).is_ok()
{
Ok(FieldValue::Null)
} else {
Err(CubeError::user(format!(
"Expected primitive value but found: {:?}",
value
)))
}
}
}

fn js_stream_push_chunk(mut cx: FunctionContext) -> JsResult<JsUndefined> {
#[cfg(build = "debug")]
trace!("JsWriteStream.push_chunk");

let this = cx
.this()
.downcast_or_throw::<JsBox<JsWriteStream>, _>(&mut cx)?;
let result = cx.argument::<JsString>(0)?;
let result = this.push_chunk(result.value(&mut cx));

Ok(cx.boolean(result))
let chunk_array = cx.argument::<JsArray>(0)?;
let callback = cx.argument::<JsFunction>(1)?.root(&mut cx);
let mut value_object = JsValueObject {
cx,
handle: chunk_array,
};
let value =
transform_response(&mut value_object, this.schema.clone(), &this.member_fields).unwrap();
let future = this.push_chunk(value);
wait_for_future_and_execute_callback(
this.tokio_handle.clone(),
value_object.cx.channel(),
callback,
future,
);

Ok(value_object.cx.undefined())
}

fn js_stream_start(mut cx: FunctionContext) -> JsResult<JsUndefined> {
Expand All @@ -102,7 +202,9 @@ fn js_stream_end(mut cx: FunctionContext) -> JsResult<JsUndefined> {
let this = cx
.this()
.downcast_or_throw::<JsBox<JsWriteStream>, _>(&mut cx)?;
this.end();
let future = this.end();
let callback = cx.argument::<JsFunction>(0)?.root(&mut cx);
wait_for_future_and_execute_callback(this.tokio_handle.clone(), cx.channel(), callback, future);

Ok(cx.undefined())
}
Expand All @@ -123,16 +225,19 @@ pub async fn call_js_with_stream_as_callback(
channel: Arc<Channel>,
js_method: Arc<Root<JsFunction>>,
query: Option<String>,
schema: SchemaRef,
member_fields: Vec<MemberField>,
) -> Result<Receiver<Chunk>, CubeError> {
let chunk_size = std::env::var("CUBEJS_DB_QUERY_STREAM_HIGH_WATER_MARK")
let channel_size = std::env::var("CUBEJS_DB_QUERY_STREAM_HIGH_WATER_MARK")
.ok()
.map(|v| v.parse::<usize>().unwrap())
.unwrap_or(8192);
let channel_size = 1_000_000 / chunk_size;

let (sender, receiver) = mpsc_channel::<Chunk>(channel_size);
let (ready_sender, ready_receiver) = oneshot::channel();

let tokio_handle = tokio::runtime::Handle::current();

channel
.try_send(move |mut cx| {
// https://github.com/neon-bindings/neon/issues/672
Expand All @@ -144,6 +249,9 @@ pub async fn call_js_with_stream_as_callback(
let stream = JsWriteStream {
sender,
ready_sender: Mutex::new(Some(ready_sender)),
tokio_handle,
schema,
member_fields,
};
let this = cx.undefined();
let args: Vec<Handle<_>> = vec![
Expand Down
7 changes: 6 additions & 1 deletion packages/cubejs-backend-native/src/transport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use neon::prelude::*;

use async_trait::async_trait;
use cubeclient::models::{V1Error, V1LoadRequestQuery, V1LoadResponse, V1MetaResponse};
use cubesql::compile::engine::df::scan::{MemberField, SchemaRef};
use cubesql::{
di_service,
sql::AuthContextRef,
Expand Down Expand Up @@ -175,6 +176,8 @@ impl TransportService for NodeBridgeTransport {
query: V1LoadRequestQuery,
ctx: AuthContextRef,
meta: LoadRequestMeta,
schema: SchemaRef,
member_fields: Vec<MemberField>,
) -> Result<CubeStreamReceiver, CubeError> {
trace!("[transport] Request ->");

Expand All @@ -201,11 +204,13 @@ impl TransportService for NodeBridgeTransport {
self.channel.clone(),
self.on_load_stream.clone(),
Some(extra),
schema.clone(),
member_fields.clone(),
)
.await;

if let Err(e) = &res {
if e.message.to_lowercase() == "continue wait" {
if e.message.to_lowercase().contains("continue wait") {
continue;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -601,10 +601,10 @@ export class QueryCache {
.then(([client]) => client.streamQuery(req.query, req.values))
.then((source) => {
const cleanup = (error) => {
if (!source.destroyed) {
if (error && !source.destroyed) {
source.destroy(error);
}
if (!target.destroyed) {
if (error && !target.destroyed) {
target.destroy(error);
}
if (!logged && source.destroyed && target.destroyed) {
Expand All @@ -625,13 +625,13 @@ export class QueryCache {
}
};

source.once('end', cleanup);
source.once('end', () => cleanup(undefined));
source.once('error', cleanup);
source.once('close', cleanup);
source.once('close', () => cleanup(undefined));

target.once('end', cleanup);
target.once('end', () => cleanup(undefined));
target.once('error', cleanup);
target.once('close', cleanup);
target.once('close', () => cleanup(undefined));

source.pipe(target);
})
Expand Down
Loading