Skip to content

Commit

Permalink
chore(deps): update dependencies (#848)
Browse files Browse the repository at this point in the history
Signed-off-by: Runji Wang <wangrunji0408@163.com>

---------

Signed-off-by: Runji Wang <wangrunji0408@163.com>
  • Loading branch information
wangrunji0408 committed Apr 18, 2024
1 parent a94674d commit e3b34ff
Show file tree
Hide file tree
Showing 16 changed files with 568 additions and 690 deletions.
967 changes: 444 additions & 523 deletions Cargo.lock

Large diffs are not rendered by default.

16 changes: 8 additions & 8 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,27 +45,27 @@ indicatif = "0.17"
indoc = "2"
iter-chunks = "0.2"
itertools = "0.12"
minitrace = "0.4"
minitrace = { version = "0.6", features = ["enable"] }
moka = { version = "0.12", features = ["future"] }
num-traits = "0.2"
ordered-float = { version = "4", features = ["serde"] }
parking_lot = "0.12"
parse-display = "0.8"
parse-display = "0.9"
paste = "1"
pgwire = "0.15"
pgwire = "0.20"
pretty-xmlish = "0.1"
prost = "0.12"
pyo3 = { version = "0.20", features = ["extension-module"], optional = true }
pyo3 = { version = "0.21", features = ["extension-module"], optional = true }
ref-cast = "1.0"
regex = "1"
risinglight_proto = "0.2"
rust_decimal = "1"
rustyline = "13"
rustyline = "14"
serde = { version = "1", features = ["derive", "rc"] }
serde_json = "1"
smallvec = { version = "1", features = ["serde"] }
sqllogictest = "0.14"
sqlparser = { version = "0.41", features = ["serde"] }
sqllogictest = "0.20"
sqlparser = { version = "0.45", features = ["serde"] }
thiserror = "1"
tikv-jemallocator = { version = "0.5", optional = true, features = [
"disable_initial_exec_tls",
Expand All @@ -86,7 +86,7 @@ tempfile = "3"
test-case = "3"

[build-dependencies]
pyo3-build-config = { version = "0.20", optional = true }
pyo3-build-config = { version = "0.21", optional = true }

[[test]]
name = "sqllogictest"
Expand Down
4 changes: 2 additions & 2 deletions src/array/ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,9 +242,9 @@ impl ArrayImpl {
})))
}

pub fn extract(&self, field: DateTimeField) -> Result {
pub fn extract(&self, field: &DateTimeField) -> Result {
Ok(match self {
A::Date(a) => match field.0 {
A::Date(a) => match &field.0 {
sqlparser::ast::DateTimeField::Year => {
A::new_int32(unary_op(a.as_ref(), |d| d.year()))
}
Expand Down
23 changes: 11 additions & 12 deletions src/binder/create_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,8 @@ impl Binder {

for (index, col_def) in columns.iter().enumerate() {
for option_def in &col_def.options {
let is_primary_ = if let ColumnOption::Unique { is_primary } = option_def.option {
let is_primary_ = if let ColumnOption::Unique { is_primary, .. } = option_def.option
{
is_primary
} else {
false
Expand All @@ -150,21 +151,19 @@ impl Binder {

/// get the primary keys' name sorted by declaration order in "primary key(c1, c2..)" syntax.
fn pks_name_from_constraints(constraints: &[TableConstraint]) -> Vec<String> {
let mut pks_name_from_constraints = vec![];

for constraint in constraints {
match constraint {
TableConstraint::Unique {
is_primary,
columns,
..
} if *is_primary => columns.iter().for_each(|ident| {
pks_name_from_constraints.push(ident.value.to_lowercase());
}),
TableConstraint::PrimaryKey { columns, .. } => {
return columns
.iter()
.map(|ident| ident.value.to_lowercase())
.collect()
}
_ => continue,
}
}
pks_name_from_constraints
// no primary key
vec![]
}
}

Expand All @@ -176,7 +175,7 @@ impl From<&ColumnDef> for ColumnCatalog {
match opt.option {
ColumnOption::Null => is_nullable = true,
ColumnOption::NotNull => is_nullable = false,
ColumnOption::Unique { is_primary: p } => is_primary = p,
ColumnOption::Unique { is_primary: p, .. } => is_primary = p,
_ => todo!("column options"),
}
}
Expand Down
10 changes: 5 additions & 5 deletions src/binder/create_view.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ impl Binder {
pub(super) fn bind_create_view(
&mut self,
name: ObjectName,
columns: Vec<Ident>,
columns: Vec<ViewColumnDef>,
query: Query,
) -> Result {
let name = lower_case_name(&name);
Expand All @@ -23,8 +23,8 @@ impl Binder {
// check duplicated column names
let mut set = HashSet::new();
for col in &columns {
if !set.insert(col.value.to_lowercase()) {
return Err(BindError::ColumnExists(col.value.to_lowercase()));
if !set.insert(col.name.value.to_lowercase()) {
return Err(BindError::ColumnExists(col.name.value.to_lowercase()));
}
}

Expand All @@ -41,10 +41,10 @@ impl Binder {
.into_iter()
.zip(output_types)
.enumerate()
.map(|(idx, (name, ty))| {
.map(|(idx, (column, ty))| {
ColumnCatalog::new(
idx as ColumnId,
ColumnDesc::new(name.value, ty.clone(), true),
ColumnDesc::new(column.name.value, ty.clone(), true),
)
})
.collect();
Expand Down
10 changes: 5 additions & 5 deletions src/binder/delete.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
use super::*;

impl Binder {
pub(super) fn bind_delete(
&mut self,
from: Vec<TableWithJoins>,
selection: Option<Expr>,
) -> Result {
pub(super) fn bind_delete(&mut self, from: FromTable, selection: Option<Expr>) -> Result {
let from = match from {
FromTable::WithFromKeyword(t) => t,
FromTable::WithoutKeyword(t) => t,
};
if from.len() != 1 || !from[0].joins.is_empty() {
return Err(BindError::Todo(format!("delete from {from:?}")));
}
Expand Down
4 changes: 4 additions & 0 deletions src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
use std::sync::{Arc, Mutex};

use futures::TryStreamExt;
use minitrace::collector::SpanContext;
use minitrace::Span;
use risinglight_proto::rowset::block_statistics::BlockStatisticsType;

use crate::array::Chunk;
Expand Down Expand Up @@ -77,6 +79,8 @@ impl Database {

/// Run SQL queries and return the outputs.
pub async fn run(&self, sql: &str) -> Result<Vec<Chunk>, Error> {
let _root = Span::root("run_sql", SpanContext::random());

let sql = if let Some(cmd) = sql.trim().strip_prefix('\\') {
self.command_to_sql(cmd)?
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/executor/evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ impl<'a> Evaluator<'a> {
},
Extract([field, a]) => {
let a = self.next(*a).eval(chunk)?;
let Expr::Field(field) = self.expr[*field] else {
let Expr::Field(field) = &self.expr[*field] else {
panic!("not a field")
};
a.extract(field)
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ use pyo3::{prelude::*, wrap_pyfunction};
/// The entry point of python module must be in the lib.rs
#[cfg(feature = "python")]
#[pymodule]
fn risinglight(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
fn risinglight(m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_function(wrap_pyfunction!(open, m)?)?;
Ok(())
}
87 changes: 20 additions & 67 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ use async_trait::async_trait;
use clap::Parser;
use humantime::format_duration;
use itertools::Itertools;
use minitrace::prelude::*;
use risinglight::array::{datachunk_to_sqllogictest_string, Chunk};
use risinglight::server::run_server;
use risinglight::storage::SecondaryStorageOptions;
Expand Down Expand Up @@ -124,31 +123,15 @@ fn print_execution_time(start_time: Instant) {
}
}

async fn run_query_in_background(
db: Arc<Database>,
sql: String,
output_format: Option<String>,
enable_tracing: bool,
) {
async fn run_query_in_background(db: Arc<Database>, sql: String, output_format: Option<String>) {
let start_time = Instant::now();
let task = async move {
if enable_tracing {
let (root, collector) = Span::root("root");
let result = db.run(&sql).in_span(root).await;
let records: Vec<SpanRecord> = collector.collect().await;
println!("{records:#?}");
result
} else {
db.run(&sql).await
}
};

select! {
_ = signal::ctrl_c() => {
// we simply drop the future `task` to cancel the query.
println!("Interrupted");
}
ret = task => {
ret = db.run(&sql) => {
match ret {
Ok(chunks) => {
for chunk in chunks {
Expand Down Expand Up @@ -190,11 +173,7 @@ fn read_sql(rl: &mut DefaultEditor) -> Result<String, ReadlineError> {
}

/// Run RisingLight interactive mode
async fn interactive(
db: Database,
output_format: Option<String>,
enable_tracing: bool,
) -> Result<()> {
async fn interactive(db: Database, output_format: Option<String>) -> Result<()> {
let mut rl = DefaultEditor::new()?;
let history_path = dirs::cache_dir().map(|p| {
let cache_dir = p.join("risinglight");
Expand All @@ -220,8 +199,7 @@ async fn interactive(
Ok(sql) => {
if !sql.trim().is_empty() {
rl.add_history_entry(sql.as_str())?;
run_query_in_background(db.clone(), sql, output_format.clone(), enable_tracing)
.await;
run_query_in_background(db.clone(), sql, output_format.clone()).await;
}
}
Err(ReadlineError::Interrupted) => {
Expand All @@ -248,25 +226,12 @@ async fn interactive(
}

/// Run a SQL file in RisingLight
async fn run_sql(
db: Database,
path: &str,
output_format: Option<String>,
enable_tracing: bool,
) -> Result<()> {
async fn run_sql(db: Database, path: &str, output_format: Option<String>) -> Result<()> {
let lines = std::fs::read_to_string(path)?;

info!("{}", lines);

let chunks = if enable_tracing {
let (root, collector) = Span::root("root");
let chunk = db.run(&lines).in_span(root).await?;
let records: Vec<SpanRecord> = collector.collect().await;
println!("{records:#?}");
chunk
} else {
db.run(&lines).await?
};
let chunks = db.run(&lines).await?;

for chunk in chunks {
print_chunk(&chunk, &output_format);
Expand All @@ -279,11 +244,10 @@ async fn run_sql(
struct DatabaseWrapper {
db: Database,
output_format: Option<String>,
enable_tracing: bool,
}

#[async_trait]
impl sqllogictest::AsyncDB for DatabaseWrapper {
impl sqllogictest::AsyncDB for &DatabaseWrapper {
type ColumnType = DefaultColumnType;
type Error = risinglight::Error;
async fn run(
Expand All @@ -302,15 +266,7 @@ impl sqllogictest::AsyncDB for DatabaseWrapper {
};

info!("{}", sql);
let chunks = if self.enable_tracing {
let (root, collector) = Span::root("root");
let chunk = self.db.run(sql).in_span(root).await?;
let records: Vec<SpanRecord> = collector.collect().await;
println!("{records:#?}");
chunk
} else {
self.db.run(sql).await?
};
let chunks = self.db.run(sql).await?;

for chunk in &chunks {
print_chunk(chunk, &self.output_format);
Expand All @@ -336,17 +292,9 @@ impl sqllogictest::AsyncDB for DatabaseWrapper {
}

/// Run a sqllogictest file in RisingLight
async fn run_sqllogictest(
db: Database,
path: &str,
output_format: Option<String>,
enable_tracing: bool,
) -> Result<()> {
let mut tester = sqllogictest::Runner::new(DatabaseWrapper {
db,
output_format,
enable_tracing,
});
async fn run_sqllogictest(db: Database, path: &str, output_format: Option<String>) -> Result<()> {
let db = DatabaseWrapper { db, output_format };
let mut tester = sqllogictest::Runner::new(|| async { Ok(&db) });
let path = path.to_string();

tester
Expand All @@ -373,6 +321,10 @@ async fn main() -> Result<()> {
.with(fmt_layer)
.init();
}
if args.enable_tracing {
use minitrace::collector::{Config, ConsoleReporter};
minitrace::set_reporter(ConsoleReporter, Config::default());
}

let db = if args.memory {
info!("using memory engine");
Expand All @@ -388,18 +340,19 @@ async fn main() -> Result<()> {

if let Some(file) = args.file {
if file.ends_with(".sql") {
run_sql(db, &file, args.output_format, args.enable_tracing).await?;
run_sql(db, &file, args.output_format).await?;
} else if file.ends_with(".slt") {
run_sqllogictest(db, &file, args.output_format, args.enable_tracing).await?;
run_sqllogictest(db, &file, args.output_format).await?;
} else {
warn!("No suffix detected, assume sql file");
run_sql(db, &file, args.output_format, args.enable_tracing).await?;
run_sql(db, &file, args.output_format).await?;
}
} else if args.server {
run_server(args.host, args.port, db).await;
} else {
interactive(db, args.output_format, args.enable_tracing).await?;
interactive(db, args.output_format).await?;
}

minitrace::flush();
Ok(())
}
6 changes: 2 additions & 4 deletions src/server/processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ impl Processor {
impl SimpleQueryHandler for Processor {
async fn do_query<'a, 'b: 'a, C>(
&'b self,
_client: &C,
_client: &mut C,
query: &'a str,
) -> PgWireResult<Vec<Response<'a>>>
where
Expand All @@ -40,9 +40,7 @@ impl SimpleQueryHandler for Processor {
.map_err(|e| PgWireError::ApiError(Box::new(e)))?;

if !query.to_uppercase().starts_with("SELECT") {
return Ok(vec![Response::Execution(Tag::new_for_execution(
"OK", None,
))]);
return Ok(vec![Response::Execution(Tag::new("OK"))]);
}
let mut results = Vec::new();
let mut headers = None;
Expand Down

0 comments on commit e3b34ff

Please sign in to comment.