Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly-2022-12-02-aarch64-apple-darwin
toolchain: nightly-2023-04-07
components: rustfmt, clippy
- name: Check code format
uses: actions-rs/cargo@v1
Expand All @@ -31,7 +31,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly-2022-12-02-aarch64-apple-darwin
toolchain: nightly-2023-04-07
- uses: actions/checkout@v2
- name: Build
uses: actions-rs/cargo@v1
Expand All @@ -45,7 +45,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly-2022-12-02-aarch64-apple-darwin
toolchain: nightly-2023-04-07
- uses: actions/checkout@v2
- name: Test
uses: actions-rs/cargo@v1
Expand Down
28 changes: 28 additions & 0 deletions .github/workflows/cr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: Code Review

permissions:
contents: read
pull-requests: write

on:
pull_request:
types: [opened, reopened, synchronize]

jobs:
test:
# if: ${{ contains(github.event.*.labels.*.name, 'gpt review') }} # Optional; to run only when a label is attached
runs-on: ubuntu-latest
steps:
- uses: anc95/ChatGPT-CodeReview@main
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
# Optional
LANGUAGE: Chinese
OPENAI_API_ENDPOINT: https://api.openai.com/v1
MODEL: gpt-3.5-turbo
PROMPT:
top_p: 1
temperature: 1
max_tokens: 10000
MAX_PATCH_LENGTH: 10000 # if the patch/diff length is large than MAX_PATCH_LENGTH, will be ignored and won't review. By default, with no MAX_PATCH_LENGTH set, there is also no limit for the patch/diff length.
10 changes: 7 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[package]
Expand All @@ -10,7 +8,6 @@ edition = "2021"
[lib]
doctest = false


[dependencies]
log = "^0.4"
sqlparser = "0.34.0"
Expand All @@ -33,6 +30,13 @@ tokio-process = "0.2.5"
serde = { version = "1", features = ["derive", "rc"] }
serde_json = "1"
async-trait = "0.1.68"
integer-encoding = "3.0.4"
petgraph = "0.6.3"
futures-async-stream = "0.2.6"
async-channel = "1.8.0"
async-backtrace = "0.2.6"
futures = "0.3.25"
futures-lite = "1.12.0"

[dev-dependencies]
ctor = "0.2.0"
Expand Down
86 changes: 86 additions & 0 deletions src/binder/create.rs
Original file line number Diff line number Diff line change
@@ -1 +1,87 @@
use super::Binder;
use crate::binder::{lower_case_name, split_name};
use crate::catalog::{Column, ColumnDesc};
use crate::planner::logical_create_table_plan::LogicalCreateTablePlan;
use crate::planner::LogicalPlan;
use crate::types::ColumnId;
use anyhow::Result;
use sqlparser::ast::{ColumnDef, ObjectName};
use std::collections::HashSet;

impl Binder {
pub(crate) fn bind_create_table(
&mut self,
name: ObjectName,
columns: &[ColumnDef],
) -> Result<LogicalCreateTablePlan> {
let name = lower_case_name(&name);

let (_, table_name) = split_name(&name)?;

// check duplicated column names
let mut set = HashSet::new();
for col in columns.iter() {
if !set.insert(col.name.value.clone()) {
return Err(anyhow::Error::msg(format!(
"bind duplicated column {}",
col.name.value.clone()
)));
}
}

let mut columns: Vec<Column> = columns
.iter()
.enumerate()
.map(|(_, col)| Column::from(col))
.collect();

let plan = LogicalCreateTablePlan {
table_name: table_name.to_string(),
columns: columns
.into_iter()
.map(|col| (col.name.to_string(), col.desc.clone()))
.collect(),
};
Ok(plan)
}
}

#[cfg(test)]
mod tests {
use super::*;
use crate::binder::BinderContext;
use crate::catalog::Root;
use crate::types::{DataTypeExt, DataTypeKind};
use sqlparser::ast::CharacterLength;
use std::sync::Arc;

#[test]
fn test_create_bind() {
let sql = "create table t1 (id int , name varchar(10))";
let mut binder = Binder::new(BinderContext::new(Arc::new(Root::new())));
let stmt = crate::parser::parse_sql(sql).unwrap();
let plan1 = binder.bind(&stmt[0]).unwrap();

let character_length = CharacterLength {
length: 10,
unit: None,
};
let plan2 = LogicalPlan::CreateTable(LogicalCreateTablePlan {
table_name: "t1".to_string(),
columns: vec![
(
"id".to_string(),
DataTypeKind::Int(None).nullable().to_column(),
),
(
"name".to_string(),
DataTypeKind::Varchar(Option::from(character_length))
.nullable()
.to_column(),
),
],
});

assert_eq!(plan1, plan2);
}
}
37 changes: 29 additions & 8 deletions src/binder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,16 @@ mod select;

use std::collections::HashMap;

use crate::{
catalog::{CatalogRef, TableRefId},
expression::ScalarExpression,
planner::LogicalPlan,
};
use crate::{catalog::CatalogRef, expression::ScalarExpression, planner::LogicalPlan};

use crate::catalog::DEFAULT_SCHEMA_NAME;
use crate::types::TableId;
use anyhow::Result;
use sqlparser::ast::Statement;

use sqlparser::ast::{Ident, ObjectName, Statement};
#[derive(Clone)]
pub struct BinderContext {
catalog: CatalogRef,
bind_table: HashMap<String, TableRefId>,
bind_table: HashMap<String, TableId>,
aliases: HashMap<String, ScalarExpression>,
group_by_exprs: Vec<ScalarExpression>,
agg_calls: Vec<ScalarExpression>,
Expand Down Expand Up @@ -65,8 +63,31 @@ impl Binder {
let plan = self.bind_query(query)?;
LogicalPlan::Select(plan)
}
Statement::CreateTable { name, columns, .. } => {
let plan = self.bind_create_table(name.to_owned(), &columns)?;
LogicalPlan::CreateTable(plan)
}
_ => unimplemented!(),
};
Ok(plan)
}
}

/// Convert an object name into lower case
fn lower_case_name(name: &ObjectName) -> ObjectName {
ObjectName(
name.0
.iter()
.map(|ident| Ident::new(ident.value.to_lowercase()))
.collect(),
)
}

/// Split an object name into `(schema name, table name)`.
fn split_name(name: &ObjectName) -> Result<(&str, &str)> {
Ok(match name.0.as_slice() {
[table] => (DEFAULT_SCHEMA_NAME, &table.value),
[schema, table] => (&schema.value, &table.value),
_ => return Err(anyhow::anyhow!("Invalid table name: {:?}", name)),
})
}
16 changes: 9 additions & 7 deletions src/binder/select.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::{borrow::Borrow, sync::Arc};

use crate::{
catalog::{ColumnRefId, DEFAULT_DATABASE_NAME, DEFAULT_SCHEMA_NAME},
catalog::ColumnRefId,
expression::ScalarExpression,
planner::{
logical_select_plan::LogicalSelectPlan,
Expand All @@ -16,6 +16,7 @@ use crate::{

use super::Binder;

use crate::catalog::{DEFAULT_DATABASE_NAME, DEFAULT_SCHEMA_NAME};
use anyhow::Result;
use itertools::Itertools;
use sqlparser::ast::{
Expand All @@ -24,7 +25,7 @@ use sqlparser::ast::{
};

impl Binder {
pub(super) fn bind_query(&mut self, query: &Query) -> Result<LogicalSelectPlan> {
pub(crate) fn bind_query(&mut self, query: &Query) -> Result<LogicalSelectPlan> {
if let Some(_with) = &query.with {
// TODO support with clause.
}
Expand Down Expand Up @@ -127,7 +128,8 @@ impl Binder {
.map(|ident| Ident::new(ident.value.to_lowercase()))
.collect_vec();

let (database, schema, mut table): (&str, &str, &str) = match obj_name.as_slice() {
let (_database, _schema, mut table): (&str, &str, &str) = match obj_name.as_slice()
{
[table] => (DEFAULT_DATABASE_NAME, DEFAULT_SCHEMA_NAME, &table.value),
[schema, table] => (DEFAULT_DATABASE_NAME, &schema.value, &table.value),
[database, schema, table] => (&database.value, &schema.value, &table.value),
Expand All @@ -147,7 +149,7 @@ impl Binder {
let table_ref_id = self
.context
.catalog
.get_table_id_by_name(database, schema, table)
.get_table_id_by_name(table)
.ok_or_else(|| anyhow::Error::msg(format!("bind table {}", table)))?;

self.context.bind_table.insert(table.into(), table_ref_id);
Expand Down Expand Up @@ -198,7 +200,7 @@ impl Binder {
fn bind_all_column_refs(&mut self) -> Result<Vec<ScalarExpression>> {
let mut exprs = vec![];
for ref_id in self.context.bind_table.values().cloned().collect_vec() {
let table = self.context.catalog.get_table(&ref_id).unwrap();
let table = self.context.catalog.get_table(ref_id).unwrap();
for (col_id, col) in &table.get_all_columns() {
let column_ref_id = ColumnRefId::from_table(ref_id, *col_id);
// self.record_regular_table_column(
Expand All @@ -209,8 +211,8 @@ impl Binder {
// );
let expr = ScalarExpression::ColumnRef {
column_ref_id,
primary_key: col.is_primary(),
desc: col.desc().clone(),
primary_key: col.desc.is_primary(),
desc: col.desc.clone(),
};
exprs.push(expr);
}
Expand Down
Loading