Skip to content

Commit

Permalink
Reworking the Canyon Connector (#38)
Browse files Browse the repository at this point in the history
* #upgrade The Canyon database connector has been reworked, simpliying the process. The monster trasmute for get a mutable reference to a connect due to the Tiberius query method has been eliminated

* db_type property of the properties of the datasource moved to the root of the configuration for a datasource

* Bumped the toml dependency to 0.7.3. This forced us to change from borrowed to owned types in the structs that holds the user configuration

* #feature - reworked the authentication properties for the configuration file, by giving them a unique spot

* #feature - Auth property is mandatory for every datasource, looking forward to get rid out of the db_type property, since it will be inferred from the auth declaration

* #feature - Removed the db_type field from the options of the configuration file. The in-use per datasource database type will be inferred from the auth key

* #feature - Added a test for the `SqlServerAuth` integrated auth option

* Adding the required features for working with the tiberius integrated authentication system

* Separating with cfg features the conditional different libraries by target to enable integrated auth for tiberius (MSSQL)

* Being more specific with targets for the CI process

* Upgrading the Rust version for the VMs

* Installing vendored OpenSSL

* Trying for UNIX based systems to solve the issue with Kerberos by installing their missing system headers

* Getting rid out of the installation of the openssl vendored. Propagating the installation of the gssapi headers to the others UNIX based actions

* Adding the gssapi headers to the other steps. Bumped the syn deps

* Disabling a doc tests in the canyon-macro module that was provoking linker issues under msvc envs

* Correct typo in the CI action

* v0.2.0

* #feature - #[cfg(feature = "mssql-integrated-auth")]
  • Loading branch information
TheRustifyer committed Apr 13, 2023
1 parent 5409cd1 commit ec47410
Show file tree
Hide file tree
Showing 21 changed files with 300 additions and 219 deletions.
6 changes: 6 additions & 0 deletions .github/workflows/code-coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,12 @@ jobs:
run: |
rustup toolchain install nightly
rustup override set nightly
- name: Make the USER own the working directory. Installing `gssapi` headers
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
sudo chown -R $USER:$USER ${{ github.workspace }}
sudo apt -y install gcc libgssapi-krb5-2 libkrb5-dev libsasl2-modules-gssapi-mit
- name: Caching cargo dependencies
id: project-cache
Expand Down
19 changes: 17 additions & 2 deletions .github/workflows/code-quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,30 @@ jobs:
steps:
- uses: actions/checkout@v3

- name: Make the USER own the working directory. Installing `gssapi` headers
run: |
sudo chown -R $USER:$USER ${{ github.workspace }}
sudo apt -y install gcc libgssapi-krb5-2 libkrb5-dev libsasl2-modules-gssapi-mit
- name: Caching project dependencies
id: project-cache
uses: Swatinem/rust-cache@v2

- uses: hecrj/setup-rust-action@v1
with:
components: clippy
- run: cargo clippy --workspace --all-targets --verbose --all-features -- -A clippy::question_mark
- run: cargo clippy --workspace --all-targets --verbose --all-features
rustfmt:
name: Verify code formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Make the USER own the working directory. Installing `gssapi` headers
run: |
sudo chown -R $USER:$USER ${{ github.workspace }}
sudo apt -y install gcc libgssapi-krb5-2 libkrb5-dev libsasl2-modules-gssapi-mit
- name: Caching project dependencies
id: project-cache
uses: Swatinem/rust-cache@v2
Expand All @@ -49,6 +59,11 @@ jobs:
steps:
- uses: actions/checkout@v3

- name: Make the USER own the working directory. Installing `gssapi` headers
run: |
sudo chown -R $USER:$USER ${{ github.workspace }}
sudo apt -y install gcc libgssapi-krb5-2 libkrb5-dev libsasl2-modules-gssapi-mit
- name: Caching project dependencies
id: project-cache
uses: Swatinem/rust-cache@v2
Expand All @@ -57,4 +72,4 @@ jobs:
with:
rust-version: nightly

- run: cargo rustdoc -p ${{ matrix.crate }} --all-features -- -D warnings
- run: cargo rustdoc --target=x86_64-unknown-linux-gnu -p ${{ matrix.crate }} --all-features -- -D warnings
16 changes: 11 additions & 5 deletions .github/workflows/continuous-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@ jobs:
- { rust: stable, os: windows-latest }

steps:
- name: Make the USER own the working directory
- name: Make the USER own the working directory. Installing `gssapi` headers
if: ${{ matrix.os == 'ubuntu-latest' }}
run: sudo chown -R $USER:$USER ${{ github.workspace }}
run: |
sudo chown -R $USER:$USER ${{ github.workspace }}
sudo apt -y install gcc libgssapi-krb5-2 libkrb5-dev libsasl2-modules-gssapi-mit
- uses: actions/checkout@v3

Expand All @@ -43,12 +45,16 @@ jobs:

- name: Load data for MSSQL tests
if: ${{ matrix.os == 'ubuntu-latest' }}
run: cargo test initialize_sql_server_docker_instance -p tests --all-features --no-fail-fast -- --show-output --nocapture --include-ignored
run: cargo test initialize_sql_server_docker_instance -p tests --target=x86_64-unknown-linux-gnu --all-features --no-fail-fast -- --show-output --nocapture --include-ignored

- name: Run all tests, UNIT and INTEGRATION for Linux targets
if: ${{ matrix.os == 'ubuntu-latest' }}
run: cargo test --verbose --workspace --all-features --no-fail-fast -- --show-output --test-threads=1

- name: Run only UNIT tests for the rest of the defined targets
if: ${{ matrix.os != 'ubuntu-latest' }}
- name: Run only UNIT tests for Windows
if: ${{ matrix.os == 'windows-latest' }}
run: cargo test --verbose --workspace --target=x86_64-pc-windows-msvc --exclude tests --all-features --no-fail-fast -- --show-output

- name: Run only UNIT tests for MacOS
if: ${{ matrix.os == 'MacOS-latest' }}
run: cargo test --verbose --workspace --exclude tests --all-features --no-fail-fast -- --show-output
2 changes: 1 addition & 1 deletion bash_aliases.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# This alias avoid the usage of a bunch of commands for performn an integrated task that
# depends on several concatenated commands.

# In order to run the script, simply type `$ . ./alias.sh` from the root of the project.
# In order to run the script, simply type `$ . ./bash_aliases.sh` from the root of the project.
# (refreshing the current terminal session could be required)

# Executes the docker compose script to wake up the postgres container
Expand Down
10 changes: 7 additions & 3 deletions canyon_connection/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "canyon_connection"
version = "0.1.2"
version = "0.2.0"
edition = "2021"
documentation = "https://zerodaycode.github.io/canyon-book/"
homepage = "https://github.com/zerodaycode/Canyon-SQL"
Expand All @@ -16,10 +16,14 @@ tokio-postgres = { version = "0.7.2", features = ["with-chrono-0_4"] }
futures = "0.3.25"
indexmap = "1.9.1"

tiberius = { version = "0.11.3", features = ["tds73", "chrono"] }
tiberius = { version = "0.12.1", features = ["tds73", "chrono", "integrated-auth-gssapi"] }
async-std = { version = "1.12.0" }

lazy_static = "1.4.0"

serde = { version = "1.0.138", features = ["derive"] }
toml = "0.5.9"
toml = "0.7.3"

[features]
mssql-integrated-auth = []

114 changes: 71 additions & 43 deletions canyon_connection/src/canyon_database_connector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use serde::Deserialize;
use tiberius::{AuthMethod, Config};
use tokio_postgres::{Client, NoTls};

use crate::datasources::DatasourceProperties;
use crate::datasources::DatasourceConfig;

/// Represents the current supported databases by Canyon
#[derive(Deserialize, Debug, Eq, PartialEq, Clone, Copy, Default)]
Expand All @@ -31,29 +31,38 @@ pub struct SqlServerConnection {
/// starts, Canyon gets the information about the desired datasources,
/// process them and generates a pool of 1 to 1 database connection for
/// every datasource defined.
pub struct DatabaseConnection {
pub postgres_connection: Option<PostgreSqlConnection>,
pub sqlserver_connection: Option<SqlServerConnection>,
pub database_type: DatabaseType,
pub enum DatabaseConnection {
Postgres(PostgreSqlConnection),
SqlServer(SqlServerConnection),
}

unsafe impl Send for DatabaseConnection {}
unsafe impl Sync for DatabaseConnection {}

impl DatabaseConnection {
pub async fn new(
datasource: &DatasourceProperties<'_>,
datasource: &DatasourceConfig,
) -> Result<DatabaseConnection, Box<(dyn std::error::Error + Send + Sync + 'static)>> {
match datasource.db_type {
match datasource.get_db_type() {
DatabaseType::PostgreSql => {
let (username, password) = match &datasource.auth {
crate::datasources::Auth::Postgres(postgres_auth) => match postgres_auth {
crate::datasources::PostgresAuth::Basic { username, password } => {
(username.as_str(), password.as_str())
}
},
crate::datasources::Auth::SqlServer(_) => {
panic!("Found SqlServer auth configuration for a PostgreSQL datasource")
}
};
let (new_client, new_connection) = tokio_postgres::connect(
&format!(
"postgres://{user}:{pswd}@{host}:{port}/{db}",
user = datasource.username,
pswd = datasource.password,
host = datasource.host,
port = datasource.port.unwrap_or_default(),
db = datasource.db_name
user = username,
pswd = password,
host = datasource.properties.host,
port = datasource.properties.port.unwrap_or_default(),
db = datasource.properties.db_name
)[..],
NoTls,
)
Expand All @@ -65,27 +74,31 @@ impl DatabaseConnection {
}
});

Ok(Self {
postgres_connection: Some(PostgreSqlConnection {
client: new_client,
// connection: new_connection,
}),
sqlserver_connection: None,
database_type: DatabaseType::PostgreSql,
})
Ok(DatabaseConnection::Postgres(PostgreSqlConnection {
client: new_client,
// connection: new_connection,
}))
}
DatabaseType::SqlServer => {
let mut config = Config::new();

config.host(datasource.host);
config.port(datasource.port.unwrap_or_default());
config.database(datasource.db_name);
config.host(&datasource.properties.host);
config.port(datasource.properties.port.unwrap_or_default());
config.database(&datasource.properties.db_name);

// Using SQL Server authentication.
config.authentication(AuthMethod::sql_server(
datasource.username,
datasource.password,
));
config.authentication(match &datasource.auth {
crate::datasources::Auth::Postgres(_) => {
panic!("Found PostgreSQL auth configuration for a SqlServer database")
}
crate::datasources::Auth::SqlServer(sql_server_auth) => match sql_server_auth {
crate::datasources::SqlServerAuth::Basic { username, password } => {
AuthMethod::sql_server(username, password)
}
#[cfg(feature = "mssql-integrated-auth")]
crate::datasources::SqlServerAuth::Integrated => AuthMethod::Integrated,
},
});

// on production, it is not a good idea to do this. We should upgrade
// Canyon in future versions to allow the user take care about this
Expand All @@ -106,18 +119,30 @@ impl DatabaseConnection {
// Handling TLS, login and other details related to the SQL Server.
let client = tiberius::Client::connect(config, tcp).await;

Ok(Self {
postgres_connection: None,
sqlserver_connection: Some(SqlServerConnection {
client: Box::leak(Box::new(
client.expect("A failure happened connecting to the database"),
)),
}),
database_type: DatabaseType::SqlServer,
})
Ok(DatabaseConnection::SqlServer(SqlServerConnection {
client: Box::leak(Box::new(
client.expect("A failure happened connecting to the database"),
)),
}))
}
}
}

pub fn postgres_connection(&self) -> Option<&PostgreSqlConnection> {
if let DatabaseConnection::Postgres(conn) = self {
Some(conn)
} else {
None
}
}

pub fn sqlserver_connection(&mut self) -> Option<&mut SqlServerConnection> {
if let DatabaseConnection::SqlServer(conn) = self {
Some(conn)
} else {
None
}
}
}

#[cfg(test)]
Expand All @@ -128,8 +153,8 @@ mod database_connection_handler {
const CONFIG_FILE_MOCK_ALT: &str = r#"
[canyon_sql]
datasources = [
{name = 'PostgresDS', properties.db_type = 'postgresql', properties.username = 'username', properties.password = 'random_pass', properties.host = 'localhost', properties.db_name = 'triforce', properties.migrations='enabled'},
{name = 'SqlServerDS', properties.db_type = 'sqlserver', properties.username = 'username2', properties.password = 'random_pass2', properties.host = '192.168.0.250.1', properties.port = 3340, properties.db_name = 'triforce2', properties.migrations='disabled'}
{name = 'PostgresDS', auth = { postgresql = { basic = { username = "postgres", password = "postgres" } } }, properties.host = 'localhost', properties.db_name = 'triforce', properties.migrations='enabled' },
{name = 'SqlServerDS', auth = { sqlserver = { basic = { username = "sa", password = "SqlServer-10" } } }, properties.host = '192.168.0.250.1', properties.port = 3340, properties.db_name = 'triforce2', properties.migrations='disabled' }
]
"#;

Expand All @@ -139,10 +164,13 @@ mod database_connection_handler {
let config: CanyonSqlConfig = toml::from_str(CONFIG_FILE_MOCK_ALT)
.expect("A failure happened retrieving the [canyon_sql] section");

let psql_ds = &config.canyon_sql.datasources[0].properties;
let sqls_ds = &config.canyon_sql.datasources[1].properties;

assert_eq!(psql_ds.db_type, DatabaseType::PostgreSql);
assert_eq!(sqls_ds.db_type, DatabaseType::SqlServer);
assert_eq!(
config.canyon_sql.datasources[0].get_db_type(),
DatabaseType::PostgreSql
);
assert_eq!(
config.canyon_sql.datasources[1].get_db_type(),
DatabaseType::SqlServer
);
}
}

0 comments on commit ec47410

Please sign in to comment.