Skip to content

Commit

Permalink
tests + cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
Weakky committed May 22, 2024
1 parent cfbff7c commit b027749
Show file tree
Hide file tree
Showing 18 changed files with 720 additions and 61 deletions.
28 changes: 27 additions & 1 deletion psl/schema-ast/src/source_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,34 @@ impl From<String> for SourceFile {
}
}

#[derive(Debug, Clone, Eq, PartialEq, Hash)]
#[derive(Debug, Clone)]
enum Contents {
Static(&'static str),
Allocated(Arc<str>),
}

impl std::hash::Hash for Contents {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
Contents::Static(s) => (*s).hash(state),
Contents::Allocated(s) => {
let s: &str = s;

s.hash(state);
}
}
}
}

impl Eq for Contents {}

impl PartialEq for Contents {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Contents::Static(l), Contents::Static(r)) => l == r,
(Contents::Allocated(l), Contents::Allocated(r)) => l == r,
(Contents::Static(l), Contents::Allocated(r)) => *l == &**r,
(Contents::Allocated(l), Contents::Static(r)) => &**l == *r,
}
}
}
104 changes: 104 additions & 0 deletions schema-engine/cli/tests/cli_tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use connection_string::JdbcString;
use expect_test::expect;
use indoc::*;
use schema_core::json_rpc::types::*;
use std::{
fs,
io::{BufRead, BufReader, Write as _},
Expand Down Expand Up @@ -711,3 +712,106 @@ fn introspect_e2e() {
assert!(response.starts_with(r#"{"jsonrpc":"2.0","result":{"datamodel":"datasource db {\n provider = \"sqlite\"\n url = env(\"TEST_DATABASE_URL\")\n}\n","warnings":[]},"#));
});
}

macro_rules! write_multi_file_vec {
// Match multiple pairs of filename and content
( $( $filename:expr => $content:expr ),* $(,)? ) => {
{
use std::fs::File;
use std::io::Write;

// Create a result vector to collect errors
let mut results = Vec::new();
let tmpdir = tempfile::tempdir().unwrap();

fs::create_dir_all(&tmpdir).unwrap();

$(
let file_path = tmpdir.path().join($filename);
// Attempt to create or open the file
let result = (|| -> std::io::Result<()> {
let mut file = File::create(&file_path)?;
file.write_all($content.as_bytes())?;
Ok(())
})();

result.unwrap();

// Push the result of the operation to the results vector
results.push((file_path.to_string_lossy().into_owned(), $content));
)*

// Return the results vector for further inspection if needed
results
}
};
}

fn to_schema_containers(files: Vec<(String, &str)>) -> Vec<SchemaContainer> {
files
.into_iter()
.map(|(path, content)| SchemaContainer {
path: path.to_string(),
content: content.to_string(),
})
.collect()
}

fn to_schemas_container(files: Vec<(String, &str)>) -> SchemasContainer {
SchemasContainer {
files: to_schema_containers(files),
}
}

#[test_connector(tags(Postgres))]
fn get_database_version_multi_file(_api: TestApi) {
let files = write_multi_file_vec! {
"a.prisma" => r#"
datasource db {
provider = "postgres"
url = env("TEST_DATABASE_URL")
}
"#,
"b.prisma" => r#"
model User {
id Int @id
}
"#,
};

let command = Command::new(schema_engine_bin_path());

let schema_path_params = GetDatabaseVersionInput {
datasource: DatasourceParam::Schema(to_schemas_container(files)),
};

let connection_string_params = GetDatabaseVersionInput {
datasource: DatasourceParam::ConnectionString(UrlContainer {
url: std::env::var("TEST_DATABASE_URL").unwrap(),
}),
};

with_child_process(command, |process| {
let stdin = process.stdin.as_mut().unwrap();
let mut stdout = BufReader::new(process.stdout.as_mut().unwrap());

for _ in 0..2 {
for params in [&schema_path_params, &connection_string_params] {
let params_template = serde_json::json!({
"jsonrpc": "2.0",
"method": "getDatabaseVersion",
"params": params,
"id": 1
})
.to_string();

writeln!(stdin, "{}", &params_template).unwrap();

let mut response = String::new();
stdout.read_line(&mut response).unwrap();

assert!(response.contains("PostgreSQL") || response.contains("CockroachDB"));
}
}
});
}
4 changes: 2 additions & 2 deletions schema-engine/core/src/commands/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ async fn json_rpc_diff_target_to_connector(
DiffTarget::SchemaDatasource(schemas) => {
let config_dir = std::path::Path::new(&schemas.config_dir);
let sources: Vec<_> = schemas.to_psl_input();
let mut connector = crate::schemas_to_connector(&sources, Some(config_dir))?;
let mut connector = crate::schema_to_connector(&sources, Some(config_dir))?;
connector.ensure_connection_validity().await?;
connector.set_preview_features(preview_features);
let schema = connector
Expand All @@ -155,7 +155,7 @@ async fn json_rpc_diff_target_to_connector(
}
DiffTarget::SchemaDatamodel(schemas) => {
let sources = schemas.to_psl_input();
let mut connector = crate::schemas_to_connector_unchecked(&sources)?;
let mut connector = crate::schema_to_connector_unchecked(&sources)?;
connector.set_preview_features(preview_features);

let schema = connector
Expand Down
26 changes: 8 additions & 18 deletions schema-engine/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,10 @@ fn connector_for_connection_string(
}

/// Same as schema_to_connector, but it will only read the provider, not the connector params.
fn schemas_to_connector_unchecked(
schemas: &[(String, SourceFile)],
fn schema_to_connector_unchecked(
files: &[(String, SourceFile)],
) -> CoreResult<Box<dyn schema_connector::SchemaConnector>> {
let (_, config) = psl::parse_configuration_multi_file(schemas)
let (_, config) = psl::parse_configuration_multi_file(files)
.map_err(|(files, err)| CoreError::new_schema_parser_error(files.render_diagnostics(&err)))?;

let preview_features = config.preview_features();
Expand All @@ -124,13 +124,13 @@ fn schemas_to_connector_unchecked(
Ok(connector)
}

fn prepare_connector(
/// Go from a schema to a connector
fn schema_to_connector(
files: &[(String, SourceFile)],
config_dir: Option<&Path>,
source: Datasource,
url: String,
preview_features: BitFlags<PreviewFeature, u64>,
shadow_database_url: Option<String>,
) -> CoreResult<Box<dyn schema_connector::SchemaConnector>> {
let (source, url, preview_features, shadow_database_url) = parse_configuration_multi(files)?;

let url = config_dir
.map(|config_dir| psl::set_config_dir(source.active_connector.flavour(), config_dir, &url).into_owned())
.unwrap_or(url);
Expand All @@ -147,16 +147,6 @@ fn prepare_connector(
Ok(connector)
}

/// Go from a schema to a connector
fn schemas_to_connector(
files: &[(String, SourceFile)],
config_dir: Option<&Path>,
) -> CoreResult<Box<dyn schema_connector::SchemaConnector>> {
let (source, url, preview_features, shadow_database_url) = parse_configuration_multi(files)?;

prepare_connector(config_dir, source, url, preview_features, shadow_database_url)
}

fn connector_for_provider(provider: &str) -> CoreResult<Box<dyn schema_connector::SchemaConnector>> {
if let Some(connector) = BUILTIN_CONNECTORS.iter().find(|c| c.is_provider(provider)) {
match connector.flavour() {
Expand Down
8 changes: 1 addition & 7 deletions schema-engine/core/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,6 @@ enum ConnectorRequestType {
Url(String),
}

impl From<Vec<(String, String)>> for ConnectorRequestType {
fn from(schemas: Vec<(String, String)>) -> Self {
Self::Schema(schemas.into_iter().map(|(a, b)| (a, SourceFile::from(b))).collect())
}
}

/// A request from the core to a connector, in the form of an async closure.
type ConnectorRequest<O> = Box<
dyn for<'c> FnOnce(&'c mut dyn SchemaConnector) -> Pin<Box<dyn Future<Output = CoreResult<O>> + Send + 'c>> + Send,
Expand Down Expand Up @@ -115,7 +109,7 @@ impl EngineState {
Err(_) => return Err(ConnectorError::from_msg("tokio mpsc send error".to_owned())),
},
None => {
let mut connector = crate::schemas_to_connector(&schemas, config_dir)?;
let mut connector = crate::schema_to_connector(&schemas, config_dir)?;

connector.set_host(self.host.clone());
let (erased_sender, mut erased_receiver) = mpsc::channel::<ErasedConnectorRequest>(12);
Expand Down
19 changes: 10 additions & 9 deletions schema-engine/sql-migration-tests/src/commands/create_migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use test_setup::runtime::run_with_thread_local_runtime;

pub struct CreateMigration<'a> {
api: &'a mut dyn SchemaConnector,
schema: &'a str,
files: Vec<SchemaContainer>,
migrations_directory: &'a TempDir,
draft: bool,
name: &'a str,
Expand All @@ -18,12 +18,18 @@ impl<'a> CreateMigration<'a> {
pub fn new(
api: &'a mut dyn SchemaConnector,
name: &'a str,
schema: &'a str,
files: &[(&'a str, &'a str)],
migrations_directory: &'a TempDir,
) -> Self {
CreateMigration {
api,
schema,
files: files
.iter()
.map(|(path, content)| SchemaContainer {
path: path.to_string(),
content: content.to_string(),
})
.collect(),
migrations_directory,
draft: false,
name,
Expand All @@ -40,12 +46,7 @@ impl<'a> CreateMigration<'a> {
let output = create_migration(
CreateMigrationInput {
migrations_directory_path: self.migrations_directory.path().to_str().unwrap().to_owned(),
schema: SchemasContainer {
files: vec![SchemaContainer {
path: "schema.prisma".to_string(),
content: self.schema.to_owned(),
}],
},
schema: SchemasContainer { files: self.files },
draft: self.draft,
migration_name: self.name.to_owned(),
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,28 +6,33 @@ use tempfile::TempDir;
pub struct EvaluateDataLoss<'a> {
api: &'a mut dyn SchemaConnector,
migrations_directory: &'a TempDir,
prisma_schema: String,
files: Vec<SchemaContainer>,
}

impl<'a> EvaluateDataLoss<'a> {
pub fn new(api: &'a mut dyn SchemaConnector, migrations_directory: &'a TempDir, prisma_schema: String) -> Self {
pub fn new<'b>(
api: &'a mut dyn SchemaConnector,
migrations_directory: &'a TempDir,
files: &[(&'b str, &'b str)],
) -> Self {
EvaluateDataLoss {
api,
migrations_directory,
prisma_schema,
files: files
.iter()
.map(|(path, content)| SchemaContainer {
path: path.to_string(),
content: content.to_string(),
})
.collect(),
}
}

fn send_impl(self) -> CoreResult<EvaluateDataLossAssertion<'a>> {
let fut = evaluate_data_loss(
EvaluateDataLossInput {
migrations_directory_path: self.migrations_directory.path().to_str().unwrap().to_owned(),
schema: SchemasContainer {
files: vec![SchemaContainer {
path: "schema.prisma".to_string(),
content: self.prisma_schema,
}],
},
schema: SchemasContainer { files: self.files },
},
self.api,
);
Expand Down
19 changes: 10 additions & 9 deletions schema-engine/sql-migration-tests/src/commands/schema_push.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use tracing_futures::Instrument;

pub struct SchemaPush<'a> {
api: &'a mut dyn SchemaConnector,
schema: String,
files: Vec<SchemaContainer>,
force: bool,
/// Purely for logging diagnostics.
migration_id: Option<&'a str>,
Expand All @@ -17,10 +17,16 @@ pub struct SchemaPush<'a> {
}

impl<'a> SchemaPush<'a> {
pub fn new(api: &'a mut dyn SchemaConnector, schema: String, max_refresh_delay: Option<Duration>) -> Self {
pub fn new(api: &'a mut dyn SchemaConnector, files: &[(&str, &str)], max_refresh_delay: Option<Duration>) -> Self {
SchemaPush {
api,
schema,
files: files
.iter()
.map(|(path, content)| SchemaContainer {
path: path.to_string(),
content: content.to_string(),
})
.collect(),
force: false,
migration_id: None,
max_ddl_refresh_delay: max_refresh_delay,
Expand All @@ -39,12 +45,7 @@ impl<'a> SchemaPush<'a> {

fn send_impl(self) -> CoreResult<SchemaPushAssertion> {
let input = SchemaPushInput {
schema: SchemasContainer {
files: vec![SchemaContainer {
path: "schema.prisma".to_string(),
content: self.schema,
}],
},
schema: SchemasContainer { files: self.files },
force: self.force,
};

Expand Down
1 change: 1 addition & 0 deletions schema-engine/sql-migration-tests/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

pub mod multi_engine_test_api;
pub mod test_api;
pub mod utils;

mod assertions;
mod commands;
Loading

0 comments on commit b027749

Please sign in to comment.