Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: don't compact findUniqueOrThrow #3458

Merged
merged 3 commits into from
Dec 1, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ use query_engine_tests::*;
#[test_suite(schema(schema))]
mod singular_batch {
use indoc::indoc;
use query_engine_tests::{
query_core::{BatchDocument, QueryDocument},
run_query, Runner, TestResult,
};

fn schema() -> String {
let schema = indoc! {
Expand Down Expand Up @@ -253,6 +257,110 @@ mod singular_batch {
Ok(())
}

fn schema_repro_16548() -> String {
let schema = indoc! {
r#"
model Post {
id String @id
}"#
};

schema.to_owned()
}

// Regression test for https://github.com/prisma/prisma/issues/16548
#[connector_test(schema(schema_repro_16548))]
async fn repro_16548(runner: Runner) -> TestResult<()> {
run_query!(&runner, r#"mutation { createOnePost(data: { id: "1" }) { id } }"#);
run_query!(&runner, r#"mutation { createOnePost(data: { id: "2" }) { id } }"#);

// Working case
let queries = vec![
r#"{ findUniquePostOrThrow(where: { id: "1" }) { id } }"#.to_string(),
r#"{ findUniquePostOrThrow(where: { id: "2" }) { id } }"#.to_string(),
];
let res = runner.batch(queries.clone(), false, None).await?;
insta::assert_snapshot!(
res.to_string(),
@r###"{"batchResult":[{"data":{"findUniquePostOrThrow":{"id":"1"}}},{"data":{"findUniquePostOrThrow":{"id":"2"}}}]}"###
);
let compact_doc = compact_batch(&runner, queries).await?;
assert_eq!(compact_doc.is_compact(), false);

// Failing case
let queries = vec![
r#"{ findUniquePostOrThrow(where: { id: "2" }) { id } }"#.to_string(),
r#"{ findUniquePostOrThrow(where: { id: "3" }) { id } }"#.to_string(),
];
let res = runner.batch(queries.clone(), false, None).await?;
insta::assert_snapshot!(
res.to_string(),
@r###"{"batchResult":[{"data":{"findUniquePostOrThrow":{"id":"2"}}},{"errors":[{"error":"Error occurred during query execution:\nConnectorError(ConnectorError { user_facing_error: Some(KnownError { message: \"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.\", meta: Object {\"cause\": String(\"Expected a record, found none.\")}, error_code: \"P2025\" }), kind: RecordDoesNotExist })","user_facing_error":{"is_panic":false,"message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.","meta":{"cause":"Expected a record, found none."},"error_code":"P2025"}}]}]}"###
);
let compact_doc = compact_batch(&runner, queries).await?;
assert_eq!(compact_doc.is_compact(), false);

// Mix of findUnique & findUniqueOrThrow
let queries = vec![
r#"{ findUniquePost(where: { id: "3" }) { id } }"#.to_string(),
r#"{ findUniquePostOrThrow(where: { id: "2" }) { id } }"#.to_string(),
];
let res = runner.batch(queries.clone(), false, None).await?;
insta::assert_snapshot!(
res.to_string(),
@r###"{"batchResult":[{"data":{"findUniquePost":null}},{"data":{"findUniquePostOrThrow":{"id":"2"}}}]}"###
);
let compact_doc = compact_batch(&runner, queries).await?;
assert_eq!(compact_doc.is_compact(), false);

// Mix of findUnique & findUniqueOrThrow
let queries = vec![
r#"{ findUniquePost(where: { id: "2" }) { id } }"#.to_string(),
r#"{ findUniquePostOrThrow(where: { id: "4" }) { id } }"#.to_string(),
];
let res = runner.batch(queries.clone(), false, None).await?;
insta::assert_snapshot!(
res.to_string(),
@r###"{"batchResult":[{"data":{"findUniquePost":{"id":"2"}}},{"errors":[{"error":"Error occurred during query execution:\nConnectorError(ConnectorError { user_facing_error: Some(KnownError { message: \"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.\", meta: Object {\"cause\": String(\"Expected a record, found none.\")}, error_code: \"P2025\" }), kind: RecordDoesNotExist })","user_facing_error":{"is_panic":false,"message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.","meta":{"cause":"Expected a record, found none."},"error_code":"P2025"}}]}]}"###
);
let compact_doc = compact_batch(&runner, queries).await?;
assert_eq!(compact_doc.is_compact(), false);

// Mix of findUnique & findUniqueOrThrow
let queries = vec![
r#"{ findUniquePostOrThrow(where: { id: "2" }) { id } }"#.to_string(),
r#"{ findUniquePost(where: { id: "3" }) { id } }"#.to_string(),
];
let res = runner.batch(queries.clone(), false, None).await?;
insta::assert_snapshot!(
res.to_string(),
@r###"{"batchResult":[{"data":{"findUniquePostOrThrow":{"id":"2"}}},{"data":{"findUniquePost":null}}]}"###
);
let compact_doc = compact_batch(&runner, queries).await?;
assert_eq!(compact_doc.is_compact(), false);

Ok(())
}

async fn compact_batch(runner: &Runner, queries: Vec<String>) -> TestResult<BatchDocument> {
// Ensure batched queries are valid
runner.batch(queries.clone(), false, None).await?.assert_success();

let doc = GraphQlBody::Multi(MultiQuery::new(
queries.into_iter().map(Into::into).collect(),
false,
None,
))
.into_doc()
.unwrap();
let batch = match doc {
QueryDocument::Multi(batch) => batch.compact(runner.query_schema()),
_ => unreachable!(),
};

Ok(batch.compact(runner.query_schema()))
}

async fn create_test_data(runner: &Runner) -> TestResult<()> {
runner
.query(
Expand Down
7 changes: 4 additions & 3 deletions query-engine/core/src/query_document/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ impl BatchDocument {
/// - non scalar filters (ie: relation filters, boolean operators...)
/// - any scalar filters that is not `EQUALS`
fn invalid_compact_filter(op: &Operation, schema: &QuerySchemaRef) -> bool {
if !op.is_find_unique() {
if !op.is_find_unique(schema) {
return true;
}

Expand All @@ -95,10 +95,11 @@ impl BatchDocument {
})
}

/// Checks whether a BatchDocument can be compacted.
fn can_compact(&self, schema: &QuerySchemaRef) -> bool {
match self {
Self::Multi(operations, _) => match operations.split_first() {
Some((first, rest)) if first.is_find_unique() => {
Some((first, rest)) if first.is_find_unique(schema) => {
// If any of the operation has an "invalid" compact filter (see documentation of `invalid_compact_filter`),
// we do not compact the queries.
let has_invalid_compact_filter =
Expand All @@ -109,7 +110,7 @@ impl BatchDocument {
}

rest.iter().all(|op| {
op.is_find_unique()
op.is_find_unique(schema)
&& first.name() == op.name()
&& first.nested_selections().len() == op.nested_selections().len()
&& first
Expand Down
11 changes: 6 additions & 5 deletions query-engine/core/src/query_document/operation.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use super::Selection;
use schema::QuerySchemaRef;

#[derive(Debug, Clone)]
pub enum Operation {
Expand All @@ -7,11 +8,11 @@ pub enum Operation {
}

impl Operation {
pub fn is_find_unique(&self) -> bool {
match self {
Self::Read(selection) => selection.is_find_unique(),
_ => false,
}
pub fn is_find_unique(&self, schema: &QuerySchemaRef) -> bool {
schema
.find_query_field(self.name())
.map(|field| field.is_find_unique())
.unwrap_or(false)
}

pub fn into_read(self) -> Option<Selection> {
Expand Down
8 changes: 8 additions & 0 deletions query-engine/schema/src/output_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,4 +191,12 @@ impl OutputField {
pub fn model(&self) -> Option<&ModelRef> {
self.query_info.as_ref().and_then(|info| info.model.as_ref())
}

pub fn is_find_unique(&self) -> bool {
matches!(self.query_tag(), Some(&QueryTag::FindUnique))
}

fn query_tag(&self) -> Option<&QueryTag> {
self.query_info.as_ref().map(|info| &info.tag)
}
}