Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

parse parameter type annotations when generating native query configuration #128

Merged
merged 15 commits into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ cc e7368f0503761c52e2ce47fa2e64454ecd063f2e019c511759162d0be049e665 # shrinks to
cc bd6f440b7ea7e51d8c369e802b8cbfbc0c3f140c01cd6b54d9c61e6d84d7e77d # shrinks to c = TypeUnificationContext { object_type_name: "", field_name: "" }, t = Nullable(Scalar(Null))
cc d16279848ea51c4be376436423d342afd077a737efcab03ba2d29d5a0dee9df2 # shrinks to left = {"": Scalar(Double)}, right = {"": Scalar(Decimal)}, shared = {}
cc fc85c97eeccb12e144f548fe65fd262d4e7b1ec9c799be69fd30535aa032e26d # shrinks to ta = Nullable(Scalar(Null)), tb = Nullable(Scalar(Undefined))
cc 57b3015ca6d70f8e1975e21132e7624132bfe3bf958475473e5d1027c59dc7d9 # shrinks to t = Predicate { object_type_name: ObjectTypeName(TypeName("A")) }
10 changes: 10 additions & 0 deletions crates/cli/proptest-regressions/native_query/type_annotation.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 525ecaf39caf362837e1addccbf4e0f4301e7e0ad1f84047a952b6ac710f795f # shrinks to t = Scalar(Double)
cc 893face3f71cf906a1a089e94527e12d36882624d651797754b0d622f7af7680 # shrinks to t = Scalar(JavascriptWithScope)
cc 6500920ee0ab41ac265301e4afdc05438df74f2b92112a7c0c1ccb59f056071c # shrinks to t = ArrayOf(Scalar(Double))
cc adf516fe79b0dc9248c54a23f8b301ad1e2a3280081cde3f89586e4b5ade1065 # shrinks to t = Nullable(Nullable(Scalar(Double)))
2 changes: 2 additions & 0 deletions crates/cli/src/exit_codes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
pub enum ExitCode {
CouldNotReadAggregationPipeline,
CouldNotReadConfiguration,
CouldNotProcessAggregationPipeline,
ErrorWriting,
RefusedToOverwrite,
}
Expand All @@ -11,6 +12,7 @@ impl From<ExitCode> for i32 {
match value {
ExitCode::CouldNotReadAggregationPipeline => 201,
ExitCode::CouldNotReadConfiguration => 202,
ExitCode::CouldNotProcessAggregationPipeline => 205,
ExitCode::ErrorWriting => 204,
ExitCode::RefusedToOverwrite => 203,
}
Expand Down
19 changes: 19 additions & 0 deletions crates/cli/src/introspection/type_unification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,25 @@ pub fn unify_type(type_a: Type, type_b: Type) -> Type {
}
}

// Predicate types unify if they have the same name.
// If they are diffferent then the union is ExtendedJSON.
(
Type::Predicate {
object_type_name: object_a,
},
Type::Predicate {
object_type_name: object_b,
},
) => {
if object_a == object_b {
Type::Predicate {
object_type_name: object_a,
}
} else {
Type::ExtendedJSON
}
}

// Array types unify iff their element types unify.
(Type::ArrayOf(elem_type_a), Type::ArrayOf(elem_type_b)) => {
let elem_type = unify_type(*elem_type_a, *elem_type_b);
Expand Down
10 changes: 6 additions & 4 deletions crates/cli/src/native_query/aggregation_expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ fn infer_type_from_aggregation_expression_document(
}
}

// TODO: propagate expected type based on operator used
fn infer_type_from_operator_expression(
context: &mut PipelineTypeContext<'_>,
desired_object_type_name: &str,
Expand Down Expand Up @@ -340,10 +339,13 @@ pub fn infer_type_from_reference_shorthand(
let t = match reference {
Reference::NativeQueryVariable {
name,
type_annotation: _,
type_annotation,
} => {
// TODO: read type annotation ENG-1249
context.register_parameter(name.into(), type_hint.into_iter().cloned())
let constraints = type_hint
.into_iter()
.cloned()
.chain(type_annotation.map(TypeConstraint::from));
context.register_parameter(name.into(), constraints)
}
Reference::PipelineVariable { .. } => todo!("pipeline variable"),
Reference::InputDocumentField { name, nested_path } => {
Expand Down
12 changes: 8 additions & 4 deletions crates/cli/src/native_query/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use super::type_constraint::{ObjectTypeConstraint, TypeConstraint, TypeVariable}

pub type Result<T> = std::result::Result<T, Error>;

#[derive(Clone, Debug, Error)]
#[derive(Clone, Debug, Error, PartialEq)]
pub enum Error {
#[error("Cannot infer a result type for an empty pipeline")]
EmptyPipeline,
Expand Down Expand Up @@ -55,9 +55,12 @@ pub enum Error {
field_name: FieldName,
},

#[error("Type mismatch in {context}: {a:?} is not compatible with {b:?}")]
#[error("Type mismatch{}: {a} is not compatible with {b}", match context {
Some(context) => format!(" in {}", context),
None => String::new(),
})]
TypeMismatch {
context: String,
context: Option<String>,
a: TypeConstraint,
b: TypeConstraint,
},
Expand Down Expand Up @@ -114,7 +117,8 @@ fn unable_to_infer_types_message(
for name in problem_parameter_types {
message += &format!("- {name}\n");
}
message += "\nTry adding type annotations of the form: {{parameter_name|[int!]!}}\n";
message += "\nTry adding type annotations of the form: {{ parameter_name | [int!]! }}\n";
message += "\nIf you added an annotation, and you are still seeing this error then the type you gave may not be compatible with the context where the parameter is used.\n";
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we link to somewhere they can read on supported types?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I need to do a docs pass on the whole native query DX feature, then I'll have something to link to

}
if could_not_infer_return_type {
message += "\nUnable to infer return type.";
Expand Down
6 changes: 5 additions & 1 deletion crates/cli/src/native_query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ mod pipeline;
mod pipeline_type_context;
mod prune_object_types;
mod reference_shorthand;
mod type_annotation;
mod type_constraint;
mod type_solver;

Expand Down Expand Up @@ -100,7 +101,10 @@ pub async fn run(context: &Context, command: Command) -> anyhow::Result<()> {
let native_query =
match native_query_from_pipeline(&configuration, &name, collection, pipeline) {
Ok(q) => WithName::named(name, q),
Err(_) => todo!(),
Err(err) => {
eprintln!("Error interpreting aggregation pipeline.\n\n{err}");
exit(ExitCode::CouldNotReadAggregationPipeline.into())
}
};

let native_query_dir = native_query_path
Expand Down
6 changes: 4 additions & 2 deletions crates/cli/src/native_query/pipeline/match_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,9 +262,11 @@ fn analyze_match_expression_string(
match parse_reference_shorthand(&match_expression)? {
Reference::NativeQueryVariable {
name,
type_annotation: _, // TODO: parse type annotation ENG-1249
type_annotation,
} => {
context.register_parameter(name.into(), [field_type.clone()]);
let constraints = std::iter::once(field_type.clone())
.chain(type_annotation.map(TypeConstraint::from));
context.register_parameter(name.into(), constraints);
}
Reference::String {
native_query_variables,
Expand Down
30 changes: 23 additions & 7 deletions crates/cli/src/native_query/reference_shorthand.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,28 @@
use configuration::schema::Type;
use ndc_models::FieldName;
use nom::{
branch::alt,
bytes::complete::{tag, take_while1},
character::complete::{alpha1, alphanumeric1},
character::complete::{alpha1, alphanumeric1, multispace0},
combinator::{all_consuming, cut, map, opt, recognize},
error::ParseError,
multi::{many0, many0_count},
sequence::{delimited, pair, preceded},
IResult,
IResult, Parser,
};

use super::error::{Error, Result};
use super::{
error::{Error, Result},
type_annotation::type_expression,
};

#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Reference {
/// Reference to a variable that is substituted by the connector from GraphQL inputs before
/// sending to MongoDB. For example, `"{{ artist_id }}`.
NativeQueryVariable {
name: String,
type_annotation: Option<String>,
type_annotation: Option<Type>,
},

/// Reference to a variable that is defined as part of the pipeline syntax. May be followed by
Expand Down Expand Up @@ -66,19 +71,19 @@ fn native_query_variable(input: &str) -> IResult<&str, Reference> {
content.trim()
})(input)
};
let type_annotation = preceded(tag("|"), placeholder_content);
let type_annotation = preceded(ws(tag("|")), type_expression);

let (remaining, (name, variable_type)) = delimited(
tag("{{"),
cut(pair(placeholder_content, opt(type_annotation))),
cut(ws(pair(ws(placeholder_content), ws(opt(type_annotation))))),
tag("}}"),
)(input)?;
// Since the native_query_variable parser runs inside an `alt`, the use of `cut` commits to
// this branch of the `alt` after successfully parsing the opening "{{" characters.

let variable = Reference::NativeQueryVariable {
name: name.to_string(),
type_annotation: variable_type.map(ToString::to_string),
type_annotation: variable_type,
};
Ok((remaining, variable))
}
Expand Down Expand Up @@ -135,3 +140,14 @@ fn plain_string(_input: &str) -> IResult<&str, Reference> {
},
))
}

/// A combinator that takes a parser `inner` and produces a parser that also consumes both leading and
/// trailing whitespace, returning the output of `inner`.
///
/// From https://github.com/rust-bakery/nom/blob/main/doc/nom_recipes.md#wrapper-combinators-that-eat-whitespace-before-and-after-a-parser
fn ws<'a, O, E: ParseError<&'a str>, F>(inner: F) -> impl Parser<&'a str, O, E>
where
F: Parser<&'a str, O, E>,
{
delimited(multispace0, inner, multispace0)
}
58 changes: 55 additions & 3 deletions crates/cli/src/native_query/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use mongodb_support::{
aggregate::{Accumulator, Pipeline, Selection, Stage},
BsonScalarType,
};
use ndc_models::{FieldName, ObjectTypeName};
use ndc_models::{ArgumentName, FieldName, ObjectTypeName};
use pretty_assertions::assert_eq;
use test_helpers::configuration::mflix_config;

Expand Down Expand Up @@ -158,6 +158,52 @@ fn infers_native_query_from_pipeline_with_unannotated_parameter() -> googletest:
Ok(())
}

#[googletest::test]
fn reads_parameter_type_annotation() -> googletest::Result<()> {
let config = mflix_config();

// Parameter type would be inferred as double without this annotation
let pipeline = Pipeline::new(vec![Stage::Match(doc! {
"imdb.rating": { "$gt": "{{ min_rating | int! }}" },
})]);

let native_query = native_query_from_pipeline(
&config,
"movies_by_min_rating",
Some("movies".into()),
pipeline,
)?;

expect_that!(
native_query.arguments,
unordered_elements_are![(
eq(&ArgumentName::from("min_rating")),
field!(ObjectField.r#type, eq(&Type::Scalar(BsonScalarType::Int)))
)]
);
Ok(())
}

#[googletest::test]
fn emits_error_on_incorrect_parameter_type_annotation() -> googletest::Result<()> {
let config = mflix_config();

let pipeline = Pipeline::new(vec![Stage::Match(doc! {
"title": { "$eq": "{{ title | decimal }}" },
})]);

let native_query =
native_query_from_pipeline(&config, "movies_by_title", Some("movies".into()), pipeline);

expect_that!(
native_query,
err(displays_as(contains_substring(
"string! is not compatible with decimal"
)))
);
Ok(())
}

#[googletest::test]
fn infers_parameter_type_from_binary_comparison() -> googletest::Result<()> {
let config = mflix_config();
Expand Down Expand Up @@ -391,7 +437,10 @@ fn supports_project_stage_in_inclusion_mode() -> Result<()> {
let native_query =
native_query_from_pipeline(&config, "inclusion", Some("movies".into()), pipeline)?;

expect_eq!(native_query.result_document_type, "inclusion_project".into());
expect_eq!(
native_query.result_document_type,
"inclusion_project".into()
);

expect_eq!(
native_query.object_types,
Expand All @@ -402,7 +451,10 @@ fn supports_project_stage_in_inclusion_mode() -> Result<()> {
fields: object_fields([
("_id", Type::Scalar(BsonScalarType::ObjectId)),
("title", Type::Scalar(BsonScalarType::String)),
("tomatoes", Type::Object("inclusion_project_tomatoes".into())),
(
"tomatoes",
Type::Object("inclusion_project_tomatoes".into())
),
("releaseDate", Type::Scalar(BsonScalarType::Date)),
]),
description: None
Expand Down
Loading
Loading