Skip to content

Commit

Permalink
Update to ndc-spec v0.1.5 and ndc-sdk-rs v0.2.1 (#86)
Browse files Browse the repository at this point in the history
* Update to ndc-spec 0.1.5 and ndc-sdk 0.2.0

* WIP: support ndc-spec v0.1.5 and ndc-sdk-rs v0.2.0

* WIP: configuration crate

* WIP: test-helpers

* mongodb-agent-common

* WIP: cli

* WIP: connector

* Error handling

* More fixes

* Update flakes

* Clippy suggestions

* Cargo fmt

* cargo audit fix

* Revert graphql-engine-source update
  • Loading branch information
dmoverton authored Jul 11, 2024
1 parent 4beb7dd commit 84c9a6d
Show file tree
Hide file tree
Showing 76 changed files with 865 additions and 752 deletions.
45 changes: 35 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,16 @@ resolver = "2"
# The tag or rev of ndc-models must match the locked tag or rev of the
# ndc-models dependency of ndc-sdk
[workspace.dependencies]
ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs.git", tag = "v0.1.4" }
ndc-models = { git = "http://github.com/hasura/ndc-spec.git", tag = "v0.1.4" }
ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs.git", tag = "v0.2.1" }
ndc-models = { git = "http://github.com/hasura/ndc-spec.git", tag = "v0.1.5" }

indexmap = { version = "2", features = [
"serde",
] } # should match the version that ndc-models uses
itertools = "^0.12.1"
mongodb = { version = "2.8", features = ["tracing-unstable"] }
schemars = "^0.8.12"
ref-cast = "1.0.23"

# Connecting to MongoDB Atlas database with time series collections fails in the
# latest released version of the MongoDB Rust driver. A fix has been merged, but
Expand Down
1 change: 1 addition & 0 deletions crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ clap = { version = "4.5.1", features = ["derive", "env"] }
futures-util = "0.3.28"
indexmap = { workspace = true }
itertools = { workspace = true }
ndc-models = { workspace = true }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0.113", features = ["raw_value"] }
thiserror = "1.0.57"
Expand Down
87 changes: 51 additions & 36 deletions crates/cli/src/introspection/sampling.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ use mongodb::bson::{doc, Bson, Document};
use mongodb_agent_common::state::ConnectorState;
use mongodb_support::BsonScalarType::{self, *};

type ObjectField = WithName<schema::ObjectField>;
type ObjectType = WithName<schema::ObjectType>;
type ObjectField = WithName<ndc_models::FieldName, schema::ObjectField>;
type ObjectType = WithName<ndc_models::ObjectTypeName, schema::ObjectType>;

/// Sample from all collections in the database and return a Schema.
/// Return an error if there are any errors accessing the database
Expand Down Expand Up @@ -66,7 +66,7 @@ async fn sample_schema_from_collection(
let is_collection_type = true;
while let Some(document) = cursor.try_next().await? {
let object_types = make_object_type(
collection_name,
&collection_name.into(),
&document,
is_collection_type,
all_schema_nullable,
Expand All @@ -81,10 +81,10 @@ async fn sample_schema_from_collection(
Ok(None)
} else {
let collection_info = WithName::named(
collection_name.to_string(),
collection_name.into(),
schema::Collection {
description: None,
r#type: collection_name.to_string(),
r#type: collection_name.into(),
},
);
Ok(Some(Schema {
Expand All @@ -95,7 +95,7 @@ async fn sample_schema_from_collection(
}

fn make_object_type(
object_type_name: &str,
object_type_name: &ndc_models::ObjectTypeName,
document: &Document,
is_collection_type: bool,
all_schema_nullable: bool,
Expand All @@ -118,7 +118,7 @@ fn make_object_type(
};

let object_type = WithName::named(
object_type_name.to_string(),
object_type_name.to_owned(),
schema::ObjectType {
description: None,
fields: WithName::into_map(object_fields),
Expand All @@ -140,7 +140,7 @@ fn make_object_field(
let (collected_otds, field_type) =
make_field_type(&object_type_name, field_value, all_schema_nullable);
let object_field_value = WithName::named(
field_name.to_owned(),
field_name.into(),
schema::ObjectField {
description: None,
r#type: field_type,
Expand All @@ -161,7 +161,10 @@ pub fn type_from_bson(
object_type_name: &str,
value: &Bson,
all_schema_nullable: bool,
) -> (BTreeMap<std::string::String, schema::ObjectType>, Type) {
) -> (
BTreeMap<ndc_models::ObjectTypeName, schema::ObjectType>,
Type,
) {
let (object_types, t) = make_field_type(object_type_name, value, all_schema_nullable);
(WithName::into_map(object_types), t)
}
Expand Down Expand Up @@ -196,7 +199,7 @@ fn make_field_type(
Bson::Document(document) => {
let is_collection_type = false;
let collected_otds = make_object_type(
object_type_name,
&object_type_name.into(),
document,
is_collection_type,
all_schema_nullable,
Expand Down Expand Up @@ -238,24 +241,28 @@ mod tests {

#[test]
fn simple_doc() -> Result<(), anyhow::Error> {
let object_name = "foo";
let object_name = "foo".into();
let doc = doc! {"my_int": 1, "my_string": "two"};
let result =
WithName::into_map::<BTreeMap<_, _>>(make_object_type(object_name, &doc, false, false));
let result = WithName::into_map::<BTreeMap<_, _>>(make_object_type(
&object_name,
&doc,
false,
false,
));

let expected = BTreeMap::from([(
object_name.to_owned(),
ObjectType {
fields: BTreeMap::from([
(
"my_int".to_owned(),
"my_int".into(),
ObjectField {
r#type: Type::Scalar(BsonScalarType::Int),
description: None,
},
),
(
"my_string".to_owned(),
"my_string".into(),
ObjectField {
r#type: Type::Scalar(BsonScalarType::String),
description: None,
Expand All @@ -273,31 +280,31 @@ mod tests {

#[test]
fn simple_doc_nullable_fields() -> Result<(), anyhow::Error> {
let object_name = "foo";
let object_name = "foo".into();
let doc = doc! {"my_int": 1, "my_string": "two", "_id": 0};
let result =
WithName::into_map::<BTreeMap<_, _>>(make_object_type(object_name, &doc, true, true));
WithName::into_map::<BTreeMap<_, _>>(make_object_type(&object_name, &doc, true, true));

let expected = BTreeMap::from([(
object_name.to_owned(),
ObjectType {
fields: BTreeMap::from([
(
"_id".to_owned(),
"_id".into(),
ObjectField {
r#type: Type::Scalar(BsonScalarType::Int),
description: None,
},
),
(
"my_int".to_owned(),
"my_int".into(),
ObjectField {
r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::Int))),
description: None,
},
),
(
"my_string".to_owned(),
"my_string".into(),
ObjectField {
r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::String))),
description: None,
Expand All @@ -315,32 +322,36 @@ mod tests {

#[test]
fn array_of_objects() -> Result<(), anyhow::Error> {
let object_name = "foo";
let object_name = "foo".into();
let doc = doc! {"my_array": [{"foo": 42, "bar": ""}, {"bar": "wut", "baz": 3.77}]};
let result =
WithName::into_map::<BTreeMap<_, _>>(make_object_type(object_name, &doc, false, false));
let result = WithName::into_map::<BTreeMap<_, _>>(make_object_type(
&object_name,
&doc,
false,
false,
));

let expected = BTreeMap::from([
(
"foo_my_array".to_owned(),
"foo_my_array".into(),
ObjectType {
fields: BTreeMap::from([
(
"foo".to_owned(),
"foo".into(),
ObjectField {
r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::Int))),
description: None,
},
),
(
"bar".to_owned(),
"bar".into(),
ObjectField {
r#type: Type::Scalar(BsonScalarType::String),
description: None,
},
),
(
"baz".to_owned(),
"baz".into(),
ObjectField {
r#type: Type::Nullable(Box::new(Type::Scalar(
BsonScalarType::Double,
Expand All @@ -356,7 +367,7 @@ mod tests {
object_name.to_owned(),
ObjectType {
fields: BTreeMap::from([(
"my_array".to_owned(),
"my_array".into(),
ObjectField {
r#type: Type::ArrayOf(Box::new(Type::Object(
"foo_my_array".to_owned(),
Expand All @@ -376,32 +387,36 @@ mod tests {

#[test]
fn non_unifiable_array_of_objects() -> Result<(), anyhow::Error> {
let object_name = "foo";
let object_name = "foo".into();
let doc = doc! {"my_array": [{"foo": 42, "bar": ""}, {"bar": 17, "baz": 3.77}]};
let result =
WithName::into_map::<BTreeMap<_, _>>(make_object_type(object_name, &doc, false, false));
let result = WithName::into_map::<BTreeMap<_, _>>(make_object_type(
&object_name,
&doc,
false,
false,
));

let expected = BTreeMap::from([
(
"foo_my_array".to_owned(),
"foo_my_array".into(),
ObjectType {
fields: BTreeMap::from([
(
"foo".to_owned(),
"foo".into(),
ObjectField {
r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::Int))),
description: None,
},
),
(
"bar".to_owned(),
"bar".into(),
ObjectField {
r#type: Type::ExtendedJSON,
description: None,
},
),
(
"baz".to_owned(),
"baz".into(),
ObjectField {
r#type: Type::Nullable(Box::new(Type::Scalar(
BsonScalarType::Double,
Expand All @@ -417,7 +432,7 @@ mod tests {
object_name.to_owned(),
ObjectType {
fields: BTreeMap::from([(
"my_array".to_owned(),
"my_array".into(),
ObjectField {
r#type: Type::ArrayOf(Box::new(Type::Object(
"foo_my_array".to_owned(),
Expand Down
Loading

0 comments on commit 84c9a6d

Please sign in to comment.