diff --git a/core/tests/interfaces.rs b/core/tests/interfaces.rs index ed43631c0f4..0400ab4afd3 100644 --- a/core/tests/interfaces.rs +++ b/core/tests/interfaces.rs @@ -67,7 +67,8 @@ async fn one_interface_zero_entities() { .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!(format!("{:?}", data), "Object({\"leggeds\": List([])})") + let exp = object! { leggeds: Vec::::new() }; + assert_eq!(data, exp); } #[tokio::test] @@ -87,10 +88,8 @@ async fn one_interface_one_entity() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"leggeds\": List([Object({\"legs\": Int(3)})])})" - ); + let exp = object! { leggeds: vec![ object!{ legs: 3 }]}; + assert_eq!(data, exp); // Query by ID. let query = "query { legged(id: \"1\") { legs } }"; @@ -98,10 +97,8 @@ async fn one_interface_one_entity() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"legged\": Object({\"legs\": Int(3)})})", - ); + let exp = object! { legged: object! { legs: 3 }}; + assert_eq!(data, exp); } #[tokio::test] @@ -121,10 +118,8 @@ async fn one_interface_one_entity_typename() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"leggeds\": List([Object({\"__typename\": String(\"Animal\")})])})" - ) + let exp = object! { leggeds: vec![ object!{ __typename: "Animal" } ]}; + assert_eq!(data, exp); } #[tokio::test] @@ -150,10 +145,8 @@ async fn one_interface_multiple_entities() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"leggeds\": List([Object({\"legs\": Int(3)}), Object({\"legs\": Int(4)})])})" - ); + let exp = object! { leggeds: vec![ object! { legs: 3 }, object! { legs: 4 }]}; + assert_eq!(data, exp); // Test for support issue #32. let query = "query { legged(id: \"2\") { legs } }"; @@ -161,10 +154,8 @@ async fn one_interface_multiple_entities() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"legged\": Object({\"legs\": Int(4)})})", - ); + let exp = object! { legged: object! { legs: 4 }}; + assert_eq!(data, exp); } #[tokio::test] @@ -187,10 +178,8 @@ async fn reference_interface() { .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"leggeds\": List([Object({\"leg\": Object({\"id\": String(\"1\")})})])})" - ) + let exp = object! { leggeds: vec![ object!{ leg: object! { id: "1" } }] }; + assert_eq!(data, exp); } #[tokio::test] @@ -253,13 +242,15 @@ async fn reference_interface_derived() { .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"events\": List([\ - Object({\"id\": String(\"buy\"), \"transaction\": Object({\"id\": String(\"txn\")})}), \ - Object({\"id\": String(\"gift\"), \"transaction\": Object({\"id\": String(\"txn\")})}), \ - Object({\"id\": String(\"sell1\"), \"transaction\": Object({\"id\": String(\"txn\")})}), \ - Object({\"id\": String(\"sell2\"), \"transaction\": Object({\"id\": String(\"txn\")})})])})"); + let exp = object! { + events: vec![ + object! { id: "buy", transaction: object! { id: "txn" } }, + object! { id: "gift", transaction: object! { id: "txn" } }, + object! { id: "sell1", transaction: object! { id: "txn" } }, + object! { id: "sell2", transaction: object! { id: "txn" } } + ] + }; + assert_eq!(data, exp); } #[tokio::test] @@ -278,6 +269,8 @@ async fn follow_interface_reference_invalid() { .await .unwrap(); + // Depending on whether `ENABLE_GRAPHQL_VALIDATIONS` is set or not, we + // get different errors match &res.to_result().unwrap_err()[0] { QueryError::ExecutionError(QueryExecutionError::ValidationError(_, error_message)) => { assert_eq!( @@ -285,7 +278,11 @@ async fn follow_interface_reference_invalid() { "Cannot query field \"parent\" on type \"Legged\"." ); } - e => panic!("error {} is not the expected one", e), + QueryError::ExecutionError(QueryExecutionError::UnknownField(_, type_name, field_name)) => { + assert_eq!(type_name, "Legged"); + assert_eq!(field_name, "parent"); + } + e => panic!("error `{}` is not the expected one", e), } } @@ -323,10 +320,10 @@ async fn follow_interface_reference() { .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"legged\": Object({\"parent\": Object({\"id\": String(\"parent\")})})})" - ) + let exp = object! { + legged: object! { parent: object! { id: "parent" } } + }; + assert_eq!(data, exp) } #[tokio::test] @@ -426,11 +423,11 @@ async fn two_interfaces() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - "Object({\"ibars\": List([Object({\"bar\": Int(100)}), Object({\"bar\": Int(200)})]), \ - \"ifoos\": List([Object({\"foo\": String(\"bla\")}), Object({\"foo\": String(\"ble\")})])})" - ); + let exp = object! { + ibars: vec![ object! { bar: 100 }, object! { bar: 200 }], + ifoos: vec![ object! { foo: "bla" }, object! { foo: "ble" } ] + }; + assert_eq!(data, exp); } #[tokio::test] @@ -454,10 +451,8 @@ async fn interface_non_inline_fragment() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - r#"Object({"leggeds": List([Object({"name": String("cow")})])})"# - ); + let exp = object! { leggeds: vec![ object! { name: "cow" } ]}; + assert_eq!(data, exp); // Query the fragment and something else. let query = "query { leggeds { legs, ...frag } } fragment frag on Animal { name }"; @@ -465,10 +460,8 @@ async fn interface_non_inline_fragment() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - r#"Object({"leggeds": List([Object({"legs": Int(3), "name": String("cow")})])})"#, - ); + let exp = object! { leggeds: vec![ object!{ legs: 3, name: "cow" } ]}; + assert_eq!(data, exp); } #[tokio::test] @@ -501,10 +494,8 @@ async fn interface_inline_fragment() { .await .unwrap(); let data = extract_data!(res).unwrap(); - assert_eq!( - format!("{:?}", data), - r#"Object({"leggeds": List([Object({"airspeed": Int(24)}), Object({"name": String("cow")})])})"# - ); + let exp = object! { leggeds: vec![ object!{ airspeed: 24 }, object! { name: "cow" }]}; + assert_eq!(data, exp); } #[tokio::test] @@ -567,20 +558,11 @@ async fn interface_inline_fragment_with_subquery() { .await .unwrap(); let data = extract_data!(res).unwrap(); - - assert_eq!( - format!("{:?}", data), - "Object({\ - \"leggeds\": List([\ - Object({\ - \"airspeed\": Int(5), \ - \"legs\": Int(2), \ - \"parent\": Object({\"id\": String(\"mama_bird\")})\ - }), \ - Object({\"legs\": Int(4)})\ - ])\ - })" - ); + let exp = object! { + leggeds: vec![ object!{ legs: 2, airspeed: 5, parent: object! { id: "mama_bird" } }, + object!{ legs: 4 }] + }; + assert_eq!(data, exp); } #[tokio::test] @@ -1394,6 +1376,16 @@ async fn enum_list_filters() { #[tokio::test] async fn recursive_fragment() { + // Depending on whether `ENABLE_GRAPHQL_VALIDATIONS` is set or not, we + // get different error messages + const FOO_ERRORS: [&str; 2] = [ + "Cannot spread fragment \"FooFrag\" within itself.", + "query has fragment cycle including `FooFrag`", + ]; + const FOO_BAR_ERRORS: [&str; 2] = [ + "Cannot spread fragment \"BarFrag\" within itself via \"FooFrag\".", + "query has fragment cycle including `BarFrag`", + ]; let subgraph_id = "RecursiveFragment"; let schema = " type Foo @entity { @@ -1426,7 +1418,7 @@ async fn recursive_fragment() { .await .unwrap(); let data = res.to_result().unwrap_err()[0].to_string(); - assert_eq!(data, "Cannot spread fragment \"FooFrag\" within itself."); + assert!(FOO_ERRORS.contains(&data.as_str())); let co_recursive = " query { @@ -1453,8 +1445,5 @@ async fn recursive_fragment() { .await .unwrap(); let data = res.to_result().unwrap_err()[0].to_string(); - assert_eq!( - data, - "Cannot spread fragment \"BarFrag\" within itself via \"FooFrag\"." - ); + assert!(FOO_BAR_ERRORS.contains(&data.as_str())); } diff --git a/graph/src/components/store.rs b/graph/src/components/store.rs index bb831405103..620a9b4a163 100644 --- a/graph/src/components/store.rs +++ b/graph/src/components/store.rs @@ -1713,12 +1713,11 @@ impl AttributeNames { } } - /// Adds a attribute name. Ignores meta fields. - pub fn add(&mut self, field: &q::Field) { - if Self::is_meta_field(&field.name) { + pub fn update(&mut self, field_name: &str) { + if Self::is_meta_field(field_name) { return; } - self.insert(&field.name) + self.insert(&field_name) } /// Adds a attribute name. Ignores meta fields. diff --git a/graph/src/data/graphql/ext.rs b/graph/src/data/graphql/ext.rs index d23ec2a57ea..4664c0d295b 100644 --- a/graph/src/data/graphql/ext.rs +++ b/graph/src/data/graphql/ext.rs @@ -66,6 +66,13 @@ pub trait DocumentExt { fn get_named_type(&self, name: &str) -> Option<&TypeDefinition>; fn scalar_value_type(&self, field_type: &Type) -> ValueType; + + /// Return `true` if the type does not allow selection of child fields. + /// + /// # Panics + /// + /// If `field_type` names an unknown type + fn is_leaf_type(&self, field_type: &Type) -> bool; } impl DocumentExt for Document { @@ -219,6 +226,19 @@ impl DocumentExt for Document { Type::ListType(inner) => self.scalar_value_type(inner), } } + + fn is_leaf_type(&self, field_type: &Type) -> bool { + match self + .get_named_type(field_type.get_base_type()) + .expect("names of field types have been validated") + { + TypeDefinition::Enum(_) | TypeDefinition::Scalar(_) => true, + TypeDefinition::Object(_) + | TypeDefinition::Interface(_) + | TypeDefinition::Union(_) + | TypeDefinition::InputObject(_) => false, + } + } } pub trait TypeExt { @@ -348,6 +368,41 @@ impl DirectiveFinder for Vec { } } +pub trait TypeDefinitionExt { + fn name(&self) -> &str; + + // Return `true` if this is the definition of a type from the + // introspection schema + fn is_introspection(&self) -> bool { + self.name().starts_with("__") + } +} + +impl TypeDefinitionExt for TypeDefinition { + fn name(&self) -> &str { + match self { + TypeDefinition::Scalar(t) => &t.name, + TypeDefinition::Object(t) => &t.name, + TypeDefinition::Interface(t) => &t.name, + TypeDefinition::Union(t) => &t.name, + TypeDefinition::Enum(t) => &t.name, + TypeDefinition::InputObject(t) => &t.name, + } + } +} + +pub trait FieldExt { + // Return `true` if this is the name of one of the query fields from the + // introspection schema + fn is_introspection(&self) -> bool; +} + +impl FieldExt for Field { + fn is_introspection(&self) -> bool { + &self.name == "__schema" || &self.name == "__type" + } +} + #[cfg(test)] mod directive_finder_tests { use graphql_parser::parse_schema; diff --git a/graph/src/data/graphql/object_macro.rs b/graph/src/data/graphql/object_macro.rs index 96c8124aae6..59d2f5a785c 100644 --- a/graph/src/data/graphql/object_macro.rs +++ b/graph/src/data/graphql/object_macro.rs @@ -1,13 +1,13 @@ +use crate::data::value::Object; use crate::prelude::q; use crate::prelude::r; -use std::collections::BTreeMap; use std::iter::FromIterator; /// Creates a `graphql_parser::query::Value::Object` from key/value pairs. /// If you don't need to determine which keys are included dynamically at runtime /// consider using the `object! {}` macro instead. pub fn object_value(data: Vec<(&str, r::Value)>) -> r::Value { - r::Value::Object(BTreeMap::from_iter( + r::Value::Object(Object::from_iter( data.into_iter().map(|(k, v)| (k.to_string(), v)), )) } @@ -83,12 +83,12 @@ macro_rules! impl_into_values { impl_into_values![(String, String), (f64, Float), (bool, Boolean)]; -/// Creates a `graphql_parser::query::Value::Object` from key/value pairs. +/// Creates a `data::value::Value::Object` from key/value pairs. #[macro_export] macro_rules! object { ($($name:ident: $value:expr,)*) => { { - let mut result = ::std::collections::BTreeMap::new(); + let mut result = $crate::data::value::Object::new(); $( let value = $crate::data::graphql::object_macro::IntoValue::into_value($value); result.insert(stringify!($name).to_string(), value); diff --git a/graph/src/data/graphql/values.rs b/graph/src/data/graphql/values.rs index ea580252448..d19cfedd29b 100644 --- a/graph/src/data/graphql/values.rs +++ b/graph/src/data/graphql/values.rs @@ -1,8 +1,9 @@ use anyhow::{anyhow, Error}; -use std::collections::{BTreeMap, HashMap}; +use std::collections::HashMap; use std::convert::TryFrom; use std::str::FromStr; +use crate::data::value::Object; use crate::prelude::{r, BigInt, Entity}; use web3::types::{H160, H256}; @@ -166,7 +167,7 @@ impl ValueMap for r::Value { } } -impl ValueMap for &BTreeMap { +impl ValueMap for &Object { fn get_required(&self, key: &str) -> Result where T: TryFromValue, diff --git a/graph/src/data/introspection.graphql b/graph/src/data/introspection.graphql new file mode 100644 index 00000000000..c3d2c1b8842 --- /dev/null +++ b/graph/src/data/introspection.graphql @@ -0,0 +1,98 @@ +# A GraphQL introspection schema for inclusion in a subgraph's API schema. +# The schema differs from the 'standard' introspection schema in that it +# doesn't have a Query type nor scalar declarations as they come from the +# API schema. + +type __Schema { + types: [__Type!]! + queryType: __Type! + mutationType: __Type + subscriptionType: __Type + directives: [__Directive!]! +} + +type __Type { + kind: __TypeKind! + name: String + description: String + + # OBJECT and INTERFACE only + fields(includeDeprecated: Boolean = false): [__Field!] + + # OBJECT only + interfaces: [__Type!] + + # INTERFACE and UNION only + possibleTypes: [__Type!] + + # ENUM only + enumValues(includeDeprecated: Boolean = false): [__EnumValue!] + + # INPUT_OBJECT only + inputFields: [__InputValue!] + + # NON_NULL and LIST only + ofType: __Type +} + +type __Field { + name: String! + description: String + args: [__InputValue!]! + type: __Type! + isDeprecated: Boolean! + deprecationReason: String +} + +type __InputValue { + name: String! + description: String + type: __Type! + defaultValue: String +} + +type __EnumValue { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String +} + +enum __TypeKind { + SCALAR + OBJECT + INTERFACE + UNION + ENUM + INPUT_OBJECT + LIST + NON_NULL +} + +type __Directive { + name: String! + description: String + locations: [__DirectiveLocation!]! + args: [__InputValue!]! +} + +enum __DirectiveLocation { + QUERY + MUTATION + SUBSCRIPTION + FIELD + FRAGMENT_DEFINITION + FRAGMENT_SPREAD + INLINE_FRAGMENT + SCHEMA + SCALAR + OBJECT + FIELD_DEFINITION + ARGUMENT_DEFINITION + INTERFACE + UNION + ENUM + ENUM_VALUE + INPUT_OBJECT + INPUT_FIELD_DEFINITION +} \ No newline at end of file diff --git a/graph/src/data/query/result.rs b/graph/src/data/query/result.rs index 791ec52de3c..6732847ca91 100644 --- a/graph/src/data/query/result.rs +++ b/graph/src/data/query/result.rs @@ -1,4 +1,5 @@ use super::error::{QueryError, QueryExecutionError}; +use crate::data::value::Object; use crate::prelude::{r, CacheWeight, DeploymentHash}; use http::header::{ ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, @@ -6,7 +7,6 @@ use http::header::{ }; use serde::ser::*; use serde::Serialize; -use std::collections::BTreeMap; use std::convert::TryFrom; use std::sync::Arc; @@ -39,7 +39,7 @@ where ser.end() } -pub type Data = BTreeMap; +pub type Data = Object; #[derive(Debug)] /// A collection of query results that is serialized as a single result. @@ -270,8 +270,8 @@ impl From> for QueryResult { } } -impl From for QueryResult { - fn from(val: Data) -> Self { +impl From for QueryResult { + fn from(val: Object) -> Self { QueryResult::new(val) } } @@ -309,7 +309,7 @@ fn multiple_data_items() { use serde_json::json; fn make_obj(key: &str, value: &str) -> Arc { - let mut map = BTreeMap::new(); + let mut map = Object::new(); map.insert(key.to_owned(), r::Value::String(value.to_owned())); Arc::new(map.into()) } diff --git a/graph/src/data/schema.rs b/graph/src/data/schema.rs index 2798d8adff6..c6d29d304d6 100644 --- a/graph/src/data/schema.rs +++ b/graph/src/data/schema.rs @@ -1,8 +1,10 @@ +use crate::cheap_clone::CheapClone; use crate::components::store::{EntityType, SubgraphStore}; use crate::data::graphql::ext::{DirectiveExt, DirectiveFinder, DocumentExt, TypeExt, ValueExt}; use crate::data::store::ValueType; use crate::data::subgraph::{DeploymentHash, SubgraphName}; use crate::prelude::{ + lazy_static, q::Value, s::{self, Definition, InterfaceType, ObjectType, TypeDefinition, *}, }; @@ -21,6 +23,8 @@ use std::iter::FromIterator; use std::str::FromStr; use std::sync::Arc; +use super::graphql::ObjectOrInterface; + pub const SCHEMA_TYPE_NAME: &str = "_Schema_"; pub const META_FIELD_TYPE: &str = "_Meta_"; @@ -347,16 +351,24 @@ impl SchemaReference { #[derive(Debug)] pub struct ApiSchema { - pub schema: Schema, + schema: Schema, // Root types for the api schema. pub query_type: Arc, pub subscription_type: Option>, + object_types: HashMap>, } impl ApiSchema { - /// `api_schema` will typically come from `fn api_schema` in the graphql crate. - pub fn from_api_schema(api_schema: Schema) -> Result { + /// `api_schema` will typically come from `fn api_schema` in the graphql + /// crate. + /// + /// In addition, the API schema has an introspection schema mixed into + /// `api_schema`. In particular, the `Query` type has fields called + /// `__schema` and `__type` + pub fn from_api_schema(mut api_schema: Schema) -> Result { + add_introspection_schema(&mut api_schema.document); + let query_type = api_schema .document .get_root_query_type() @@ -368,10 +380,19 @@ impl ApiSchema { .cloned() .map(Arc::new); + let object_types = HashMap::from_iter( + api_schema + .document + .get_object_type_definitions() + .into_iter() + .map(|obj_type| (obj_type.name.clone(), Arc::new(obj_type.clone()))), + ); + Ok(Self { schema: api_schema, query_type: Arc::new(query_type), subscription_type, + object_types, }) } @@ -395,6 +416,151 @@ impl ApiSchema { pub fn interfaces_for_type(&self, type_name: &EntityType) -> Option<&Vec> { self.schema.interfaces_for_type(type_name) } + + /// Return an `Arc` around the `ObjectType` from our internal cache + /// + /// # Panics + /// If `obj_type` is not part of this schema, this function panics + pub fn object_type(&self, obj_type: &ObjectType) -> Arc { + self.object_types + .get(&obj_type.name) + .expect("ApiSchema.object_type is only used with existing types") + .cheap_clone() + } + + pub fn get_named_type(&self, name: &str) -> Option<&TypeDefinition> { + self.schema.document.get_named_type(name) + } + + /// Returns true if the given type is an input type. + /// + /// Uses the algorithm outlined on + /// https://facebook.github.io/graphql/draft/#IsInputType(). + pub fn is_input_type(&self, t: &s::Type) -> bool { + match t { + s::Type::NamedType(name) => { + let named_type = self.get_named_type(name); + named_type.map_or(false, |type_def| match type_def { + s::TypeDefinition::Scalar(_) + | s::TypeDefinition::Enum(_) + | s::TypeDefinition::InputObject(_) => true, + _ => false, + }) + } + s::Type::ListType(inner) => self.is_input_type(inner), + s::Type::NonNullType(inner) => self.is_input_type(inner), + } + } + + pub fn get_root_query_type_def(&self) -> Option<&s::TypeDefinition> { + self.schema + .document + .definitions + .iter() + .find_map(|d| match d { + s::Definition::TypeDefinition(def @ s::TypeDefinition::Object(_)) => match def { + s::TypeDefinition::Object(t) if t.name == "Query" => Some(def), + _ => None, + }, + _ => None, + }) + } + + pub fn object_or_interface(&self, name: &str) -> Option> { + if name.starts_with("__") { + INTROSPECTION_SCHEMA.object_or_interface(name) + } else { + self.schema.document.object_or_interface(name) + } + } + + /// Returns the type definition that a field type corresponds to. + pub fn get_type_definition_from_field<'a>( + &'a self, + field: &s::Field, + ) -> Option<&'a s::TypeDefinition> { + self.get_type_definition_from_type(&field.field_type) + } + + /// Returns the type definition for a type. + pub fn get_type_definition_from_type<'a>( + &'a self, + t: &s::Type, + ) -> Option<&'a s::TypeDefinition> { + match t { + s::Type::NamedType(name) => self.get_named_type(name), + s::Type::ListType(inner) => self.get_type_definition_from_type(inner), + s::Type::NonNullType(inner) => self.get_type_definition_from_type(inner), + } + } + + #[cfg(debug_assertions)] + pub fn definitions(&self) -> impl Iterator> { + self.schema.document.definitions.iter() + } +} + +lazy_static! { + static ref INTROSPECTION_SCHEMA: Document = { + let schema = include_str!("introspection.graphql"); + parse_schema(schema).expect("the schema `introspection.graphql` is invalid") + }; +} + +fn add_introspection_schema(schema: &mut Document) { + fn introspection_fields() -> Vec { + // Generate fields for the root query fields in an introspection schema, + // the equivalent of the fields of the `Query` type: + // + // type Query { + // __schema: __Schema! + // __type(name: String!): __Type + // } + + let type_args = vec![InputValue { + position: Pos::default(), + description: None, + name: "name".to_string(), + value_type: Type::NonNullType(Box::new(Type::NamedType("String".to_string()))), + default_value: None, + directives: vec![], + }]; + + vec![ + Field { + position: Pos::default(), + description: None, + name: "__schema".to_string(), + arguments: vec![], + field_type: Type::NonNullType(Box::new(Type::NamedType("__Schema".to_string()))), + directives: vec![], + }, + Field { + position: Pos::default(), + description: None, + name: "__type".to_string(), + arguments: type_args, + field_type: Type::NamedType("__Type".to_string()), + directives: vec![], + }, + ] + } + + schema + .definitions + .extend(INTROSPECTION_SCHEMA.definitions.iter().cloned()); + + let query_type = schema + .definitions + .iter_mut() + .filter_map(|d| match d { + Definition::TypeDefinition(TypeDefinition::Object(t)) if t.name == "Query" => Some(t), + _ => None, + }) + .peekable() + .next() + .expect("no root `Query` in the schema"); + query_type.fields.append(&mut introspection_fields()); } /// A validated and preprocessed GraphQL schema for a subgraph. diff --git a/graph/src/data/value.rs b/graph/src/data/value.rs index e43406aa232..61011441a69 100644 --- a/graph/src/data/value.rs +++ b/graph/src/data/value.rs @@ -3,6 +3,160 @@ use serde::ser::{SerializeMap, SerializeSeq, Serializer}; use serde::Serialize; use std::collections::BTreeMap; use std::convert::TryFrom; +use std::iter::FromIterator; + +const TOMBSTONE_KEY: &str = "*dead*"; + +#[derive(Clone, Debug, PartialEq)] +struct Entry { + key: String, + value: Value, +} + +#[derive(Clone, PartialEq)] +pub struct Object(Vec); + +impl Object { + pub fn new() -> Self { + Self(Vec::new()) + } + + pub fn get(&self, key: &str) -> Option<&Value> { + self.0 + .iter() + .find(|entry| entry.key == key) + .map(|entry| &entry.value) + } + + pub fn remove(&mut self, key: &str) -> Option { + self.0 + .iter_mut() + .find(|entry| entry.key == key) + .map(|entry| { + entry.key = TOMBSTONE_KEY.to_string(); + std::mem::replace(&mut entry.value, Value::Null) + }) + } + + pub fn iter(&self) -> impl Iterator { + ObjectIter::new(self) + } + + fn len(&self) -> usize { + self.0.len() + } + + pub fn extend(&mut self, other: Object) { + self.0.extend(other.0) + } + + pub fn insert(&mut self, key: String, value: Value) -> Option { + match self.0.iter_mut().find(|entry| &entry.key == &key) { + Some(entry) => Some(std::mem::replace(&mut entry.value, value)), + None => { + self.0.push(Entry { key, value }); + None + } + } + } +} + +impl FromIterator<(String, Value)> for Object { + fn from_iter>(iter: T) -> Self { + let mut items: Vec<_> = Vec::new(); + for (key, value) in iter { + items.push(Entry { key, value }) + } + Object(items) + } +} + +pub struct ObjectOwningIter { + iter: std::vec::IntoIter, +} + +impl Iterator for ObjectOwningIter { + type Item = (String, Value); + + fn next(&mut self) -> Option { + while let Some(entry) = self.iter.next() { + if &entry.key != TOMBSTONE_KEY { + return Some((entry.key, entry.value)); + } + } + None + } +} + +impl IntoIterator for Object { + type Item = (String, Value); + + type IntoIter = ObjectOwningIter; + + fn into_iter(self) -> Self::IntoIter { + ObjectOwningIter { + iter: self.0.into_iter(), + } + } +} + +pub struct ObjectIter<'a> { + iter: std::slice::Iter<'a, Entry>, +} + +impl<'a> ObjectIter<'a> { + fn new(object: &'a Object) -> Self { + Self { + iter: object.0.as_slice().iter(), + } + } +} +impl<'a> Iterator for ObjectIter<'a> { + type Item = (&'a String, &'a Value); + + fn next(&mut self) -> Option { + while let Some(entry) = self.iter.next() { + if &entry.key != TOMBSTONE_KEY { + return Some((&entry.key, &entry.value)); + } + } + None + } +} + +impl<'a> IntoIterator for &'a Object { + type Item = as Iterator>::Item; + + type IntoIter = ObjectIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + ObjectIter::new(self) + } +} + +impl CacheWeight for Entry { + fn indirect_weight(&self) -> usize { + self.key.indirect_weight() + self.value.indirect_weight() + } +} + +impl CacheWeight for Object { + fn indirect_weight(&self) -> usize { + self.0.indirect_weight() + } +} + +impl Default for Object { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl std::fmt::Debug for Object { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} #[derive(Debug, Clone, PartialEq)] pub enum Value { @@ -13,10 +167,18 @@ pub enum Value { Null, Enum(String), List(Vec), - Object(BTreeMap), + Object(Object), } impl Value { + pub fn object(map: BTreeMap) -> Self { + let items = map + .into_iter() + .map(|(key, value)| Entry { key, value }) + .collect(); + Value::Object(Object(items)) + } + pub fn is_null(&self) -> bool { matches!(self, Value::Null) } @@ -164,7 +326,7 @@ impl TryFrom for Value { let value = Value::try_from(value)?; rmap.insert(key, value); } - Ok(Value::Object(rmap)) + Ok(Value::object(rmap)) } } } diff --git a/graph/src/lib.rs b/graph/src/lib.rs index c6019c78766..12c1452185f 100644 --- a/graph/src/lib.rs +++ b/graph/src/lib.rs @@ -184,7 +184,7 @@ pub mod prelude { static_graphql!(q, query, { Document, Value, OperationDefinition, InlineFragment, TypeCondition, FragmentSpread, Field, Selection, SelectionSet, FragmentDefinition, - Directive, VariableDefinition, Type, + Directive, VariableDefinition, Type, Query, }); static_graphql!(s, schema, { Field, Directive, InterfaceType, ObjectType, Value, TypeDefinition, diff --git a/graphql/src/execution/ast.rs b/graphql/src/execution/ast.rs new file mode 100644 index 00000000000..21cd323c9f8 --- /dev/null +++ b/graphql/src/execution/ast.rs @@ -0,0 +1,353 @@ +use std::{collections::HashSet, ops::Deref}; + +use graph::{ + components::store::EntityType, + data::graphql::ObjectOrInterface, + prelude::{anyhow, q, r, s, ApiSchema, QueryExecutionError, ValueMap}, +}; +use graphql_parser::Pos; + +use crate::schema::ast::ObjectType; + +/// A selection set is a table that maps object types to the fields that +/// should be selected for objects of that type. The types are always +/// concrete object types, never interface or union types. When a +/// `SelectionSet` is constructed, fragments must already have been resolved +/// as it only allows using fields. +/// +/// The set of types that a `SelectionSet` can accommodate must be set at +/// the time the `SelectionSet` is constructed. It is not possible to add +/// more types to it, but it is possible to add fields for all known types +/// or only some of them +#[derive(Debug, Clone, PartialEq)] +pub struct SelectionSet { + // Map object types to the list of fields that should be selected for + // them. In most cases, this will have a single entry. If the + // `SelectionSet` is attached to a field with an interface or union + // type, it will have an entry for each object type implementing that + // interface or being part of the union + items: Vec<(ObjectType, Vec)>, +} + +impl SelectionSet { + /// Create a new `SelectionSet` that can handle the given types + pub fn new(types: Vec) -> Self { + let items = types + .into_iter() + .map(|obj_type| (obj_type, Vec::new())) + .collect(); + SelectionSet { items } + } + + /// Create a new `SelectionSet` that can handle the same types as + /// `other`, but ignore all fields from `other` + pub fn empty_from(other: &SelectionSet) -> Self { + let items = other + .items + .iter() + .map(|(name, _)| (name.clone(), Vec::new())) + .collect(); + SelectionSet { items } + } + + /// Return `true` if this selection set does not select any fields for + /// its types + pub fn is_empty(&self) -> bool { + self.items.iter().all(|(_, fields)| fields.is_empty()) + } + + /// If the selection set contains a single field across all its types, + /// return it. Otherwise, return `None` + pub fn single_field(&self) -> Option<&Field> { + let mut iter = self.items.iter(); + let field = match iter.next() { + Some((_, fields)) => { + if fields.len() != 1 { + return None; + } else { + &fields[0] + } + } + None => return None, + }; + for (_, fields) in iter { + if fields.len() != 1 { + return None; + } + if &fields[0] != field { + return None; + } + } + return Some(field); + } + + /// Iterate over all types and the fields for those types + pub fn fields(&self) -> impl Iterator)> { + self.items + .iter() + .map(|(obj_type, fields)| (obj_type, fields.iter())) + } + + /// Iterate over all types and the fields that are not leaf fields, i.e. + /// whose selection sets are not empty + pub fn interior_fields( + &self, + ) -> impl Iterator)> { + self.items + .iter() + .map(|(obj_type, fields)| (obj_type, fields.iter().filter(|field| !field.is_leaf()))) + } + + /// Iterate over all fields for the given object type + /// + /// # Panics + /// If this `SelectionSet` does not have an entry for `obj_type`, this + /// method will panic + pub fn fields_for(&self, obj_type: &ObjectType) -> impl Iterator { + let item = self + .items + .iter() + .find(|(our_type, _)| our_type == obj_type) + .expect("there is an entry for the type"); + item.1.iter() + } + + /// Append the field for all the sets' types + pub fn push(&mut self, new_field: &Field) { + for (_, fields) in &mut self.items { + Self::merge_field(fields, new_field.clone()); + } + } + + /// Append the fields for all the sets' types + pub fn push_fields(&mut self, fields: Vec<&Field>) { + for field in fields { + self.push(field); + } + } + + /// Merge `self` with the fields from `other`, which must have the same, + /// or a subset of, the types of `self`. The `directives` are added to + /// `self`'s directives so that they take precedence over existing + /// directives with the same name + pub fn merge(&mut self, other: SelectionSet, directives: Vec) { + for (other_name, other_fields) in other.items { + let item = self + .items + .iter_mut() + .find(|(name, _)| &other_name == name) + .expect("all possible types are already in items"); + for mut other_field in other_fields { + other_field.prepend_directives(directives.clone()); + Self::merge_field(&mut item.1, other_field); + } + } + } + + fn merge_field(fields: &mut Vec, new_field: Field) { + match fields + .iter_mut() + .find(|field| field.response_key() == new_field.response_key()) + { + Some(field) => { + // TODO: check that _field and new_field are mergeable, in + // particular that their name, directives and arguments are + // compatible + field.selection_set.merge(new_field.selection_set, vec![]); + } + None => fields.push(new_field), + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Directive { + pub position: Pos, + pub name: String, + pub arguments: Vec<(String, r::Value)>, +} + +impl Directive { + /// Looks up the value of an argument of this directive + pub fn argument_value(&self, name: &str) -> Option<&r::Value> { + self.arguments + .iter() + .find(|(n, _)| n == name) + .map(|(_, v)| v) + } + + fn eval_if(&self) -> bool { + match self.argument_value("if") { + None => true, + Some(r::Value::Boolean(b)) => *b, + Some(_) => false, + } + } + + /// Return `true` if this directive says that we should not include the + /// field it is attached to. That is the case if the directive is + /// `include` and its `if` condition is `false`, or if it is `skip` and + /// its `if` condition is `true`. In all other cases, return `false` + pub fn skip(&self) -> bool { + match self.name.as_str() { + "include" => !self.eval_if(), + "skip" => self.eval_if(), + _ => false, + } + } +} + +/// A field to execute as part of a query. When the field is constructed by +/// `Query::new`, variables are interpolated, and argument values have +/// already been coerced to the appropriate types for the field argument +#[derive(Debug, Clone, PartialEq)] +pub struct Field { + pub position: Pos, + pub alias: Option, + pub name: String, + pub arguments: Vec<(String, r::Value)>, + pub directives: Vec, + pub selection_set: SelectionSet, +} + +impl Field { + /// Returns the response key of a field, which is either its name or its + /// alias (if there is one). + pub fn response_key(&self) -> &str { + self.alias + .as_ref() + .map(Deref::deref) + .unwrap_or(self.name.as_str()) + } + + /// Looks up the value of an argument for this field + pub fn argument_value(&self, name: &str) -> Option<&r::Value> { + self.arguments + .iter() + .find(|(n, _)| n == name) + .map(|(_, v)| v) + } + + fn prepend_directives(&mut self, mut directives: Vec) { + // TODO: check that the new directives don't conflict with existing + // directives + std::mem::swap(&mut self.directives, &mut directives); + self.directives.extend(directives); + } + + fn is_leaf(&self) -> bool { + self.selection_set.is_empty() + } +} + +impl ValueMap for Field { + fn get_required(&self, key: &str) -> Result { + self.argument_value(key) + .ok_or_else(|| anyhow!("Required field `{}` not set", key)) + .and_then(|value| T::try_from_value(value).map_err(|e| e.into())) + } + + fn get_optional( + &self, + key: &str, + ) -> Result, anyhow::Error> { + self.argument_value(key) + .map_or(Ok(None), |value| match value { + r::Value::Null => Ok(None), + _ => T::try_from_value(value).map(Some).map_err(Into::into), + }) + } +} + +/// A set of object types, generated from resolving interfaces into the +/// object types that implement them, and possibly narrowing further when +/// expanding fragments with type conditions +#[derive(Debug, Clone, PartialEq)] +pub(crate) enum ObjectTypeSet { + Any, + Only(HashSet), +} + +impl ObjectTypeSet { + pub fn convert( + schema: &ApiSchema, + type_cond: Option<&q::TypeCondition>, + ) -> Result { + match type_cond { + Some(q::TypeCondition::On(name)) => Self::from_name(schema, name), + None => Ok(ObjectTypeSet::Any), + } + } + + pub fn from_name(schema: &ApiSchema, name: &str) -> Result { + let set = resolve_object_types(schema, name)?; + Ok(ObjectTypeSet::Only(set)) + } + + fn contains(&self, obj_type: &ObjectType) -> bool { + match self { + ObjectTypeSet::Any => true, + ObjectTypeSet::Only(set) => set.contains(obj_type), + } + } + + pub fn intersect(self, other: &ObjectTypeSet) -> ObjectTypeSet { + match self { + ObjectTypeSet::Any => other.clone(), + ObjectTypeSet::Only(set) => { + ObjectTypeSet::Only(set.into_iter().filter(|ty| other.contains(ty)).collect()) + } + } + } + + /// Return a list of the object type names that are in this type set and + /// are also implementations of `current_type` + pub fn type_names( + &self, + schema: &ApiSchema, + current_type: ObjectOrInterface<'_>, + ) -> Result, QueryExecutionError> { + Ok(resolve_object_types(schema, current_type.name())? + .into_iter() + .filter(|obj_type| match self { + ObjectTypeSet::Any => true, + ObjectTypeSet::Only(set) => set.contains(obj_type), + }) + .collect()) + } +} + +/// Look up the type `name` from the schema and resolve interfaces +/// and unions until we are left with a set of concrete object types +pub(crate) fn resolve_object_types( + schema: &ApiSchema, + name: &str, +) -> Result, QueryExecutionError> { + let mut set = HashSet::new(); + match schema + .get_named_type(name) + .ok_or_else(|| QueryExecutionError::AbstractTypeError(name.to_string()))? + { + s::TypeDefinition::Interface(intf) => { + for obj_ty in &schema.types_for_interface()[&EntityType::new(intf.name.to_string())] { + let obj_ty = schema.object_type(obj_ty); + set.insert(obj_ty.into()); + } + } + s::TypeDefinition::Union(tys) => { + for ty in &tys.types { + set.extend(resolve_object_types(schema, ty)?) + } + } + s::TypeDefinition::Object(ty) => { + let ty = schema.object_type(ty); + set.insert(ty.into()); + } + s::TypeDefinition::Scalar(_) + | s::TypeDefinition::Enum(_) + | s::TypeDefinition::InputObject(_) => { + return Err(QueryExecutionError::NamedTypeError(name.to_string())); + } + } + Ok(set) +} diff --git a/graphql/src/execution/execution.rs b/graphql/src/execution/execution.rs index 71adc45104b..2f28f7fad3b 100644 --- a/graphql/src/execution/execution.rs +++ b/graphql/src/execution/execution.rs @@ -1,19 +1,16 @@ use super::cache::{QueryBlockCache, QueryCache}; use crossbeam::atomic::AtomicCell; use graph::{ - data::schema::META_FIELD_NAME, + data::{schema::META_FIELD_NAME, value::Object}, prelude::{s, CheapClone}, util::timed_rw_lock::TimedMutex, }; -use indexmap::IndexMap; use lazy_static::lazy_static; use stable_hash::crypto::SetHasher; use stable_hash::prelude::*; use stable_hash::utils::stable_hash; -use std::borrow::ToOwned; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::iter; use std::time::Instant; +use std::{borrow::ToOwned, collections::HashSet}; use graph::data::graphql::*; use graph::data::query::CacheStatus; @@ -21,13 +18,10 @@ use graph::prelude::*; use graph::util::lfu_cache::LfuCache; use super::QueryHash; -use crate::introspection::{ - is_introspection_field, INTROSPECTION_DOCUMENT, INTROSPECTION_QUERY_TYPE, -}; +use crate::execution::ast as a; +use crate::introspection::{is_introspection_field, INTROSPECTION_QUERY_TYPE}; use crate::prelude::*; -use crate::query::ast as qast; use crate::schema::ast as sast; -use crate::values::coercion; lazy_static! { // Comma separated subgraph ids to cache queries for. @@ -128,7 +122,7 @@ impl CacheWeight for WeightedResult { impl Default for WeightedResult { fn default() -> Self { WeightedResult { - result: Arc::new(QueryResult::new(BTreeMap::default())), + result: Arc::new(QueryResult::new(Object::default())), weight: 0, } } @@ -136,9 +130,7 @@ impl Default for WeightedResult { struct HashableQuery<'a> { query_schema_id: &'a DeploymentHash, - query_variables: &'a HashMap, - query_fragments: &'a HashMap, - selection_set: &'a q::SelectionSet, + selection_set: &'a a::SelectionSet, block_ptr: &'a BlockPtr, } @@ -163,24 +155,8 @@ impl StableHash for HashableQuery<'_> { self.query_schema_id .stable_hash(sequence_number.next_child(), state); - // Not stable! Uses to_string() - self.query_variables - .iter() - .map(|(k, v)| (k, v.to_string())) - .collect::>() - .stable_hash(sequence_number.next_child(), state); - - // Not stable! Uses to_string() - self.query_fragments - .iter() - .map(|(k, v)| (k, v.to_string())) - .collect::>() - .stable_hash(sequence_number.next_child(), state); - // Not stable! Uses to_string - self.selection_set - .to_string() - .stable_hash(sequence_number.next_child(), state); + format!("{:?}", self.selection_set).stable_hash(sequence_number.next_child(), state); self.block_ptr .stable_hash(sequence_number.next_child(), state); @@ -190,15 +166,13 @@ impl StableHash for HashableQuery<'_> { // The key is: subgraph id + selection set + variables + fragment definitions fn cache_key( ctx: &ExecutionContext, - selection_set: &q::SelectionSet, + selection_set: &a::SelectionSet, block_ptr: &BlockPtr, ) -> QueryHash { // It is very important that all data used for the query is included. // Otherwise, incorrect results may be returned. let query = HashableQuery { query_schema_id: ctx.query.schema.id(), - query_variables: &ctx.query.variables, - query_fragments: &ctx.query.fragments, selection_set, block_ptr, }; @@ -232,38 +206,18 @@ where pub(crate) cache_status: AtomicCell, } -// Helpers to look for types and fields on both the introspection and regular schemas. -pub(crate) fn get_named_type(schema: &s::Document, name: &str) -> Option { - if name.starts_with("__") { - INTROSPECTION_DOCUMENT.get_named_type(name).cloned() - } else { - schema.get_named_type(name).cloned() - } -} - pub(crate) fn get_field<'a>( object_type: impl Into>, name: &str, ) -> Option { if name == "__schema" || name == "__type" { - let object_type = *INTROSPECTION_QUERY_TYPE; + let object_type = &*INTROSPECTION_QUERY_TYPE; sast::get_field(object_type, name).cloned() } else { sast::get_field(object_type, name).cloned() } } -pub(crate) fn object_or_interface<'a>( - schema: &'a s::Document, - name: &str, -) -> Option> { - if name.starts_with("__") { - INTROSPECTION_DOCUMENT.object_or_interface(name) - } else { - schema.object_or_interface(name) - } -} - impl ExecutionContext where R: Resolver, @@ -275,7 +229,7 @@ where ExecutionContext { logger: self.logger.cheap_clone(), resolver: introspection_resolver, - query: self.query.as_introspection_query(), + query: self.query.cheap_clone(), deadline: self.deadline, max_first: std::u32::MAX, max_skip: std::u32::MAX, @@ -286,54 +240,46 @@ where } } -pub fn execute_root_selection_set_uncached( +pub(crate) fn execute_root_selection_set_uncached( ctx: &ExecutionContext, - selection_set: &q::SelectionSet, - root_type: &s::ObjectType, -) -> Result, Vec> { + selection_set: &a::SelectionSet, + root_type: &sast::ObjectType, +) -> Result> { // Split the top-level fields into introspection fields and // regular data fields - let mut data_set = q::SelectionSet { - span: selection_set.span, - items: Vec::new(), - }; - let mut intro_set = q::SelectionSet { - span: selection_set.span, - items: Vec::new(), - }; + let mut data_set = a::SelectionSet::empty_from(selection_set); + let mut intro_set = a::SelectionSet::empty_from(selection_set); let mut meta_items = Vec::new(); - for (_, fields) in collect_fields(ctx, root_type, iter::once(selection_set)) { - let name = fields[0].name.clone(); - let selections = fields.into_iter().map(|f| q::Selection::Field(f.clone())); + for field in selection_set.fields_for(root_type) { // See if this is an introspection or data field. We don't worry about // non-existent fields; those will cause an error later when we execute // the data_set SelectionSet - if is_introspection_field(&name) { - intro_set.items.extend(selections) - } else if &name == META_FIELD_NAME { - meta_items.extend(selections) + if is_introspection_field(&field.name) { + intro_set.push(field) + } else if &field.name == META_FIELD_NAME { + meta_items.push(field) } else { - data_set.items.extend(selections) + data_set.push(field) } } // If we are getting regular data, prefetch it from the database - let mut values = if data_set.items.is_empty() && meta_items.is_empty() { - BTreeMap::default() + let mut values = if data_set.is_empty() && meta_items.is_empty() { + Object::default() } else { let initial_data = ctx.resolver.prefetch(&ctx, &data_set)?; - data_set.items.extend(meta_items); - execute_selection_set_to_map(&ctx, iter::once(&data_set), root_type, initial_data)? + data_set.push_fields(meta_items); + execute_selection_set_to_map(&ctx, &data_set, root_type, initial_data)? }; // Resolve introspection fields, if there are any - if !intro_set.items.is_empty() { + if !intro_set.is_empty() { let ictx = ctx.as_introspection_context(); values.extend(execute_selection_set_to_map( &ictx, - iter::once(&intro_set), + ctx.query.selection_set.as_ref(), &*INTROSPECTION_QUERY_TYPE, None, )?); @@ -343,10 +289,10 @@ pub fn execute_root_selection_set_uncached( } /// Executes the root selection set of a query. -pub async fn execute_root_selection_set( +pub(crate) async fn execute_root_selection_set( ctx: Arc>, - selection_set: Arc, - root_type: Arc, + selection_set: Arc, + root_type: sast::ObjectType, block_ptr: Option, ) -> Arc { // Cache the cache key to not have to calculate it twice - once for lookup @@ -489,13 +435,13 @@ pub async fn execute_root_selection_set( /// Allows passing in a parent value during recursive processing of objects and their fields. fn execute_selection_set<'a>( ctx: &'a ExecutionContext, - selection_sets: impl Iterator, - object_type: &s::ObjectType, + selection_set: &'a a::SelectionSet, + object_type: &sast::ObjectType, prefetched_value: Option, ) -> Result> { Ok(r::Value::Object(execute_selection_set_to_map( ctx, - selection_sets, + selection_set, object_type, prefetched_value, )?)) @@ -503,26 +449,23 @@ fn execute_selection_set<'a>( fn execute_selection_set_to_map<'a>( ctx: &'a ExecutionContext, - selection_sets: impl Iterator, - object_type: &s::ObjectType, + selection_set: &'a a::SelectionSet, + object_type: &sast::ObjectType, prefetched_value: Option, -) -> Result, Vec> { +) -> Result> { let mut prefetched_object = match prefetched_value { Some(r::Value::Object(object)) => Some(object), Some(_) => unreachable!(), None => None, }; let mut errors: Vec = Vec::new(); - let mut result_map: BTreeMap = BTreeMap::new(); - - // Group fields with the same response key, so we can execute them together - let grouped_field_set = collect_fields(ctx, object_type, selection_sets); + let mut result_map = Object::new(); // Gather fields that appear more than once with the same response key. let multiple_response_keys = { let mut multiple_response_keys = HashSet::new(); let mut fields = HashSet::new(); - for field in grouped_field_set.iter().map(|(_, f)| f.iter()).flatten() { + for field in selection_set.fields_for(object_type) { if !fields.insert(field.name.as_str()) { multiple_response_keys.insert(field.name.as_str()); } @@ -531,7 +474,7 @@ fn execute_selection_set_to_map<'a>( }; // Process all field groups in order - for (response_key, fields) in grouped_field_set { + for field in selection_set.fields_for(object_type) { match ctx.deadline { Some(deadline) if deadline < Instant::now() => { errors.push(QueryExecutionError::Timeout); @@ -540,8 +483,10 @@ fn execute_selection_set_to_map<'a>( _ => (), } + let response_key = field.response_key(); + // Unwrap: The query was validated to contain only valid fields. - let field = sast::get_field(object_type, &fields[0].name).unwrap(); + let field_type = sast::get_field(object_type, &field.name).unwrap(); // Check if we have the value already. let field_value = prefetched_object @@ -554,13 +499,13 @@ fn execute_selection_set_to_map<'a>( // Scalars and scalar lists are associated to the field name. // If the field has more than one response key, we have to clone. - match multiple_response_keys.contains(fields[0].name.as_str()) { - false => o.remove(&fields[0].name), - true => o.get(&fields[0].name).cloned(), + match multiple_response_keys.contains(field.name.as_str()) { + false => o.remove(&field.name), + true => o.get(&field.name).cloned(), } }) .flatten(); - match execute_field(&ctx, object_type, field_value, &fields[0], field, fields) { + match execute_field(&ctx, object_type, field_value, field, field_type) { Ok(v) => { result_map.insert(response_key.to_owned(), v); } @@ -577,144 +522,23 @@ fn execute_selection_set_to_map<'a>( } } -/// Collects fields from selection sets. Returns a map from response key to fields. There will -/// typically be a single field for a response key. If there are multiple, the overall execution -/// logic will effectively merged them into the output for the response key. -pub fn collect_fields<'a>( - ctx: &'a ExecutionContext, - object_type: &s::ObjectType, - selection_sets: impl Iterator, -) -> IndexMap<&'a str, Vec<&'a q::Field>> { - let mut grouped_fields = IndexMap::new(); - collect_fields_inner( - ctx, - object_type, - selection_sets, - &mut HashSet::new(), - &mut grouped_fields, - ); - grouped_fields -} - -pub fn collect_fields_inner<'a>( - ctx: &'a ExecutionContext, - object_type: &s::ObjectType, - selection_sets: impl Iterator, - visited_fragments: &mut HashSet<&'a str>, - output: &mut IndexMap<&'a str, Vec<&'a q::Field>>, -) { - for selection_set in selection_sets { - // Only consider selections that are not skipped and should be included - let selections = selection_set - .items - .iter() - .filter(|selection| !qast::skip_selection(selection, &ctx.query.variables)) - .filter(|selection| qast::include_selection(selection, &ctx.query.variables)); - - for selection in selections { - match selection { - q::Selection::Field(ref field) => { - let response_key = qast::get_response_key(field); - output.entry(response_key).or_default().push(field); - } - - q::Selection::FragmentSpread(spread) => { - // Only consider the fragment if it hasn't already been included, - // as would be the case if the same fragment spread ...Foo appeared - // twice in the same selection set. - // - // Note: This will skip both duplicate fragments and will break cycles, - // so we support fragments even though the GraphQL spec prohibits them. - if visited_fragments.insert(&spread.fragment_name) { - let fragment = ctx.query.get_fragment(&spread.fragment_name); - if does_fragment_type_apply(ctx, object_type, &fragment.type_condition) { - // We have a fragment that applies to the current object type, - // collect fields recursively - collect_fields_inner( - ctx, - object_type, - iter::once(&fragment.selection_set), - visited_fragments, - output, - ); - } - } - } - - q::Selection::InlineFragment(fragment) => { - let applies = match &fragment.type_condition { - Some(cond) => does_fragment_type_apply(ctx, object_type, &cond), - None => true, - }; - - if applies { - collect_fields_inner( - ctx, - object_type, - iter::once(&fragment.selection_set), - visited_fragments, - output, - ) - } - } - }; - } - } -} - -/// Determines whether a fragment is applicable to the given object type. -fn does_fragment_type_apply( - ctx: &ExecutionContext, - object_type: &s::ObjectType, - fragment_type: &q::TypeCondition, -) -> bool { - // This is safe to do, as TypeCondition only has a single `On` variant. - let q::TypeCondition::On(ref name) = fragment_type; - - // Resolve the type the fragment applies to based on its name - let named_type = ctx.query.schema.document().get_named_type(name); - - match named_type { - // The fragment applies to the object type if its type is the same object type - Some(s::TypeDefinition::Object(ot)) => object_type == ot, - - // The fragment also applies to the object type if its type is an interface - // that the object type implements - Some(s::TypeDefinition::Interface(it)) => { - object_type.implements_interfaces.contains(&it.name) - } - - // The fragment also applies to an object type if its type is a union that - // the object type is one of the possible types for - Some(s::TypeDefinition::Union(ut)) => ut.types.contains(&object_type.name), - - // In all other cases, the fragment does not apply - _ => false, - } -} - /// Executes a field. fn execute_field( ctx: &ExecutionContext, object_type: &s::ObjectType, field_value: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, - fields: Vec<&q::Field>, ) -> Result> { - coerce_argument_values(&ctx.query, object_type, field) - .and_then(|argument_values| { - resolve_field_value( - ctx, - object_type, - field_value, - field, - field_definition, - &field_definition.field_type, - &argument_values, - ) - }) - .and_then(|value| complete_value(ctx, field, &field_definition.field_type, &fields, value)) + resolve_field_value( + ctx, + object_type, + field_value, + field, + field_definition, + &field_definition.field_type, + ) + .and_then(|value| complete_value(ctx, field, &field_definition.field_type, value)) } /// Resolves the value of a field. @@ -722,10 +546,9 @@ fn resolve_field_value( ctx: &ExecutionContext, object_type: &s::ObjectType, field_value: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, field_type: &s::Type, - argument_values: &HashMap<&str, r::Value>, ) -> Result> { match field_type { s::Type::NonNullType(inner_type) => resolve_field_value( @@ -735,7 +558,6 @@ fn resolve_field_value( field, field_definition, inner_type.as_ref(), - argument_values, ), s::Type::NamedType(ref name) => resolve_field_value_for_named_type( @@ -745,7 +567,6 @@ fn resolve_field_value( field, field_definition, name, - argument_values, ), s::Type::ListType(inner_type) => resolve_field_value_for_list_type( @@ -755,7 +576,6 @@ fn resolve_field_value( field, field_definition, inner_type.as_ref(), - argument_values, ), } } @@ -765,27 +585,22 @@ fn resolve_field_value_for_named_type( ctx: &ExecutionContext, object_type: &s::ObjectType, field_value: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, type_name: &str, - argument_values: &HashMap<&str, r::Value>, ) -> Result> { // Try to resolve the type name into the actual type let named_type = ctx .query .schema - .document() .get_named_type(type_name) .ok_or_else(|| QueryExecutionError::NamedTypeError(type_name.to_string()))?; match named_type { // Let the resolver decide how the field (with the given object type) is resolved - s::TypeDefinition::Object(t) => ctx.resolver.resolve_object( - field_value, - field, - field_definition, - t.into(), - argument_values, - ), + s::TypeDefinition::Object(t) => { + ctx.resolver + .resolve_object(field_value, field, field_definition, t.into()) + } // Let the resolver decide how values in the resolved object value // map to values of GraphQL enums @@ -795,16 +610,13 @@ fn resolve_field_value_for_named_type( // map to values of GraphQL scalars s::TypeDefinition::Scalar(t) => { ctx.resolver - .resolve_scalar_value(object_type, field, t, field_value, argument_values) + .resolve_scalar_value(object_type, field, t, field_value) } - s::TypeDefinition::Interface(i) => ctx.resolver.resolve_object( - field_value, - field, - field_definition, - i.into(), - argument_values, - ), + s::TypeDefinition::Interface(i) => { + ctx.resolver + .resolve_object(field_value, field, field_definition, i.into()) + } s::TypeDefinition::Union(_) => Err(QueryExecutionError::Unimplemented("unions".to_owned())), @@ -818,10 +630,9 @@ fn resolve_field_value_for_list_type( ctx: &ExecutionContext, object_type: &s::ObjectType, field_value: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, inner_type: &s::Type, - argument_values: &HashMap<&str, r::Value>, ) -> Result> { match inner_type { s::Type::NonNullType(inner_type) => resolve_field_value_for_list_type( @@ -831,14 +642,12 @@ fn resolve_field_value_for_list_type( field, field_definition, inner_type, - argument_values, ), s::Type::NamedType(ref type_name) => { let named_type = ctx .query .schema - .document() .get_named_type(type_name) .ok_or_else(|| QueryExecutionError::NamedTypeError(type_name.to_string()))?; @@ -847,13 +656,7 @@ fn resolve_field_value_for_list_type( // is resolved into a entities based on the (potential) parent object s::TypeDefinition::Object(t) => ctx .resolver - .resolve_objects( - field_value, - field, - field_definition, - t.into(), - argument_values, - ) + .resolve_objects(field_value, field, field_definition, t.into()) .map_err(|e| vec![e]), // Let the resolver decide how values in the resolved object value @@ -870,13 +673,7 @@ fn resolve_field_value_for_list_type( s::TypeDefinition::Interface(t) => ctx .resolver - .resolve_objects( - field_value, - field, - field_definition, - t.into(), - argument_values, - ) + .resolve_objects(field_value, field, field_definition, t.into()) .map_err(|e| vec![e]), s::TypeDefinition::Union(_) => Err(vec![QueryExecutionError::Unimplemented( @@ -899,15 +696,14 @@ fn resolve_field_value_for_list_type( /// Ensures that a value matches the expected return type. fn complete_value( ctx: &ExecutionContext, - field: &q::Field, + field: &a::Field, field_type: &s::Type, - fields: &Vec<&q::Field>, resolved_value: r::Value, ) -> Result> { match field_type { // Fail if the field type is non-null but the value is null s::Type::NonNullType(inner_type) => { - return match complete_value(ctx, field, inner_type, fields, resolved_value)? { + return match complete_value(ctx, field, inner_type, resolved_value)? { r::Value::Null => Err(vec![QueryExecutionError::NonNullError( field.position, field.name.to_string(), @@ -931,7 +727,7 @@ fn complete_value( for value_place in &mut values { // Put in a placeholder, complete the value, put the completed value back. let value = std::mem::replace(value_place, r::Value::Null); - match complete_value(ctx, field, inner_type, fields, value) { + match complete_value(ctx, field, inner_type, value) { Ok(value) => { *value_place = value; } @@ -953,7 +749,7 @@ fn complete_value( } s::Type::NamedType(name) => { - let named_type = ctx.query.schema.document().get_named_type(name).unwrap(); + let named_type = ctx.query.schema.get_named_type(name).unwrap(); match named_type { // Complete scalar values @@ -986,12 +782,15 @@ fn complete_value( } // Complete object types recursively - s::TypeDefinition::Object(object_type) => execute_selection_set( - ctx, - fields.iter().map(|f| &f.selection_set), - object_type, - Some(resolved_value), - ), + s::TypeDefinition::Object(object_type) => { + let object_type = ctx.query.schema.object_type(object_type).into(); + execute_selection_set( + ctx, + &field.selection_set, + &object_type, + Some(resolved_value), + ) + } // Resolve interface types using the resolved value and complete the value recursively s::TypeDefinition::Interface(_) => { @@ -999,8 +798,8 @@ fn complete_value( execute_selection_set( ctx, - fields.iter().map(|f| &f.selection_set), - object_type, + &field.selection_set, + &object_type, Some(resolved_value), ) } @@ -1011,8 +810,8 @@ fn complete_value( execute_selection_set( ctx, - fields.iter().map(|f| &f.selection_set), - object_type, + &field.selection_set, + &object_type, Some(resolved_value), ) } @@ -1030,53 +829,16 @@ fn resolve_abstract_type<'a>( ctx: &'a ExecutionContext, abstract_type: &s::TypeDefinition, object_value: &r::Value, -) -> Result<&'a s::ObjectType, Vec> { +) -> Result> { // Let the resolver handle the type resolution, return an error if the resolution // yields nothing - ctx.resolver - .resolve_abstract_type(ctx.query.schema.document(), abstract_type, object_value) + let obj_type = ctx + .resolver + .resolve_abstract_type(&ctx.query.schema, abstract_type, object_value) .ok_or_else(|| { vec![QueryExecutionError::AbstractTypeError( sast::get_type_name(abstract_type).to_string(), )] - }) -} - -/// Coerces argument values into GraphQL values. -pub fn coerce_argument_values<'a>( - query: &crate::execution::Query, - ty: impl Into>, - field: &q::Field, -) -> Result, Vec> { - let mut coerced_values = HashMap::new(); - let mut errors = vec![]; - - let resolver = |name: &str| query.schema.document().get_named_type(name); - - for argument_def in sast::get_argument_definitions(ty, &field.name) - .into_iter() - .flatten() - { - let value = qast::get_argument_value(&field.arguments, &argument_def.name).cloned(); - match coercion::coerce_input_value(value, &argument_def, &resolver, &query.variables) { - Ok(Some(value)) => { - if argument_def.name == "text".to_string() { - coerced_values.insert( - argument_def.name.as_str(), - r::Value::Object(BTreeMap::from_iter(vec![(field.name.clone(), value)])), - ); - } else { - coerced_values.insert(&argument_def.name, value); - } - } - Ok(None) => {} - Err(e) => errors.push(e), - } - } - - if errors.is_empty() { - Ok(coerced_values) - } else { - Err(errors) - } + })?; + Ok(ctx.query.schema.object_type(obj_type).into()) } diff --git a/graphql/src/execution/mod.rs b/graphql/src/execution/mod.rs index 29e6e31aa08..e018074404c 100644 --- a/graphql/src/execution/mod.rs +++ b/graphql/src/execution/mod.rs @@ -5,6 +5,9 @@ mod query; /// Common trait for field resolvers used in the execution. mod resolver; +/// Our representation of a query AST +pub mod ast; + use stable_hash::{crypto::SetHasher, StableHasher}; pub use self::execution::*; diff --git a/graphql/src/execution/query.rs b/graphql/src/execution/query.rs index ac098be39ee..4f2e487f6fc 100644 --- a/graphql/src/execution/query.rs +++ b/graphql/src/execution/query.rs @@ -1,37 +1,31 @@ +use graph::data::graphql::DocumentExt as _; +use graph::data::value::Object; use graphql_parser::Pos; use graphql_tools::validation::rules::*; use graphql_tools::validation::validate::{validate, ValidationPlan}; use lazy_static::lazy_static; -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeMap, HashMap, HashSet}; use std::hash::{Hash, Hasher}; +use std::iter::FromIterator; use std::sync::Arc; use std::time::Instant; use std::{collections::hash_map::DefaultHasher, convert::TryFrom}; -use graph::data::graphql::{ - ext::{DocumentExt, TypeExt}, - ObjectOrInterface, -}; +use graph::data::graphql::{ext::TypeExt, ObjectOrInterface}; use graph::data::query::QueryExecutionError; use graph::data::query::{Query as GraphDataQuery, QueryVariables}; use graph::data::schema::ApiSchema; use graph::prelude::{info, o, q, r, s, BlockNumber, CheapClone, Logger, TryFromValue}; -use crate::introspection::introspection_schema; +use crate::execution::ast as a; use crate::query::{ast as qast, ext::BlockConstraint}; -use crate::schema::ast as sast; -use crate::{ - execution::{get_field, get_named_type, object_or_interface}, - schema::api::ErrorPolicy, -}; +use crate::schema::ast::{self as sast}; +use crate::values::coercion; +use crate::{execution::get_field, schema::api::ErrorPolicy}; lazy_static! { static ref GRAPHQL_VALIDATION_PLAN: ValidationPlan = ValidationPlan::from( - if std::env::var("DISABLE_GRAPHQL_VALIDATIONS") - .unwrap_or_else(|_| "false".into()) - .parse::() - .unwrap_or_else(|_| false) - { + if std::env::var("ENABLE_GRAPHQL_VALIDATIONS").ok().is_none() { vec![] } else { vec![ @@ -80,23 +74,20 @@ enum Kind { /// uses ',' to separate key/value pairs. /// If `SelectionSet` is `None`, log `*` to indicate that the query was /// for the entire selection set of the query -struct SelectedFields<'a>(&'a q::SelectionSet); +struct SelectedFields<'a>(&'a a::SelectionSet); impl<'a> std::fmt::Display for SelectedFields<'a> { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { let mut first = true; - for item in &self.0.items { - match item { - q::Selection::Field(field) => { - if !first { - write!(fmt, ";")?; - } - first = false; - write!(fmt, "{}", field.alias.as_ref().unwrap_or(&field.name))? - } - q::Selection::FragmentSpread(_) | q::Selection::InlineFragment(_) => { - /* nothing */ + for (obj_type, fields) in self.0.fields() { + write!(fmt, "{}:", obj_type.name)?; + for field in fields { + if first { + write!(fmt, "{}", field.response_key())?; + } else { + write!(fmt, ";{}", field.response_key())?; } + first = false; } } if first { @@ -112,13 +103,17 @@ impl<'a> std::fmt::Display for SelectedFields<'a> { /// A GraphQL query that has been preprocessed and checked and is ready /// for execution. Checking includes validating all query fields and, if /// desired, checking the query's complexity +// +// The implementation contains various workarounds to make it compatible +// with the previous implementation when it comes to queries that are not +// fully spec compliant and should be rejected through rigorous validation +// against the GraphQL spec. Once we do validate queries, code that is +// marked with `graphql-bug-compat` can be deleted. pub struct Query { /// The schema against which to execute the query pub schema: Arc, - /// The variables for the query, coerced into proper values - pub variables: HashMap, - /// The root selection set of the query - pub selection_set: Arc, + /// The root selection set of the query. All variable references have already been resolved + pub selection_set: Arc, /// The ShapeHash of the original query pub shape_hash: u64, @@ -128,7 +123,6 @@ pub struct Query { start: Instant, - pub(crate) fragments: HashMap, kind: Kind, /// Used only for logging; if logging is configured off, these will @@ -136,7 +130,6 @@ pub struct Query { pub query_text: Arc, pub variables_text: Arc, pub query_id: String, - pub(crate) complexity: u64, } impl Query { @@ -211,58 +204,62 @@ impl Query { "query_id" => query_id.clone() )); - let mut query = Self { - schema, + let start = Instant::now(); + let root_type = match kind { + Kind::Query => schema.query_type.as_ref(), + Kind::Subscription => schema.subscription_type.as_ref().unwrap(), + }; + // Use an intermediate struct so we can modify the query before + // enclosing it in an Arc + let raw_query = RawQuery { + schema: schema.cheap_clone(), variables, + selection_set, fragments, + root_type, + }; + + // It's important to check complexity first, so `validate_fields` + // doesn't risk a stack overflow from invalid queries. We don't + // really care about the resulting complexity, only that all the + // checks that `check_complexity` performs pass successfully + let _ = raw_query.check_complexity(max_complexity, max_depth)?; + raw_query.validate_fields()?; + let selection_set = raw_query.convert()?; + + let query = Self { + schema, selection_set: Arc::new(selection_set), shape_hash: query.shape_hash, kind, network, logger, - start: Instant::now(), + start, query_text: query.query_text.cheap_clone(), variables_text: query.variables_text.cheap_clone(), query_id, - complexity: 0, }; - // It's important to check complexity first, so `validate_fields` doesn't risk a stack - // overflow from invalid queries. - query.check_complexity(max_complexity, max_depth)?; - query.validate_fields()?; - Ok(Arc::new(query)) } - /// Return the block constraint for the toplevel query field(s), merging the selection sets of - /// fields that have the same block constraint. + /// Return the block constraint for the toplevel query field(s), merging + /// consecutive fields that have the same block constraint, while making + /// sure that the fields appear in the same order as they did in the + /// query /// - /// Also returns the combined error policy for those fields, which is `Deny` if any field is - /// `Deny` and `Allow` otherwise. + /// Also returns the combined error policy for those fields, which is + /// `Deny` if any field is `Deny` and `Allow` otherwise. pub fn block_constraint( &self, - ) -> Result, Vec> + ) -> Result, Vec> { - use graphql_parser::query::Selection::Field; + let mut bcs: Vec<(BlockConstraint, (a::SelectionSet, ErrorPolicy))> = Vec::new(); - let mut bcs = HashMap::new(); - let mut errors = Vec::new(); - - for field in self.selection_set.items.iter().filter_map(|sel| match sel { - Field(f) => Some(f), - _ => None, - }) { - let query_ty = self.schema.query_type.as_ref(); - let args = match crate::execution::coerce_argument_values(self, query_ty, field) { - Ok(args) => args, - Err(errs) => { - errors.extend(errs); - continue; - } - }; - - let bc = match args.get("block") { + let root_type = sast::ObjectType::from(self.schema.query_type.cheap_clone()); + let mut prev_bc: Option = None; + for field in self.selection_set.fields_for(&root_type) { + let bc = match field.argument_value("block") { Some(bc) => BlockConstraint::try_from_value(bc).map_err(|_| { vec![QueryExecutionError::InvalidArgumentError( Pos::default(), @@ -273,7 +270,7 @@ impl Query { None => BlockConstraint::Latest, }; - let field_error_policy = match args.get("subgraphError") { + let field_error_policy = match field.argument_value("subgraphError") { Some(value) => ErrorPolicy::try_from(value).map_err(|_| { vec![QueryExecutionError::InvalidArgumentError( Pos::default(), @@ -284,52 +281,21 @@ impl Query { None => ErrorPolicy::Deny, }; - let (selection_set, error_policy) = bcs.entry(bc).or_insert_with(|| { - ( - q::SelectionSet { - span: self.selection_set.span, - items: vec![], - }, - field_error_policy, - ) - }); - selection_set.items.push(Field(field.clone())); - if field_error_policy == ErrorPolicy::Deny { - *error_policy = ErrorPolicy::Deny; + let next_bc = Some(bc.clone()); + if prev_bc == next_bc { + let (selection_set, error_policy) = &mut bcs.last_mut().unwrap().1; + selection_set.push(field); + if field_error_policy == ErrorPolicy::Deny { + *error_policy = ErrorPolicy::Deny; + } + } else { + let mut selection_set = a::SelectionSet::empty_from(&self.selection_set); + selection_set.push(field); + bcs.push((bc, (selection_set, field_error_policy))) } + prev_bc = next_bc; } - if !errors.is_empty() { - Err(errors) - } else { - Ok(bcs) - } - } - - /// Return this query, but use the introspection schema as its schema - pub fn as_introspection_query(&self) -> Arc { - let introspection_schema = introspection_schema(self.schema.id().clone()); - - Arc::new(Self { - schema: Arc::new(introspection_schema), - variables: self.variables.clone(), - fragments: self.fragments.clone(), - selection_set: self.selection_set.clone(), - shape_hash: self.shape_hash, - kind: self.kind, - network: self.network.clone(), - logger: self.logger.clone(), - start: self.start, - query_text: self.query_text.clone(), - variables_text: self.variables_text.clone(), - query_id: self.query_id.clone(), - complexity: self.complexity, - }) - } - - /// Should only be called for fragments that exist in the query, and therefore have been - /// validated to exist. Panics otherwise. - pub fn get_fragment(&self, name: &str) -> &q::FragmentDefinition { - self.fragments.get(name).unwrap() + Ok(bcs) } /// Return `true` if this is a query, and not a subscription or @@ -367,7 +333,7 @@ impl Query { /// `selection_set` was cached pub fn log_cache_status( &self, - selection_set: &q::SelectionSet, + selection_set: &a::SelectionSet, block: BlockNumber, start: Instant, cache_status: String, @@ -383,12 +349,106 @@ impl Query { ); } } +} + +/// Coerces variable values for an operation. +pub fn coerce_variables( + schema: &ApiSchema, + operation: &q::OperationDefinition, + mut variables: Option, +) -> Result, Vec> { + let mut coerced_values = HashMap::new(); + let mut errors = vec![]; + + for variable_def in qast::get_variable_definitions(operation) + .into_iter() + .flatten() + { + // Skip variable if it has an invalid type + if !schema.is_input_type(&variable_def.var_type) { + errors.push(QueryExecutionError::InvalidVariableTypeError( + variable_def.position, + variable_def.name.to_owned(), + )); + continue; + } + + let value = variables + .as_mut() + .and_then(|vars| vars.remove(&variable_def.name)); + + let value = match value.or_else(|| { + variable_def + .default_value + .clone() + .map(r::Value::try_from) + .transpose() + .unwrap() + }) { + // No variable value provided and no default for non-null type, fail + None => { + if sast::is_non_null_type(&variable_def.var_type) { + errors.push(QueryExecutionError::MissingVariableError( + variable_def.position, + variable_def.name.to_owned(), + )); + }; + continue; + } + Some(value) => value, + }; + + // We have a variable value, attempt to coerce it to the value type + // of the variable definition + coerced_values.insert( + variable_def.name.to_owned(), + coerce_variable(schema, variable_def, value.into())?, + ); + } + + if errors.is_empty() { + Ok(coerced_values) + } else { + Err(errors) + } +} + +fn coerce_variable( + schema: &ApiSchema, + variable_def: &q::VariableDefinition, + value: r::Value, +) -> Result> { + use crate::values::coercion::coerce_value; + + let resolver = |name: &str| schema.get_named_type(name); + + coerce_value(value, &variable_def.var_type, &resolver).map_err(|value| { + vec![QueryExecutionError::InvalidArgumentError( + variable_def.position, + variable_def.name.to_owned(), + value.into(), + )] + }) +} + +struct RawQuery<'s> { + /// The schema against which to execute the query + schema: Arc, + /// The variables for the query, coerced into proper values + variables: HashMap, + /// The root selection set of the query + selection_set: q::SelectionSet, + + fragments: HashMap, + root_type: &'s s::ObjectType, +} +impl<'s> RawQuery<'s> { fn check_complexity( - &mut self, + &self, max_complexity: Option, max_depth: u8, - ) -> Result<(), Vec> { + ) -> Result> { let complexity = self.complexity(max_depth).map_err(|e| vec![e])?; if let Some(max_complexity) = max_complexity { if complexity > max_complexity { @@ -398,8 +458,113 @@ impl Query { )]); } } - self.complexity = complexity; - Ok(()) + Ok(complexity) + } + + fn complexity_inner<'a>( + &'a self, + ty: &s::TypeDefinition, + selection_set: &'a q::SelectionSet, + max_depth: u8, + depth: u8, + visited_fragments: &'a HashSet<&'a str>, + ) -> Result { + use ComplexityError::*; + + if depth >= max_depth { + return Err(TooDeep); + } + + selection_set + .items + .iter() + .try_fold(0, |total_complexity, selection| { + match selection { + q::Selection::Field(field) => { + // Empty selection sets are the base case. + if field.selection_set.items.is_empty() { + return Ok(total_complexity); + } + + // Get field type to determine if this is a collection query. + let s_field = match ty { + s::TypeDefinition::Object(t) => get_field(t, &field.name), + s::TypeDefinition::Interface(t) => get_field(t, &field.name), + + // `Scalar` and `Enum` cannot have selection sets. + // `InputObject` can't appear in a selection. + // `Union` is not yet supported. + s::TypeDefinition::Scalar(_) + | s::TypeDefinition::Enum(_) + | s::TypeDefinition::InputObject(_) + | s::TypeDefinition::Union(_) => None, + } + .ok_or(Invalid)?; + + let field_complexity = self.complexity_inner( + self.schema + .get_named_type(s_field.field_type.get_base_type()) + .ok_or(Invalid)?, + &field.selection_set, + max_depth, + depth + 1, + visited_fragments, + )?; + + // Non-collection queries pass through. + if !sast::is_list_or_non_null_list_field(&s_field) { + return Ok(total_complexity + field_complexity); + } + + // For collection queries, check the `first` argument. + let max_entities = qast::get_argument_value(&field.arguments, "first") + .and_then(|arg| match arg { + q::Value::Int(n) => Some(n.as_i64()? as u64), + _ => None, + }) + .unwrap_or(100); + max_entities + .checked_add( + max_entities.checked_mul(field_complexity).ok_or(Overflow)?, + ) + .ok_or(Overflow) + } + q::Selection::FragmentSpread(fragment) => { + let def = self.fragments.get(&fragment.fragment_name).unwrap(); + let q::TypeCondition::On(type_name) = &def.type_condition; + let ty = self.schema.get_named_type(&type_name).ok_or(Invalid)?; + + // Copy `visited_fragments` on write. + let mut visited_fragments = visited_fragments.clone(); + if !visited_fragments.insert(&fragment.fragment_name) { + return Err(CyclicalFragment(fragment.fragment_name.clone())); + } + self.complexity_inner( + &ty, + &def.selection_set, + max_depth, + depth + 1, + &visited_fragments, + ) + } + q::Selection::InlineFragment(fragment) => { + let ty = match &fragment.type_condition { + Some(q::TypeCondition::On(type_name)) => { + self.schema.get_named_type(type_name).ok_or(Invalid)? + } + _ => ty, + }; + self.complexity_inner( + ty, + &fragment.selection_set, + max_depth, + depth + 1, + visited_fragments, + ) + } + } + .and_then(|complexity| total_complexity.checked_add(complexity).ok_or(Overflow)) + }) } /// See https://developer.github.com/v4/guides/resource-limitations/. @@ -407,7 +572,7 @@ impl Query { /// If the query is invalid, returns `Ok(0)` so that execution proceeds and /// gives a proper error. fn complexity(&self, max_depth: u8) -> Result { - let root_type = sast::get_root_query_type_def(self.schema.document()).unwrap(); + let root_type = self.schema.get_root_query_type_def().unwrap(); match self.complexity_inner( root_type, @@ -429,7 +594,7 @@ impl Query { } fn validate_fields(&self) -> Result<(), Vec> { - let root_type = self.schema.document().get_root_query_type().unwrap(); + let root_type = self.schema.query_type.as_ref(); let errors = self.validate_fields_inner(&"Query".to_owned(), root_type.into(), &self.selection_set); @@ -447,8 +612,6 @@ impl Query { ty: ObjectOrInterface<'_>, selection_set: &q::SelectionSet, ) -> Vec { - let schema = self.schema.document(); - selection_set .items .iter() @@ -457,9 +620,9 @@ impl Query { q::Selection::Field(field) => match get_field(ty, &field.name) { Some(s_field) => { let base_type = s_field.field_type.get_base_type(); - if get_named_type(schema, base_type).is_none() { + if self.schema.get_named_type(base_type).is_none() { errors.push(QueryExecutionError::NamedTypeError(base_type.into())); - } else if let Some(ty) = object_or_interface(schema, base_type) { + } else if let Some(ty) = self.schema.object_or_interface(base_type) { errors.extend(self.validate_fields_inner( base_type, ty, @@ -477,7 +640,7 @@ impl Query { match self.fragments.get(&fragment.fragment_name) { Some(frag) => { let q::TypeCondition::On(type_name) = &frag.type_condition; - match object_or_interface(schema, type_name) { + match self.schema.object_or_interface(type_name) { Some(ty) => errors.extend(self.validate_fields_inner( type_name, ty, @@ -495,7 +658,7 @@ impl Query { } q::Selection::InlineFragment(fragment) => match &fragment.type_condition { Some(q::TypeCondition::On(type_name)) => { - match object_or_interface(schema, type_name) { + match self.schema.object_or_interface(type_name) { Some(ty) => errors.extend(self.validate_fields_inner( type_name, ty, @@ -516,189 +679,348 @@ impl Query { }) } - fn complexity_inner<'a>( - &'a self, - ty: &s::TypeDefinition, - selection_set: &'a q::SelectionSet, - max_depth: u8, - depth: u8, - visited_fragments: &'a HashSet<&'a str>, - ) -> Result { - use ComplexityError::*; + fn convert(self) -> Result> { + let RawQuery { + schema, + variables, + selection_set, + fragments, + root_type, + } = self; - if depth >= max_depth { - return Err(TooDeep); - } + let transform = Transform { + schema, + variables, + fragments, + }; + transform.expand_selection_set(selection_set, &a::ObjectTypeSet::Any, root_type.into()) + } +} - selection_set - .items - .iter() - .try_fold(0, |total_complexity, selection| { - let schema = self.schema.document(); - match selection { - q::Selection::Field(field) => { - // Empty selection sets are the base case. - if field.selection_set.items.is_empty() { - return Ok(total_complexity); - } +struct Transform { + schema: Arc, + variables: HashMap, + fragments: HashMap, +} - // Get field type to determine if this is a collection query. - let s_field = match ty { - s::TypeDefinition::Object(t) => get_field(t, &field.name), - s::TypeDefinition::Interface(t) => get_field(t, &field.name), +impl Transform { + /// Look up the value of the variable `name`. If the variable is not + /// defined, return `r::Value::Null` + // graphql-bug-compat: Once queries are fully validated, all variables + // will be defined + fn variable(&self, name: &str) -> r::Value { + self.variables + .get(name) + .map(|value| value.clone()) + .unwrap_or(r::Value::Null) + } - // `Scalar` and `Enum` cannot have selection sets. - // `InputObject` can't appear in a selection. - // `Union` is not yet supported. - s::TypeDefinition::Scalar(_) - | s::TypeDefinition::Enum(_) - | s::TypeDefinition::InputObject(_) - | s::TypeDefinition::Union(_) => None, - } - .ok_or(Invalid)?; + /// Interpolate variable references in the arguments `args` + fn interpolate_arguments( + &self, + args: Vec<(String, q::Value)>, + pos: &Pos, + ) -> Vec<(String, r::Value)> { + args.into_iter() + .map(|(name, val)| { + let val = self.interpolate_value(val, pos); + (name, val) + }) + .collect() + } - let field_complexity = self.complexity_inner( - &get_named_type(schema, s_field.field_type.get_base_type()) - .ok_or(Invalid)?, - &field.selection_set, - max_depth, - depth + 1, - visited_fragments, - )?; + /// Turn `value` into an `r::Value` by resolving variable references + fn interpolate_value(&self, value: q::Value, pos: &Pos) -> r::Value { + match value { + q::Value::Variable(var) => self.variable(&var), + q::Value::Int(ref num) => { + r::Value::Int(num.as_i64().expect("q::Value::Int contains an i64")) + } + q::Value::Float(f) => r::Value::Float(f), + q::Value::String(s) => r::Value::String(s), + q::Value::Boolean(b) => r::Value::Boolean(b), + q::Value::Null => r::Value::Null, + q::Value::Enum(s) => r::Value::Enum(s), + q::Value::List(vals) => { + let vals = vals + .into_iter() + .map(|val| self.interpolate_value(val, pos)) + .collect(); + r::Value::List(vals) + } + q::Value::Object(map) => { + let mut rmap = BTreeMap::new(); + for (key, value) in map.into_iter() { + let value = self.interpolate_value(value, pos); + rmap.insert(key, value); + } + r::Value::object(rmap) + } + } + } - // Non-collection queries pass through. - if !sast::is_list_or_non_null_list_field(&s_field) { - return Ok(total_complexity + field_complexity); - } + /// Interpolate variable references in directives. Return the directives + /// and a boolean indicating whether the element these directives are + /// attached to should be skipped + fn interpolate_directives( + &self, + dirs: Vec, + ) -> Result<(Vec, bool), QueryExecutionError> { + let dirs: Vec<_> = dirs + .into_iter() + .map(|dir| { + let q::Directive { + name, + position, + arguments, + } = dir; + let arguments = self.interpolate_arguments(arguments, &position); + a::Directive { + name, + position, + arguments, + } + }) + .collect(); + let skip = dirs.iter().any(|dir| dir.skip()); + Ok((dirs, skip)) + } - // For collection queries, check the `first` argument. - let max_entities = qast::get_argument_value(&field.arguments, "first") - .and_then(|arg| match arg { - q::Value::Int(n) => Some(n.as_i64()? as u64), - _ => None, - }) - .unwrap_or(100); - max_entities - .checked_add( - max_entities.checked_mul(field_complexity).ok_or(Overflow)?, - ) - .ok_or(Overflow) - } - q::Selection::FragmentSpread(fragment) => { - let def = self.get_fragment(&fragment.fragment_name); - let q::TypeCondition::On(type_name) = &def.type_condition; - let ty = get_named_type(schema, &type_name).ok_or(Invalid)?; + /// Coerces argument values into GraphQL values. + pub fn coerce_argument_values<'a>( + &self, + arguments: &mut Vec<(String, r::Value)>, + ty: ObjectOrInterface<'a>, + field_name: &str, + ) -> Result<(), Vec> { + let mut errors = vec![]; - // Copy `visited_fragments` on write. - let mut visited_fragments = visited_fragments.clone(); - if !visited_fragments.insert(&fragment.fragment_name) { - return Err(CyclicalFragment(fragment.fragment_name.clone())); - } - self.complexity_inner( - &ty, - &def.selection_set, - max_depth, - depth + 1, - &visited_fragments, - ) - } - q::Selection::InlineFragment(fragment) => { - let ty = match &fragment.type_condition { - Some(q::TypeCondition::On(type_name)) => { - get_named_type(schema, &type_name).ok_or(Invalid)? - } - _ => ty.clone(), - }; - self.complexity_inner( - &ty, - &fragment.selection_set, - max_depth, - depth + 1, - visited_fragments, - ) + let resolver = |name: &str| self.schema.get_named_type(name); + + let mut defined_args: usize = 0; + for argument_def in sast::get_argument_definitions(ty, field_name) + .into_iter() + .flatten() + { + let arg_value = arguments + .iter_mut() + .find(|arg| &arg.0 == &argument_def.name) + .map(|arg| &mut arg.1); + if arg_value.is_some() { + defined_args += 1; + } + match coercion::coerce_input_value( + arg_value.as_deref().cloned(), + &argument_def, + &resolver, + ) { + Ok(Some(value)) => { + let value = if argument_def.name == "text".to_string() { + r::Value::Object(Object::from_iter(vec![(field_name.to_string(), value)])) + } else { + value + }; + match arg_value { + Some(arg_value) => *arg_value = value, + None => arguments.push((argument_def.name.clone(), value)), } } - .and_then(|complexity| total_complexity.checked_add(complexity).ok_or(Overflow)) - }) + Ok(None) => {} + Err(e) => errors.push(e), + } + } + + // see: graphql-bug-compat + // avoids error 'unknown argument on field' + if defined_args < arguments.len() { + // `arguments` contains undefined arguments, remove them + match sast::get_argument_definitions(ty, field_name) { + None => arguments.clear(), + Some(arg_defs) => { + arguments.retain(|(name, _)| arg_defs.iter().any(|def| &def.name == name)) + } + } + } + + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } } -} -/// Coerces variable values for an operation. -pub fn coerce_variables( - schema: &ApiSchema, - operation: &q::OperationDefinition, - mut variables: Option, -) -> Result, Vec> { - let mut coerced_values = HashMap::new(); - let mut errors = vec![]; + /// Expand fragments and interpolate variables in a field. Return `None` + /// if the field should be skipped + fn expand_field( + &self, + field: q::Field, + parent_type: ObjectOrInterface<'_>, + ) -> Result, Vec> { + let q::Field { + position, + alias, + name, + arguments, + directives, + selection_set, + } = field; - for variable_def in qast::get_variable_definitions(operation) - .into_iter() - .flatten() - { - // Skip variable if it has an invalid type - if !sast::is_input_type(schema.document(), &variable_def.var_type) { - errors.push(QueryExecutionError::InvalidVariableTypeError( - variable_def.position, - variable_def.name.to_owned(), - )); - continue; + // Short-circuit '__typename' since it is not a real field + if name == "__typename" { + return Ok(Some(a::Field { + position, + alias, + name, + arguments: vec![], + directives: vec![], + selection_set: a::SelectionSet::new(vec![]), + })); } - let value = variables - .as_mut() - .and_then(|vars| vars.remove(&variable_def.name)); + let field_type = parent_type.field(&name).ok_or_else(|| { + vec![QueryExecutionError::UnknownField( + position, + parent_type.name().to_string(), + name.clone(), + )] + })?; - let value = match value.or_else(|| { - variable_def - .default_value - .clone() - .map(r::Value::try_from) - .transpose() - .unwrap() - }) { - // No variable value provided and no default for non-null type, fail - None => { - if sast::is_non_null_type(&variable_def.var_type) { - errors.push(QueryExecutionError::MissingVariableError( - variable_def.position, - variable_def.name.to_owned(), - )); - }; - continue; + let (directives, skip) = self.interpolate_directives(directives)?; + if skip { + return Ok(None); + } + + let mut arguments = self.interpolate_arguments(arguments, &position); + self.coerce_argument_values(&mut arguments, parent_type, &name)?; + + let is_leaf_type = self.schema.document().is_leaf_type(&field_type.field_type); + let selection_set = if selection_set.items.is_empty() { + if !is_leaf_type { + // see: graphql-bug-compat + // Field requires selection, ignore this field + return Ok(None); + } + a::SelectionSet::new(vec![]) + } else { + if is_leaf_type { + // see: graphql-bug-compat + // Field does not allow selections, ignore selections + a::SelectionSet::new(vec![]) + } else { + let ty = field_type.field_type.get_base_type(); + let type_set = a::ObjectTypeSet::from_name(&self.schema, ty)?; + let ty = self.schema.object_or_interface(ty).unwrap(); + self.expand_selection_set(selection_set, &type_set, ty)? } - Some(value) => value, }; - // We have a variable value, attempt to coerce it to the value type - // of the variable definition - coerced_values.insert( - variable_def.name.to_owned(), - coerce_variable(schema, variable_def, value.into())?, - ); + Ok(Some(a::Field { + position, + alias, + name, + arguments, + directives, + selection_set, + })) } - if errors.is_empty() { - Ok(coerced_values) - } else { - Err(errors) - } -} + /// Expand fragments and interpolate variables in a selection set + fn expand_selection_set( + &self, + set: q::SelectionSet, + type_set: &a::ObjectTypeSet, + ty: ObjectOrInterface<'_>, + ) -> Result> { + let q::SelectionSet { span: _, items } = set; + // check_complexity already checked for cycles in fragment + // expansion, i.e. situations where a named fragment includes itself + // recursively. We still want to guard against spreading the same + // fragment twice at the same level in the query + let mut visited_fragments = HashSet::new(); -fn coerce_variable( - schema: &ApiSchema, - variable_def: &q::VariableDefinition, - value: q::Value, -) -> Result> { - use crate::values::coercion::coerce_value; + // All the types that could possibly be returned by this selection set + let types = type_set.type_names(&self.schema, ty)?; + let mut newset = a::SelectionSet::new(types); - let resolver = |name: &str| schema.document().get_named_type(name); + for sel in items { + match sel { + q::Selection::Field(field) => { + if let Some(field) = self.expand_field(field, ty)? { + newset.push(&field); + } + } + q::Selection::FragmentSpread(spread) => { + // TODO: we ignore the directives here (and so did the + // old implementation), but that seems wrong + let q::FragmentSpread { + position: _, + fragment_name, + directives: _, + } = spread; + let frag = self.fragments.get(&fragment_name).unwrap(); + if visited_fragments.insert(fragment_name) { + let q::FragmentDefinition { + position: _, + name: _, + type_condition, + directives, + selection_set, + } = frag; + self.expand_fragment( + directives.clone(), + Some(type_condition), + type_set, + selection_set.clone(), + ty, + &mut newset, + )?; + } + } + q::Selection::InlineFragment(frag) => { + let q::InlineFragment { + position: _, + type_condition, + directives, + selection_set, + } = frag; + self.expand_fragment( + directives, + type_condition.as_ref(), + type_set, + selection_set, + ty, + &mut newset, + )?; + } + } + } + Ok(newset) + } - coerce_value(value, &variable_def.var_type, &resolver, &HashMap::new()).map_err(|value| { - vec![QueryExecutionError::InvalidArgumentError( - variable_def.position, - variable_def.name.to_owned(), - value.clone(), - )] - }) + fn expand_fragment( + &self, + directives: Vec, + frag_cond: Option<&q::TypeCondition>, + type_set: &a::ObjectTypeSet, + selection_set: q::SelectionSet, + ty: ObjectOrInterface, + newset: &mut a::SelectionSet, + ) -> Result<(), Vec> { + let (directives, skip) = self.interpolate_directives(directives)?; + // Field names in fragment spreads refer to this type, which will + // usually be different from the outer type + let ty = match frag_cond { + Some(q::TypeCondition::On(name)) => self + .schema + .object_or_interface(name) + .expect("type names on fragment spreads are valid"), + None => ty, + }; + if !skip { + let type_set = a::ObjectTypeSet::convert(&self.schema, frag_cond)?.intersect(type_set); + let selection_set = self.expand_selection_set(selection_set, &type_set, ty)?; + newset.merge(selection_set, directives); + } + Ok(()) + } } diff --git a/graphql/src/execution/resolver.rs b/graphql/src/execution/resolver.rs index b40890ddc7c..bad3ee098a9 100644 --- a/graphql/src/execution/resolver.rs +++ b/graphql/src/execution/resolver.rs @@ -1,13 +1,12 @@ -use std::collections::HashMap; - -use crate::execution::ExecutionContext; use graph::components::store::UnitStream; -use graph::prelude::{async_trait, q, s, tokio, Error, QueryExecutionError}; +use graph::prelude::{async_trait, s, tokio, ApiSchema, Error, QueryExecutionError}; use graph::{ - data::graphql::{ext::DocumentExt, ObjectOrInterface}, + data::graphql::ObjectOrInterface, prelude::{r, QueryResult}, }; +use crate::execution::{ast as a, ExecutionContext}; + /// A GraphQL resolver that can resolve entities, enum values, scalar types and interfaces/unions. #[async_trait] pub trait Resolver: Sized + Send + Sync + 'static { @@ -19,33 +18,31 @@ pub trait Resolver: Sized + Send + Sync + 'static { fn prefetch( &self, ctx: &ExecutionContext, - selection_set: &q::SelectionSet, + selection_set: &a::SelectionSet, ) -> Result, Vec>; /// Resolves list of objects, `prefetched_objects` is `Some` if the parent already calculated the value. fn resolve_objects( &self, prefetched_objects: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, object_type: ObjectOrInterface<'_>, - arguments: &HashMap<&str, r::Value>, ) -> Result; /// Resolves an object, `prefetched_object` is `Some` if the parent already calculated the value. fn resolve_object( &self, prefetched_object: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, object_type: ObjectOrInterface<'_>, - arguments: &HashMap<&str, r::Value>, ) -> Result; /// Resolves an enum value for a given enum type. fn resolve_enum_value( &self, - _field: &q::Field, + _field: &a::Field, _enum_type: &s::EnumType, value: Option, ) -> Result { @@ -56,10 +53,9 @@ pub trait Resolver: Sized + Send + Sync + 'static { fn resolve_scalar_value( &self, _parent_object_type: &s::ObjectType, - _field: &q::Field, + _field: &a::Field, _scalar_type: &s::ScalarType, value: Option, - _argument_values: &HashMap<&str, r::Value>, ) -> Result { // This code is duplicated. // See also c2112309-44fd-4a84-92a0-5a651e6ed548 @@ -69,7 +65,7 @@ pub trait Resolver: Sized + Send + Sync + 'static { /// Resolves a list of enum values for a given enum type. fn resolve_enum_values( &self, - _field: &q::Field, + _field: &a::Field, _enum_type: &s::EnumType, value: Option, ) -> Result> { @@ -79,7 +75,7 @@ pub trait Resolver: Sized + Send + Sync + 'static { /// Resolves a list of scalar values for a given list type. fn resolve_scalar_values( &self, - _field: &q::Field, + _field: &a::Field, _scalar_type: &s::ScalarType, value: Option, ) -> Result> { @@ -89,13 +85,13 @@ pub trait Resolver: Sized + Send + Sync + 'static { // Resolves an abstract type into the specific type of an object. fn resolve_abstract_type<'a>( &self, - schema: &'a s::Document, + schema: &'a ApiSchema, _abstract_type: &s::TypeDefinition, object_value: &r::Value, ) -> Option<&'a s::ObjectType> { let concrete_type_name = match object_value { // All objects contain `__typename` - r::Value::Object(data) => match &data["__typename"] { + r::Value::Object(data) => match &data.get("__typename").unwrap() { r::Value::String(name) => name.clone(), _ => unreachable!("__typename must be a string"), }, @@ -112,9 +108,9 @@ pub trait Resolver: Sized + Send + Sync + 'static { // Resolves a change stream for a given field. fn resolve_field_stream( &self, - _schema: &s::Document, + _schema: &ApiSchema, _object_type: &s::ObjectType, - _field: &q::Field, + _field: &a::Field, ) -> Result { Err(QueryExecutionError::NotSupported(String::from( "Resolving field streams is not supported by this resolver", diff --git a/graphql/src/introspection/resolver.rs b/graphql/src/introspection/resolver.rs index 17867ba7757..19a73df3168 100644 --- a/graphql/src/introspection/resolver.rs +++ b/graphql/src/introspection/resolver.rs @@ -1,26 +1,31 @@ +use graph::data::graphql::ext::{FieldExt, TypeDefinitionExt}; use graphql_parser::Pos; -use std::collections::{BTreeMap, HashMap}; +use std::collections::BTreeMap; use graph::data::graphql::{object, DocumentExt, ObjectOrInterface}; use graph::prelude::*; +use crate::execution::ast as a; use crate::prelude::*; use crate::schema::ast as sast; type TypeObjectsMap = BTreeMap; +/// Our Schema has the introspection schema mixed in. When we build the +/// `TypeObjectsMap`, suppress types and fields that belong to the +/// introspection schema fn schema_type_objects(schema: &Schema) -> TypeObjectsMap { - sast::get_type_definitions(&schema.document).iter().fold( - BTreeMap::new(), - |mut type_objects, typedef| { + sast::get_type_definitions(&schema.document) + .iter() + .filter(|def| !def.is_introspection()) + .fold(BTreeMap::new(), |mut type_objects, typedef| { let type_name = sast::get_type_name(typedef); if !type_objects.contains_key(type_name) { let type_object = type_definition_object(schema, &mut type_objects, typedef); type_objects.insert(type_name.to_owned(), type_object); } type_objects - }, - ) + }) } fn type_object(schema: &Schema, type_objects: &mut TypeObjectsMap, t: &s::Type) -> r::Value { @@ -166,6 +171,7 @@ fn field_objects( r::Value::List( fields .iter() + .filter(|field| !field.is_introspection()) .map(|field| field_object(schema, type_objects, field)) .collect(), ) @@ -359,7 +365,7 @@ impl Resolver for IntrospectionResolver { fn prefetch( &self, _: &ExecutionContext, - _: &q::SelectionSet, + _: &a::SelectionSet, ) -> Result, Vec> { Ok(None) } @@ -367,10 +373,9 @@ impl Resolver for IntrospectionResolver { fn resolve_objects( &self, prefetched_objects: Option, - field: &q::Field, + field: &a::Field, _field_definition: &s::Field, _object_type: ObjectOrInterface<'_>, - _arguments: &HashMap<&str, r::Value>, ) -> Result { match field.name.as_str() { "possibleTypes" => { @@ -409,15 +414,14 @@ impl Resolver for IntrospectionResolver { fn resolve_object( &self, prefetched_object: Option, - field: &q::Field, + field: &a::Field, _field_definition: &s::Field, _object_type: ObjectOrInterface<'_>, - arguments: &HashMap<&str, r::Value>, ) -> Result { let object = match field.name.as_str() { "__schema" => self.schema_object(), "__type" => { - let name = arguments.get("name").ok_or_else(|| { + let name = field.argument_value("name").ok_or_else(|| { QueryExecutionError::MissingArgumentError( Pos::default(), "missing argument `name` in `__type(name: String!)`".to_owned(), diff --git a/graphql/src/introspection/schema.rs b/graphql/src/introspection/schema.rs index 035eae34c0b..e780948b73e 100644 --- a/graphql/src/introspection/schema.rs +++ b/graphql/src/introspection/schema.rs @@ -1,13 +1,17 @@ +use std::sync::Arc; + use graphql_parser; use graph::data::graphql::ext::DocumentExt; use graph::data::graphql::ext::ObjectTypeExt; use graph::data::schema::{ApiSchema, Schema}; use graph::data::subgraph::DeploymentHash; -use graph::prelude::s::{Document, ObjectType}; +use graph::prelude::s::Document; use lazy_static::lazy_static; +use crate::schema::ast as sast; + const INTROSPECTION_SCHEMA: &str = " scalar Boolean scalar Float @@ -117,8 +121,12 @@ enum __DirectiveLocation { lazy_static! { pub static ref INTROSPECTION_DOCUMENT: Document = graphql_parser::parse_schema(INTROSPECTION_SCHEMA).unwrap(); - pub static ref INTROSPECTION_QUERY_TYPE: &'static ObjectType = - INTROSPECTION_DOCUMENT.get_root_query_type().unwrap(); + pub static ref INTROSPECTION_QUERY_TYPE: sast::ObjectType = sast::ObjectType::from(Arc::new( + INTROSPECTION_DOCUMENT + .get_root_query_type() + .unwrap() + .clone() + )); } pub fn introspection_schema(id: DeploymentHash) -> ApiSchema { diff --git a/graphql/src/lib.rs b/graphql/src/lib.rs index e74ecbfd527..c82deb7e4f0 100644 --- a/graphql/src/lib.rs +++ b/graphql/src/lib.rs @@ -26,11 +26,11 @@ mod runner; /// Prelude that exports the most important traits and types. pub mod prelude { - pub use super::execution::{ExecutionContext, Query, Resolver}; + pub use super::execution::{ast as a, ExecutionContext, Query, Resolver}; pub use super::introspection::{introspection_schema, IntrospectionResolver}; pub use super::query::{execute_query, ext::BlockConstraint, QueryExecutionOptions}; pub use super::schema::{api_schema, APISchemaError}; - pub use super::store::{build_query, StoreResolver}; + pub use super::store::StoreResolver; pub use super::subscription::SubscriptionExecutionOptions; pub use super::values::MaybeCoercible; diff --git a/graphql/src/query/ast.rs b/graphql/src/query/ast.rs index f530de9dc41..a12eaa08748 100644 --- a/graphql/src/query/ast.rs +++ b/graphql/src/query/ast.rs @@ -1,5 +1,4 @@ -use graph::prelude::{q::*, r}; -use std::collections::HashMap; +use graph::prelude::q::*; use std::ops::Deref; use graph::prelude::QueryExecutionError; @@ -63,71 +62,6 @@ pub fn get_argument_value<'a>(arguments: &'a [(String, Value)], name: &str) -> O arguments.iter().find(|(n, _)| n == name).map(|(_, v)| v) } -/// Returns true if a selection should be skipped (as per the `@skip` directive). -pub fn skip_selection(selection: &Selection, variables: &HashMap) -> bool { - match get_directive(selection, "skip".to_string()) { - Some(directive) => match get_argument_value(&directive.arguments, "if") { - Some(val) => match val { - // Skip if @skip(if: true) - Value::Boolean(skip_if) => *skip_if, - - // Also skip if @skip(if: $variable) where $variable is true - Value::Variable(name) => variables.get(name).map_or(false, |var| match var { - r::Value::Boolean(v) => v.to_owned(), - _ => false, - }), - - _ => false, - }, - None => true, - }, - None => false, - } -} - -/// Returns true if a selection should be included (as per the `@include` directive). -pub fn include_selection(selection: &Selection, variables: &HashMap) -> bool { - match get_directive(selection, "include".to_string()) { - Some(directive) => match get_argument_value(&directive.arguments, "if") { - Some(val) => match val { - // Include if @include(if: true) - Value::Boolean(include) => *include, - - // Also include if @include(if: $variable) where $variable is true - Value::Variable(name) => variables.get(name).map_or(false, |var| match var { - r::Value::Boolean(v) => v.to_owned(), - _ => false, - }), - - _ => false, - }, - None => true, - }, - None => true, - } -} - -/// Returns the response key of a field, which is either its name or its alias (if there is one). -pub fn get_response_key(field: &Field) -> &str { - field - .alias - .as_ref() - .map(Deref::deref) - .unwrap_or(field.name.as_str()) -} - -/// Returns up the fragment with the given name, if it exists. -pub fn get_fragment<'a>(document: &'a Document, name: &String) -> Option<&'a FragmentDefinition> { - document - .definitions - .iter() - .filter_map(|d| match d { - Definition::Fragment(fd) => Some(fd), - _ => None, - }) - .find(|fd| &fd.name == name) -} - /// Returns the variable definitions for an operation. pub fn get_variable_definitions( operation: &OperationDefinition, diff --git a/graphql/src/query/ext.rs b/graphql/src/query/ext.rs index 1826923f654..2071a0eeec6 100644 --- a/graphql/src/query/ext.rs +++ b/graphql/src/query/ext.rs @@ -54,7 +54,7 @@ impl ValueExt for q::Value { } } -#[derive(PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum BlockConstraint { Hash(H256), Number(BlockNumber), diff --git a/graphql/src/query/mod.rs b/graphql/src/query/mod.rs index 2eaf0262e7b..b8f9b3fa8c5 100644 --- a/graphql/src/query/mod.rs +++ b/graphql/src/query/mod.rs @@ -1,10 +1,10 @@ -use graph::prelude::{q, BlockPtr, CheapClone, QueryExecutionError, QueryResult}; +use graph::prelude::{BlockPtr, CheapClone, QueryExecutionError, QueryResult}; use std::sync::Arc; use std::time::Instant; use graph::data::graphql::effort::LoadManager; -use crate::execution::*; +use crate::execution::{ast as a, *}; /// Utilities for working with GraphQL query ASTs. pub mod ast; @@ -33,7 +33,7 @@ pub struct QueryExecutionOptions { /// If the query is not cacheable, the `Arc` may be unwrapped. pub async fn execute_query( query: Arc, - selection_set: Option, + selection_set: Option, block_ptr: Option, options: QueryExecutionOptions, ) -> Arc @@ -61,7 +61,7 @@ where .unwrap_or_else(|| query.selection_set.cheap_clone()); // Execute top-level `query { ... }` and `{ ... }` expressions. - let query_type = ctx.query.schema.query_type.cheap_clone(); + let query_type = ctx.query.schema.query_type.cheap_clone().into(); let start = Instant::now(); let result = execute_root_selection_set( ctx.cheap_clone(), diff --git a/graphql/src/schema/api.rs b/graphql/src/schema/api.rs index 0aa9456b175..ed759a60570 100644 --- a/graphql/src/schema/api.rs +++ b/graphql/src/schema/api.rs @@ -73,9 +73,8 @@ impl TryFrom<&r::Value> for ErrorPolicy { /// Derives a full-fledged GraphQL API schema from an input schema. /// /// The input schema should only have type/enum/interface/union definitions -/// and must not include a root Query type. This Query type is derived, -/// with all its fields and their input arguments, based on the existing -/// types. +/// and must not include a root Query type. This Query type is derived, with +/// all its fields and their input arguments, based on the existing types. pub fn api_schema(input_schema: &Document) -> Result { // Refactor: Take `input_schema` by value. let object_types = input_schema.get_object_type_definitions(); @@ -531,9 +530,9 @@ fn add_query_type( let mut fields = object_types .iter() - .map(|t| &t.name) + .map(|t| t.name.as_str()) .filter(|name| !name.eq(&SCHEMA_TYPE_NAME)) - .chain(interface_types.iter().map(|t| &t.name)) + .chain(interface_types.iter().map(|t| t.name.as_str())) .flat_map(|name| query_fields_for_type(name)) .collect::>(); let mut fulltext_fields = schema diff --git a/graphql/src/schema/ast.rs b/graphql/src/schema/ast.rs index f56aa3c19ce..b973aa0ca37 100644 --- a/graphql/src/schema/ast.rs +++ b/graphql/src/schema/ast.rs @@ -1,14 +1,17 @@ -use anyhow::anyhow; -use graph::data::graphql::ext::DirectiveFinder; +use graph::cheap_clone::CheapClone; use graphql_parser::Pos; use lazy_static::lazy_static; use std::ops::Deref; use std::str::FromStr; +use std::sync::Arc; -use crate::query::ast as qast; +use graph::data::graphql::ext::DirectiveFinder; use graph::data::graphql::{DocumentExt, ObjectOrInterface}; -use graph::prelude::s::{Value, *}; -use graph::prelude::*; +use graph::data::store; +use graph::prelude::anyhow::{anyhow, Context}; +use graph::prelude::{s, Entity, EntityKey, Error, ValueType}; + +use crate::query::ast as qast; pub(crate) enum FilterOp { Not, @@ -50,30 +53,81 @@ pub(crate) fn parse_field_as_filter(key: &str) -> (String, FilterOp) { (key.trim_end_matches(suffix).to_owned(), op) } -pub fn get_root_query_type_def(schema: &Document) -> Option<&TypeDefinition> { - schema.definitions.iter().find_map(|d| match d { - Definition::TypeDefinition(def @ TypeDefinition::Object(_)) => match def { - TypeDefinition::Object(t) if t.name == "Query" => Some(def), - _ => None, - }, - _ => None, - }) +/// An `ObjectType` with `Hash` and `Eq` derived from the name. +#[derive(Clone, Debug)] +pub struct ObjectType(Arc); + +impl Ord for ObjectType { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.0.name.cmp(&other.0.name) + } +} + +impl PartialOrd for ObjectType { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.0.name.cmp(&other.0.name)) + } +} + +impl std::hash::Hash for ObjectType { + fn hash(&self, state: &mut H) { + self.0.name.hash(state) + } +} + +impl PartialEq for ObjectType { + fn eq(&self, other: &Self) -> bool { + self.0.name.eq(&other.0.name) + } +} + +impl Eq for ObjectType {} + +impl From> for ObjectType { + fn from(object: Arc) -> Self { + ObjectType(object) + } +} + +impl<'a> From<&'a ObjectType> for ObjectOrInterface<'a> { + fn from(cond: &'a ObjectType) -> Self { + ObjectOrInterface::Object(cond.0.as_ref()) + } +} + +impl Deref for ObjectType { + type Target = s::ObjectType; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl CheapClone for ObjectType {} + +impl ObjectType { + pub fn name(&self) -> &str { + &self.0.name + } } /// Returns all type definitions in the schema. -pub fn get_type_definitions(schema: &Document) -> Vec<&TypeDefinition> { +pub fn get_type_definitions(schema: &s::Document) -> Vec<&s::TypeDefinition> { schema .definitions .iter() .filter_map(|d| match d { - Definition::TypeDefinition(typedef) => Some(typedef), + s::Definition::TypeDefinition(typedef) => Some(typedef), _ => None, }) .collect() } /// Returns the object type with the given name. -pub fn get_object_type_mut<'a>(schema: &'a mut Document, name: &str) -> Option<&'a mut ObjectType> { +pub fn get_object_type_mut<'a>( + schema: &'a mut s::Document, + name: &str, +) -> Option<&'a mut s::ObjectType> { use graphql_parser::schema::TypeDefinition::*; get_named_type_definition_mut(schema, name).and_then(|type_def| match type_def { @@ -84,9 +138,9 @@ pub fn get_object_type_mut<'a>(schema: &'a mut Document, name: &str) -> Option<& /// Returns the interface type with the given name. pub fn get_interface_type_mut<'a>( - schema: &'a mut Document, + schema: &'a mut s::Document, name: &str, -) -> Option<&'a mut InterfaceType> { +) -> Option<&'a mut s::InterfaceType> { use graphql_parser::schema::TypeDefinition::*; get_named_type_definition_mut(schema, name).and_then(|type_def| match type_def { @@ -99,13 +153,13 @@ pub fn get_interface_type_mut<'a>( pub fn get_field<'a>( object_type: impl Into>, name: &str, -) -> Option<&'a Field> { +) -> Option<&'a s::Field> { lazy_static! { - pub static ref TYPENAME_FIELD: Field = Field { + pub static ref TYPENAME_FIELD: s::Field = s::Field { position: Pos::default(), description: None, name: "__typename".to_owned(), - field_type: Type::NonNullType(Box::new(Type::NamedType("String".to_owned()))), + field_type: s::Type::NonNullType(Box::new(s::Type::NamedType("String".to_owned()))), arguments: vec![], directives: vec![], }; @@ -123,66 +177,54 @@ pub fn get_field<'a>( } /// Returns the value type for a GraphQL field type. -pub fn get_field_value_type(field_type: &Type) -> Result { +pub fn get_field_value_type(field_type: &s::Type) -> Result { match field_type { - Type::NamedType(ref name) => ValueType::from_str(&name), - Type::NonNullType(inner) => get_field_value_type(&inner), - Type::ListType(_) => Err(anyhow!("Only scalar values are supported in this context")), + s::Type::NamedType(ref name) => ValueType::from_str(&name), + s::Type::NonNullType(inner) => get_field_value_type(&inner), + s::Type::ListType(_) => Err(anyhow!("Only scalar values are supported in this context")), } } /// Returns the value type for a GraphQL field type. -pub fn get_field_name(field_type: &Type) -> String { +pub fn get_field_name(field_type: &s::Type) -> String { match field_type { - Type::NamedType(name) => name.to_string(), - Type::NonNullType(inner) => get_field_name(&inner), - Type::ListType(inner) => get_field_name(&inner), + s::Type::NamedType(name) => name.to_string(), + s::Type::NonNullType(inner) => get_field_name(&inner), + s::Type::ListType(inner) => get_field_name(&inner), } } /// Returns a mutable version of the type with the given name. -pub fn get_named_type_definition_mut<'a>( - schema: &'a mut Document, +fn get_named_type_definition_mut<'a>( + schema: &'a mut s::Document, name: &str, -) -> Option<&'a mut TypeDefinition> { +) -> Option<&'a mut s::TypeDefinition> { schema .definitions .iter_mut() .filter_map(|def| match def { - Definition::TypeDefinition(typedef) => Some(typedef), + s::Definition::TypeDefinition(typedef) => Some(typedef), _ => None, }) .find(|typedef| match typedef { - TypeDefinition::Object(t) => &t.name == name, - TypeDefinition::Enum(t) => &t.name == name, - TypeDefinition::InputObject(t) => &t.name == name, - TypeDefinition::Interface(t) => &t.name == name, - TypeDefinition::Scalar(t) => &t.name == name, - TypeDefinition::Union(t) => &t.name == name, + s::TypeDefinition::Object(t) => &t.name == name, + s::TypeDefinition::Enum(t) => &t.name == name, + s::TypeDefinition::InputObject(t) => &t.name == name, + s::TypeDefinition::Interface(t) => &t.name == name, + s::TypeDefinition::Scalar(t) => &t.name == name, + s::TypeDefinition::Union(t) => &t.name == name, }) } /// Returns the name of a type. -pub fn get_type_name(t: &TypeDefinition) -> &str { - match t { - TypeDefinition::Enum(t) => &t.name, - TypeDefinition::InputObject(t) => &t.name, - TypeDefinition::Interface(t) => &t.name, - TypeDefinition::Object(t) => &t.name, - TypeDefinition::Scalar(t) => &t.name, - TypeDefinition::Union(t) => &t.name, - } -} - -/// Returns the description of a type. -pub fn get_type_description(t: &TypeDefinition) -> &Option { +pub fn get_type_name(t: &s::TypeDefinition) -> &str { match t { - TypeDefinition::Enum(t) => &t.description, - TypeDefinition::InputObject(t) => &t.description, - TypeDefinition::Interface(t) => &t.description, - TypeDefinition::Object(t) => &t.description, - TypeDefinition::Scalar(t) => &t.description, - TypeDefinition::Union(t) => &t.description, + s::TypeDefinition::Enum(t) => &t.name, + s::TypeDefinition::InputObject(t) => &t.name, + s::TypeDefinition::Interface(t) => &t.name, + s::TypeDefinition::Object(t) => &t.name, + s::TypeDefinition::Scalar(t) => &t.name, + s::TypeDefinition::Union(t) => &t.name, } } @@ -190,13 +232,13 @@ pub fn get_type_description(t: &TypeDefinition) -> &Option { pub fn get_argument_definitions<'a>( object_type: impl Into>, name: &str, -) -> Option<&'a Vec> { +) -> Option<&'a Vec> { lazy_static! { - pub static ref NAME_ARGUMENT: Vec = vec![InputValue { + pub static ref NAME_ARGUMENT: Vec = vec![s::InputValue { position: Pos::default(), description: None, name: "name".to_owned(), - value_type: Type::NonNullType(Box::new(Type::NamedType("String".to_owned()))), + value_type: s::Type::NonNullType(Box::new(s::Type::NamedType("String".to_owned()))), default_value: None, directives: vec![], }]; @@ -210,28 +252,23 @@ pub fn get_argument_definitions<'a>( } } -/// Returns the type definition that a field type corresponds to. -pub fn get_type_definition_from_field<'a>( - schema: &'a Document, - field: &Field, -) -> Option<&'a TypeDefinition> { - get_type_definition_from_type(schema, &field.field_type) -} - /// Returns the type definition for a type. pub fn get_type_definition_from_type<'a>( - schema: &'a Document, - t: &Type, -) -> Option<&'a TypeDefinition> { + schema: &'a s::Document, + t: &s::Type, +) -> Option<&'a s::TypeDefinition> { match t { - Type::NamedType(name) => schema.get_named_type(name), - Type::ListType(inner) => get_type_definition_from_type(schema, inner), - Type::NonNullType(inner) => get_type_definition_from_type(schema, inner), + s::Type::NamedType(name) => schema.get_named_type(name), + s::Type::ListType(inner) => get_type_definition_from_type(schema, inner), + s::Type::NonNullType(inner) => get_type_definition_from_type(schema, inner), } } /// Looks up a directive in a object type, if it is provided. -pub fn get_object_type_directive(object_type: &ObjectType, name: String) -> Option<&Directive> { +pub fn get_object_type_directive( + object_type: &s::ObjectType, + name: String, +) -> Option<&s::Directive> { object_type .directives .iter() @@ -239,9 +276,9 @@ pub fn get_object_type_directive(object_type: &ObjectType, name: String) -> Opti } // Returns true if the given type is a non-null type. -pub fn is_non_null_type(t: &Type) -> bool { +pub fn is_non_null_type(t: &s::Type) -> bool { match t { - Type::NonNullType(_) => true, + s::Type::NonNullType(_) => true, _ => false, } } @@ -250,46 +287,42 @@ pub fn is_non_null_type(t: &Type) -> bool { /// /// Uses the algorithm outlined on /// https://facebook.github.io/graphql/draft/#IsInputType(). -pub fn is_input_type(schema: &Document, t: &Type) -> bool { - use graphql_parser::schema::TypeDefinition::*; - +pub fn is_input_type(schema: &s::Document, t: &s::Type) -> bool { match t { - Type::NamedType(name) => { + s::Type::NamedType(name) => { let named_type = schema.get_named_type(name); named_type.map_or(false, |type_def| match type_def { - Scalar(_) | Enum(_) | InputObject(_) => true, + s::TypeDefinition::Scalar(_) + | s::TypeDefinition::Enum(_) + | s::TypeDefinition::InputObject(_) => true, _ => false, }) } - Type::ListType(inner) => is_input_type(schema, inner), - Type::NonNullType(inner) => is_input_type(schema, inner), + s::Type::ListType(inner) => is_input_type(schema, inner), + s::Type::NonNullType(inner) => is_input_type(schema, inner), } } -pub fn is_entity_type(schema: &Document, t: &Type) -> bool { - use graphql_parser::schema::Type::*; - +pub fn is_entity_type(schema: &s::Document, t: &s::Type) -> bool { match t { - NamedType(name) => schema + s::Type::NamedType(name) => schema .get_named_type(&name) .map_or(false, is_entity_type_definition), - ListType(inner_type) => is_entity_type(schema, inner_type), - NonNullType(inner_type) => is_entity_type(schema, inner_type), + s::Type::ListType(inner_type) => is_entity_type(schema, inner_type), + s::Type::NonNullType(inner_type) => is_entity_type(schema, inner_type), } } -pub fn is_entity_type_definition(type_def: &TypeDefinition) -> bool { - use graphql_parser::schema::TypeDefinition::*; - +pub fn is_entity_type_definition(type_def: &s::TypeDefinition) -> bool { match type_def { // Entity types are obvious - Object(object_type) => { + s::TypeDefinition::Object(object_type) => { get_object_type_directive(object_type, String::from("entity")).is_some() } // For now, we'll assume that only entities can implement interfaces; // thus, any interface type definition is automatically an entity type - Interface(_) => true, + s::TypeDefinition::Interface(_) => true, // Everything else (unions, scalars, enums) are not considered entity // types for now @@ -297,62 +330,284 @@ pub fn is_entity_type_definition(type_def: &TypeDefinition) -> bool { } } -pub fn is_list_or_non_null_list_field(field: &Field) -> bool { - use graphql_parser::schema::Type::*; - +pub fn is_list_or_non_null_list_field(field: &s::Field) -> bool { match &field.field_type { - ListType(_) => true, - NonNullType(inner_type) => match inner_type.deref() { - ListType(_) => true, + s::Type::ListType(_) => true, + s::Type::NonNullType(inner_type) => match inner_type.deref() { + s::Type::ListType(_) => true, _ => false, }, _ => false, } } -fn unpack_type<'a>(schema: &'a Document, t: &Type) -> Option<&'a TypeDefinition> { - use graphql_parser::schema::Type::*; - +fn unpack_type<'a>(schema: &'a s::Document, t: &s::Type) -> Option<&'a s::TypeDefinition> { match t { - NamedType(name) => schema.get_named_type(&name), - ListType(inner_type) => unpack_type(schema, inner_type), - NonNullType(inner_type) => unpack_type(schema, inner_type), + s::Type::NamedType(name) => schema.get_named_type(&name), + s::Type::ListType(inner_type) => unpack_type(schema, inner_type), + s::Type::NonNullType(inner_type) => unpack_type(schema, inner_type), } } pub fn get_referenced_entity_type<'a>( - schema: &'a Document, - field: &Field, -) -> Option<&'a TypeDefinition> { + schema: &'a s::Document, + field: &s::Field, +) -> Option<&'a s::TypeDefinition> { unpack_type(schema, &field.field_type).filter(|ty| is_entity_type_definition(ty)) } -pub fn get_input_object_definitions(schema: &Document) -> Vec { - schema - .definitions - .iter() - .filter_map(|d| match d { - Definition::TypeDefinition(TypeDefinition::InputObject(t)) => Some(t.clone()), - _ => None, - }) - .collect() -} - /// If the field has a `@derivedFrom(field: "foo")` directive, obtain the /// name of the field (e.g. `"foo"`) -pub fn get_derived_from_directive<'a>(field_definition: &Field) -> Option<&Directive> { +pub fn get_derived_from_directive<'a>(field_definition: &s::Field) -> Option<&s::Directive> { field_definition.find_directive("derivedFrom") } pub fn get_derived_from_field<'a>( object_type: impl Into>, - field_definition: &'a Field, -) -> Option<&'a Field> { + field_definition: &'a s::Field, +) -> Option<&'a s::Field> { get_derived_from_directive(field_definition) .and_then(|directive| qast::get_argument_value(&directive.arguments, "field")) .and_then(|value| match value { - Value::String(s) => Some(s), + s::Value::String(s) => Some(s), _ => None, }) .and_then(|derived_from_field_name| get_field(object_type, derived_from_field_name)) } + +fn scalar_value_type(schema: &s::Document, field_type: &s::Type) -> ValueType { + use s::TypeDefinition as t; + match field_type { + s::Type::NamedType(name) => { + ValueType::from_str(&name).unwrap_or_else(|_| match schema.get_named_type(name) { + Some(t::Object(_)) | Some(t::Interface(_)) | Some(t::Enum(_)) => ValueType::String, + Some(t::Scalar(_)) => unreachable!("user-defined scalars are not used"), + Some(t::Union(_)) => unreachable!("unions are not used"), + Some(t::InputObject(_)) => unreachable!("inputObjects are not used"), + None => unreachable!("names of field types have been validated"), + }) + } + s::Type::NonNullType(inner) => scalar_value_type(schema, inner), + s::Type::ListType(inner) => scalar_value_type(schema, inner), + } +} + +pub fn is_list(field_type: &s::Type) -> bool { + match field_type { + s::Type::NamedType(_) => false, + s::Type::NonNullType(inner) => is_list(inner), + s::Type::ListType(_) => true, + } +} + +fn is_assignable(value: &store::Value, scalar_type: &ValueType, is_list: bool) -> bool { + match (value, scalar_type) { + (store::Value::String(_), ValueType::String) + | (store::Value::BigDecimal(_), ValueType::BigDecimal) + | (store::Value::BigInt(_), ValueType::BigInt) + | (store::Value::Bool(_), ValueType::Boolean) + | (store::Value::Bytes(_), ValueType::Bytes) + | (store::Value::Int(_), ValueType::Int) + | (store::Value::Null, _) => true, + (store::Value::List(values), _) if is_list => values + .iter() + .all(|value| is_assignable(value, scalar_type, false)), + _ => false, + } +} + +pub fn validate_entity( + schema: &s::Document, + key: &EntityKey, + entity: &Entity, +) -> Result<(), anyhow::Error> { + let object_type_definitions = schema.get_object_type_definitions(); + let object_type = object_type_definitions + .iter() + .find(|object_type| key.entity_type.as_str() == &object_type.name) + .with_context(|| { + format!( + "Entity {}[{}]: unknown entity type `{}`", + key.entity_type, key.entity_id, key.entity_type + ) + })?; + + for field in &object_type.fields { + let is_derived = field.is_derived(); + match (entity.get(&field.name), is_derived) { + (Some(value), false) => { + let scalar_type = scalar_value_type(schema, &field.field_type); + if is_list(&field.field_type) { + // Check for inhomgeneous lists to produce a better + // error message for them; other problems, like + // assigning a scalar to a list will be caught below + if let store::Value::List(elts) = value { + for (index, elt) in elts.iter().enumerate() { + if !is_assignable(elt, &scalar_type, false) { + anyhow::bail!( + "Entity {}[{}]: field `{}` is of type {}, but the value `{}` \ + contains a {} at index {}", + key.entity_type, + key.entity_id, + field.name, + &field.field_type, + value, + elt.type_name(), + index + ); + } + } + } + } + if !is_assignable(value, &scalar_type, is_list(&field.field_type)) { + anyhow::bail!( + "Entity {}[{}]: the value `{}` for field `{}` must have type {} but has type {}", + key.entity_type, + key.entity_id, + value, + field.name, + &field.field_type, + value.type_name() + ); + } + } + (None, false) => { + if is_non_null_type(&field.field_type) { + anyhow::bail!( + "Entity {}[{}]: missing value for non-nullable field `{}`", + key.entity_type, + key.entity_id, + field.name, + ); + } + } + (Some(_), true) => { + anyhow::bail!( + "Entity {}[{}]: field `{}` is derived and can not be set", + key.entity_type, + key.entity_id, + field.name, + ); + } + (None, true) => { + // derived fields should not be set + } + } + } + Ok(()) +} + +#[test] +fn entity_validation() { + use graph::prelude::DeploymentHash; + + fn make_thing(name: &str) -> Entity { + let mut thing = Entity::new(); + thing.set("id", name); + thing.set("name", name); + thing.set("stuff", "less"); + thing.set("favorite_color", "red"); + thing.set("things", store::Value::List(vec![])); + thing + } + + fn check(thing: Entity, errmsg: &str) { + const DOCUMENT: &str = " + enum Color { red, yellow, blue } + interface Stuff { id: ID!, name: String! } + type Cruft @entity { + id: ID!, + thing: Thing! + } + type Thing @entity { + id: ID!, + name: String!, + favorite_color: Color, + stuff: Stuff, + things: [Thing!]! + # Make sure we do not validate derived fields; it's ok + # to store a thing with a null Cruft + cruft: Cruft! @derivedFrom(field: \"thing\") + }"; + let subgraph = DeploymentHash::new("doesntmatter").unwrap(); + let schema = + graph::prelude::Schema::parse(DOCUMENT, subgraph).expect("Failed to parse test schema"); + let id = thing.id().unwrap_or("none".to_owned()); + let key = EntityKey::data( + DeploymentHash::new("doesntmatter").unwrap(), + "Thing".to_owned(), + id.to_owned(), + ); + + let err = validate_entity(&schema.document, &key, &thing); + if errmsg == "" { + assert!( + err.is_ok(), + "checking entity {}: expected ok but got {}", + id, + err.unwrap_err() + ); + } else { + if let Err(e) = err { + assert_eq!(errmsg, e.to_string(), "checking entity {}", id); + } else { + panic!( + "Expected error `{}` but got ok when checking entity {}", + errmsg, id + ); + } + } + } + + let mut thing = make_thing("t1"); + thing.set("things", store::Value::from(vec!["thing1", "thing2"])); + check(thing, ""); + + let thing = make_thing("t2"); + check(thing, ""); + + let mut thing = make_thing("t3"); + thing.remove("name"); + check( + thing, + "Entity Thing[t3]: missing value for non-nullable field `name`", + ); + + let mut thing = make_thing("t4"); + thing.remove("things"); + check( + thing, + "Entity Thing[t4]: missing value for non-nullable field `things`", + ); + + let mut thing = make_thing("t5"); + thing.set("name", store::Value::Int(32)); + check( + thing, + "Entity Thing[t5]: the value `32` for field `name` must \ + have type String! but has type Int", + ); + + let mut thing = make_thing("t6"); + thing.set( + "things", + store::Value::List(vec!["thing1".into(), 17.into()]), + ); + check( + thing, + "Entity Thing[t6]: field `things` is of type [Thing!]!, \ + but the value `[thing1, 17]` contains a Int at index 1", + ); + + let mut thing = make_thing("t7"); + thing.remove("favorite_color"); + thing.remove("stuff"); + check(thing, ""); + + let mut thing = make_thing("t8"); + thing.set("cruft", "wat"); + check( + thing, + "Entity Thing[t8]: field `cruft` is derived and can not be set", + ); +} diff --git a/graphql/src/store/mod.rs b/graphql/src/store/mod.rs index 135a40bcb26..85ceb4275d7 100644 --- a/graphql/src/store/mod.rs +++ b/graphql/src/store/mod.rs @@ -2,5 +2,5 @@ mod prefetch; mod query; mod resolver; -pub use self::query::{build_query, parse_subgraph_id}; +pub use self::query::parse_subgraph_id; pub use self::resolver::StoreResolver; diff --git a/graphql/src/store/prefetch.rs b/graphql/src/store/prefetch.rs index 8229b44c540..06a63858d63 100644 --- a/graphql/src/store/prefetch.rs +++ b/graphql/src/store/prefetch.rs @@ -3,13 +3,12 @@ use anyhow::{anyhow, Error}; use graph::constraint_violation; +use graph::data::value::Object; use graph::prelude::{r, CacheWeight}; use graph::slog::warn; use graph::util::cache_weight; -use indexmap::IndexMap; use lazy_static::lazy_static; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::iter::once; +use std::collections::BTreeMap; use std::rc::Rc; use std::time::Instant; @@ -17,17 +16,17 @@ use graph::{components::store::EntityType, data::graphql::*}; use graph::{ data::graphql::ext::DirectiveFinder, prelude::{ - q, s, ApiSchema, AttributeNames, BlockNumber, ChildMultiplicity, EntityCollection, + s, ApiSchema, AttributeNames, BlockNumber, ChildMultiplicity, EntityCollection, EntityFilter, EntityLink, EntityOrder, EntityWindow, Logger, ParentLink, QueryExecutionError, QueryStore, StoreError, Value as StoreValue, WindowAttribute, }, }; -use crate::execution::{ExecutionContext, Resolver}; -use crate::query::ast::{self as qast, get_argument_value}; +use crate::execution::{ast as a, ExecutionContext, Resolver}; use crate::runner::ResultSizeMetrics; use crate::schema::ast as sast; -use crate::store::{build_query, StoreResolver}; +use crate::store::query::build_query; +use crate::store::StoreResolver; lazy_static! { static ref ARG_FIRST: String = String::from("first"); @@ -48,48 +47,6 @@ lazy_static! { .unwrap_or(std::usize::MAX); } -type GroupedFieldSet<'a> = IndexMap<&'a str, CollectedResponseKey<'a>>; - -/// Used for associating objects or interfaces and the field names used in `orderBy` query field -/// attributes. -type ComplementaryFields<'a> = BTreeMap, String>; - -/// An `ObjectType` with `Hash` and `Eq` derived from the name. -#[derive(Clone, Debug)] -pub struct ObjectCondition<'a>(&'a s::ObjectType); - -impl<'a> Ord for ObjectCondition<'a> { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.name.cmp(&other.0.name) - } -} - -impl<'a> PartialOrd for ObjectCondition<'a> { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.0.name.cmp(&other.0.name)) - } -} - -impl std::hash::Hash for ObjectCondition<'_> { - fn hash(&self, state: &mut H) { - self.0.name.hash(state) - } -} - -impl PartialEq for ObjectCondition<'_> { - fn eq(&self, other: &Self) -> bool { - self.0.name.eq(&other.0.name) - } -} - -impl Eq for ObjectCondition<'_> {} - -impl<'a> From<&'a s::ObjectType> for ObjectCondition<'a> { - fn from(object: &'a s::ObjectType) -> Self { - ObjectCondition(object) - } -} - /// Intermediate data structure to hold the results of prefetching entities /// and their nested associations. For each association of `entity`, `children` /// has an entry mapping the response key to the list of nodes. @@ -208,7 +165,7 @@ impl From for r::Value { for (key, nodes) in node.children.into_iter() { map.insert(format!("prefetch:{}", key), node_list_as_value(nodes)); } - r::Value::Object(map) + r::Value::object(map) } } @@ -409,25 +366,17 @@ impl<'a> Join<'a> { /// Construct a `Join` based on the parent field pointing to the child fn new( schema: &'a ApiSchema, - parent_type: ObjectOrInterface<'a>, + parent_type: &'a s::ObjectType, child_type: ObjectOrInterface<'a>, field_name: &str, ) -> Self { - let parent_types = parent_type - .object_types(schema.schema()) - .expect("the name of the parent type is valid"); let child_types = child_type .object_types(schema.schema()) .expect("the name of the child type is valid"); - let conds = parent_types + let conds = child_types .iter() - .flat_map::, _>(|parent_type| { - child_types - .iter() - .map(|child_type| JoinCond::new(parent_type, child_type, field_name)) - .collect() - }) + .map(|child_type| JoinCond::new(parent_type, child_type, field_name)) .collect(); Join { child_type, conds } @@ -536,13 +485,12 @@ impl<'a> Join<'a> { pub fn run( resolver: &StoreResolver, ctx: &ExecutionContext, - selection_set: &q::SelectionSet, + selection_set: &a::SelectionSet, result_size: &ResultSizeMetrics, ) -> Result> { execute_root_selection_set(resolver, ctx, selection_set).map(|nodes| { result_size.observe(nodes.weight()); - let map = BTreeMap::default(); - r::Value::Object(nodes.into_iter().fold(map, |mut map, node| { + r::Value::Object(nodes.into_iter().fold(Object::default(), |mut map, node| { // For root nodes, we only care about the children for (key, nodes) in node.children.into_iter() { map.insert(format!("prefetch:{}", key), node_list_as_value(nodes)); @@ -556,21 +504,10 @@ pub fn run( fn execute_root_selection_set( resolver: &StoreResolver, ctx: &ExecutionContext, - selection_set: &q::SelectionSet, + selection_set: &a::SelectionSet, ) -> Result, Vec> { - // Obtain the root Query type and fail if there isn't one - let query_type = ctx.query.schema.query_type.as_ref().into(); - let (grouped_field_set, _complementary_fields) = - collect_fields(ctx, query_type, once(selection_set)); - // Execute the root selection set against the root query type - execute_selection_set( - resolver, - ctx, - make_root_node(), - grouped_field_set, - ComplementaryFields::new(), - ) + execute_selection_set(resolver, ctx, make_root_node(), selection_set) } fn check_result_size(logger: &Logger, size: usize) -> Result<(), QueryExecutionError> { @@ -587,14 +524,13 @@ fn execute_selection_set<'a>( resolver: &StoreResolver, ctx: &'a ExecutionContext, mut parents: Vec, - grouped_field_set: GroupedFieldSet<'a>, - mut complementary_fields: ComplementaryFields<'a>, + selection_set: &a::SelectionSet, ) -> Result, Vec> { let schema = &ctx.query.schema; let mut errors: Vec = Vec::new(); // Process all field groups in order - for (response_key, collected_fields) in grouped_field_set { + for (object_type, fields) in selection_set.interior_fields() { if let Some(deadline) = ctx.deadline { if deadline < Instant::now() { errors.push(QueryExecutionError::Timeout); @@ -602,38 +538,34 @@ fn execute_selection_set<'a>( } } - for (type_cond, fields) in collected_fields { - // Filter out parents that do not match the type condition. - let mut parents: Vec<&mut Node> = if is_root_node(parents.iter()) { - parents.iter_mut().collect() - } else { - parents - .iter_mut() - .filter(|p| type_cond.matches(p.typename(), schema.types_for_interface())) - .collect() - }; + // Filter out parents that do not match the type condition. + let mut parents: Vec<&mut Node> = if is_root_node(parents.iter()) { + parents.iter_mut().collect() + } else { + parents + .iter_mut() + .filter(|p| object_type.name == p.typename()) + .collect() + }; - if parents.is_empty() { - continue; - } + if parents.is_empty() { + continue; + } - // Unwrap: The query was validated to contain only valid fields, - // and `collect_fields` will skip introspection fields. - let field = type_cond.field(&fields[0].name).unwrap(); + for field in fields { + let field_type = object_type + .field(&field.name) + .expect("field names are valid"); let child_type = schema - .document() - .object_or_interface(field.field_type.get_base_type()) + .object_or_interface(field_type.field_type.get_base_type()) .expect("we only collect fields that are objects or interfaces"); let join = Join::new( ctx.query.schema.as_ref(), - type_cond, + object_type, child_type, &field.name, ); - // Group fields with the same response key, so we can execute them together - let (mut grouped_field_set, new_complementary_fields) = - collect_fields(ctx, child_type, fields.iter().map(|f| &f.selection_set)); // "Select by Specific Attribute Names" is an experimental feature and can be disabled completely. // If this environment variable is set, the program will use an empty collection that, @@ -641,34 +573,24 @@ fn execute_selection_set<'a>( // queries. let collected_columns = if *DISABLE_EXPERIMENTAL_FEATURE_SELECT_BY_SPECIFIC_ATTRIBUTE_NAMES { - BTreeMap::new() + SelectedAttributes(BTreeMap::new()) } else { - let mut collected = - CollectedAttributeNames::consolidate_column_names(&mut grouped_field_set); - collected.populate_complementary_fields(&mut complementary_fields); - collected.resolve_interfaces(&ctx.query.schema.types_for_interface()) + SelectedAttributes::for_field(field)? }; match execute_field( resolver, &ctx, - type_cond, &parents, &join, - &fields[0], field, + field_type, collected_columns, ) { Ok(children) => { - match execute_selection_set( - resolver, - ctx, - children, - grouped_field_set, - new_complementary_fields, - ) { + match execute_selection_set(resolver, ctx, children, &field.selection_set) { Ok(children) => { - Join::perform(&mut parents, children, response_key); + Join::perform(&mut parents, children, field.response_key()); let weight = parents.iter().map(|parent| parent.weight()).sum::(); check_result_size(&ctx.logger, weight)?; @@ -683,23 +605,6 @@ fn execute_selection_set<'a>( } } - // Confidence check: all complementary fields must be consumed, otherwise constructed SQL - // queries will be malformed. - if !*DISABLE_EXPERIMENTAL_FEATURE_SELECT_BY_SPECIFIC_ATTRIBUTE_NAMES { - complementary_fields - .into_iter() - .for_each(|(parent, complementary_field)| { - errors.push( - constraint_violation!( - "Complementary field \"{}\" was not prefetched by its parent: {}", - complementary_field, - parent.name().to_string(), - ) - .into(), - ) - }); - } - if errors.is_empty() { Ok(parents) } else { @@ -707,283 +612,16 @@ fn execute_selection_set<'a>( } } -/// If the top-level selection is on an object, there will be a single entry in `obj_types` with all -/// the collected fields. -/// -/// The interesting case is if the top-level selection is an interface. `iface_cond` will be the -/// interface type and `iface_fields` the selected fields on the interface. `obj_types` are the -/// fields selected on objects by fragments. In `collect_fields`, the `iface_fields` will then be -/// merged into each entry in `obj_types`. See also: e0d6da3e-60cf-41a5-b83c-b60a7a766d4a -#[derive(Default, Debug)] -struct CollectedResponseKey<'a> { - iface_cond: Option<&'a s::InterfaceType>, - iface_fields: Vec<&'a q::Field>, - obj_types: IndexMap, Vec<&'a q::Field>>, - collected_column_names: CollectedAttributeNames<'a>, -} - -impl<'a> CollectedResponseKey<'a> { - fn collect_field( - &mut self, - document: &s::Document, - object_or_interface: ObjectOrInterface<'a>, - field: &'a q::Field, - ) { - let schema_field = object_or_interface.field(&field.name); - schema_field - .and_then(|field_def| sast::get_type_definition_from_field(document, field_def)) - .map(|type_def| match type_def { - // Only consider fields that point to objects or interfaces, and ignore nonexistent fields. - s::TypeDefinition::Interface(_) | s::TypeDefinition::Object(_) => { - match object_or_interface { - ObjectOrInterface::Interface(i) => { - // `collect_fields` will never call this with two different interfaces types. - assert!( - self.iface_cond.is_none() - || self.iface_cond.map(|x| &x.name) == Some(&i.name) - ); - self.iface_cond = Some(i); - self.iface_fields.push(field); - } - ObjectOrInterface::Object(o) => { - self.obj_types - .entry(ObjectCondition(o)) - .or_default() - .push(field); - } - } - } - s::TypeDefinition::Scalar(_) | s::TypeDefinition::Enum(_) => {} - s::TypeDefinition::Union(_) | s::TypeDefinition::InputObject(_) => {} - }); - - // collect the column name if field exists in schema - if schema_field.is_some() { - self.collected_column_names - .update(object_or_interface, &field) - } - } -} - -impl<'a> IntoIterator for CollectedResponseKey<'a> { - type Item = (ObjectOrInterface<'a>, Vec<&'a q::Field>); - type IntoIter = Box + 'a>; - - fn into_iter(self) -> Self::IntoIter { - // Make sure the interface fields are processed first. - // See also: e0d6da3e-60cf-41a5-b83c-b60a7a766d4a - let iface_fields = self.iface_fields; - Box::new( - self.iface_cond - .map(|cond| (ObjectOrInterface::Interface(cond), iface_fields)) - .into_iter() - .chain( - self.obj_types - .into_iter() - .map(|(c, f)| (ObjectOrInterface::Object(c.0), f)), - ), - ) - } -} - -/// Collects fields of a selection set. The resulting map indicates for each -/// response key from which types to fetch what fields to express the effect -/// of fragment spreads -fn collect_fields<'a>( - ctx: &'a ExecutionContext, - parent_ty: ObjectOrInterface<'a>, - selection_sets: impl Iterator, -) -> (GroupedFieldSet<'a>, ComplementaryFields<'a>) { - let mut grouped_fields = IndexMap::new(); - let mut complementary_fields = ComplementaryFields::new(); - - for selection_set in selection_sets { - collect_fields_inner( - ctx, - parent_ty, - selection_set, - &mut HashSet::new(), - &mut grouped_fields, - &mut complementary_fields, - ); - } - - // For interfaces, if a response key occurs both under the interface and under concrete types, - // we want to add the fields selected at the interface level to the selections in the specific - // concrete types, effectively merging the selection sets. - // See also: e0d6da3e-60cf-41a5-b83c-b60a7a766d4a - for collected_response_key in grouped_fields.values_mut() { - for obj_type_fields in collected_response_key.obj_types.values_mut() { - obj_type_fields.extend_from_slice(&collected_response_key.iface_fields) - } - } - - (grouped_fields, complementary_fields) -} - -// When querying an object type, `type_condition` will always be that object type, even if it passes -// through fragments for interfaces which that type implements. -// -// When querying an interface, `type_condition` will start as the interface itself at the root, and -// change to an implementing object type if it passes to a fragment with a concrete type condition. -fn collect_fields_inner<'a>( - ctx: &'a ExecutionContext, - type_condition: ObjectOrInterface<'a>, - selection_set: &'a q::SelectionSet, - visited_fragments: &mut HashSet<&'a str>, - output: &mut GroupedFieldSet<'a>, - complementary_fields: &mut ComplementaryFields<'a>, -) { - fn collect_fragment<'a>( - ctx: &'a ExecutionContext, - outer_type_condition: ObjectOrInterface<'a>, - frag_ty_condition: Option<&'a q::TypeCondition>, - frag_selection_set: &'a q::SelectionSet, - visited_fragments: &mut HashSet<&'a str>, - output: &mut GroupedFieldSet<'a>, - complementary_fields: &mut ComplementaryFields<'a>, - ) { - let schema = &ctx.query.schema.document(); - let fragment_ty = match frag_ty_condition { - // Unwrap: Validation ensures this interface exists. - Some(q::TypeCondition::On(ty_name)) if outer_type_condition.is_interface() => { - schema.object_or_interface(ty_name).unwrap() - } - _ => outer_type_condition, - }; - - // The check above makes any type condition on an outer object type redunant. - // A type condition on the same interface as the outer one is also redundant. - let redundant_condition = fragment_ty.name() == outer_type_condition.name(); - if redundant_condition || fragment_ty.is_object() { - collect_fields_inner( - ctx, - fragment_ty, - &frag_selection_set, - visited_fragments, - output, - complementary_fields, - ); - } else { - // This is an interface fragment in the root selection for an interface. - // We deal with this by expanding the fragment into one fragment for - // each type in the intersection between the root interface and the - // interface in the fragment type condition. - let types_for_interface = ctx.query.schema.types_for_interface(); - let root_tys = &types_for_interface[&outer_type_condition.into()]; - let fragment_tys = &types_for_interface[&fragment_ty.into()]; - let intersection_tys = root_tys.iter().filter(|root_ty| { - fragment_tys - .iter() - .map(|t| &t.name) - .any(|t| *t == root_ty.name) - }); - for ty in intersection_tys { - collect_fields_inner( - ctx, - ty.into(), - &frag_selection_set, - visited_fragments, - output, - complementary_fields, - ); - } - } - } - - // Only consider selections that are not skipped and should be included - let selections = selection_set - .items - .iter() - .filter(|selection| !qast::skip_selection(selection, &ctx.query.variables)) - .filter(|selection| qast::include_selection(selection, &ctx.query.variables)); - - for selection in selections { - match selection { - q::Selection::Field(ref field) => { - let response_key = qast::get_response_key(field); - output.entry(response_key).or_default().collect_field( - &ctx.query.schema.document(), - type_condition, - field, - ); - - // Collect complementary fields used in the `orderBy` query attribute, if present. - if !*DISABLE_EXPERIMENTAL_FEATURE_SELECT_BY_SPECIFIC_ATTRIBUTE_NAMES { - if let Some(arguments) = get_argument_value(&field.arguments, "orderBy") { - let schema_field = type_condition.field(&field.name).expect(&format!( - "the field {:?} to exist in {:?}", - &field.name, - &type_condition.name() - )); - let field_name = sast::get_field_name(&schema_field.field_type); - let object_or_interface_for_field = ctx - .query - .schema - .document() - .object_or_interface(&field_name) - .expect(&format!( - "The field {:?} to exist in the Document", - field_name - )); - match arguments { - graphql_parser::schema::Value::Enum(complementary_field_name) => { - complementary_fields.insert( - object_or_interface_for_field, - complementary_field_name.clone(), - ); - } - _ => unimplemented!("unsure on what to do about other variants"), - } - } - } - } - - q::Selection::FragmentSpread(spread) => { - // Only consider the fragment if it hasn't already been included, - // as would be the case if the same fragment spread ...Foo appeared - // twice in the same selection set - if visited_fragments.insert(&spread.fragment_name) { - let fragment = ctx.query.get_fragment(&spread.fragment_name); - collect_fragment( - ctx, - type_condition, - Some(&fragment.type_condition), - &fragment.selection_set, - visited_fragments, - output, - complementary_fields, - ); - } - } - - q::Selection::InlineFragment(fragment) => { - collect_fragment( - ctx, - type_condition, - fragment.type_condition.as_ref(), - &fragment.selection_set, - visited_fragments, - output, - complementary_fields, - ); - } - } - } -} - /// Executes a field. fn execute_field( resolver: &StoreResolver, ctx: &ExecutionContext, - object_type: ObjectOrInterface<'_>, parents: &Vec<&mut Node>, join: &Join<'_>, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, - collected_column_names: AttributeNamesByObjectType<'_>, + selected_attrs: SelectedAttributes, ) -> Result, Vec> { - let argument_values = crate::execution::coerce_argument_values(&ctx.query, object_type, field)?; let multiplicity = if sast::is_list_or_non_null_list_field(field_definition) { ChildMultiplicity::Many } else { @@ -995,14 +633,14 @@ fn execute_field( resolver.store.as_ref(), parents, &join, - argument_values, + field, multiplicity, ctx.query.schema.types_for_interface(), resolver.block_number(), ctx.max_first, ctx.max_skip, ctx.query.query_id.clone(), - collected_column_names, + selected_attrs, ) .map_err(|e| vec![e]) } @@ -1015,23 +653,23 @@ fn fetch( store: &(impl QueryStore + ?Sized), parents: &Vec<&mut Node>, join: &Join<'_>, - arguments: HashMap<&str, r::Value>, + field: &a::Field, multiplicity: ChildMultiplicity, types_for_interface: &BTreeMap>, block: BlockNumber, max_first: u32, max_skip: u32, query_id: String, - collected_column_names: AttributeNamesByObjectType<'_>, + selected_attrs: SelectedAttributes, ) -> Result, QueryExecutionError> { let mut query = build_query( join.child_type, block, - &arguments, + field, types_for_interface, max_first, max_skip, - collected_column_names, + selected_attrs, )?; query.query_id = Some(query_id); @@ -1042,7 +680,7 @@ fn fetch( } query.logger = Some(logger); - if let Some(r::Value::String(id)) = arguments.get(ARG_ID.as_str()) { + if let Some(r::Value::String(id)) = field.argument_value(ARG_ID.as_str()) { query.filter = Some( EntityFilter::Equal(ARG_ID.to_owned(), StoreValue::from(id.to_owned())) .and_maybe(query.filter), @@ -1063,126 +701,52 @@ fn fetch( .map(|entities| entities.into_iter().map(|entity| entity.into()).collect()) } -/// Represents a finished column collection operation, mapping each object type to the final set of -/// selected SQL columns. -type AttributeNamesByObjectType<'a> = BTreeMap, AttributeNames>; - #[derive(Debug, Default, Clone)] -struct CollectedAttributeNames<'a>(HashMap, AttributeNames>); - -impl<'a> CollectedAttributeNames<'a> { - fn update(&mut self, object_or_interface: ObjectOrInterface<'a>, field: &q::Field) { - self.0 - .entry(object_or_interface) - .or_insert(AttributeNames::All) - .add(field); - } - - /// Injects complementary fields that were collected priviously in upper hierarchical levels of - /// the query into `self`. - fn populate_complementary_fields( - &mut self, - complementary_fields: &mut ComplementaryFields<'a>, - ) { - for (object_or_interface, selected_attributes) in self.0.iter_mut() { - if let Some(complementary_field_name) = - complementary_fields.remove(&object_or_interface) - { - selected_attributes.add_str(&complementary_field_name) - } +pub(crate) struct SelectedAttributes(BTreeMap); + +impl SelectedAttributes { + /// Extract the attributes we should select from `selection_set`. In + /// particular, disregard derived fields since they are not stored + fn for_field(field: &a::Field) -> Result> { + let mut map = BTreeMap::new(); + for (object_type, fields) in field.selection_set.fields() { + let column_names = fields + .filter(|field| { + // Keep fields that are not derived and for which we + // can find the field type + sast::get_field(object_type, &field.name) + .map(|field_type| !field_type.is_derived()) + .unwrap_or(false) + }) + .map(|field| field.name.clone()) + .collect(); + map.insert( + object_type.name().to_string(), + AttributeNames::Select(column_names), + ); } - } - - /// Consume this instance and transform it into a mapping from - /// `ObjectTypes` to `AttributeNames`, while resolving all interfaces - /// into the `ObjectType`s that implement them. - fn resolve_interfaces( - mut self, - types_for_interface: &'a BTreeMap>, - ) -> AttributeNamesByObjectType { - let mut map: AttributeNamesByObjectType = BTreeMap::new(); - for (object_or_interface, column_names) in self.0.drain() { - match object_or_interface { - ObjectOrInterface::Object(object) => { - CollectedAttributeNames::upsert(&mut map, object, column_names); - } - ObjectOrInterface::Interface(interface) => { - for object in &types_for_interface[&EntityType::from(interface)] { - CollectedAttributeNames::upsert(&mut map, object, column_names.clone()); - } + // We need to also select the `orderBy` field if there is one. + // Because of how the API Schema is set up, `orderBy` can only have + // an enum value + match field.argument_value("orderBy") { + None => { /* nothing to do */ } + Some(r::Value::Enum(e)) => { + for columns in map.values_mut() { + columns.add_str(e); } } - } - map - } - - /// Helper function for handling insertion on the `AttributeNamesByObjectType` struct. - fn upsert( - map: &mut BTreeMap, AttributeNames>, - object: &'a s::ObjectType, - column_names: AttributeNames, - ) { - use std::collections::btree_map::Entry; - let key = ObjectCondition(object); - let column_names = filter_derived_fields(column_names, object); - match map.entry(key) { - Entry::Occupied(mut entry) => { - entry.get_mut().extend(column_names); - } - Entry::Vacant(entry) => { - entry.insert(column_names); - } - } - } - - /// Creates a new, combined `CollectedAttributeNames` using drained values from `CollectedAttributeNames` - /// scattered across different `CollectedResponseKey`s. - fn consolidate_column_names<'schema, 'collection>( - grouped_field_set: &'collection mut GroupedFieldSet<'schema>, - ) -> CollectedAttributeNames<'schema> { - let mut map: HashMap = HashMap::new(); - for (_, collected_response_key) in grouped_field_set.into_iter() { - for (object_or_interface, column_names) in - collected_response_key.collected_column_names.0.drain() - { - match map.entry(object_or_interface) { - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(column_names); - } - std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().extend(column_names); - } - } + Some(v) => { + return Err(vec![constraint_violation!( + "'orderBy' attribute must be an enum but is {:?}", + v + ) + .into()]); } } - CollectedAttributeNames(map) + Ok(SelectedAttributes(map)) } -} -/// Removes all derived fields from a `AttributeNames` collection based on a referential `ObjectType`. -fn filter_derived_fields( - column_names_type: AttributeNames, - object: &s::ObjectType, -) -> AttributeNames { - match column_names_type { - AttributeNames::All => column_names_type, - AttributeNames::Select(sql_column_names) => { - let mut filtered = AttributeNames::All; - sql_column_names - .into_iter() - .filter_map(|column_name| { - if let Some(schema_field) = sast::get_field(object, &column_name) { - if !schema_field.is_derived() { - Some(column_name) // field exists and is not derived - } else { - None // field exists and is derived - } - } else { - None // field does not exist - } - }) - .for_each(|col| filtered.add_str(&col)); - filtered - } + pub fn get(&mut self, obj_type: &s::ObjectType) -> AttributeNames { + self.0.remove(&obj_type.name).unwrap_or(AttributeNames::All) } } diff --git a/graphql/src/store/query.rs b/graphql/src/store/query.rs index b32d3c17f8c..8459eb9c87e 100644 --- a/graphql/src/store/query.rs +++ b/graphql/src/store/query.rs @@ -1,11 +1,14 @@ -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}; +use std::collections::{BTreeMap, BTreeSet, HashSet, VecDeque}; use std::mem::discriminant; +use graph::data::value::Object; use graph::prelude::*; use graph::{components::store::EntityType, data::graphql::ObjectOrInterface}; +use crate::execution::ast as a; use crate::schema::ast as sast; -use crate::store::prefetch::ObjectCondition; + +use super::prefetch::SelectedAttributes; #[derive(Debug)] enum OrderDirection { @@ -16,42 +19,38 @@ enum OrderDirection { /// Builds a EntityQuery from GraphQL arguments. /// /// Panics if `entity` is not present in `schema`. -pub fn build_query<'a>( +pub(crate) fn build_query<'a>( entity: impl Into>, block: BlockNumber, - arguments: &HashMap<&str, r::Value>, + field: &a::Field, types_for_interface: &'a BTreeMap>, max_first: u32, max_skip: u32, - mut column_names: BTreeMap, AttributeNames>, + mut column_names: SelectedAttributes, ) -> Result { let entity = entity.into(); let entity_types = EntityCollection::All(match &entity { ObjectOrInterface::Object(object) => { - let selected_columns = column_names - .remove(&(*object).into()) - .unwrap_or(AttributeNames::All); + let selected_columns = column_names.get(object); vec![((*object).into(), selected_columns)] } ObjectOrInterface::Interface(interface) => types_for_interface [&EntityType::from(*interface)] .iter() .map(|o| { - let selected_columns = column_names - .remove(&o.into()) - .unwrap_or(AttributeNames::All); + let selected_columns = column_names.get(o); (o.into(), selected_columns) }) .collect(), }); let mut query = EntityQuery::new(parse_subgraph_id(entity)?, block, entity_types) - .range(build_range(arguments, max_first, max_skip)?); - if let Some(filter) = build_filter(entity, arguments)? { + .range(build_range(field, max_first, max_skip)?); + if let Some(filter) = build_filter(entity, field)? { query = query.filter(filter); } let order = match ( - build_order_by(entity, arguments)?, - build_order_direction(arguments)?, + build_order_by(entity, field)?, + build_order_direction(field)?, ) { (Some((attr, value_type)), OrderDirection::Ascending) => { EntityOrder::Ascending(attr, value_type) @@ -67,11 +66,11 @@ pub fn build_query<'a>( /// Parses GraphQL arguments into a EntityRange, if present. fn build_range( - arguments: &HashMap<&str, r::Value>, + field: &a::Field, max_first: u32, max_skip: u32, ) -> Result { - let first = match arguments.get("first") { + let first = match field.argument_value("first") { Some(r::Value::Int(n)) => { let n = *n; if n > 0 && n <= (max_first as i64) { @@ -86,7 +85,7 @@ fn build_range( _ => unreachable!("first is an Int with a default value"), }; - let skip = match arguments.get("skip") { + let skip = match field.argument_value("skip") { Some(r::Value::Int(n)) => { let n = *n; if n >= 0 && n <= (max_skip as i64) { @@ -110,12 +109,12 @@ fn build_range( /// Parses GraphQL arguments into an EntityFilter, if present. fn build_filter( entity: ObjectOrInterface, - arguments: &HashMap<&str, r::Value>, + field: &a::Field, ) -> Result, QueryExecutionError> { - match arguments.get("where") { + match field.argument_value("where") { Some(r::Value::Object(object)) => build_filter_from_object(entity, object), Some(r::Value::Null) => Ok(None), - None => match arguments.get("text") { + None => match field.argument_value("text") { Some(r::Value::Object(filter)) => build_fulltext_filter_from_object(filter), None => Ok(None), _ => Err(QueryExecutionError::InvalidFilterError), @@ -125,7 +124,7 @@ fn build_filter( } fn build_fulltext_filter_from_object( - object: &BTreeMap, + object: &Object, ) -> Result, QueryExecutionError> { object.iter().next().map_or( Err(QueryExecutionError::FulltextQueryRequiresFilter), @@ -145,7 +144,7 @@ fn build_fulltext_filter_from_object( /// Parses a GraphQL input object into an EntityFilter, if present. fn build_filter_from_object( entity: ObjectOrInterface, - object: &BTreeMap, + object: &Object, ) -> Result, QueryExecutionError> { Ok(Some(EntityFilter::And({ object @@ -217,9 +216,9 @@ fn list_values(value: Value, filter_type: &str) -> Result, QueryExecu /// Parses GraphQL arguments into an field name to order by, if present. fn build_order_by( entity: ObjectOrInterface, - arguments: &HashMap<&str, r::Value>, + field: &a::Field, ) -> Result, QueryExecutionError> { - match arguments.get("orderBy") { + match field.argument_value("orderBy") { Some(r::Value::Enum(name)) => { let field = sast::get_field(entity, name).ok_or_else(|| { QueryExecutionError::EntityFieldError(entity.name().to_owned(), name.clone()) @@ -233,7 +232,7 @@ fn build_order_by( ) }) } - _ => match arguments.get("text") { + _ => match field.argument_value("text") { Some(r::Value::Object(filter)) => build_fulltext_order_by_from_object(filter), None => Ok(None), _ => Err(QueryExecutionError::InvalidFilterError), @@ -242,7 +241,7 @@ fn build_order_by( } fn build_fulltext_order_by_from_object( - object: &BTreeMap, + object: &Object, ) -> Result, QueryExecutionError> { object.iter().next().map_or( Err(QueryExecutionError::FulltextQueryRequiresFilter), @@ -257,11 +256,9 @@ fn build_fulltext_order_by_from_object( } /// Parses GraphQL arguments into a EntityOrder, if present. -fn build_order_direction( - arguments: &HashMap<&str, r::Value>, -) -> Result { - Ok(arguments - .get("orderDirection") +fn build_order_direction(field: &a::Field) -> Result { + Ok(field + .argument_value("orderDirection") .map(|value| match value { r::Value::Enum(name) if name == "asc" => OrderDirection::Ascending, r::Value::Enum(name) if name == "desc" => OrderDirection::Descending, @@ -291,10 +288,10 @@ pub fn parse_subgraph_id<'a>( } /// Recursively collects entities involved in a query field as `(subgraph ID, name)` tuples. -pub fn collect_entities_from_query_field( - schema: &s::Document, - object_type: &s::ObjectType, - field: &q::Field, +pub(crate) fn collect_entities_from_query_field( + schema: &ApiSchema, + object_type: sast::ObjectType, + field: &a::Field, ) -> BTreeSet { // Output entities let mut entities = HashSet::new(); @@ -305,11 +302,10 @@ pub fn collect_entities_from_query_field( while let Some((object_type, field)) = queue.pop_front() { // Check if the field exists on the object type - if let Some(field_type) = sast::get_field(object_type, &field.name) { + if let Some(field_type) = sast::get_field(&object_type, &field.name) { // Check if the field type corresponds to a type definition (in a valid schema, // this should always be the case) - if let Some(type_definition) = sast::get_type_definition_from_field(schema, field_type) - { + if let Some(type_definition) = schema.get_type_definition_from_field(field_type) { // If the field's type definition is an object type, extract that type if let s::TypeDefinition::Object(object_type) = type_definition { // Only collect whether the field's type has an @entity directive @@ -325,10 +321,9 @@ pub fn collect_entities_from_query_field( // If the query field has a non-empty selection set, this means we // need to recursively process it - for selection in field.selection_set.items.iter() { - if let q::Selection::Field(sub_field) = selection { - queue.push_back((object_type, sub_field)) - } + let object_type = schema.object_type(object_type).into(); + for sub_field in field.selection_set.fields_for(&object_type) { + queue.push_back((object_type.cheap_clone(), sub_field)) } } } @@ -345,14 +340,20 @@ pub fn collect_entities_from_query_field( mod tests { use graph::{ components::store::EntityType, - prelude::s::{Directive, Field, InputValue, ObjectType, Type, Value as SchemaValue}, + data::value::Object, + prelude::{ + r, AttributeNames, EntityCollection, EntityFilter, EntityRange, Value, ValueType, + BLOCK_NUMBER_MAX, + }, + prelude::{ + s::{self, Directive, Field, InputValue, ObjectType, Type, Value as SchemaValue}, + EntityOrder, + }, }; use graphql_parser::Pos; - use std::collections::{BTreeMap, HashMap}; - - use graph::prelude::*; + use std::{collections::BTreeMap, iter::FromIterator, sync::Arc}; - use super::build_query; + use super::{a, build_query}; fn default_object() -> ObjectType { let subgraph_id_argument = ( @@ -417,13 +418,34 @@ mod tests { } } - fn default_arguments<'a>() -> HashMap<&'a str, r::Value> { - let mut map = HashMap::new(); - let first = "first"; - let skip = "skip"; - map.insert(first, r::Value::Int(100.into())); - map.insert(skip, r::Value::Int(0.into())); - map + fn default_field() -> a::Field { + let arguments = vec![ + ("first".to_string(), r::Value::Int(100.into())), + ("skip".to_string(), r::Value::Int(0.into())), + ]; + let obj_type = Arc::new(object("SomeType")).into(); + a::Field { + position: Default::default(), + alias: None, + name: "aField".to_string(), + arguments, + directives: vec![], + selection_set: a::SelectionSet::new(vec![obj_type]), + } + } + + fn default_field_with(arg_name: &str, arg_value: r::Value) -> a::Field { + let mut field = default_field(); + field.arguments.push((arg_name.to_string(), arg_value)); + field + } + + fn default_field_with_vec(args: Vec<(&str, r::Value)>) -> a::Field { + let mut field = default_field(); + for (name, value) in args { + field.arguments.push((name.to_string(), value)); + } + field } #[test] @@ -432,7 +454,7 @@ mod tests { build_query( &object("Entity1"), BLOCK_NUMBER_MAX, - &default_arguments(), + &default_field(), &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -446,7 +468,7 @@ mod tests { build_query( &object("Entity2"), BLOCK_NUMBER_MAX, - &default_arguments(), + &default_field(), &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -464,7 +486,7 @@ mod tests { build_query( &default_object(), BLOCK_NUMBER_MAX, - &default_arguments(), + &default_field(), &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -478,14 +500,12 @@ mod tests { #[test] fn build_query_parses_order_by_from_enum_values_correctly() { - let order_by = "orderBy".to_string(); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("name".to_string())); + let field = default_field_with("orderBy", r::Value::Enum("name".to_string())); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -496,13 +516,12 @@ mod tests { EntityOrder::Ascending("name".to_string(), ValueType::String) ); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("email".to_string())); + let field = default_field_with("orderBy", r::Value::Enum("email".to_string())); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -516,14 +535,12 @@ mod tests { #[test] fn build_query_ignores_order_by_from_non_enum_values() { - let order_by = "orderBy".to_string(); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::String("name".to_string())); + let field = default_field_with("orderBy", r::Value::String("name".to_string())); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -534,13 +551,12 @@ mod tests { EntityOrder::Default ); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::String("email".to_string())); + let field = default_field_with("orderBy", r::Value::String("email".to_string())); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -554,16 +570,15 @@ mod tests { #[test] fn build_query_parses_order_direction_from_enum_values_correctly() { - let order_by = "orderBy".to_string(); - let order_direction = "orderDirection".to_string(); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("name".to_string())); - args.insert(&order_direction, r::Value::Enum("asc".to_string())); + let field = default_field_with_vec(vec![ + ("orderBy", r::Value::Enum("name".to_string())), + ("orderDirection", r::Value::Enum("asc".to_string())), + ]); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -574,14 +589,15 @@ mod tests { EntityOrder::Ascending("name".to_string(), ValueType::String) ); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("name".to_string())); - args.insert(&order_direction, r::Value::Enum("desc".to_string())); + let field = default_field_with_vec(vec![ + ("orderBy", r::Value::Enum("name".to_string())), + ("orderDirection", r::Value::Enum("desc".to_string())), + ]); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -592,17 +608,18 @@ mod tests { EntityOrder::Descending("name".to_string(), ValueType::String) ); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("name".to_string())); - args.insert( - &order_direction, - r::Value::Enum("descending...".to_string()), - ); + let field = default_field_with_vec(vec![ + ("orderBy", r::Value::Enum("name".to_string())), + ( + "orderDirection", + r::Value::Enum("descending...".to_string()), + ), + ]); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -614,16 +631,15 @@ mod tests { ); // No orderBy -> EntityOrder::Default - let mut args = default_arguments(); - args.insert( - &order_direction, + let field = default_field_with( + "orderDirection", r::Value::Enum("descending...".to_string()), ); assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -635,54 +651,13 @@ mod tests { ); } - #[test] - fn build_query_ignores_order_direction_from_non_enum_values() { - let order_by = "orderBy".to_string(); - let order_direction = "orderDirection".to_string(); - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("name".to_string())); - args.insert(&order_direction, r::Value::String("asc".to_string())); - assert_eq!( - build_query( - &default_object(), - BLOCK_NUMBER_MAX, - &args, - &BTreeMap::new(), - std::u32::MAX, - std::u32::MAX, - Default::default() - ) - .unwrap() - .order, - EntityOrder::Ascending("name".to_string(), ValueType::String) - ); - - let mut args = default_arguments(); - args.insert(&order_by, r::Value::Enum("name".to_string())); - args.insert(&order_direction, r::Value::String("desc".to_string())); - assert_eq!( - build_query( - &default_object(), - BLOCK_NUMBER_MAX, - &args, - &BTreeMap::new(), - std::u32::MAX, - std::u32::MAX, - Default::default() - ) - .unwrap() - .order, - EntityOrder::Ascending("name".to_string(), ValueType::String) - ); - } - #[test] fn build_query_yields_default_range_if_none_is_present() { assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &default_arguments(), + &default_field(), &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -696,14 +671,14 @@ mod tests { #[test] fn build_query_yields_default_first_if_only_skip_is_present() { - let skip = "skip".to_string(); - let mut args = default_arguments(); - args.insert(&skip, r::Value::Int(50)); + let mut field = default_field(); + field.arguments = vec![("skip".to_string(), r::Value::Int(50))]; + assert_eq!( build_query( &default_object(), BLOCK_NUMBER_MAX, - &args, + &field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, @@ -720,11 +695,9 @@ mod tests { #[test] fn build_query_yields_filters() { - let whre = "where".to_string(); - let mut args = default_arguments(); - args.insert( - &whre, - r::Value::Object(BTreeMap::from_iter(vec![( + let query_field = default_field_with( + "where", + r::Value::Object(Object::from_iter(vec![( "name_ends_with".to_string(), r::Value::String("ello".to_string()), )])), @@ -736,7 +709,7 @@ mod tests { ..default_object() }, BLOCK_NUMBER_MAX, - &args, + &query_field, &BTreeMap::new(), std::u32::MAX, std::u32::MAX, diff --git a/graphql/src/store/resolver.rs b/graphql/src/store/resolver.rs index 3b32585d7d3..950a0894d50 100644 --- a/graphql/src/store/resolver.rs +++ b/graphql/src/store/resolver.rs @@ -1,7 +1,8 @@ -use std::collections::{BTreeMap, HashMap}; +use std::collections::BTreeMap; use std::result; use std::sync::Arc; +use graph::data::value::Object; use graph::data::{ graphql::{object, ObjectOrInterface}, schema::META_FIELD_TYPE, @@ -9,6 +10,7 @@ use graph::data::{ use graph::prelude::*; use graph::{components::store::*, data::schema::BLOCK_FIELD_TYPE}; +use crate::execution::ast as a; use crate::query::ext::BlockConstraint; use crate::runner::ResultSizeMetrics; use crate::schema::ast as sast; @@ -220,7 +222,7 @@ impl StoreResolver { "__typename".to_string(), r::Value::String(META_FIELD_TYPE.to_string()), ); - return Ok((None, Some(r::Value::Object(map)))); + return Ok((None, Some(r::Value::object(map)))); } Ok((prefetched_object, None)) } @@ -237,7 +239,7 @@ impl Resolver for StoreResolver { fn prefetch( &self, ctx: &ExecutionContext, - selection_set: &q::SelectionSet, + selection_set: &a::SelectionSet, ) -> Result, Vec> { super::prefetch::run(self, ctx, selection_set, &self.result_size).map(Some) } @@ -245,10 +247,9 @@ impl Resolver for StoreResolver { fn resolve_objects( &self, prefetched_objects: Option, - field: &q::Field, + field: &a::Field, _field_definition: &s::Field, object_type: ObjectOrInterface<'_>, - _arguments: &HashMap<&str, r::Value>, ) -> Result { if let Some(child) = prefetched_objects { Ok(child) @@ -265,10 +266,9 @@ impl Resolver for StoreResolver { fn resolve_object( &self, prefetched_object: Option, - field: &q::Field, + field: &a::Field, field_definition: &s::Field, object_type: ObjectOrInterface<'_>, - _arguments: &HashMap<&str, r::Value>, ) -> Result { let (prefetched_object, meta) = self.handle_meta(prefetched_object, &object_type)?; if let Some(meta) = meta { @@ -301,11 +301,12 @@ impl Resolver for StoreResolver { fn resolve_field_stream( &self, - schema: &s::Document, + schema: &ApiSchema, object_type: &s::ObjectType, - field: &q::Field, + field: &a::Field, ) -> result::Result { // Collect all entities involved in the query field + let object_type = schema.object_type(object_type).into(); let entities = collect_entities_from_query_field(schema, object_type, field); // Subscribe to the store and return the entity change stream @@ -328,8 +329,9 @@ impl Resolver for StoreResolver { // or a different field queried under the response key `_meta`. ErrorPolicy::Deny => { let data = result.take_data(); - let meta = data.and_then(|mut d| d.remove_entry("_meta")); - result.set_data(meta.map(|m| BTreeMap::from_iter(Some(m)))); + let meta = + data.and_then(|d| d.get("_meta").map(|m| ("_meta".to_string(), m.clone()))); + result.set_data(meta.map(|m| Object::from_iter(Some(m)))); } ErrorPolicy::Allow => (), } diff --git a/graphql/src/subscription/mod.rs b/graphql/src/subscription/mod.rs index ccc6a19427d..6cc2b40bd01 100644 --- a/graphql/src/subscription/mod.rs +++ b/graphql/src/subscription/mod.rs @@ -1,5 +1,3 @@ -use std::collections::HashMap; -use std::iter; use std::result::Result; use std::time::{Duration, Instant}; @@ -8,6 +6,7 @@ use graph::{components::store::SubscriptionManager, prelude::*}; use crate::runner::ResultSizeMetrics; use crate::{ + execution::ast as a, execution::*, prelude::{BlockConstraint, StoreResolver}, schema::api::ErrorPolicy, @@ -106,35 +105,29 @@ fn create_source_event_stream( .as_ref() .ok_or(QueryExecutionError::NoRootSubscriptionObjectType)?; - let grouped_field_set = collect_fields( - &ctx, - &subscription_type, - iter::once(ctx.query.selection_set.as_ref()), - ); - - if grouped_field_set.is_empty() { + let field = if ctx.query.selection_set.is_empty() { return Err(SubscriptionError::from(QueryExecutionError::EmptyQuery)); - } else if grouped_field_set.len() > 1 { - return Err(SubscriptionError::from( - QueryExecutionError::MultipleSubscriptionFields, - )); - } - - let fields = grouped_field_set.get_index(0).unwrap(); - let field = fields.1[0]; - let argument_values = coerce_argument_values(&ctx.query, subscription_type.as_ref(), field)?; + } else { + match ctx.query.selection_set.single_field() { + Some(field) => field, + None => { + return Err(SubscriptionError::from( + QueryExecutionError::MultipleSubscriptionFields, + )); + } + } + }; - resolve_field_stream(&ctx, &subscription_type, field, argument_values) + resolve_field_stream(&ctx, &subscription_type, field) } fn resolve_field_stream( ctx: &ExecutionContext, object_type: &s::ObjectType, - field: &q::Field, - _argument_values: HashMap<&str, r::Value>, + field: &a::Field, ) -> Result { ctx.resolver - .resolve_field_stream(&ctx.query.schema.document(), object_type, field) + .resolve_field_stream(&ctx.query.schema, object_type, field) .map_err(SubscriptionError::from) } @@ -226,7 +219,7 @@ async fn execute_subscription_event( execute_root_selection_set( ctx.cheap_clone(), ctx.query.selection_set.cheap_clone(), - subscription_type, + subscription_type.into(), block_ptr, ) .await diff --git a/graphql/src/values/coercion.rs b/graphql/src/values/coercion.rs index eb82d56494f..35a16501e6b 100644 --- a/graphql/src/values/coercion.rs +++ b/graphql/src/values/coercion.rs @@ -1,7 +1,8 @@ use crate::schema; use graph::prelude::s::{EnumType, InputValue, ScalarType, Type, TypeDefinition}; use graph::prelude::{q, r, QueryExecutionError}; -use std::collections::{BTreeMap, HashMap}; +use std::collections::BTreeMap; +use std::convert::TryFrom; /// A GraphQL value that can be coerced according to a type. pub trait MaybeCoercible { @@ -58,22 +59,21 @@ impl MaybeCoercible for q::Value { /// On error, the `value` is returned as `Err(value)`. fn coerce_to_definition<'a>( - value: q::Value, + value: r::Value, definition: &str, resolver: &impl Fn(&str) -> Option<&'a TypeDefinition>, - variables: &HashMap, -) -> Result { +) -> Result { match resolver(definition).ok_or_else(|| value.clone())? { // Accept enum values if they match a value in the enum type - TypeDefinition::Enum(t) => value.coerce(t), + TypeDefinition::Enum(t) => value.coerce_enum(t), // Try to coerce Scalar values - TypeDefinition::Scalar(t) => value.coerce(t), + TypeDefinition::Scalar(t) => value.coerce_scalar(t), // Try to coerce InputObject values TypeDefinition::InputObject(t) => match value { - q::Value::Object(object) => { - let object_for_error = q::Value::Object(object.clone()); + r::Value::Object(object) => { + let object_for_error = r::Value::Object(object.clone()); let mut coerced_object = BTreeMap::new(); for (name, value) in object { let def = t @@ -83,13 +83,13 @@ fn coerce_to_definition<'a>( .ok_or_else(|| object_for_error.clone())?; coerced_object.insert( name.clone(), - match coerce_input_value(Some(value), def, resolver, variables) { + match coerce_input_value(Some(value), def, resolver) { Err(_) | Ok(None) => return Err(object_for_error), Ok(Some(v)) => v, }, ); } - Ok(r::Value::Object(coerced_object)) + Ok(r::Value::object(coerced_object)) } _ => Err(value), }, @@ -103,17 +103,25 @@ fn coerce_to_definition<'a>( /// /// `Ok(None)` happens when no value is found for a nullable type. pub(crate) fn coerce_input_value<'a>( - mut value: Option, + mut value: Option, def: &InputValue, resolver: &impl Fn(&str) -> Option<&'a TypeDefinition>, - variable_values: &HashMap, ) -> Result, QueryExecutionError> { - if let Some(q::Value::Variable(name)) = value { - value = variable_values.get(&name).cloned().map(Into::into); - }; - // Use the default value if necessary and present. - value = value.or(def.default_value.clone()); + value = match value { + Some(value) => Some(value), + None => def + .default_value + .clone() + .map(|val| r::Value::try_from(val)) + .transpose() + .map_err(|value| { + QueryExecutionError::Panic(format!( + "internal error: failed to convert default value {:?}", + value + )) + })?, + }; // Extract value, checking for null or missing. let value = match value { @@ -131,22 +139,21 @@ pub(crate) fn coerce_input_value<'a>( }; Ok(Some( - coerce_value(value, &def.value_type, resolver, variable_values).map_err(|val| { - QueryExecutionError::InvalidArgumentError(def.position, def.name.to_owned(), val) + coerce_value(value, &def.value_type, resolver).map_err(|val| { + QueryExecutionError::InvalidArgumentError(def.position, def.name.to_owned(), val.into()) })?, )) } /// On error, the `value` is returned as `Err(value)`. pub(crate) fn coerce_value<'a>( - value: q::Value, + value: r::Value, ty: &Type, resolver: &impl Fn(&str) -> Option<&'a TypeDefinition>, - variable_values: &HashMap, -) -> Result { +) -> Result { match (ty, value) { // Null values cannot be coerced into non-null types. - (Type::NonNullType(_), q::Value::Null) => Err(q::Value::Null), + (Type::NonNullType(_), r::Value::Null) => Err(r::Value::Null), // Non-null values may be coercible into non-null types (Type::NonNullType(_), val) => { @@ -156,11 +163,11 @@ pub(crate) fn coerce_value<'a>( Type::NonNullType(ty) => ty, _ => unreachable!(), }; - coerce_value(val, t, resolver, variable_values) + coerce_value(val, t, resolver) } // Nullable types can be null. - (_, q::Value::Null) => Ok(r::Value::Null), + (_, r::Value::Null) => Ok(r::Value::Null), // Resolve named types, then try to coerce the value into the resolved type (Type::NamedType(_), val) => { @@ -168,12 +175,12 @@ pub(crate) fn coerce_value<'a>( Type::NamedType(name) => name, _ => unreachable!(), }; - coerce_to_definition(val, name, resolver, variable_values) + coerce_to_definition(val, name, resolver) } // List values are coercible if their values are coercible into the // inner type. - (Type::ListType(_), q::Value::List(values)) => { + (Type::ListType(_), r::Value::List(values)) => { let t = match ty { Type::ListType(ty) => ty, _ => unreachable!(), @@ -182,7 +189,7 @@ pub(crate) fn coerce_value<'a>( // Coerce the list values individually for value in values { - coerced_values.push(coerce_value(value, t, resolver, variable_values)?); + coerced_values.push(coerce_value(value, t, resolver)?); } Ok(r::Value::List(coerced_values)) @@ -195,10 +202,9 @@ pub(crate) fn coerce_value<'a>( #[cfg(test)] mod tests { - use graph::prelude::{q, r::Value}; + use graph::prelude::r::Value; use graphql_parser::schema::{EnumType, EnumValue, ScalarType, TypeDefinition}; use graphql_parser::Pos; - use std::collections::HashMap; use super::coerce_to_definition; @@ -220,43 +226,27 @@ mod tests { // We can coerce from Value::Enum -> TypeDefinition::Enum if the variant is valid assert_eq!( - coerce_to_definition( - q::Value::Enum("ValidVariant".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::Enum("ValidVariant".to_string()), "", &resolver,), Ok(Value::Enum("ValidVariant".to_string())) ); // We cannot coerce from Value::Enum -> TypeDefinition::Enum if the variant is invalid - assert!(coerce_to_definition( - q::Value::Enum("InvalidVariant".to_string()), - "", - &resolver, - &HashMap::new() - ) - .is_err()); + assert!( + coerce_to_definition(Value::Enum("InvalidVariant".to_string()), "", &resolver,) + .is_err() + ); // We also support going from Value::String -> TypeDefinition::Scalar(Enum) assert_eq!( - coerce_to_definition( - q::Value::String("ValidVariant".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("ValidVariant".to_string()), "", &resolver,), Ok(Value::Enum("ValidVariant".to_string())), ); // But we don't support invalid variants - assert!(coerce_to_definition( - q::Value::String("InvalidVariant".to_string()), - "", - &resolver, - &HashMap::new() - ) - .is_err()); + assert!( + coerce_to_definition(Value::String("InvalidVariant".to_string()), "", &resolver,) + .is_err() + ); } #[test] @@ -271,37 +261,21 @@ mod tests { // We can coerce from Value::Boolean -> TypeDefinition::Scalar(Boolean) assert_eq!( - coerce_to_definition(q::Value::Boolean(true), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Boolean(true), "", &resolver), Ok(Value::Boolean(true)) ); assert_eq!( - coerce_to_definition(q::Value::Boolean(false), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Boolean(false), "", &resolver), Ok(Value::Boolean(false)) ); // We don't support going from Value::String -> TypeDefinition::Scalar(Boolean) - assert!(coerce_to_definition( - q::Value::String("true".to_string()), - "", - &resolver, - &HashMap::new() - ) - .is_err()); - assert!(coerce_to_definition( - q::Value::String("false".to_string()), - "", - &resolver, - &HashMap::new() - ) - .is_err()); + assert!(coerce_to_definition(Value::String("true".to_string()), "", &resolver,).is_err()); + assert!(coerce_to_definition(Value::String("false".to_string()), "", &resolver,).is_err()); // We don't support going from Value::Float -> TypeDefinition::Scalar(Boolean) - assert!( - coerce_to_definition(q::Value::Float(1.0), "", &resolver, &HashMap::new()).is_err() - ); - assert!( - coerce_to_definition(q::Value::Float(0.0), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Float(1.0), "", &resolver).is_err()); + assert!(coerce_to_definition(Value::Float(0.0), "", &resolver).is_err()); } #[test] @@ -311,56 +285,37 @@ mod tests { // We can coerce from Value::Float -> TypeDefinition::Scalar(BigDecimal) assert_eq!( - coerce_to_definition(q::Value::Float(23.7), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Float(23.7), "", &resolver), Ok(Value::String("23.7".to_string())) ); assert_eq!( - coerce_to_definition(q::Value::Float(-5.879), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Float(-5.879), "", &resolver), Ok(Value::String("-5.879".to_string())) ); // We can coerce from Value::String -> TypeDefinition::Scalar(BigDecimal) assert_eq!( - coerce_to_definition( - q::Value::String("23.7".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("23.7".to_string()), "", &resolver,), Ok(Value::String("23.7".to_string())) ); assert_eq!( - coerce_to_definition( - q::Value::String("-5.879".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("-5.879".to_string()), "", &resolver,), Ok(Value::String("-5.879".to_string())), ); // We can coerce from Value::Int -> TypeDefinition::Scalar(BigDecimal) assert_eq!( - coerce_to_definition(q::Value::Int(23.into()), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Int(23.into()), "", &resolver), Ok(Value::String("23".to_string())) ); assert_eq!( - coerce_to_definition( - q::Value::Int((-5 as i32).into()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::Int((-5 as i32).into()), "", &resolver,), Ok(Value::String("-5".to_string())), ); // We don't support going from Value::Boolean -> TypeDefinition::Scalar(Boolean) - assert!( - coerce_to_definition(q::Value::Boolean(true), "", &resolver, &HashMap::new()).is_err() - ); - assert!( - coerce_to_definition(q::Value::Boolean(false), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Boolean(true), "", &resolver).is_err()); + assert!(coerce_to_definition(Value::Boolean(false), "", &resolver).is_err()); } #[test] @@ -370,39 +325,21 @@ mod tests { // We can coerce from Value::String -> TypeDefinition::Scalar(String) assert_eq!( - coerce_to_definition( - q::Value::String("foo".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("foo".to_string()), "", &resolver,), Ok(Value::String("foo".to_string())) ); assert_eq!( - coerce_to_definition( - q::Value::String("bar".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("bar".to_string()), "", &resolver,), Ok(Value::String("bar".to_string())) ); // We don't support going from Value::Boolean -> TypeDefinition::Scalar(String) - assert!( - coerce_to_definition(q::Value::Boolean(true), "", &resolver, &HashMap::new()).is_err() - ); - assert!( - coerce_to_definition(q::Value::Boolean(false), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Boolean(true), "", &resolver).is_err()); + assert!(coerce_to_definition(Value::Boolean(false), "", &resolver).is_err()); // We don't support going from Value::Float -> TypeDefinition::Scalar(String) - assert!( - coerce_to_definition(q::Value::Float(23.7), "", &resolver, &HashMap::new()).is_err() - ); - assert!( - coerce_to_definition(q::Value::Float(-5.879), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Float(23.7), "", &resolver).is_err()); + assert!(coerce_to_definition(Value::Float(-5.879), "", &resolver).is_err()); } #[test] @@ -412,46 +349,28 @@ mod tests { // We can coerce from Value::String -> TypeDefinition::Scalar(ID) assert_eq!( - coerce_to_definition( - q::Value::String("foo".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("foo".to_string()), "", &resolver,), Ok(Value::String("foo".to_string())) ); assert_eq!( - coerce_to_definition( - q::Value::String("bar".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("bar".to_string()), "", &resolver,), Ok(Value::String("bar".to_string())) ); // And also from Value::Int assert_eq!( - coerce_to_definition(q::Value::Int(1234.into()), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Int(1234.into()), "", &resolver), Ok(Value::String("1234".to_string())) ); // We don't support going from Value::Boolean -> TypeDefinition::Scalar(ID) - assert!( - coerce_to_definition(q::Value::Boolean(true), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Boolean(true), "", &resolver).is_err()); - assert!( - coerce_to_definition(q::Value::Boolean(false), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Boolean(false), "", &resolver).is_err()); // We don't support going from Value::Float -> TypeDefinition::Scalar(ID) - assert!( - coerce_to_definition(q::Value::Float(23.7), "", &resolver, &HashMap::new()).is_err() - ); - assert!( - coerce_to_definition(q::Value::Float(-5.879), "", &resolver, &HashMap::new()).is_err() - ); + assert!(coerce_to_definition(Value::Float(23.7), "", &resolver).is_err()); + assert!(coerce_to_definition(Value::Float(-5.879), "", &resolver).is_err()); } #[test] @@ -461,27 +380,17 @@ mod tests { // We can coerce from Value::String -> TypeDefinition::Scalar(BigInt) assert_eq!( - coerce_to_definition( - q::Value::String("1234".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("1234".to_string()), "", &resolver,), Ok(Value::String("1234".to_string())) ); // And also from Value::Int assert_eq!( - coerce_to_definition(q::Value::Int(1234.into()), "", &resolver, &HashMap::new()), + coerce_to_definition(Value::Int(1234.into()), "", &resolver), Ok(Value::String("1234".to_string())) ); assert_eq!( - coerce_to_definition( - q::Value::Int((-1234 as i32).into()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::Int((-1234 as i32).into()), "", &resolver,), Ok(Value::String("-1234".to_string())) ); } @@ -493,12 +402,7 @@ mod tests { // We can coerce from Value::String -> TypeDefinition::Scalar(Bytes) assert_eq!( - coerce_to_definition( - q::Value::String("0x21f".to_string()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::String("0x21f".to_string()), "", &resolver,), Ok(Value::String("0x21f".to_string())) ); } @@ -509,21 +413,11 @@ mod tests { let resolver = |_: &str| Some(&int_type); assert_eq!( - coerce_to_definition( - q::Value::Int(13289123.into()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::Int(13289123.into()), "", &resolver,), Ok(Value::Int(13289123.into())) ); assert_eq!( - coerce_to_definition( - q::Value::Int((-13289123 as i32).into()), - "", - &resolver, - &HashMap::new() - ), + coerce_to_definition(Value::Int((-13289123 as i32).into()), "", &resolver,), Ok(Value::Int((-13289123 as i32).into())) ); } diff --git a/graphql/tests/introspection.rs b/graphql/tests/introspection.rs index bf7cd613795..a6a306eff4d 100644 --- a/graphql/tests/introspection.rs +++ b/graphql/tests/introspection.rs @@ -1,16 +1,15 @@ #[macro_use] extern crate pretty_assertions; -use std::collections::HashMap; use std::sync::Arc; use graph::data::graphql::{object, object_value, ObjectOrInterface}; use graph::prelude::{ - async_trait, o, q, r, s, slog, tokio, ApiSchema, DeploymentHash, Logger, Query, + async_trait, o, r, s, slog, tokio, ApiSchema, DeploymentHash, Logger, Query, QueryExecutionError, QueryResult, Schema, }; use graph_graphql::prelude::{ - api_schema, execute_query, ExecutionContext, Query as PreparedQuery, QueryExecutionOptions, + a, api_schema, execute_query, ExecutionContext, Query as PreparedQuery, QueryExecutionOptions, Resolver, }; use test_store::LOAD_MANAGER; @@ -26,7 +25,7 @@ impl Resolver for MockResolver { fn prefetch( &self, _: &ExecutionContext, - _: &q::SelectionSet, + _: &a::SelectionSet, ) -> Result, Vec> { Ok(None) } @@ -34,10 +33,9 @@ impl Resolver for MockResolver { fn resolve_objects<'a>( &self, _: Option, - _field: &q::Field, + _field: &a::Field, _field_definition: &s::Field, _object_type: ObjectOrInterface<'_>, - _arguments: &HashMap<&str, r::Value>, ) -> Result { Ok(r::Value::Null) } @@ -45,10 +43,9 @@ impl Resolver for MockResolver { fn resolve_object( &self, __: Option, - _field: &q::Field, + _field: &a::Field, _field_definition: &s::Field, _object_type: ObjectOrInterface<'_>, - _arguments: &HashMap<&str, r::Value>, ) -> Result { Ok(r::Value::Null) } @@ -120,8 +117,8 @@ fn expected_mock_schema_introspection() -> r::Value { description: r::Value::Null, fields: r::Value::Null, inputFields: r::Value::Null, - enumValues: r::Value::Null, interfaces: r::Value::Null, + enumValues: r::Value::Null, possibleTypes: r::Value::Null, }; @@ -131,8 +128,8 @@ fn expected_mock_schema_introspection() -> r::Value { description: r::Value::Null, fields: r::Value::Null, inputFields: r::Value::Null, - enumValues: r::Value::Null, interfaces: r::Value::Null, + enumValues: r::Value::Null, possibleTypes: r::Value::Null, }; @@ -142,8 +139,8 @@ fn expected_mock_schema_introspection() -> r::Value { description: r::Value::Null, fields: r::Value::Null, inputFields: r::Value::Null, - enumValues: r::Value::Null, interfaces: r::Value::Null, + enumValues: r::Value::Null, possibleTypes: r::Value::Null, }; @@ -153,8 +150,8 @@ fn expected_mock_schema_introspection() -> r::Value { description: r::Value::Null, fields: r::Value::Null, inputFields: r::Value::Null, - enumValues: r::Value::Null, interfaces: r::Value::Null, + enumValues: r::Value::Null, possibleTypes: r::Value::Null, }; @@ -164,6 +161,7 @@ fn expected_mock_schema_introspection() -> r::Value { description: r::Value::Null, fields: r::Value::Null, inputFields: r::Value::Null, + interfaces: r::Value::Null, enumValues: r::Value::List(vec![ object! { @@ -179,7 +177,6 @@ fn expected_mock_schema_introspection() -> r::Value { deprecationReason: r::Value::Null, }, ]), - interfaces: r::Value::Null, possibleTypes: r::Value::Null, }; @@ -192,6 +189,7 @@ fn expected_mock_schema_introspection() -> r::Value { r::Value::List(vec![object_value(vec![ ("name", r::Value::String("id".to_string())), ("description", r::Value::Null), + ("args", r::Value::List(vec![])), ( "type", object_value(vec![ @@ -207,14 +205,13 @@ fn expected_mock_schema_introspection() -> r::Value { ), ]), ), - ("args", r::Value::List(vec![])), - ("deprecationReason", r::Value::Null), ("isDeprecated", r::Value::Boolean(false)), + ("deprecationReason", r::Value::Null), ])]), ), ("inputFields", r::Value::Null), - ("enumValues", r::Value::Null), ("interfaces", r::Value::Null), + ("enumValues", r::Value::Null), ( "possibleTypes", r::Value::List(vec![object_value(vec![ @@ -231,6 +228,7 @@ fn expected_mock_schema_introspection() -> r::Value { ("description", r::Value::Null), ("fields", r::Value::Null), ("inputFields", r::Value::Null), + ("interfaces", r::Value::Null), ( "enumValues", r::Value::List(vec![ @@ -248,7 +246,6 @@ fn expected_mock_schema_introspection() -> r::Value { ]), ]), ), - ("interfaces", r::Value::Null), ("possibleTypes", r::Value::Null), ]); @@ -263,10 +260,6 @@ fn expected_mock_schema_introspection() -> r::Value { object_value(vec![ ("name", r::Value::String("name_eq".to_string())), ("description", r::Value::Null), - ( - "defaultValue", - r::Value::String("\"default name\"".to_string()), - ), ( "type", object_value(vec![ @@ -275,11 +268,14 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ]), ), + ( + "defaultValue", + r::Value::String("\"default name\"".to_string()), + ), ]), object_value(vec![ ("name", r::Value::String("name_not".to_string())), ("description", r::Value::Null), - ("defaultValue", r::Value::Null), ( "type", object_value(vec![ @@ -288,11 +284,12 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ]), ), + ("defaultValue", r::Value::Null), ]), ]), ), - ("enumValues", r::Value::Null), ("interfaces", r::Value::Null), + ("enumValues", r::Value::Null), ("possibleTypes", r::Value::Null), ]); @@ -372,7 +369,6 @@ fn expected_mock_schema_introspection() -> r::Value { ]), ), ("inputFields", r::Value::Null), - ("enumValues", r::Value::Null), ( "interfaces", r::Value::List(vec![object_value(vec![ @@ -381,6 +377,7 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ])]), ), + ("enumValues", r::Value::Null), ("possibleTypes", r::Value::Null), ]); @@ -398,9 +395,8 @@ fn expected_mock_schema_introspection() -> r::Value { "args", r::Value::List(vec![ object_value(vec![ - ("defaultValue", r::Value::Null), - ("description", r::Value::Null), ("name", r::Value::String("orderBy".to_string())), + ("description", r::Value::Null), ( "type", object_value(vec![ @@ -409,11 +405,11 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ]), ), + ("defaultValue", r::Value::Null), ]), object_value(vec![ - ("defaultValue", r::Value::Null), - ("description", r::Value::Null), ("name", r::Value::String("filter".to_string())), + ("description", r::Value::Null), ( "type", object_value(vec![ @@ -422,6 +418,7 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ]), ), + ("defaultValue", r::Value::Null), ]), ]), ), @@ -456,9 +453,8 @@ fn expected_mock_schema_introspection() -> r::Value { ( "args", r::Value::List(vec![object_value(vec![ - ("defaultValue", r::Value::String("99".to_string())), - ("description", r::Value::Null), ("name", r::Value::String("age".to_string())), + ("description", r::Value::Null), ( "type", object_value(vec![ @@ -467,6 +463,7 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ]), ), + ("defaultValue", r::Value::String("99".to_string())), ])]), ), ( @@ -498,8 +495,8 @@ fn expected_mock_schema_introspection() -> r::Value { ]), ), ("inputFields", r::Value::Null), - ("enumValues", r::Value::Null), ("interfaces", r::Value::List(vec![])), + ("enumValues", r::Value::Null), ("possibleTypes", r::Value::Null), ]); @@ -528,7 +525,6 @@ fn expected_mock_schema_introspection() -> r::Value { r::Value::List(vec![object_value(vec![ ("name", r::Value::String("language".to_string())), ("description", r::Value::Null), - ("defaultValue", r::Value::String("\"English\"".to_string())), ( "type", object_value(vec![ @@ -537,6 +533,7 @@ fn expected_mock_schema_introspection() -> r::Value { ("ofType", r::Value::Null), ]), ), + ("defaultValue", r::Value::String("\"English\"".to_string())), ])]), ), ])]); diff --git a/graphql/tests/query.rs b/graphql/tests/query.rs index 021f8dc016f..16499f344b4 100644 --- a/graphql/tests/query.rs +++ b/graphql/tests/query.rs @@ -1,13 +1,14 @@ #[macro_use] extern crate pretty_assertions; +use graph::data::value::Object; use graphql_parser::Pos; use std::convert::TryFrom; use std::iter::FromIterator; use std::sync::Arc; use std::time::{Duration, Instant}; use std::{ - collections::{BTreeMap, BTreeSet, HashMap}, + collections::{BTreeSet, HashMap}, marker::PhantomData, }; @@ -365,7 +366,6 @@ fn can_query_one_to_one_relationship() { r::Value::List(vec![ object_value(vec![ ("id", r::Value::String(String::from("s1"))), - ("played", r::Value::Int(10)), ( "song", object_value(vec![ @@ -373,10 +373,10 @@ fn can_query_one_to_one_relationship() { ("title", r::Value::String(String::from("Cheesy Tune"))) ]) ), + ("played", r::Value::Int(10)), ]), object_value(vec![ ("id", r::Value::String(String::from("s2"))), - ("played", r::Value::Int(15)), ( "song", object_value(vec![ @@ -384,6 +384,7 @@ fn can_query_one_to_one_relationship() { ("title", r::Value::String(String::from("Rock Tune"))) ]) ), + ("played", r::Value::Int(15)), ]) ]) ) @@ -573,6 +574,30 @@ fn can_query_many_to_many_relationship() { }) } +#[test] +fn root_fragments_are_expanded() { + run_test_sequentially(|store| async move { + let deployment = setup(store.as_ref()); + let query = graphql_parser::parse_query( + r#" + fragment Musicians on Query { + musicians(first: 100, where: { name: "Tom" }) { + name + } + } + query MyQuery { + ...Musicians + }"#, + ) + .expect("invalid test query") + .into_static(); + + let result = execute_query_document_with_variables(&deployment.hash, query, None).await; + let exp = object! { musicians: vec![ object! { name: "Tom" }]}; + assert_eq!(extract_data!(result), Some(exp)); + }) +} + #[test] fn query_variables_are_used() { run_test_sequentially(|store| async move { @@ -1396,6 +1421,111 @@ fn can_use_nested_filter() { }) } +// see: graphql-bug-compat +#[test] +fn ignores_invalid_field_arguments() { + run_test_sequentially(|store| async move { + let deployment = setup(store.as_ref()); + // This query has to return all the musicians since `id` is not a + // valid argument for the `musicians` field and must therefore be + // ignored + let result = execute_query_document( + &deployment.hash, + graphql_parser::parse_query("query { musicians(id: \"m1\") { id } } ") + .expect("invalid test query") + .into_static(), + ) + .await; + + let data = extract_data!(result).unwrap(); + match data { + r::Value::Object(obj) => match obj.get("musicians").unwrap() { + r::Value::List(lst) => { + assert_eq!(4, lst.len()); + } + _ => panic!("expected a list of values"), + }, + _ => { + panic!("expected an object") + } + } + }) +} + +// see: graphql-bug-compat +#[test] +fn leaf_selection_mismatch() { + run_test_sequentially(|store| async move { + let deployment = setup(store.as_ref()); + let result = execute_query_document( + &deployment.hash, + // 'name' is a string and doesn't admit a selection + graphql_parser::parse_query("query { musician(id: \"m1\") { id name { wat }} } ") + .expect("invalid test query") + .into_static(), + ) + .await; + let exp = object! { + musician: object! { + id: "m1", + name: "John" + } + }; + let data = extract_data!(result).unwrap(); + assert_eq!(exp, data); + + let result = execute_query_document( + &deployment.hash, + // 'mainBand' is an object and requires a selection; it is ignored + graphql_parser::parse_query("query { musician(id: \"m1\") { id name mainBand } } ") + .expect("invalid test query") + .into_static(), + ) + .await; + let data = extract_data!(result).unwrap(); + assert_eq!(exp, data); + }) +} + +// see: graphql-bug-compat +#[test] +fn missing_variable() { + run_test_sequentially(|store| async move { + let deployment = setup(store.as_ref()); + let result = execute_query_document( + &deployment.hash, + // '$first' is not defined, use its default from the schema + graphql_parser::parse_query("query { musicians(first: $first, skip: $skip) { id } }") + .expect("invalid test query") + .into_static(), + ) + .await; + // We silently set `$first` to 100 and `$skip` to 0, and therefore + // get everything + let exp = object! { + musicians: vec![ + object! { id: "m1" }, + object! { id: "m2" }, + object! { id: "m3" }, + object! { id: "m4" }, + ] + }; + let data = extract_data!(result).unwrap(); + assert_eq!(exp, data); + + let result = execute_query_document( + &deployment.hash, + // '$where' is not defined but nullable, ignore the argument + graphql_parser::parse_query("query { musicians(where: $where) { id } }") + .expect("invalid test query") + .into_static(), + ) + .await; + let data = extract_data!(result).unwrap(); + assert_eq!(exp, data); + }) +} + async fn check_musicians_at( id: &DeploymentHash, query: &str, @@ -1538,7 +1668,7 @@ fn query_at_block_with_vars() { check_musicians_at(&deployment.hash, query, var, expected.clone(), qid).await; let query = "query by_nr($block: Block_height!) { musicians(block: $block) { id } }"; - let mut map = BTreeMap::new(); + let mut map = Object::new(); map.insert("number".to_owned(), number); let block = r::Value::Object(map); let var = Some(("block", block)); @@ -1676,12 +1806,12 @@ fn can_query_meta() { let result = execute_query_document(&deployment.hash, query).await; let exp = object! { _meta: object! { + deployment: "graphqlTestsQuery", block: object! { hash: "0x8511fa04b64657581e3f00e14543c1d522d5d7e771b54aa3060b662ade47da13", number: 1, __typename: "_Block_" }, - deployment: "graphqlTestsQuery", __typename: "_Meta_" }, }; @@ -1696,11 +1826,11 @@ fn can_query_meta() { let result = execute_query_document(&deployment.hash, query).await; let exp = object! { _meta: object! { + deployment: "graphqlTestsQuery", block: object! { hash: r::Value::Null, number: 0 }, - deployment: "graphqlTestsQuery" }, }; assert_eq!(extract_data!(result), Some(exp)); @@ -1715,11 +1845,11 @@ fn can_query_meta() { let result = execute_query_document(&deployment.hash, query).await; let exp = object! { _meta: object! { + deployment: "graphqlTestsQuery", block: object! { hash: "0xbd34884280958002c51d3f7b5f853e6febeba33de0f40d15b0363006533c924f", number: 0 }, - deployment: "graphqlTestsQuery" }, }; assert_eq!(extract_data!(result), Some(exp)); diff --git a/server/http/src/request.rs b/server/http/src/request.rs index 69f925f313e..4bc280201a7 100644 --- a/server/http/src/request.rs +++ b/server/http/src/request.rs @@ -69,9 +69,12 @@ impl Future for GraphQLRequest { mod tests { use graphql_parser; use hyper; - use std::collections::{BTreeMap, HashMap}; + use std::collections::HashMap; - use graph::{data::query::QueryTarget, prelude::*}; + use graph::{ + data::{query::QueryTarget, value::Object}, + prelude::*, + }; use super::GraphQLRequest; @@ -173,7 +176,7 @@ mod tests { (String::from("string"), r::Value::String(String::from("s"))), ( String::from("map"), - r::Value::Object(BTreeMap::from_iter( + r::Value::Object(Object::from_iter( vec![(String::from("k"), r::Value::String(String::from("v")))].into_iter(), )), ), diff --git a/server/http/src/service.rs b/server/http/src/service.rs index 0902d30cfe7..91f39666db0 100644 --- a/server/http/src/service.rs +++ b/server/http/src/service.rs @@ -387,10 +387,10 @@ where #[cfg(test)] mod tests { + use graph::data::value::Object; use http::status::StatusCode; use hyper::service::Service; use hyper::{Body, Method, Request}; - use std::collections::BTreeMap; use graph::data::{ graphql::effort::LoadManager, @@ -426,7 +426,7 @@ mod tests { } async fn run_query(self: Arc, _query: Query, _target: QueryTarget) -> QueryResults { - QueryResults::from(BTreeMap::from_iter( + QueryResults::from(Object::from_iter( vec![( String::from("name"), r::Value::String(String::from("Jordi")), diff --git a/server/http/tests/response.rs b/server/http/tests/response.rs index 17bb76dfb13..c007fc78605 100644 --- a/server/http/tests/response.rs +++ b/server/http/tests/response.rs @@ -1,11 +1,11 @@ +use graph::data::value::Object; use graph::data::{graphql::object, query::QueryResults}; use graph::prelude::*; use graph_server_http::test_utils; -use std::collections::BTreeMap; #[test] fn generates_200_for_query_results() { - let data = BTreeMap::new(); + let data = Object::new(); let query_result = QueryResults::from(data).as_http_response(); test_utils::assert_expected_headers(&query_result); test_utils::assert_successful_response(query_result); @@ -13,7 +13,7 @@ fn generates_200_for_query_results() { #[test] fn generates_valid_json_for_an_empty_result() { - let data = BTreeMap::new(); + let data = Object::new(); let query_result = QueryResults::from(data).as_http_response(); test_utils::assert_expected_headers(&query_result); let data = test_utils::assert_successful_response(query_result); @@ -43,10 +43,11 @@ fn canonical_serialization() { // Value::Variable: nothing to check, not used in a response - // Value::Object: Insertion order of keys does not matter + // Value::Object: Insertion order of keys matters let first_second = r#"{"data":{"first":"first","second":"second"}}"#; + let second_first = r#"{"data":{"second":"second","first":"first"}}"#; assert_resp!(first_second, object! { first: "first", second: "second" }); - assert_resp!(first_second, object! { second: "second", first: "first" }); + assert_resp!(second_first, object! { second: "second", first: "first" }); // Value::List assert_resp!(r#"{"data":{"ary":[1,2]}}"#, object! { ary: vec![1,2] }); @@ -81,6 +82,6 @@ fn canonical_serialization() { // Value::Boolean assert_resp!( r#"{"data":{"no":false,"yes":true}}"#, - object! { yes: true, no: false } + object! { no: false, yes: true } ); } diff --git a/server/http/tests/server.rs b/server/http/tests/server.rs index 460f6dc92ed..cde7b3d4c95 100644 --- a/server/http/tests/server.rs +++ b/server/http/tests/server.rs @@ -1,11 +1,11 @@ use http::StatusCode; use hyper::{Body, Client, Request}; -use std::collections::BTreeMap; use std::time::Duration; use graph::data::{ graphql::effort::LoadManager, query::{QueryResults, QueryTarget}, + value::Object, }; use graph::prelude::*; @@ -46,11 +46,11 @@ impl GraphQlRunner for TestGraphQlRunner { .unwrap() == &r::Value::String(String::from("John")) { - BTreeMap::from_iter( + Object::from_iter( vec![(String::from("name"), r::Value::String(String::from("John")))].into_iter(), ) } else { - BTreeMap::from_iter( + Object::from_iter( vec![( String::from("name"), r::Value::String(String::from("Jordi")), diff --git a/server/index-node/src/request.rs b/server/index-node/src/request.rs index 31ed015e836..73e17891173 100644 --- a/server/index-node/src/request.rs +++ b/server/index-node/src/request.rs @@ -70,9 +70,9 @@ impl Future for IndexNodeRequest { mod tests { use graphql_parser; use hyper; - use std::collections::{BTreeMap, HashMap}; + use std::collections::HashMap; - use graph::prelude::*; + use graph::{data::value::Object, prelude::*}; use super::IndexNodeRequest; @@ -169,7 +169,7 @@ mod tests { (String::from("string"), r::Value::String(String::from("s"))), ( String::from("map"), - r::Value::Object(BTreeMap::from_iter( + r::Value::Object(Object::from_iter( vec![(String::from("k"), r::Value::String(String::from("v")))].into_iter(), )), ), diff --git a/server/index-node/src/resolver.rs b/server/index-node/src/resolver.rs index 3cccfb1ad6f..e2f694fe2b9 100644 --- a/server/index-node/src/resolver.rs +++ b/server/index-node/src/resolver.rs @@ -1,6 +1,6 @@ use either::Either; use graph::blockchain::{Blockchain, BlockchainKind}; -use std::collections::{BTreeMap, HashMap}; +use graph::data::value::Object; use graph::data::subgraph::features::detect_features; use graph::data::subgraph::{status, MAX_SPEC_VERSION}; @@ -9,7 +9,7 @@ use graph::{ components::store::StatusStore, data::graphql::{IntoValue, ObjectOrInterface, ValueMap}, }; -use graph_graphql::prelude::{ExecutionContext, Resolver}; +use graph_graphql::prelude::{a, ExecutionContext, Resolver}; use std::convert::TryInto; use web3::types::{Address, H256}; @@ -42,12 +42,9 @@ where } } - fn resolve_indexing_statuses( - &self, - arguments: &HashMap<&str, r::Value>, - ) -> Result { - let deployments = arguments - .get("subgraphs") + fn resolve_indexing_statuses(&self, field: &a::Field) -> Result { + let deployments = field + .argument_value("subgraphs") .map(|value| match value { r::Value::List(ids) => ids .into_iter() @@ -68,12 +65,12 @@ where fn resolve_indexing_statuses_for_subgraph_name( &self, - arguments: &HashMap<&str, r::Value>, + field: &a::Field, ) -> Result { // Get the subgraph name from the arguments; we can safely use `expect` here // because the argument will already have been validated prior to the resolver // being called - let subgraph_name = arguments + let subgraph_name = field .get_required::("subgraphName") .expect("subgraphName not provided"); @@ -90,21 +87,18 @@ where Ok(infos.into_value()) } - fn resolve_proof_of_indexing( - &self, - argument_values: &HashMap<&str, r::Value>, - ) -> Result { - let deployment_id = argument_values + fn resolve_proof_of_indexing(&self, field: &a::Field) -> Result { + let deployment_id = field .get_required::("subgraph") .expect("Valid subgraphId required"); - let block_number: u64 = argument_values + let block_number: u64 = field .get_required::("blockNumber") .expect("Valid blockNumber required") .try_into() .unwrap(); - let block_hash = argument_values + let block_hash = field .get_required::("blockHash") .expect("Valid blockHash required") .try_into() @@ -112,7 +106,7 @@ where let block = BlockPtr::from((block_hash, block_number)); - let indexer = argument_values + let indexer = field .get_optional::
("indexer") .expect("Invalid indexer"); @@ -139,13 +133,13 @@ where fn resolve_indexing_status_for_version( &self, - arguments: &HashMap<&str, r::Value>, + field: &a::Field, // If `true` return the current version, if `false` return the pending version. current_version: bool, ) -> Result { // We can safely unwrap because the argument is non-nullable and has been validated. - let subgraph_name = arguments.get_required::("subgraphName").unwrap(); + let subgraph_name = field.get_required::("subgraphName").unwrap(); debug!( self.logger, @@ -168,10 +162,10 @@ where async fn resolve_subgraph_features( &self, - arguments: &HashMap<&str, r::Value>, + field: &a::Field, ) -> Result { // We can safely unwrap because the argument is non-nullable and has been validated. - let subgraph_id = arguments.get_required::("subgraphId").unwrap(); + let subgraph_id = field.get_required::("subgraphId").unwrap(); // TODO: // @@ -244,7 +238,7 @@ where // We then bulid a GraphqQL `Object` value that contains the feature detection and // validation results and send it back as a response. - let mut response: BTreeMap = BTreeMap::new(); + let mut response = Object::new(); response.insert("features".to_string(), features); response.insert("errors".to_string(), errors); response.insert("network".to_string(), network); @@ -376,7 +370,7 @@ where fn prefetch( &self, _: &ExecutionContext, - _: &q::SelectionSet, + _: &a::SelectionSet, ) -> Result, Vec> { Ok(None) } @@ -385,17 +379,16 @@ where fn resolve_scalar_value( &self, parent_object_type: &s::ObjectType, - field: &q::Field, + field: &a::Field, scalar_type: &s::ScalarType, value: Option, - argument_values: &HashMap<&str, r::Value>, ) -> Result { // Check if we are resolving the proofOfIndexing bytes if &parent_object_type.name == "Query" && &field.name == "proofOfIndexing" && &scalar_type.name == "Bytes" { - return self.resolve_proof_of_indexing(argument_values); + return self.resolve_proof_of_indexing(field); } // Fallback to the same as is in the default trait implementation. There @@ -408,20 +401,19 @@ where fn resolve_objects( &self, prefetched_objects: Option, - field: &q::Field, + field: &a::Field, _field_definition: &s::Field, object_type: ObjectOrInterface<'_>, - arguments: &HashMap<&str, r::Value>, ) -> Result { match (prefetched_objects, object_type.name(), field.name.as_str()) { // The top-level `indexingStatuses` field (None, "SubgraphIndexingStatus", "indexingStatuses") => { - self.resolve_indexing_statuses(arguments) + self.resolve_indexing_statuses(field) } // The top-level `indexingStatusesForSubgraphName` field (None, "SubgraphIndexingStatus", "indexingStatusesForSubgraphName") => { - self.resolve_indexing_statuses_for_subgraph_name(arguments) + self.resolve_indexing_statuses_for_subgraph_name(field) } // Resolve fields of `Object` values (e.g. the `chains` field of `ChainIndexingStatus`) @@ -432,26 +424,23 @@ where fn resolve_object( &self, prefetched_object: Option, - field: &q::Field, + field: &a::Field, _field_definition: &s::Field, _object_type: ObjectOrInterface<'_>, - arguments: &HashMap<&str, r::Value>, ) -> Result { match (prefetched_object, field.name.as_str()) { // The top-level `indexingStatusForCurrentVersion` field (None, "indexingStatusForCurrentVersion") => { - self.resolve_indexing_status_for_version(arguments, true) + self.resolve_indexing_status_for_version(field, true) } // The top-level `indexingStatusForPendingVersion` field (None, "indexingStatusForPendingVersion") => { - self.resolve_indexing_status_for_version(arguments, false) + self.resolve_indexing_status_for_version(field, false) } // The top-level `indexingStatusForPendingVersion` field - (None, "subgraphFeatures") => { - graph::block_on(self.resolve_subgraph_features(arguments)) - } + (None, "subgraphFeatures") => graph::block_on(self.resolve_subgraph_features(field)), // Resolve fields of `Object` values (e.g. the `latestBlock` field of `EthereumBlock`) (value, _) => Ok(value.unwrap_or(r::Value::Null)), diff --git a/store/postgres/tests/store.rs b/store/postgres/tests/store.rs index 42d1207d4bd..57e68ae3a40 100644 --- a/store/postgres/tests/store.rs +++ b/store/postgres/tests/store.rs @@ -1,3 +1,4 @@ +use graph::data::graphql::ext::TypeDefinitionExt; use graph_chain_ethereum::{Mapping, MappingABI}; use graph_mock::MockMetricsRegistry; use hex_literal::hex; @@ -1474,13 +1475,12 @@ fn subgraph_schema_types_have_subgraph_id_directive() { .api_schema(&deployment.hash) .expect("test subgraph should have a schema"); for typedef in schema - .document() - .definitions - .iter() + .definitions() .filter_map(|def| match def { s::Definition::TypeDefinition(typedef) => Some(typedef), _ => None, }) + .filter(|typedef| !typedef.is_introspection()) { // Verify that all types have a @subgraphId directive on them let directive = match typedef {