diff --git a/website/src/docs/docs.json b/website/src/docs/docs.json index 8eb64644bfa..5ed445238e1 100644 --- a/website/src/docs/docs.json +++ b/website/src/docs/docs.json @@ -6,6 +6,336 @@ "versions": [ { "path": "", + "title": "v13", + "items": [ + { + "path": "index", + "title": "Introduction" + }, + { + "path": "get-started", + "title": "Get Started" + }, + { + "path": "defining-a-schema", + "title": "Defining a schema", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "queries", + "title": "Queries" + }, + { + "path": "mutations", + "title": "Mutations" + }, + { + "path": "subscriptions", + "title": "Subscriptions" + }, + { + "path": "object-types", + "title": "Object Types" + }, + { + "path": "scalars", + "title": "Scalars" + }, + { + "path": "arguments", + "title": "Arguments" + }, + { + "path": "input-object-types", + "title": "Input Object Types" + }, + { + "path": "lists", + "title": "Lists" + }, + { + "path": "non-null", + "title": "Non-Null" + }, + { + "path": "enums", + "title": "Enums" + }, + { + "path": "interfaces", + "title": "Interfaces" + }, + { + "path": "unions", + "title": "Unions" + }, + { + "path": "extending-types", + "title": "Extending Types" + }, + { + "path": "directives", + "title": "Directives" + }, + { + "path": "documentation", + "title": "Documentation" + }, + { + "path": "versioning", + "title": "Versioning" + }, + { + "path": "relay", + "title": "Relay" + } + ] + }, + { + "path": "fetching-data", + "title": "Fetching data", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "resolvers", + "title": "Resolvers" + }, + { + "path": "fetching-from-databases", + "title": "Fetching from Databases" + }, + { + "path": "fetching-from-rest", + "title": "Fetching from REST" + }, + { + "path": "dataloader", + "title": "DataLoader" + }, + { + "path": "pagination", + "title": "Pagination" + }, + { + "path": "filtering", + "title": "Filtering" + }, + { + "path": "sorting", + "title": "Sorting" + }, + { + "path": "projections", + "title": "Projections" + } + ] + }, + { + "path": "execution-engine", + "title": "Execution Engine", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "field-middleware", + "title": "Field middleware" + } + ] + }, + { + "path": "integrations", + "title": "Integrations", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "entity-framework", + "title": "Entity Framework" + }, + { + "path": "mongodb", + "title": "MongoDB" + }, + { + "path": "neo4j", + "title": "Neo4J" + }, + { + "path": "spatial-data", + "title": "Spatial Data" + } + ] + }, + { + "path": "server", + "title": "Server", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "endpoints", + "title": "Endpoints" + }, + { + "path": "dependency-injection", + "title": "Dependency Injection" + }, + { + "path": "interceptors", + "title": "Interceptors" + }, + { + "path": "global-state", + "title": "Global State" + }, + { + "path": "introspection", + "title": "Introspection" + }, + { + "path": "files", + "title": "Files" + }, + { + "path": "instrumentation", + "title": "Instrumentation" + } + ] + }, + { + "path": "distributed-schema", + "title": "Distributed Schemas", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "schema-stitching", + "title": "Schema Stitching" + }, + { + "path": "schema-federations", + "title": "Schema Federations" + }, + { + "path": "schema-configuration", + "title": "Schema Configuration" + } + ] + }, + { + "path": "performance", + "title": "Performance", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "persisted-queries", + "title": "Persisted queries" + }, + { + "path": "automatic-persisted-queries", + "title": "Automatic persisted queries" + } + ] + }, + { + "path": "security", + "title": "Security", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "authentication", + "title": "Authentication" + }, + { + "path": "authorization", + "title": "Authorization" + }, + { + "path": "operation-complexity", + "title": "Operation Complexity" + } + ] + }, + { + "path": "api-reference", + "title": "API Reference", + "items": [ + { + "path": "index", + "title": "Overview" + }, + { + "path": "custom-attributes", + "title": "Custom Attributes" + }, + { + "path": "language", + "title": "Language" + }, + { + "path": "extending-filtering", + "title": "Extending Filtering" + }, + { + "path": "visitors", + "title": "Visitors" + }, + { + "path": "aspnetcore", + "title": "ASP.NET Core" + }, + { + "path": "apollo-federation", + "title": "Apollo Federation" + }, + { + "path": "executable", + "title": "Executable" + } + ] + }, + { + "path": "migrating", + "title": "Migrating", + "items": [ + { + "path": "migrate-from-12-to-13", + "title": "Migrate from 12 to 13" + }, + { + "path": "migrate-from-11-to-12", + "title": "Migrate from 11 to 12" + }, + { + "path": "migrate-from-10-to-11", + "title": "Migrate from 10.5 to 11.0" + } + ] + } + ] + }, + { + "path": "v12", "title": "v12", "items": [ { diff --git a/website/src/docs/hotchocolate/api-reference/migrate-from-10-to-11.md b/website/src/docs/hotchocolate/migrating/migrate-from-10-to-11.md similarity index 100% rename from website/src/docs/hotchocolate/api-reference/migrate-from-10-to-11.md rename to website/src/docs/hotchocolate/migrating/migrate-from-10-to-11.md diff --git a/website/src/docs/hotchocolate/api-reference/migrate-from-11-to-12.md b/website/src/docs/hotchocolate/migrating/migrate-from-11-to-12.md similarity index 100% rename from website/src/docs/hotchocolate/api-reference/migrate-from-11-to-12.md rename to website/src/docs/hotchocolate/migrating/migrate-from-11-to-12.md diff --git a/website/src/docs/hotchocolate/migrating/migrate-from-12-to-13.md b/website/src/docs/hotchocolate/migrating/migrate-from-12-to-13.md new file mode 100644 index 00000000000..3514ef63037 --- /dev/null +++ b/website/src/docs/hotchocolate/migrating/migrate-from-12-to-13.md @@ -0,0 +1,3 @@ +--- +title: Migrate from Hot Chocolate GraphQL server 12 to 13 +--- diff --git a/website/src/docs/hotchocolate/v12/api-reference/apollo-federation.md b/website/src/docs/hotchocolate/v12/api-reference/apollo-federation.md new file mode 100644 index 00000000000..8df45b2c14b --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/apollo-federation.md @@ -0,0 +1,7 @@ +--- +title: Apollo Federation Subgraph Support +--- + +> Note: Apollo Federation Support is coming with Hot Chocolate 12.6 + +## Example subgraphs diff --git a/website/src/docs/hotchocolate/v12/api-reference/aspnetcore.md b/website/src/docs/hotchocolate/v12/api-reference/aspnetcore.md new file mode 100644 index 00000000000..05dfa135d85 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/aspnetcore.md @@ -0,0 +1,394 @@ +--- +title: ASP.NET Core +--- + +Hot Chocolate comes with integration to the ASP.NET Core endpoints API. The middleware implementation follows the current GraphQL over HTTP Spec. + +```csharp +public void Configure(IApplicationBuilder app, IWebHostEnvironment env) +{ + app.UseRouting() + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL(); + }); +} +``` + +# GraphQL over HTTP Spec + +The following GraphQL requests follow the current GraphQL over HTTP spec draft. + +If no path is specified, the GraphQL middleware will follow the spec recommendation to map the endpoint to `/graphql`. + +`http://example.com/graphql` + +`http://product.example.com/graphql` + +`http://example.com/product/graphql` + +## GraphQL HTTP POST requests + +The GraphQL HTTP POST request is the most commonly used variant for GraphQL requests over HTTP and is specified [here](https://github.com/graphql/graphql-over-http/blob/master/spec/GraphQLOverHTTP.md#post). + +**request:** + +```http +POST /graphql +HOST: foo.example +Content-Type: application/json + +{ + "query": "query($id: ID!){user(id:$id){name}}", + "variables": { "id": "QVBJcy5ndXJ1" } +} +``` + +**response:** + +```http +HTTP/1.1 200 OK +Content-Type: application/json + +{ + "data": { + "user": { + "name": "Jon Doe" + } + } +} +``` + +## GraphQL HTTP GET request + +GraphQL can also be served through an HTTP GET request. You have the same options as the HTTP POST request, just that the request properties are provided as query parameters. GraphQL HTTP GET requests can be a good choice if you are looking to cache GraphQL requests. + +For example, if we wanted to execute the following GraphQL query: + +```graphql +query ($id: ID!) { + user(id: $id) { + name + } +} +``` + +With the following query variables: + +```json +{ + "id": "QVBJcy5ndXJ1" +} +``` + +This request could be sent via an HTTP GET as follows: + +**request:** + +```http +GET /graphql?query=query(%24id%3A%20ID!)%7Buser(id%3A%24id)%7Bname%7D%7D&variables=%7B%22id%22%3A%22QVBJcy5ndXJ1%22%7D` +HOST: foo.example +``` + +**response:** + +```http +HTTP/1.1 200 OK +Content-Type: application/json + +{ + "data": { + "user": { + "name": "Jon Doe" + } + } +} +``` + +> Note: {query} and {operationName} parameters are encoded as raw strings in the query component. Therefore if the query string contained operationName=null then it should be interpreted as the {operationName} being the string "null". If a literal null is desired, the parameter (e.g. {operationName}) should be omitted. + +The GraphQL HTTP GET request is specified [here](https://github.com/graphql/graphql-over-http/blob/master/spec/GraphQLOverHTTP.md#get). + +By default, Hot Chocolate will only serve query operations when HTTP GET requests are used. You can change this default by specifying the GraphQL server options. + +```csharp +public void Configure(IApplicationBuilder app, IWebHostEnvironment env) +{ + app.UseRouting() + + app.UseEndpoints(endpoints => + { + endpoints + .MapGraphQL() + .WithOptions(new GraphQLServerOptions + { + AllowedGetOperations = AllowedGetOperations.QueryAndMutation + }); + }); +} +``` + +You can also entirely deactivate HTTP GET request handling. + +```csharp +public void Configure(IApplicationBuilder app, IWebHostEnvironment env) +{ + app.UseRouting() + + app.UseEndpoints(endpoints => + { + endpoints + .MapGraphQL() + .WithOptions(new GraphQLServerOptions + { + EnableGetRequests = false + }); + }); +} +``` + +## Incremental Delivery over HTTP + +The Hot Chocolate GraphQL server supports incremental delivery over HTTP, which essentially uses HTTP chunked transfer encoding combined with the [specification of multipart content defined by the W3 in rfc1341](https://www.w3.org/Protocols/rfc1341/7_2_Multipart.html). + +The incremental delivery is at the moment at the RFC stage and is specified [here](https://github.com/graphql/graphql-over-http/blob/master/rfcs/IncrementalDelivery.md). + +Incremental delivery is used with `@defer`, `@stream`, and with request batching. + +# Additional Requests + +Apart from the requests defined by the GraphQL over HTTP spec, Hot Chocolate allows you to batch requests, download the GraphQL SDL, and many more things. + +> Many of the request types stated in this section are on their way into the GraphQL over HTTP spec, and we will update this document as the spec, and its RFCs change. + +## GraphQL Schema request + +Although you can access and query the schema definition through introspection, we support fetching the GraphQL schema SDL as a file. The GraphQL schema SDL is richer with more information and easier to read. + +**request:** + +```http +GET /graphql?sdl +HOST: foo.example +``` + +**response:** + +```http +HTTP/1.1 200 OK +Content-Type: application/graphql + +type Query { + hello: String! +} +``` + +## GraphQL HTTP POST batching request + +We support two kinds of batching variants. + +The first variant to batch GraphQL requests is by sending in an array of GraphQL requests. Hot Chocolate will execute them in order. + +```http +POST /graphql +HOST: foo.example +Content-Type: application/json + +[ + { + # The query document. + "query": "query getHero { hero { name } }", + + # The name of the operation that shall be executed. + "operationName": "getHero", + + # A key under which a query document was saved on the server. + "id": "W5vrrAIypCbniaIYeroNnw==", + + # The variable values for this request. + "variables": { + "a": 1, + "b": "abc" + }, + + # Custom properties that can be passed to the execution engine context data. + "extensions": { + "a": 1, + "b": "abc" + } + }, + { + # The query document. + "query": "query getHero { hero { name } }", + + # The name of the operation that shall be executed. + "operationName": "getHero", + + # A key under which a query document was saved on the server. + "id": "W5vrrAIypCbniaIYeroNnw==", + + # The variable values for this request. + "variables": { + "a": 1, + "b": "abc" + }, + + # Custom properties that can be passed to the execution engine context data. + "extensions": { + "a": 1, + "b": "abc" + } + }, +] +``` + +The second GraphQL batching variant is called operation batching, where you send in one GraphQL request document with multiple operations. The operation execution order is then specified as a query param. + +```http +POST /graphql?batchOperations=[a,b] +HOST: foo.example +Content-Type: application/json + +{ + # The query document. + "query": "query a { hero { name } } query b { hero { name } }", + + # The name of the operation that shall be executed. + "operationName": "getHero", + + # A key under which a query document was saved on the server. + "id": "W5vrrAIypCbniaIYeroNnw==", + + # The variable values for this request. + "variables": { + "a": 1, + "b": "abc" + }, + + # Custom properties that can be passed to the execution engine context data. + "extensions": { + "a": 1, + "b": "abc" + } +} +``` + +By default, the GraphQL server will use the **incremental delivery over HTTP** specification to write the stream results as soon as they are available. This means that depending on your client implementation; you can start using the results as they appear in order. + +The serialization defaults can be changed like the following: + +```csharp +services.AddHttpResultSerializer( + batchSerialization: HttpResultSerialization.JsonArray, + deferSerialization: HttpResultSerialization.MultiPartChunked) +``` + +> More about batching can be found [here](/docs/hotchocolate/v10/execution-engine/batching). + +# Subscription Transport + +Subscriptions are by default delivered over WebSocket. We have implemented the [GraphQL over WebSocket Protocol](https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md) specified by Apollo. + +## Alternative Transport Protocols + +With version 11.1, we will add alternative transport protocols like the [new proposal for the GraphQL over HTTP spec](https://github.com/graphql/graphql-over-http/pull/140). + +Moreover, we are working on allowing this protocol to be used over SignalR, which gives more flexibility to use subscriptions. + +# Tooling + +The Hot Chocolate GraphQL server comes right out of the gate with excellent tooling. By default, we are mapping our GraphQL IDE Banana Cake Pop to the GraphQL endpoint. This means you just need to open your browser and navigate to the configured endpoint to send requests to your server, explore your schema, or build-up tests. + +![GraphQL IDE](../../../images/get-started-bcp-query.png) + +The GraphQL IDE can be disabled by specifying tool options: + +```csharp +endpoints + .MapGraphQL() + .WithOptions( + new GraphQLServerOptions + { + Tool = { Enable = false } + })); +``` + +# Serialization + +The Hot Chocolate GraphQL server has abstracted the result serialization with the `IHttpResultSerializer` interface. The server uses the registered implementation to resolve the HTTP status code, the HTTP content type, and the serialized response from a GraphQL execution result. + +```csharp +/// +/// This interface specifies how a GraphQL result is serialized to a HTTP response. +/// +public interface IHttpResultSerializer +{ + /// + /// Gets the HTTP content type for the specified execution result. + /// + /// + /// The GraphQL execution result. + /// + /// + /// Returns a string representing the content type, + /// eg. "application/json; charset=utf-8". + /// + string GetContentType(IExecutionResult result); + + /// + /// Gets the HTTP status code for the specified execution result. + /// + /// + /// The GraphQL execution result. + /// + /// + /// Returns the HTTP status code, eg. . + /// + HttpStatusCode GetStatusCode(IExecutionResult result); + + /// + /// Serializes the specified execution result. + /// + /// + /// The GraphQL execution result. + /// + /// + /// The HTTP response stream. + /// + /// + /// The request cancellation token. + /// + ValueTask SerializeAsync( + IExecutionResult result, + Stream stream, + CancellationToken cancellationToken); +} +``` + +We have a default implementation (`DefaultHttpResultSerializer`) that can be used to built custom logic on top of the original implementation to make extensibility easier. By default, we are using `System.Text.Json` to serialize GraphQL execution results to JSON. + +A custom implementation of the result serializer is registered like the following: + +```csharp +services.AddHttpResultSerializer(); +``` + +If you, for instance, wanted to add some special error code handling when some error happened during execution, you could implement this like the following: + +```csharp +public class MyCustomHttpResultSerializer : DefaultHttpResultSerializer +{ + public override HttpStatusCode GetStatusCode(IExecutionResult result) + { + if (result is IQueryResult queryResult && + queryResult.Errors?.Count > 0 && + queryResult.Errors.Any(error => error.Code == "SOME_AUTH_ISSUE")) + { + return HttpStatusCode.Forbidden; + } + + return base.GetStatusCode(result); + } +} +``` diff --git a/website/src/docs/hotchocolate/v12/api-reference/custom-attributes.md b/website/src/docs/hotchocolate/v12/api-reference/custom-attributes.md new file mode 100644 index 00000000000..aec989975fe --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/custom-attributes.md @@ -0,0 +1,192 @@ +--- +title: "Custom Attributes" +--- + +Hot Chocolate allows to define a schema in various ways. When defining schemas with pure .NET types and custom attributes we need a way to access advanced features like custom field middleware that we have at our disposal with schema types. + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.Strings).UsePaging(); + } +} +``` + +This is where descriptor attributes come in. Descriptor attributes allow us to package descriptor configurations into an attribute that can be used to decorate our .NET types. Descriptor attributes act like an interceptor into the configuration of the inferred schema type. + +# Built-In Attributes + +We have prepared the following set of built-in descriptor attributes. + +> ⚠️ **Note:** As middleware comprises the stages of a sequential _pipeline_, the ordering is important. The correct order to use is `UsePaging`, `UseFiltering`, `UseSorting`. + +## UsePagingAttribute + +The `UsePagingAttribute` allows us to use the paging middleware by annotating it to a property or method. + +```csharp +public class Query +{ + [UsePaging] + public IQueryable GetFoos() + { + ... + } +} +``` + +## UseFilteringAttribute + +The `UseFilteringAttribute` allows us to apply the filtering middleware to a property or method. + +```csharp +public class Query +{ + [UseFiltering] + public IQueryable GetFoos() + { + ... + } +} +``` + +> ⚠️ **Note**: Be sure to install the `HotChocolate.Types.Filters` NuGet package. + +## UseSortingAttribute + +The `UseSortingAttribute` allows us to apply the sorting middleware to a property or method. + +```csharp +public class Query +{ + [UseSorting] + public IQueryable GetFoos() + { + ... + } +} +``` + +> ⚠️ **Note**: Be sure to install the `HotChocolate.Types.Sorting` NuGet package. + +## AuthorizeAttribute + +The `AuthorizeAttribute` allows to apply the authorize directives to a class, struct, interface, property or method. The attribute will only be applied if the inferred type is an object type. + +```csharp +public class Query +{ + [Authorize(Policy = "MyPolicy")] + public IQueryable GetFoos() + { + ... + } +} +``` + +# Attribute Chaining + +Attributes can by default be chained, meaning that the attributes are applied in order from the top one to the bottom one. + +The following code ... + +```csharp +public class Query +{ + [UsePaging] + [UseFiltering] + [UseSorting] + public IQueryable GetFoos() + { + ... + } +} +``` + +... would translate to: + +```csharp +public class QueryType + : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.Foos) + .UsePaging>() + .UseFiltering() + .UseSorting(); + } +} +``` + +# Custom Descriptor Attributes + +It is super simple to create custom descriptor attributes and package complex functionality in simple to use attributes. + +```csharp +public class SomeMiddlewareAttribute + : ObjectFieldDescriptorAttribute +{ + public override void OnConfigure( + IDescriptorContext context, + IObjectFieldDescriptor descriptor, + MemberInfo member) + { + descriptor.Use(next => context => ...); + } +} +``` + +Within the `OnConfigure` method you can do what you actually would do in the `Configure` method of a type. + +But you also get some context information about where the configuration was applied to, like you get the member to which the attribute was applied to and you get the descriptor context. + +We have one descriptor base class for each first-class descriptor type. + +- EnumTypeDescriptorAttribute +- EnumValueDescriptorAttribute +- InputObjectTypeDescriptorAttribute +- InputFieldDescriptorAttribute +- InterfaceTypeDescriptorAttribute +- InterfaceFieldDescriptorAttribute +- ObjectTypeDescriptorAttribute +- ObjectFieldDescriptorAttribute +- UnionTypeDescriptorAttribute +- ArgumentDescriptorAttribute + +All of these attribute base classes have already the allowed attribute targets applied. That means that we pre-configured the `ObjectFieldDescriptorAttribute` for instance to be only valid on methods and properties. + +If you want to build more complex attributes that can be applied to multiple targets like an interface type and an object type at the same time then you can use our `DescriptorAttribute` base class. This base class is not pre-configured and lets you probe for configuration types. + +```csharp +[AttributeUsage( + AttributeTargets.Property | AttributeTargets.Method, + Inherited = true, + AllowMultiple = true)] +public sealed class MyCustomAttribute : DescriptorAttribute +{ + protected override void TryConfigure( + IDescriptorContext context, + IDescriptor descriptor, + ICustomAttributeProvider element) + { + if(element is MemberInfo member) + { + switch(descriptor) + { + case IInterfaceFieldDescriptor interfaceField: + // do something ... + break; + + case IObjectFieldDescriptor interfaceField: + // do something ... + break; + } + } + } +} +``` + +It is simple to use these attributes. Just annotating a type or a property with an attribute will add the packaged functionality. The types can be used in conjunction with schema types or without. diff --git a/website/src/docs/hotchocolate/v12/api-reference/custom-context-data.md b/website/src/docs/hotchocolate/v12/api-reference/custom-context-data.md new file mode 100644 index 00000000000..b582d1b3885 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/custom-context-data.md @@ -0,0 +1,93 @@ +--- +title: Custom Context Data +--- + +When implementing custom middleware, it can be useful to be able to store some custom state on the context. This could be to build up a cache or other state data. Hot Chocolate has two types of context stores that we can use. + +# Global Context Data + +The global context data is a thread-safe dictionary that is available though the `IQueryContext` and the `IResolverContext`. This means we are able to share context data between query middleware components and field middleware components. + +One common use case is to aggregate some state when the GraphQL request is created and use it in field middleware or in the resolver. + +In order to intercept the request creation we can add an `IQueryRequestInterceptor` to our services and there build up our custom state. + +```csharp +services.AddQueryRequestInterceptor((ctx, builder, ct) => +{ + builder.SetProperty("Foo", new Foo()); + return Task.CompletedTask; +}); +``` + +We can access the initial provided data in a query middleware, field middleware or our resolver. + +Query Middleware Example: + +```csharp +builder.Use(next => context => +{ + // access data + var foo = (Foo)context.ContextData["Foo"]; + + // set new data + context.ContextData["Bar"] = new Bar(); + + return next.Invoke(context); +}); +``` + +Field Middleware Example: + +```csharp +SchemaBuilder.New() + .Use(next => context => + { + // access data + var foo = (Foo)context.ContextData["Foo"]; + + // set new data + context.ContextData["Bar"] = new Bar(); + + return next.Invoke(context); + }) + .Create(); +``` + +Resolver Example: + +```csharp +public Task MyResolver([State("Foo")]Foo foo) +{ + ... +} +``` + +# Scoped Context Data + +The scoped context data is a immutable dictionary and is only available through the `IResolverContext`. + +Scoped state allows us to aggregate state for our child field resolvers. + +Let's say we have the following query: + +```graphql +{ + a { + b { + c + } + } + d { + e { + f + } + } +} +``` + +If the `a`-resolver would put something on the scoped context its sub-tree could access that data. This means, `b` and `c` could access the data but `d`, `e` and `f` would _NOT_ be able to access the data, their dictionary is still unchanged. + +```csharp +context.ScopedContextData = context.ScopedContextData.SetItem("foo", "bar"); +``` diff --git a/website/src/docs/hotchocolate/v12/api-reference/error-filter.md b/website/src/docs/hotchocolate/v12/api-reference/error-filter.md new file mode 100644 index 00000000000..89cb7103774 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/error-filter.md @@ -0,0 +1,80 @@ +--- +title: Error Filter +--- + +GraphQL errors in Hot Chocolate are passed to the query result by returning an instance of `IError` or an enumerable of `IError` in a field resolver. + +Moreover, you can throw a `QueryException` that will be be caught by the query engine and translated to a field error. + +One further way to raise an error are non-terminating field errors. This can be raised by using `IResolverContext.RaiseError`. So, with this you can provide a result and raise an error for your current field. + +> If you do want to log errors head over to our diagnostic source [documentation](/docs/hotchocolate/v10/execution-engine/instrumentation) and see how you can hook up your logging framework of choice to it. + +# Error Builder + +Since, errors can have a lot of properties depending on your case we have introduced a new error builder which provides a nice API without thousands of overloads. + +```csharp +return ErrorBuilder.New() + .SetMessage("This is my error.") + .SetCode("FOO_BAR") + .Build(); +``` + +# Exceptions + +If some other exception is thrown during the query execution, then the execution engine will create an instance of `IError` with the message **Unexpected Execution Error** and the actual exception assigned to the error. However, the exception details will not be serialized so by default the user will only see the error message **Unexpected Execution Error**. + +If you want to translate exceptions into errors with useful information then you can write an `IErrorFilter`. + +An error filter has to be registered with the execution builder or with your dependency injection. + +```csharp +IQueryExecuter executer = schema.MakeExecutable(builder => + builder.UseDefaultPipeline(options) + .AddErrorFilter()); +``` + +OR + +```csharp +services.AddErrorFilter(); +``` + +It is also possible to just register the error filter as a delegate like the following. + +```csharp +IQueryExecuter executer = schema.MakeExecutable(builder => + builder.UseDefaultPipeline(options) + .AddErrorFilter(error => + { + if (error.Exception is NullReferenceException) + { + return error.WithCode("NullRef"); + } + return error; + })); +``` + +Since errors are immutable we have added some helper functions like `WithMessage`, `WithCode` and so on that create a new error with the desired properties. Moreover, you can create an error builder from an error and modify multiple properties and then rebuild the error object. + +```csharp +return ErrorBuilder.FromError(error) + .SetMessage("This is my error.") + .SetCode("FOO_BAR") + .Build(); +``` + +# Exception Details + +In order to automatically add exception details to your GraphQL error you can switch the execution option to include exception details. By default we will switch this on if the debugger is attached. You can overwrite the behavior by setting the option. + +```csharp +SchemaBuilder.New() + ... + .Create() + .MakeExecutable(new QueryExecutionOptions + { + IncludeExceptionDetails = true + }); +``` diff --git a/website/src/docs/hotchocolate/v12/api-reference/executable.md b/website/src/docs/hotchocolate/v12/api-reference/executable.md new file mode 100644 index 00000000000..bad91256aa0 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/executable.md @@ -0,0 +1,127 @@ +--- +title: Executable +--- + +The `IExecutable` and `IExecutable` interfaces are intended to be used by data providers. +These interfaces can abstract any kind of data source. +The data or domain layer can wrap data in an executable and pass it to the GraphQL layer. +A GraphQL resolver that returns an `IExecutable` is recognized as a list. + +```csharp +public class User +{ + public string Name { get; } +} + +public interface IUserRepostiory +{ + public IExecutable FindAll(); +} + +public class Query +{ + public IExecutable GetUsers([Service] IUserRepostiory repo) => + repo.FindAll(); +} +``` + +```sdl +type Query { + users: [User!]! +} +``` + +This abstraction can be used to completely decouple the GraphQL layer form the database-specific knowledge. + +Filtering, sorting, projections et al, can pick up the executable and apply logic to it. There is still +a database-specific provider needed for these features, but it is opaque to the GraphQL layer. + +The `IExecutable` is known to the execution engine. The engine calls `ToListAsync`, `FirstOrDefault` or +`SingleOrDefault` on the executable. The executable shall execute it in the most efficient way for the +database. + +# API + +## Source + +```csharp + object Source { get; } +``` + +The source property stores the current state of the executable + +In the EnittyFramework executable this property holds the `IQueryable`. In the `MongoExecutable` it is the +`DbSet` or the `IAggregateFluent`. `Source` is deliberately read-only. If you have a custom implementation +of `IExecutable` and you want to set the `Source`, you should create a method that returns a new executable +with the new source + +## ToListAsync + +```csharp + ValueTask ToListAsync(CancellationToken cancellationToken); +``` + +Should return a list of ``. + +## FirstOrDefault + +```csharp + ValueTask FirstOrDefault(CancellationToken cancellationToken); +``` + +Should return the first element of a sequence, or a default value if the sequence contains no elements. + +## SingleOrDefault + +```csharp + ValueTask SingleOrDefault(CancellationToken cancellationToken); +``` + +Should return the only element of a default value if no such element exists. This method +should throw an exception if more than one element satisfies the condition. + +## Print + +```csharp +string Print(); +``` + +Prints the executable in its current state + +# Example + +```csharp +public class EntityFrameworkExecutable : QueryableExecutable +{ + public IQueryable Source { get; } + + object IExecutable.Source => Source; + + public EntityFrameworkExecutable(IQueryable queryable) : base(queryable) + { + } + + /// + /// Returns a new enumerable executable with the provided source + /// + /// The source that should be set + /// The new instance of an enumerable executable + public QueryableExecutable WithSource(IQueryable source) + { + return new QueryableExecutable(source); + } + + public override async ValueTask ToListAsync(CancellationToken cancellationToken) => + await Source.ToListAsync(cancellationToken).ConfigureAwait(false); + + public override async ValueTask FirstOrDefaultAsync( + CancellationToken cancellationToken) => + await Source.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + + public override async ValueTask SingleOrDefaultAsync( + CancellationToken cancellationToken) => + await Source.SingleOrDefaultAsync(cancellationToken).ConfigureAwait(false); + + public override string Print() => Source.ToQueryString(); +} +``` diff --git a/website/src/docs/hotchocolate/v12/api-reference/extending-filtering.md b/website/src/docs/hotchocolate/v12/api-reference/extending-filtering.md new file mode 100644 index 00000000000..f0682dcd2df --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/extending-filtering.md @@ -0,0 +1,419 @@ +--- +title: Extending Filtering +--- + +> **Work in progress**: This documentation is not yet complete. + +The `HotChocolate.Data` package works with all databases that support `IQueryable`. Included in the +default settings, are all filter operations that work over `IQueryable` on all databases. +Sometimes this is not enough. Some databases might not support `IQueryable`. Some other databases may have +technology-specific operations (e.g. SQL Like). Filtering was designed with extensibility in mind. + +Filtering can be broken down into two basic parts. Schema building and execution. In schema building, +the input types are created. In execution, the data passed by the user is analyzed and translated to a +database query. Both parts can be configured over a convention. + +In theory, you are free to design the structure of filters as it suits you best. +Usually, it makes sense to divide the structure into two parts. The _field_ and the _operation_. + +The query below returns all movies where the franchise is equal to "Star Wars". The _field_ `franchise` where the filter +is applied to and the _operation_ equals (`eq`) that should operate on this field. + +```graphql +{ + movies(where: { franchise: { eq: "Star Wars" } }) { + name + } +} +``` + +Fields can also form paths. In the query below there are two _fields_ `genre` and `totalMovieCount` and one operation equals +`eq` + +```graphql +{ + movies(where: { genre: { totalMovieCount: { eq: 100 } } }) { + name + } +} +``` + +The two queries above show the difference between _fields_ and _operations_ well. A field is always context-specific. +Even when two fields have the same name, like the description of a movie and the description of a genre, they have different meanings. +One field refers to the description of a movie and the other description refers to the description of a genre. +Same name, different meanings. An operation on the other hand, has always the same meaning. +The equals operation (`eq`) do always mean that the value of the selected field, should +be equals to the value that was provided in the query. +Operations can be applied in different contexts, but the operation itself, stays the same. +The name of the operation should be consistent. There should only be one operation that checks for equality. +This operation should always have the same name. + +With this in mind, we can have a deeper dive into filtering. Buckle up, this might get exciting. + +# How everything fits together + +At the core of the configuration API of filtering there sits a convention. The convention holds the whole +configuration that filtering needs to create filter types and to translate them to the database. +During schema creation, the schema builder asks the convention how the schema should look like. +The convention defines the names and descriptions of types and fields and also what the type should be used for properties. +The convention also defines what provider should be used to translate a GraphQL query to a database query. +The provider is the only thing that is used after the schema is built. +Every field or operation in a filter type has a handler annotated. +During schema initialization, these handlers are bound, to the GraphQL fields. The provider can specify which handler should be bound to which field. +During execution, the provider visits the incoming value node and executes the handler on the fields. +This loose coupling allows defining the provider independently of the convention. + +# Filter Convention + +A filter convention is a dotnet class that has to implement the interface `IFilterConvention`. +Instead of writing a convention completely new, it is recommended to extend the base convention `FilterConvention` +This convention is also configurable with a fluent interface, so in most cases you can probably just use the descriptor API. + +## Descriptor + +Most of the capabilities of the descriptor are already documented under `Fetching Data -> Filtering`. +If you have not done this already, it is now the right time to head over to [Filtering](/docs/hotchocolate/v12/fetching-data/filtering) and read the parts about the `FilterConventions` + +There are two things on this descriptor that are not documented in `Fetching Data`: + +### Operation + +```csharp + IFilterOperationConventionDescriptor Operation(int operationId); +``` + +Operations are configured globally. Each operation has a unique identifier. You can find the build-in identifiers in `DefaultFilterOperations`. +This identifier is used in the `FilterInputType`'s to bind operations on a type. Filter operations can also be configured with a fluent interface. +You can specify the name and the description of the operation. This configuration is applied to all operation fields a `FilterInputType` defines. + +```csharp +conventionDescriptor + .Operation(DefaultFilterOperations.Equals) + .Name("equals") + .Description("Compares the value of the input to the value of the field"); +``` + +With this configuration, all equals operations are now no longer names `eq` but `equals` and have a description. + +If you want to create your own operations, you have to choose an identifier. +To make sure to not collide with the framework, choose a number that is higher than 1024. +If you are a framework developer and want to create an extension for HotChocolate, talk to us. +We can assign you a range of operations so you do not collide with the operations defined by users. + +You will need this identifier later, so it probably makes sense to store it somewhere on a class + +```csharp +public static class CustomOperations +{ + public const int Like = 1025; +} + +public static class CustomerFilterConventionExtensions +{ + public static IFilterConventionDescriptor AddInvariantComparison( + this IFilterConventionDescriptor conventionDescriptor) => + conventionDescriptor + .Operation(CustomOperations.Like) + .Name("like"); +} +``` + +To apply this configuration to operations types, you can use the Configure method + +```csharp + conventionDescriptor.Configure( + x => x.Operation(CustomOperations.Like)) +``` + +### Provider + +```csharp + IFilterConventionDescriptor Provider() + where TProvider : class, IFilterProvider; + IFilterConventionDescriptor Provider(TProvider provider) + where TProvider : class, IFilterProvider; + IFilterConventionDescriptor Provider(Type provider); +``` + +On the convention, you can also specify what provider should be used. For now you need just to know +that you can configure the provider here. We will have a closer look at the provider later. + +```csharp +conventionDescriptor.Provider(); +``` + +## Custom Conventions + +Most of the time the descriptor API should satisfy your needs. It is recommended to build extensions +based on the descriptor API, rather than creating a custom convention. +However, if you want to have full control over naming and type creation, you can also override the methods +you need on the `FilterConvention`. + +You can also override the configure method to have a (probably) familiar API experience. + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + desciptor.AddDefaults(); + } + + public override NameString GetTypeName(Type runtimeType) => + base.GetTypeName(runtimeType) + "Suffix"; +} +``` + +# Providers + +Like the convention, a provider can be configured over a fluent interface. +Every filter field or operation has a specific handler defined. The handler translates the operation to the database. +These handlers are stored on the provider. After the schema is initialized, an interceptor visits the filter types and requests a handler from the provider. +The handler is annotated directly on the field. +The provider translates an incoming query into a database query by traversing an input object and executing the handlers on the fields. + +The output of a translation is always some kind of _filter definition_. In case, of `IQueryable` this is an expression. +In case, of MongoDB this is a `FilterDefinition`. Provider, visitor context and handler, operate on and produce this _filter definition_. + +To inspect and analyze the input object, the provider uses a visitor. + +What a visitor is and how you can write you own visitor is explained [here](/docs/hotchocolate/v12/api-reference/visitors) + +Visitors are a powerful yet complex concept, we tried our best to abstract it away. +For most cases, you will not need to create a custom visitor. + +## Provider Descriptor + +The descriptor of a provider is simple. It only has one method: + +```csharp + IFilterProviderDescriptor AddFieldHandler() + where TFieldHandler : IFilterFieldHandler; +``` + +With this method you can register field handlers on the provider. + +## Field Handler + +Every field or operation is annotated with an instance of a `FilterFieldHandler`. When the provider is asked for a handler for a field, it iterates sequentially through the list of existing field handlers and calls the `CanHandle` method. +The first field handler that can handle the field, is annotated on the field. +As the visitor traverses the input object, it calls `TryHandleEnter` as it enters the input field and `TryHandleLeave` as it leaves it. + +> A field handler supports constructor injection and is a singleton. Do not store data on the field handler. use the `context` of the visitor for state management. + +### CanHandle + +```csharp + bool CanHandle( + ITypeCompletionContext context, + IFilterInputTypeDefinition typeDefinition, + IFilterFieldDefinition fieldDefinition); +``` + +Tests if this field handler can handle a field. If it can handle the field it will be attached to it. + +### TryHandleEnter + +```csharp +bool TryHandleEnter( + TContext context, + IFilterField field, + ObjectFieldNode node, + [NotNullWhen(true)] out ISyntaxVisitorAction? action); +``` + +This method is called when the visitor encounters a field. + +- `context` is the context of the visitor +- `field` is the instance of the field that is currently visited +- `node` is the field node of the input object. `node.Value` contains the value of the field. +- `action` If `TryHandleEnter` returns true, the action is used for further processing by the visitor. + +### TryHandleLeave + +```csharp +bool TryHandleLeave( + TContext context, + IFilterField field, + ObjectFieldNode node, + [NotNullWhen(true)] out ISyntaxVisitorAction? action); +``` + +This method is called when the visitor leave the field it previously entered. + +- `context` is the context of the visitor +- `field` is the instance of the field that is currently visited +- `node` is the field node of the input object. `node.Value` contains the value of the field. +- `action` If `TryHandleLeave` returns true, the action is used for further processing by the visitor. + +## Filter Operation Handlers + +There is only one kind of field handler. To make it easier to handle operations, there also exists `FilterOperationHandler`, a more specific abstraction. +You can override `TryHandleOperation` to handle operations. + +## The Context + +As the visitor and the field handlers are singletons, a context object is passed along with the traversation of input objects. +Field handlers can push data on this context, to make it available for other handlers further down in the tree. + +The context contains `Types`, `Operations`, `Errors` and `Scopes`. It is very provider-specific what data you need to store in the context. +In the case of the `IQueryable` provider, it also contains `RuntimeTypes` and knows if the source is `InMemory` or a database call. + +With `Scopes` it is possible to add multiple logical layers to a context. In the case of `IQuerable` this is needed, whenever a new closure starts + +```csharp +// /------------------------ SCOPE 1 -----------------------------\ +// /----------- SCOPE 2 -------------\ +users.Where(x => x.Company.Addresses.Any(y => y.Street == "221B Baker Street")) +``` + +A filter statement that produces the expression above would look like this + +```graphql +{ + users( + where: { + company: { addresses: { any: { street: { eq: "221B Baker Street" } } } } + } + ) { + name + } +} +``` + +A little simplified this is what happens during visitation: + +```graphql +{ + users( + # level[0] = [] + # instance[0] = x + # Create SCOPE 1 with parameter x of type User + where: { + # Push property User.Company onto the scope + # instance[1] = x.Company + # level[1] = [] + company: { + # Push property Company.Addresses onto the scope + # instance[2] x.Company.Addresses + # level[2] = [] + addresses: { + # Create SCOPE 2 with parameter y of type Address + # instance[0] = y + # level[0] = [] + any: { + # Push poperty Address.Street onto the scope + # instance[1] = y.Street + # level[1] = [] + street: { + # Create and push the operation onto the scope + # instance[2] = y.Street + # level[2] = [y.Street == "221B Baker Street"] + eq: "221B Baker Street" + } + # Combine everything of the current level and pop the porperty street from the instance + # instance[1] = y.Street + # level[1] = [y.Street == "221B Baker Street"] + } + # Combine everything of the current level, create the any operation and exit SCOPE 2 + # instance[2] = x.Company.Addresses + # level[2] = [x.Company.Addresses.Any(y => y.Street == "221B Baker Street")] + } + # Combine everything of the current level and pop the porperty street from the instance + # instance[1] = x.Company + # level[1] = [x.Company.Addresses.Any(y => y.Street == "221B Baker Street")] + } + # Combine everything of the current level and pop the porperty street from the instance + # instance[0] = x + # level[0] = [x.Company.Addresses.Any(y => y.Street == "221B Baker Street")] + } + ) { + name + } +} +``` + +# Extending IQueryable + +The default filtering implementation uses `IQueryable` under the hood. You can customize the translation of queries by registering handlers on the `QueryableFilterProvider`. + +The following example creates a `StringOperationHandler` that supports case insensitive filtering: + +```csharp +// The QueryableStringOperationHandler already has an implemenation of CanHandle +// It checks if the field is declared in a string operation type and also checks if +// the operation of this field uses the `Operation` specified in the override property further +// below +public class QueryableStringInvariantEqualsHandler : QueryableStringOperationHandler +{ + // For creating a expression tree we need the `MethodInfo` of the `ToLower` method of string + private static readonly MethodInfo _toLower = typeof(string) + .GetMethods() + .Single( + x => x.Name == nameof(string.ToLower) && + x.GetParameters().Length == 0); + + // This is used to match the handler to all `eq` fields + protected override int Operation => DefaultFilterOperations.Equals; + + public override Expression HandleOperation( + QueryableFilterContext context, + IFilterOperationField field, + IValueNode value, + object parsedValue) + { + // We get the instance of the context. This is the expression path to the propert + // e.g. ~> y.Street + Expression property = context.GetInstance(); + + // the parsed value is what was specified in the query + // e.g. ~> eq: "221B Baker Street" + if (parsedValue is string str) + { + // Creates and returnes the operation + // e.g. ~> y.Street.ToLower() == "221b baker street" + return Expression.Equal( + Expression.Call(property, _toLower), + Expression.Constant(str.ToLower())); + } + + // Something went wrong 😱 + throw new InvalidOperationException(); + } +} +``` + +This operation handler can be registered on the convention: + +```csharp +public class CustomFilteringConvention : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + descriptor.AddDefaults(); + descriptor.Provider( + new QueryableFilterProvider( + x => x + .AddDefaultFieldHandlers() + .AddFieldHandler())); + } +} + +// and then +services.AddGraphQLServer() + .AddFiltering(); +``` + +To make this registration easier, Hot Chocolate also supports convention and provider extensions. +Instead of creating a custom `FilterConvention`, you can also do the following: + +```csharp +services + .AddGraphQLServer() + .AddFiltering() + .AddConvention( + new FilterConventionExtension( + x => x.AddProviderExtension( + new QueryableFilterProviderExtension( + y => y.AddFieldHandler())))); +``` diff --git a/website/src/docs/hotchocolate/v12/api-reference/filtering.md b/website/src/docs/hotchocolate/v12/api-reference/filtering.md new file mode 100644 index 00000000000..57864585889 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/filtering.md @@ -0,0 +1,2031 @@ +--- +title: Filtering +--- + +**What are filters?** + +With Hot Chocolate filters, you can expose complex filter objects through your GraphQL API that translates to native database queries. + +The default filter implementation translates filters to expression trees and applies these on `IQueryable`. + +# Overview + +Filters by default work on `IQueryable` but you can also easily customize them to use other interfaces. + +Hot Chocolate by default will inspect your .NET model and infer the possible filter operations from it. + +The following type would yield the following filter operations: + +```csharp +public class Foo +{ + public string Bar { get; set; } +} +``` + +```graphql +input FooFilter { + bar: String + bar_contains: String + bar_ends_with: String + bar_in: [String] + bar_not: String + bar_not_contains: String + bar_not_ends_with: String + bar_not_in: [String] + bar_not_starts_with: String + bar_starts_with: String + AND: [FooFilter!] + OR: [FooFilter!] +} +``` + +**So how can we get started with filters?** + +Getting started with filters is very easy, especially if you do not want to explicitly define filters or customize anything. + +Hot Chocolate will infer the filters directly from your .Net Model and then use a Middleware to apply filters to `IQueryable` or `IEnumerable` on execution. + +> ⚠️ **Note:** If you use more than middleware, keep in mind that **ORDER MATTERS**. + +> ⚠️ **Note:** Be sure to install the `HotChocolate.Types.Filters` NuGet package. + +In the following example, the person resolver returns the `IQueryable` representing the data source. The `IQueryable` represents a not executed database query on which Hot Chocolate can apply filters. + +**Code First** + +The next thing to note is the `UseFiltering` extension method which adds the filter argument to the field and a middleware that can apply those filters to the `IQueryable`. The execution engine will, in the end, execute the `IQueryable` and fetch the data. + +```csharp +public class QueryType + : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.GetPersons(default)) + .Type>>() + .UseFiltering(); + } +} + +public class Query +{ + public IQueryable GetPersons([Service]IPersonRepository repository) + { + repository.GetPersons(); + } +} +``` + +**Pure Code First** + +The field descriptor attribute `[UseFiltering]` does apply the extension method `UseFiltering()` on the field descriptor. + +```csharp +public class Query +{ + [UseFiltering] + public IQueryable GetPersons([Service]IPersonRepository repository) + { + repository.GetPersons(); + } +} +``` + +**Schema First** + +> ⚠️ **Note:** Schema first does currently not support filtering! + +# Customizing Filters + +A `FilterInputType` defines a GraphQL input type, that Hot Chocolate uses for filtering. You can customize these similar to a normal input type. You can change the name of the type; add, remove, or change operations or directive; and configure the binding behavior. To define and customize a filter we must inherit from `FilterInputType` and configure it like any other type by overriding the `Configure` method. + +```csharp +public class PersonFilterType + : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor + .BindFieldsExplicitly() + .Filter(t => t.Name) + .BindOperationsExplicitly() + .AllowEquals().Name("equals").And() + .AllowContains().Name("contains").And() + .AllowIn().Name("in"); + } +} +``` + +The above filter type defines explicitly which fields allow filtering and what operations these filters allow. Additionally, the filter type changes the name of the equals operation of the filter of the field `Name` to `equals`. + +To make use of the configuration in this filter type, you can provide it to the `UseFiltering` extension method as the generic type argument. + +```csharp +public class QueryType + : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.GetPerson(default)) + .Type>>(); + .UseFiltering() + } +} +``` + +# Sorting + +Like with filter support you can add sorting support to your database queries. + +```csharp +public class QueryType + : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.GetPerson(default)) + .Type>>(); + .UseSorting() + } +} +``` + +> ⚠️ **Note**: Be sure to install the `HotChocolate.Types.Sorting` NuGet package. + +If you want to combine for instance paging, filtering, and sorting make sure that the order is like follows: + +```csharp +public class QueryType + : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.GetPerson(default)) + .UsePaging() + .UseFiltering() + .UseSorting(); + } +} +``` + +**Why is order important?** + +Paging, filtering, and sorting are modular middlewares that form the field resolver pipeline. + +The above example forms the following pipeline: + +`Paging -> Filtering -> Sorting -> Field Resolver` + +The paging middleware will first delegate to the next middleware, which is filtering. + +The filtering middleware will also first delegate to the next middleware, which is sorting. + +The sorting middleware will again first delegate to the next middleware, which is the actual field resolver. + +The field resolver will call `GetPerson` which returns in this example an `IQueryable`. The queryable represents a not yet executed database query. + +After the resolver has been executed and puts its result onto the middleware context the sorting middleware will apply for the sort order on the query. + +After the sorting middleware has been executed and updated the result on the middleware context the filtering middleware will apply its filters on the queryable and updates the result on the middleware context. + +After the paging middleware has been executed and updated the result on the middleware context the paging middleware will slice the data and execute the queryable which will then actually pull in data from the data source. + +So, if we, for instance, applied paging as our last middleware the data set would have been sliced first and then filtered which in most cases is not what we actually want. + +# Filter & Operations Kinds + +You can break down filtering into different kinds of filters that then have different operations. +The filter kind is bound to the type. A string is fundamentally something different than an array or an object. +Each filter kind has different operations that you can apply to it. Some operations are unique to a filter and some operations are shared across multiple filter +e.g. A string filter has string specific operations like `Contains` or `EndsWith` but still shares the operations `Equals` and `NotEquals` with the boolean filter. + +## Filter Kinds + +Hot Chocolate knows following filter kinds + +| Kind | Operations | +| ---------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| String | Equals, In, EndsWith, StartsWith, Contains, NotEquals, NotIn, NotEndsWith, NotStartsWith, NotContains | +| Bool | Equals, NotEquals | +| Object | Equals | +| Array | Some, Any, All, None | +| Comparable | Equals, In, GreaterThan, GreaterThanOrEqual, LowerThan, LowerThanOrEqual, NotEquals, NotIn, NotGreaterThan, NotGreaterThanOrEqual, NotLowerThan, NotLowerThanOrEqual | + +## Operations Kinds + +Hot Chocolate knows following operation kinds + +| Kind | Operations | +| ---------------------- | ----------------------------------------------------------------------------------------------------- | +| Equals | Compares the equality of input value and property value | +| NotEquals | negation of Equals | +| In | Checks if the property value is contained in a given list of input values | +| NotIn | negation of In | +| GreaterThan | checks if the input value is greater than the property value | +| NotGreaterThan | negation of GreaterThan | +| GreaterThanOrEquals | checks if the input value is greater than or equal to the property value | +| NotGreaterThanOrEquals | negation of GreaterThanOrEquals | +| LowerThan | checks if the input value is lower than the property value | +| NotLowerThan | negation of LowerThan | +| LowerThanOrEquals | checks if the input value is lower than or equal to the property value | +| NotLowerThanOrEquals | negation of LowerThanOrEquals | +| EndsWith | checks if the property value ends with the input value | +| NotEndsWith | negation of EndsWith | +| StartsWith | checks if the property value starts with the input value | +| NotStartsWith | negation of StartsWith | +| Contains | checks if the input value is contained in the property value | +| NotContains | negation of Contains | +| Some | checks if at least one element in the collection exists | +| Some | checks if at least one element of the property value meets the condition provided by the input value | +| None | checks if no element of the property value meets the condition provided by the input value | +| All | checks if all least one element of the property value meets the condition provided by the input value | + +## Boolean Filter + +In this example, we look at the filter configuration of a Boolean filter. +As an example, we will use the following model: + +```csharp +public class User +{ + public bool IsOnline {get;set;} +} + +public class Query : ObjectType +{ + [UseFiltering] + public IQueryable GetUsers([Service]UserService users ) + => users.AsQueryable(); +} + +``` + +The produced GraphQL SDL will look like the following: + +```graphql +type Query { + users(where: UserFilter): [User] +} + +type User { + isOnline: Boolean +} + +input UserFilter { + isOnline: Boolean + isOnline_not: Boolean + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +### Boolean Operation Descriptor + +The example above showed that configuring the operations is optional. +If you want to have access to the actual field input types or allow only a subset of Boolean filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor` + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Filter(x => x.Name) + .AllowEquals().And() + .AllowNotEquals(); + } +} +``` + +## Comparable Filter + +In this example, we look at the filter configuration of a comparable filter. + +A comparable filter is generated for all values that implement IComparable except string and boolean. +e.g. `csharp±enum`, `csharp±int`, `csharp±DateTime`... + +As an example, we will use the following model: + +```csharp +public class User +{ + public int LoggingCount {get;set;} +} + +public class Query : ObjectType +{ + [UseFiltering] + public IQueryable GetUsers([Service]UserService users ) + => users.AsQueryable(); +} + +``` + +The produced GraphQL SDL will look like the following: + +```graphql +type Query { + users(where: UserFilter): [User] +} + +type User { + loggingCount: Int +} + +input UserFilter { + loggingCount: Int + loggingCount_gt: Int + loggingCount_gte: Int + loggingCount_in: [Int!] + loggingCount_lt: Int + loggingCount_lte: Int + loggingCount_not: Int + loggingCount_not_gt: Int + loggingCount_not_gte: Int + loggingCount_not_in: [Int!] + loggingCount_not_lt: Int + loggingCount_not_lte: Int + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +### Comparable Operation Descriptor + +The example above showed that configuring the operations is optional. +If you want to have access to the actual field input types or allow only a subset of comparable filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor` + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Filter(x => x.Name) + .AllowEquals().And() + .AllowNotEquals().And() + .AllowGreaterThan().And() + .AllowNotGreaterThan().And() + .AllowGreaterThanOrEqals().And() + .AllowNotGreaterThanOrEqals().And() + .AllowLowerThan().And() + .AllowNotLowerThan().And() + .AllowLowerThanOrEqals().And() + .AllowNotLowerThanOrEqals().And() + .AllowIn().And() + .AllowNotIn(); + } +} +``` + +## String Filter + +In this example, we look at the filter configuration of a String filter. +As an example, we will use the following model: + +```csharp +public class User +{ + public string Name {get;set;} +} + +public class Query : ObjectType +{ + [UseFiltering] + public IQueryable GetUsers([Service]UserService users ) + => users.AsQueryable(); +} + +``` + +The produced GraphQL SDL will look like the following: + +```graphql +type Query { + users(where: UserFilter): [User] +} + +type User { + name: String +} + +input UserFilter { + name: String + name_contains: String + name_ends_with: String + name_in: [String] + name_not: String + name_not_contains: String + name_not_ends_with: String + name_not_in: [String] + name_not_starts_with: String + name_starts_with: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +### String Operation Descriptor + +The example above showed that configuring the operations is optional. +If you want to have access to the actual field input types or allow only a subset of string filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor` + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Filter(x => x.Name) + .AllowEquals().And() + .AllowNotEquals().And() + .AllowContains().And() + .AllowNotContains().And() + .AllowStartsWith().And() + .AllowNotStartsWith().And() + .AllowEndsWith().And() + .AllowNotEndsWith().And() + .AllowIn().And() + .AllowNotIn(); + } +} +``` + +## Object Filter + +In this example, we look at the filter configuration of an object filter. + +Hot Chocolate generated object filters for all objects. Since Version 11, Hot Chocolate also generates filter types for nested objects. You can also use object filters to filter over database relations. + +As an example, we will use the following model: + +```csharp +public class User +{ + public Address Address {get;set;} +} + +public class Address +{ + public string Street {get;set;} + + public bool IsPrimary {get;set;} +} + +public class Query : ObjectType +{ + [UseFiltering] + public IQueryable GetUsers([Service]UserService users ) + => users.AsQueryable(); +} + +``` + +The produced GraphQL SDL will look like the following: + +```graphql +type Query { + users(where: UserFilter): [User] +} + +type User { + address: Address +} + +type Address { + isPrimary: Boolean + street: String +} + +input UserFilter { + address: AddressFilter + AND: [UserFilter!] + OR: [UserFilter!] +} + +input AddressFilter { + is_primary: Boolean + is_primary_not: Boolean + street: String + street_contains: String + street_ends_with: String + street_in: [String] + street_not: String + street_not_contains: String + street_not_ends_with: String + street_not_in: [String] + street_not_starts_with: String + street_starts_with: String + AND: [AddressFilter!] + OR: [AddressFilter!] +} +``` + +### Object Operation Descriptor + +The example above showed that configuring the operations is optional. +If you want to have access to the actual field input types or allow only a subset of comparable filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor` + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Object(x => x.Address); + } +} +``` + +**Configuring a custom nested filter type:** + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Object(x => x.Address).AllowObject(); + } +} + +public class AddressFilterType : FilterInputType
+{ + protected override void Configure( + IFilterInputTypeDescriptor
descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Filter(x => x.IsPrimary); + } +} + +// or inline + +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Object(x => x.Address) + .AllowObject( + y => y.BindFieldsExplicitly().Filter(z => z.IsPrimary)); + } +} + + +``` + +## List Filter + +In this example, we look at the filter configuration of a list filter. + +Hot Chocolate can also generate filters for IEnumerables. Like object filter, Hot Chocolate generates filters for the whole object tree. List filter addresses scalars and object values differently. +In the case the field is a scalar value, Hot Chocolate creates and object type to address the different operations of this scalar. e.g. If you specify filters for a list of strings, Hot Chocolate creates an object type that contains all operations of the string filter. +In case the list holds a complex object, it generates an object filter for this object instead. + +Hot Chocolate implicitly generates filters for all properties that implement `IEnumerable`. +e.g. `csharp±string[]`, `csharp±List`, `csharp±IEnumerable`... + +As an example, we will use the following model: + +```csharp +public class User +{ + public string[] Roles {get;set;} + + public IEnumerable
Addresses {get;set;} +} + +public class Address +{ + public string Street {get;set;} + + public bool IsPrimary {get;set;} +} + +public class Query : ObjectType +{ + [UseFiltering] + public IQueryable GetUsers([Service]UserService users ) + => users.AsQueryable(); +} + +``` + +The produced GraphQL SDL will look like the following: + +```graphql +type Query { + users(where: UserFilter): [User] +} + +type User { + addresses: [Address] + roles: [String] +} + +type Address { + isPrimary: Boolean + street: String +} + +input UserFilter { + addresses_some: AddressFilter + addresses_all: AddressFilter + addresses_none: AddressFilter + addresses_any: Boolean + roles_some: ISingleFilterOfStringFilter + roles_all: ISingleFilterOfStringFilter + roles_none: ISingleFilterOfStringFilter + roles_any: Boolean + AND: [UserFilter!] + OR: [UserFilter!] +} + +input AddressFilter { + is_primary: Boolean + is_primary_not: Boolean + street: String + street_contains: String + street_ends_with: String + street_in: [String] + street_not: String + street_not_contains: String + street_not_ends_with: String + street_not_in: [String] + street_not_starts_with: String + street_starts_with: String + AND: [AddressFilter!] + OR: [AddressFilter!] +} + +input ISingleFilterOfStringFilter { + AND: [ISingleFilterOfStringFilter!] + element: String + element_contains: String + element_ends_with: String + element_in: [String] + element_not: String + element_not_contains: String46 + element_not_ends_with: String + element_not_in: [String] + element_not_starts_with: String + element_starts_with: String + OR: [ISingleFilterOfStringFilter!] +} +``` + +### Array Operation Descriptor + +The example above showed that configuring the operations is optional. +If you want to have access to the actual field input types or allow only a subset of array filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor` + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.List(x => x.Addresses) + .AllowSome().And() + .AlloAny().And() + .AllowAll().And() + .AllowNone(); + descriptor.List(x => x.Roles) + .AllowSome().And() + .AlloAny().And() + .AllowAll().And() + .AllowNone(); + } +} +``` + +# Naming Conventions + +\_Hot Chocolate already provides two naming schemes for filters. If you would like to define your own naming scheme or extend existing ones have a look at the documentation of TODO:Link-Filtering + +## Snake Case + +**Configuration** +You can configure the Snake Case with the `UseSnakeCase` extension method convention on the `IFilterConventionDescriptor` + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + descriptor.UseSnakeCase() + } +} + +SchemaBuilder.New().AddConvention(); +// +SchemaBuilder.New().AddConvention(new FilterConvention(x => x.UseSnakeCase()) +``` + +```graphql +input FooBarFilter { + AND: [FooBarFilter!] + nested: String + nested_contains: String + nested_ends_with: String + nested_in: [String] + nested_not: String + nested_not_contains: String + nested_not_ends_with: String +**Change the name of an operation** + nested_not_in: [String] + nested_not_starts_with: String + nested_starts_with: String + OR: [FooBarFilter!] +} + +input FooFilter { + AND: [FooFilter!] + bool: Boolean + bool_not: Boolean + comparable: Short + comparableEnumerable_all: ISingleFilterOfInt16Filter + comparableEnumerable_any: Boolean + comparableEnumerable_none: ISingleFilterOfInt16Filter + comparableEnumerable_some: ISingleFilterOfInt16Filter + comparable_gt: Short + comparable_gte: Short + comparable_in: [Short!] + comparable_lt: Short + comparable_lte: Short + comparable_not: Short + comparable_not_gt: Short + comparable_not_gte: Short + comparable_not_in: [Short!] + comparable_not_lt: Short + comparable_not_lte: Short + object: FooBarFilter + OR: [FooFilter!] +} + +input ISingleFilterOfInt16Filter { + AND: [ISingleFilterOfInt16Filter!] + element: Short + element_gt: Short + element_gte: Short + element_in: [Short!] + element_lt: Short + element_lte: Short + element_not: Short + element_not_gt: Short + element_not_gte: Short + element_not_in: [Short!] + element_not_lt: Short + element_not_lte: Short + OR: [ISingleFilterOfInt16Filter!] +} +``` + +## Pascal Case + +**Configuration** +You can configure the Pascal Case with the `UsePascalCase` extension method convention on the `IFilterConventionDescriptor` + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + descriptor.UsePascalCase() + } +} + +SchemaBuilder.New().AddConvention(); +// +SchemaBuilder.New().AddConvention(new FilterConvention(x => x.UsePascalCase()) +``` + +```graphql +input FooBarFilter { + AND: [FooBarFilter!] + Nested: String + Nested_Contains: String + Nested_EndsWith: String + Nested_In: [String] + Nested_Not: String + Nested_Not_Contains: String + Nested_Not_EndsWith: String + Nested_Not_In: [String] + Nested_Not_StartsWith: String + Nested_StartsWith: String + OR: [FooBarFilter!] +} + +input FooFilter { + AND: [FooFilter!] + Bool: Boolean + Bool_Not: Boolean + Comparable: Short + ComparableEnumerable_All: ISingleFilterOfInt16Filter + ComparableEnumerable_Any: Boolean + ComparableEnumerable_None: ISingleFilterOfInt16Filter + ComparableEnumerable_Some: ISingleFilterOfInt16Filter + Comparable_Gt: Short + Comparable_Gte: Short + Comparable_In: [Short!] + Comparable_Lt: Short + Comparable_Lte: Short + Comparable_Not: Short + Comparable_Not_Gt: Short + Comparable_Not_Gte: Short + Comparable_Not_In: [Short!] + Comparable_Not_Lt: Short + Comparable_Not_Lte: Short + Object: FooBarFilter + OR: [FooFilter!] +} + +input ISingleFilterOfInt16Filter { + AND: [ISingleFilterOfInt16Filter!] + Element: Short + Element_Gt: Short + Element_Gte: Short + Element_In: [Short!] + Element_Lt: Short + Element_Lte: Short + Element_Not_Gt: Short + Element_Not: Short + Element_Not_Gte: Short + Element_Not_In: [Short!] + Element_Not_Lt: Short + Element_Not_Lte: Short + OR: [ISingleFilterOfInt16Filter!] +} +``` + +# Customizing Filter + +Hot Chocolate provides different APIs to customize filtering. You can write custom filter input types, customize the inference behavior of .NET Objects, customize the generated expression, or create a custom visitor, and attach your exotic database. + +**As this can be a bit overwhelming the following questionnaire might help:** + +| | | +| --------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------- | +| _You do not want all the generated filters and only allow a specific set of filters in a specific case?_ | Custom FilterInputType | +| _You want to change the name of a field or a whole type?_ | Custom FilterInputType | +| _You want to change the name of the `where` argument?_ | Filter Conventions ArgumentName | +| _You want to configure how *Hot Chocolate* generates the name and the description of filters in globally? e.g. `PascalCaseFilterType`?_ | Filter Conventions | +| _You want to configure what the different types of filters are allowed globally?_ | Filter Conventions | +| _Your database provider does not support certain operations of `IQueryable`_ | Filter Conventions | +| _You want to change the naming of a specific lar filter type? e.g._ `foo_contains` _should be_ `foo_like` | Filter Conventions | +| _You want to customize the expression a filter is generating: e.g._ `_equals` _should not be case sensitive?_ | Expression Visitor  | +| _You want to create your own filter types with custom parameters and custom expressions? e.g. GeoJson?_ | Filter Conventions | +| _You have a database client that does not support `IQueryable` and wants to generate filters for it?_ | Custom Visitor | + +# Custom FilterInputType + +Under the hood, filtering is based on top of normal Hot Chocolate input types. You can easily customize them with a very familiar fluent interface. The filter input types follow the same `descriptor` scheme as you are used to from the normal filter input types. Just extend the base class `FilterInputType` and override the descriptor method. + +```csharp +public class User +{ + public string Name {get; set; } + + public string LastName {get; set; } +} + +public class UserFilterType + : FilterInputType +{ + protected override void Configure( IFilterInputTypeDescriptor descriptor) { + + } +} +``` + +`IFilterInputTypeDescriptor` supports most of the methods of `IInputTypeDescriptor` and adds the configuration interface for the filters. By default, Hot Chocolate generates filters for all properties of the type. +If you do want to specify the filters by yourself you can change this behavior with `BindFields`, `BindFieldsExplicitly` or `BindFieldsImplicitly`. + +```csharp +public class UserFilterType + : FilterInputType +{ + protected override void Configure( IFilterInputTypeDescriptor descriptor) { + descriptor.BindFieldsExplicitly(); + descriptor.Filter(x => x.Name); + } +} +``` + +```graphql +input UserFilter { + name: String + name_contains: String + name_ends_with: String + name_in: [String] + name_not: String + name_not_contains: String + name_not_ends_with: String + name_not_in: [String] + name_not_starts_with: String + name_starts_with: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +To add or customize a filter you must use `Filter(x => x.Foo)` for scalars `List(x => x.Bar)` for lists and `Object(x => x.Baz)` for nested objects. +These methods will return fluent interfaces to configure the filter for the selected field. + +A field has different filter operations that you can configure. You will find more about filter types and filter operations here TODO:Link +When fields are bound implicitly, meaning filters are added for all properties, you may want to hide a few fields. You can do this with `Ignore(x => Bar)`. +Operations on fields can again be bound implicitly or explicitly. By default, Hot Chocolate generates operations for all fields of the type. +If you do want to specify the operations by yourself you can change this behavior with `BindFilters`, `BindFiltersExplicitly` or `BindFiltersImplicitly`. + +It is also possible to customize the GraphQL field of the operation further. You can change the name, add a description or directive. + +```csharp +public class UserFilterType + : FilterInputType +{ + protected override void Configure( IFilterInputTypeDescriptor descriptor) { + // descriptor.BindFieldsImplicitly(); <- is already the default + descriptor.Filter(x => x.Name) + .BindFilterExplicitly() + .AllowContains() + .Description("Checks if the provided string is contained in the `Name` of a User") + .And() + .AllowEquals() + .Name("exits_with_name") + .Directive("name"); + descriptor.Ignore(x => x.Bar); + } +} +``` + +```graphql +input UserFilter { + exits_with_name: String @name + """ + Checks if the provided string is contained in the `Name` of a User + """ + name_contains: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +**API Documentation** + +| Method | Description | +| -------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- | +| `csharp±BindFields(BindingBehavior bindingBehavior)` | Defines the filter binding behavior. `Explicitly`or `Implicitly`. Default is `Implicitly` | +| `csharp±BindFieldsExplicitly` | Defines that all filters have to be specified explicitly. This means that only the filters are applied that are added with `Filter(x => x.Foo)` | +| `csharp±BindFieldsImplicitly` | The filter type will add filters for all compatible fields. | +| `csharp±Description(string value)` | Adds explanatory text of the `FilterInputType` that can be accessed via introspection. | +| `csharp±Name(NameString value)` | Defines the _GraphQL_ name of the `FilterInputType`. | +| `csharp±Ignore( Expression> property);` | Ignore the specified property. | +| `csharp±Filter( Expression> property)` | Defines a string filter for the selected property. | +| `csharp±Filter( Expression> property)` | Defines a bool filter for the selected property. | +| `csharp±Filter( Expression> property)` | Defines a comparable filter for the selected property. | +| `csharp±Object( Expression> property)` | Defines a object filter for the selected property. | +| `csharp±List( Expression>> property)` | Defines an array string filter for the selected property. | +| `csharp±List( Expression>> property)` | Defines an array bool filter for the selected property. | +| `csharp±List( Expression>> property)` | Defines an array comarable filter for the selected property. | +| `csharp±Filter( Expression>> property)` | Defines an array object filter for the selected property. | +| `csharp±Directive(TDirective directiveInstance)` | Add directive `directiveInstance` to the type | +| `csharp±Directive(TDirective directiveInstance)` | Add directive of type `TDirective` to the type | +| `csharp±Directive(NameString name, params ArgumentNode[] arguments)` | Add directive of type `TDirective` to the type | + +# Filter Conventions + +The customization of filters with `FilterInputTypes` works if you only want to customize one specific filter type. +If you want to change the behavior of all filter types, you want to create a convention for your filters. The filter convention comes with a fluent interface that is close to a type descriptor. +You can see the convention as a configuration object that holds the state that is used by the type system or the execution engine. + +## Get Started + +To use a filter convention, you can extend `FilterConvention` and override the `Configure` method. Alternatively, you can directly configure the convention over the constructor argument. +You then must register your custom convention on the schema builder with `AddConvention`. + +```csharp +public class CustomConvention + : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) { } +} + +SchemaBuilder.New().AddConvention(); +// +SchemaBuilder.New().AddConvention(new FilterConvention(x => /* Config */)); +``` + +## Convention Descriptor Basics + +In this section, we will take a look at the basic features of the filter convention. +The documentation will reference often to `descriptor`. Imagine this `descriptor` as the parameter of the Configure method of the filter convention in the following context: + +```csharp {5} +public class CustomConvention + : FilterConvention +{ + protected override void Configure( + IFilterConventionDescriptor descriptor + ) { } +} + +SchemaBuilder.New().AddConvention(); +``` + +
+ +### Argument Name + +With the convention descriptor, you can easily change the argument name of the `FilterInputType`. + +**Configuration** + +```csharp +descriptor.ArgumentName("example_argument_name"); +``` + +**Result** + +```graphql +type Query { + users(example_argument_name: UserFilter): [User] +} +``` + +### Change Name of Scalar List Type Element + +You can change the name of the element of the list type. + +**Configuration** + +```csharp +descriptor.ElementName("example_element_name"); +``` + +**Result** + +```graphql +input ISingleFilterOfInt16Filter { + AND: [ISingleFilterOfInt16Filter!] + example_element_name: Short + example_element_name_gt: Short + example_element_name_gte: Short + example_element_name_in: [Short!] + example_element_name_lt: Short + example_element_name_lte: Short + example_element_name_not: Short + example_element_name_not_gt: Short + example_element_name_not_gte: Short + example_element_name_not_in: [Short!] + example_element_name_not_lt: Short + example_element_name_not_lte: Short + OR: [ISingleFilterOfInt16Filter!] +} +``` + +### Configure Filter Type Name Globally + +You can change the way Hot Chocolate names the types by supplying a delegate. + +This delgate must be of the following type: + +```csharp +public delegate NameString GetFilterTypeName( + IDescriptorContext context, + Type entityType); +``` + +**Configuration** + +```csharp +descriptor.TypeName((context,types) => + context.Naming.GetTypeName(entityType, TypeKind.Object) + "Custom"); +``` + +**Result** + +```graphql +type Query { + users(where: UserCustom): [User] +} +``` + +### Configure Filter Description Globally + +To change the way filter types are named, you have to exchange the factory. + +You have to provide a delegate of the following type: + +```csharp +public delegate string GetFilterTypeDescription( + IDescriptorContext context, + Type entityType); +``` + +**Configuration** + +```csharp +descriptor.TypeName((context,types) => + context.Naming.GetTypeDescription(entityType, TypeKind.Object); + "Custom"); +``` + +**Result** + +```graphql +""" +Custom +""" +input UserFilter { + AND: [UserFilter!] + isOnline: Boolean + isOnline_not: Boolean + OR: [UserFilter!] +} +``` + +### Reset Configuration + +Hot Chocolate shippes with well-defined defaults. To start from scratch, you need to call `Reset()`first. + +**Configuration** + +```csharp +descriptor.Reset(); +``` + +**Result** + +> **⚠ Note:** You will need to add a complete configuration, otherwise the filter will not work as desired! + +## Describe with convention + +With the filter convention descriptor, you have full control over what filters are inferred, their names, operations, and a lot more. +The convention provides a familiar interface to the type configuration. We recommended to first take a look at `Filter & Operations` to understand the concept of filters. This will help you understand how the filter configuration works. + +Filtering has two core components at its heart. First, you have the inference of filters based on .NET types. The second part is an interceptor that translates the filters to the desired output and applies it to the resolver pipeline. These two parts can (and have to) be configured completely independently. With this separation, it is possible to easily extend the behavior. The descriptor is designed to be extendable by extension methods. + +**It's fluent** + +Filter conventions are a completely fluent experience. You can write a whole configuration as a chain of method calls. +This provides a very clean interface, but can, on the other hand, get messy quickly. We recommend using indentation to keep the configuration comprehensible. +You can drill up with `And()`. + +```csharp + descriptor.Operation(FilterOperationKind.Equals).Description("has to be equal"); + descriptor.Operation(FilterOperationKind.NotEquals).Description("has not to be equal"); + descriptor.Type(FilterKind.Comparable).Operation(FilterOperationKind.NotEquals).Description("has to be comparable and not equal") + + + descriptor + .Operation(FilterOperationKind.Equals) + .Description("has to be equal") + .And() + .Operation(FilterOperationKind.NotEquals) + .Description("has not to be equal") + .And() + .Type(FilterKind.Comparable) + .Operation(FilterOperationKind.NotEquals) + .Description("has to be comparable and not equal") +``` + +### Configuration of the type system + +In this section, we will focus on the generation of the schema. If you are interested in changing how filters translate to the database, you have to look here TODO:Link + +#### Configure Filter Operations + +There are two ways to configure Operations. + +You can configure a default configuration that applies to all operations of this kind. In this case the configuration for `FilterOperationKind.Equals` would be applied to all `FilterKind` that specify this operation. + +```csharp + descriptor.Operation(FilterOperationKind.Equals) +``` + +If you want to configure a more specific Operation e.g. `FilterOperationKind.Equal` of kind `FilterKind.String`, you can override the default behavior. + +```csharp + descriptor.Type(FilterKind.String).Operation(FilterOperationKind.Equals) +``` + +The operation descriptor allows you to configure the name, the description or even ignore an operation completely + +In this example, we will look at the following input type: + +```graphql +input UserFilter { + loggingCount: Int + loggingCount_gt: Int + loggingCount_gte: Int + loggingCount_in: [Int!] + loggingCount_lt: Int + loggingCount_lte: Int + loggingCount_not: Int + loggingCount_not_gt: Int + loggingCount_not_gte: Int + loggingCount_not_in: [Int!] + loggingCount_not_lt: Int + loggingCount_not_lte: Int + name: String + name_contains: String + name_ends_with: String + name_in: [String] + name_not: String + name_not_contains: String + name_not_ends_with: String + name_not_in: [String] + name_not_starts_with: String + name_starts_with: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +##### Change the name of an operation + +To change the name of an operation you need to specify a delegate of the following type: + +```csharp +public delegate NameString CreateFieldName( + FilterFieldDefintion definition, + FilterOperationKind kind); +``` + +**Configuration** + +```csharp {1, 6} + // (A) + // specifies that all not equals operations should be extended with _nada + descriptor + .Operation(FilterOperationKind.NotEquals) + .Name((def, kind) => def.Name + "_nada" ); + // (B) + // specifies that the not equals operations should be extended with _niente. + // this overrides (A) + descriptor + .Type(FilterKind.Comparable) + .Operation(FilterOperationKind.NotEquals) + .Name((def, kind) => def.Name + "_niente" ) +``` + +**result** + +```graphql {8,18} +input UserFilter { + loggingCount: Int + loggingCount_gt: Int + loggingCount_gte: Int + loggingCount_in: [Int!] + loggingCount_lt: Int + loggingCount_lte: Int + loggingCount_niente: Int <-- (B) + loggingCount_not_gt: Int + loggingCount_not_gte: Int + loggingCount_not_in: [Int!] + loggingCount_not_lt: Int + loggingCount_not_lte: Int + name: String + name_contains: String + name_ends_with: String + name_in: [String] + name_nada: String <-- (A) + name_not_contains: String + name_not_ends_with: String + name_not_in: [String] + name_not_starts_with: String + name_starts_with: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +##### Change the description of an operation + +In the same way, you can configure names you can also configure the description of operations. +You can either set the description for all operations of this kind or only for a specific one in combination with a filter kind. + +**Configuration** + +```csharp + descriptor + .Operation(FilterOperationKind.Equals) + .Description("has to be equal") + .And() + .Operation(FilterOperationKind.NotEquals) + .Description("has not to be equal") + .And() + .Type(FilterKind.Comparable) + .Operation(FilterOperationKind.NotEquals) + .Description("has to be comparable and not equal") +``` + +**result** + +```graphql {2-4,11-14, 20-22,27-29} +input UserFilter { + """ + has to be equal + """ + loggingCount: Int + loggingCount_gt: Int + loggingCount_gte: Int + loggingCount_in: [Int!] + loggingCount_lt: Int + loggingCount_lte: Int + """ + has to be comparable and not equal + """ + loggingCount_not: Int + loggingCount_not_gt: Int + loggingCount_not_gte: Int + loggingCount_not_in: [Int!] + loggingCount_not_lt: Int + loggingCount_not_lte: Int + """ + has to be equal + """ + name: String + name_contains: String + name_ends_with: String + name_in: [String] + """ + has not to be equal + """ + name_not: String + name_not_contains: String + name_not_ends_with: String + name_not_in: [String] + name_not_starts_with: String + name_starts_with: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +##### Hide Operations + +Hot Chocolate comes preconfigured with a set of operations. If you like to hide operations globally, you can use `Ignore` for it. +If your database provider does not support certain `IQueryable` methods you can just ignore the operation. Ignored operations do not generate filter input types. + +There are multiple ways to ignore an operation: + +**Configuration** + +```csharp + descriptor + .Ignore(FilterOperationKind.Equals) + .Operation(FilterOperationKind.NotEquals) + .Ignore() + .And() + .Type(FilterKind.Comparable) + .Operation(FilterOperationKind.GreaterThanOrEqual) + .Ignore(); +``` + +**result** + +```graphql {2,4, 8,14,18} +input UserFilter { + ↵ + loggingCount_gt: Int + ↵ + loggingCount_in: [Int!] + loggingCount_lt: Int + loggingCount_lte: Int + ↵ + loggingCount_not_gt: Int + loggingCount_not_gte: Int + loggingCount_not_in: [Int!] + loggingCount_not_lt: Int + loggingCount_not_lte: Int + ↵ + name_contains: String + name_ends_with: String + name_in: [String] + ↵ + name_not_contains: String + name_not_ends_with: String + name_not_in: [String] + name_not_starts_with: String + name_starts_with: String + AND: [UserFilter!] + OR: [UserFilter!] +} +``` + +##### Configure Implicit Filter + +The default binding behavior of Hot Chocolate is implicit. Filter types are no exception. +This first may seem like magic, but unfortunately, there is none. It is just code. With `AddImplicitFilter` you can add this pinch of magic to your extension too. +Hot Chocolate creates the filters as it builds the input type. The type iterates over a list of factories sequentially and tries to create a definition for each property. The first factory that can handle the property wins and creates a definition for the filter. + +To configure you have to use the following delegate: + +```csharp + public delegate bool TryCreateImplicitFilter( + IDescriptorContext context, + Type type, + PropertyInfo property, + IFilterConvention filterConventions, + [NotNullWhen(true)] out FilterFieldDefintion? definition); +``` + +| parameter | type | description | +| ------------------- | --------------------------- | --------------------------------------------------------------------------------------------------------- | +| _context_ | `IDescriptorContext` | The context of the type descriptor | +| _type_ | `Type` | The type of the property. `Nullable` is already unwrapped (typeof(T)) | +| _property_ | `PropertyInfo` | The property | +| _filterConventions_ | `IFilterConvention` | The instance of the `IFilterContention`. | +| _definition_ | `out FilterFieldDefintion?` | The generated definition for the property. Return null if the current factory cannot handle the property. | + +If you just want to build your extension for implicit bindings, you can just out a custom `FilterFieldDefinition`. + +It makes sense to encapsulate that logic in a FilterFieldDescriptor though. You can reuse this descriptor also for the fluent configuration interface. + +**Example** + +```csharp +private static bool TryCreateStringFilter( + IDescriptorContext context, + Type type, + PropertyInfo property, + IFilterConvention filterConventions, + [NotNullWhen(true)] out FilterFieldDefintion? definition) +{ + if (type == typeof(string)) + { + var field = new StringFilterFieldDescriptor(context, property, filterConventions); + definition = field.CreateDefinition(); + return true; + } + + definition = null; + return false; +} +``` + +##### Creating a fluent filter extension + +Hot Chocolate provides fluent interfaces for all its APIs. If you want to create an extension that integrates seamlessly with Hot Chocolate it makes sense to also provide fluent interfaces. It makes sense to briefly understand how `Type -> Descriptor -> Definition` work. You can read more about it here //TODO LINK + +Here a quick introduction: + +_Type_ + +A type is a description of a GraphQL Type System Object. Hot Chocolate builds types during schema creation. Types specify how a GraphQL Type looks like. It holds, for example, the definition, fields, interfaces, and all life cycle methods. Type do only exist on startup; they do not exist on runtime. + +_Type Definition_ + +Each type has a definition that describes the type. It holds, for example, the name, description, the CLR type and the field definitions. The field definitions describe the fields that are on the type. + +_Type Descriptor_ + +A type descriptor is a fluent interface to describe the type over the definition. The type descriptor does not have access to the type itself. It operates solely on the definition. + +In the case of filtering, this works nearly the same. The `FilterInputType` is just an extension of the `InputObjectType`. It also has the same _Definition_. The `FilterInputType` stores `FilterOperationField` on this definition. These are extensions of the normal `InputField`'s and extend it by a `FilterOperationKind`. + +With a normal `InputTypeDescriptor` you declare a field by selecting a member. The filter descriptor works a little differently. You declare the `FilterKind` of a member by selecting it and then you declare the operations on this filter. These operations are the input field configuration. + +```csharp +InputTypeDescriptor inputDesc; +inputDesc.Field(x => x.Name) + .Description("This is the name") + + +FilterInputTypeDescriptor inputDesc; +inputDesc.Filter(x => x.Name).AllowEqual().Description("This is the name") +``` + +We have a few case studies that will show you how you can change the inference: + +1. String "\_like" shows an example of how you can easily add a "\_like" operation to the string filter +2. DateTime "from", "to" +3. NetTopologySuite + +> The configuration you see in this case study only shows how you add an operation to an already-existing filter. After this, the job is only half way done. To create a working filter, you must also change the expression visitor. Check the documentation for //TODO: ExpressionVisitor + +##### Case Study: String "\_like" + +**Situation** +The customer has requested a full-text search of the description field of a product. The product owner has promised the feature to the customer two sprints ago and it has still not been shipped. The UX guru of your company has, slightly under pressure, worked out a solution, and together with the frontend team they have already build a prototype. In the heat of the moment, they did not read the user story correctly and, unfortunately, realized last minute that the current filtering API does not fit their needs. The customer does also has to be able to create complex search queries. `This%Test` should match `This is a Test`. As you come back from lunch a hysterical product owner explains the situation to you. To you, it is immediately clear that this can be easily done by using the SQL `like` operator. + +In your codebase you use the `UseFiltering` middleware extensively. In some cases, you also have customized filter types. To cover all possible cases you need + +1. Implicit Binding: `[UseFiltering]` should automagically create the "\_like" filter for every string filter +2. Explicity Binding: `desc.Filter(x => x.Description).AllowLike())` +3. Expression Visitor: You want to directly filter on the database. You use EF Core. + +**Implicit Binding** +With the conventions, it is easy to add operations on already existing filters. We will first look into the configuration for filter inference and in a second step into the code first extension. + +You just need to navigate to the filter you like to modify. `descriptor.Type(FilterKind.String)`. Just add the operation you need with `.Operation(FilterOperationKind.Like)`. The next step is to add factories for the name and the description. + +Altogether this looks like this: + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + descriptor + .Type(FilterKind.String) + .Operation(FilterOperationKind.GreaterThanOrEqual) + .Name((def, kind) => def.Name + "_like" ); + .Description("Full text search. Use % as a placeholder for any symbol"); + } +} +``` + +**Explicit Binding** +By extending the filter descriptor of the string filter you can add a fluent extension that seamlessly integrated with the Hot Chocolate API. + +//TODO: currently there `StringFilterOperationDescriptor` requires `StringFilterFieldDescriptor` instead of `StringFilterFieldDescriptor` and there is no way to `Allow` +//TODO: TYPO ! FilterFieldDefintion +//TODO: Move RewriteType to convention . +//TODO: Move up CreateFieldName + +```csharp +public static class StringLikeFilterExtension +{ + public static IStringFilterOperationDescriptor AllowLike( + IStringFilterFieldDescriptor descriptor) + { + return descriptor.Allow( + FilterOperationKind.ArrayAll, + (ctx, definition) => + { + var operation = new FilterOperation( + typeof(string), FilterOperationKind.ArrayAll, definition.Property); + + return StringFilterOperationDescriptor.New( + ctx, + descriptor, + ctx.GetFilterConvention().CreateFieldName(FilterOperationKind.ArrayAll), + ctx.GetFilterConvention().RewriteType(FilterOperationKind.ArrayAll), + operation); + } + ) + } +} +``` + +--- + +##### Case Study: DateTime "from", "to" + +**Situation** + +1. Implicit Binding: `[UseFiltering]` should automagically create `DateTimeFilter` and the corresponding "\_from" and "\_to". +2. Explicity Binding: `desc.Filter(x => x.OrderedAt).AllowFrom().AllowTo())` +3. Expression Visitor: You want to directly filter on the database. You use EF Core. + +**Configuration** + +It is slightly more complex to create a custom filter than just modifying existing operations. There are a few different parts that must come together to make this work. Implicit and Explicit Bindings are coming together in this example. + +Let's start with the configuration of the convention. By splitting the configuration up into a set of extension methods that can be applied to the convention, it is possible to easily replace sub-components of the extension. e.g. some users might want to use an expression visitor, some others might want to use MognoDB Native. + +- `UseDateTimeFilter` adds support for date-time filters and registers the expression visitor for it. Abstraction for `UseDateTimeFilterImplicitly().UseDateTimeExpression()` + +- `UseDateTimeFilterImplicitly` only registers the configuration of the schema building part of the extension + +- `UseDateTimeExpression` only registers the expression visitor configuration. + +With this separation, a user that prefers to use a custom visitor, can just register the types and skip the expression visitor configuration + +TODO: UseExpressionVisitor should return expression visitor if it already exists +TODO: Reference Definition from Filter Operation instead of property. This way we could reduce complexity further and improve extensibility + +```csharp +public static class DateTimeFilterConventionExtensions +{ + public static IFilterConventionDescriptor UseDateTimeFilter( + this IFilterConventionDescriptor descriptor) => + descriptor.UseDateTimeFilterImplicitly() + .UseDateTimeFilterExpression(); + + public static IFilterConventionDescriptor UseDateTimeFilterImplicitly( + this IFilterConventionDescriptor descriptor) => + descriptor.AddImplicitFilter(TryCreateDateTimeFilter) + .Type(FilterKind.DateTime) + .Operation(FilterOperationKind.GreaterThanOrEquals) + .Name((def, _) => def.Name + "_from") + .Description("") + .And() + .Operation(FilterOperationKind.LowerThanOrEquals) + .Name((def, _) => def.Name + "_to") + .Description("") + .And() + .And(); + + public static IFilterConventionDescriptor UseDateTimeFilterExpression( + this IFilterConventionDescriptor descriptor) => + descriptor.UseExpressionVisitor() + .Kind(FilterKind.DateTime) + .Operation(FilterOperationKind.LowerThanOrEquals) + .Handler(ComparableOperationHandlers.LowerThanOrEquals).And() + .Operation(FilterOperationKind.GreaterThanOrEquals) + .Handler(ComparableOperationHandlers.GreaterThanOrEquals).And() + .And() + .And(); +} +``` + +**Create Date Time Filter Implicitly** + +`DateTime` is a new filter. Hot Chocolate is only aware of its existence because of the delegate passed to `AddImplicitFilter` + +```csharp +private static bool TryCreateDateTimeFilter( + IDescriptorContext context, + Type type, + PropertyInfo property, + IFilterConvention filterConventions, + [NotNullWhen(true)] out FilterFieldDefintion? definition) +{ + if (type == typeof(DateTime)) + { + var field = new DateTimeFilterFieldDescriptor( + context, property, filterConventions); + definition = field.CreateDefinition(); + return true; + } + + definition = null; + return false; +} +``` + +TODO: make filters name based +**Filter Field** + +A filter field is a collection of operations. It holds the configuration of the different operations like _“from”_ and _“to”_. In classic Hot Chocolate fashion there is a descriptor that describes these collections. Hot Chocolate provides the base class `FilterFieldDescriptorBase` you can use as an extension point. There is quite a lot of boilerplate code you need to write. e.g. it makes sense to define an interface for the descriptor. +You find an example here: //TODO LINK + +For the explicit binding, we need to override `CreateOperationDefinition`. In case the filter is bound implicitly, this method is invoked for each operation. +TODO: I think there is an issue with AllowNotEndsWith. + +```csharp +// We override this method for implicity binding +protected override FilterOperationDefintion CreateOperationDefinition( + FilterOperationKind operationKind) => + CreateOperation(operationKind).CreateDefinition(); +``` + +For the implicit binding, we only need to add the methods `AllowFrom` and `AllowTo`. + +```csharp +// The following to methods are for adding the filters explicitly +public IDateTimeFilterOperationDescriptor AllowFrom() => + GetOrCreateOperation(FilterOperationKind.GreaterThanOrEqual); + +public IDateTimeFilterOperationDescriptor AllowTo() => + GetOrCreateOperation(FilterOperationKind.LowerThanOrEqual); + +// This is just a little helper that reduces code duplication +private DateTimeFilterOperationDescriptor GetOrCreateOperation( + FilterOperationKind operationKind) => + Filters.GetOrAddOperation(operationKind, + () => CreateOperation(operationKind)); +``` + +All the methods described above call `CreateOperation`. This method creates the operation descriptor. The `FitlerOperation` that is created here, will also be available for the expression visitor. + +```csharp +// This helper method creates the operation. +private DateTimeFilterOperationDescriptor CreateOperation( + FilterOperationKind operationKind) + { + // This operation is also available in execution. + var operation = new FilterOperation( + typeof(DateTime), + Definition.Kind, + operationKind, + Definition.Property); + + return DateTimeOffsetFilterOperationDescriptor.New( + Context, + this, + CreateFieldName(operationKind), + RewriteType(operationKind), + operation, + FilterConvention); + } +``` + +**Filter Operation** + +In this example; there are two filter operations _"form"_ and _"to"_. The configuration with a descriptor combines explicit and implicit binding. As a base class, you can use `FilterOperationDescriptorBase`. +Here is the interface that is used in this example: + +```csharp +public interface IDateTimeFilterOperationDescriptor + : IDescriptor + , IFluent + { + /// Define filter operations for another field. + IDateTimeFilterFieldDescriptor And(); + + /// Specify the name of the filter operation. + IDateTimeFilterOperationDescriptor Name(NameString value); + + /// Specify the description of the filter operation. + IDateTimeFilterOperationDescriptor Description(string value); + + /// Annotate the operation filter field with a directive. + IDateTimeFilterOperationDescriptor Directive(T directiveInstance) + where T : class; + IDateTimeFilterOperationDescriptor Directive() + where T : class, new(); + IDateTimeFilterOperationDescriptor Directive( + NameString name, + params ArgumentNode[] arguments); + } +``` + +You can find the implementation of this interface here: //TODO link + +**Filter Type Extension** +The last missing piece to complete the integration into Hot Chocolate is an extension of `FilterInputType`. This can again be done as a extension method. + +```csharp +public IStringFilterFieldDescriptor Filter( + Expression> property) +{ + if (property.ExtractMember() is PropertyInfo p) + { + return Fields.GetOrAddDescriptor(p, + () => new StringFilterFieldDescriptor(Context, p)); + } + + throw new ArgumentException( + FilterResources.FilterInputTypeDescriptor_OnlyProperties, + nameof(property)); +} +``` + +//TODO Open this api + +--- + +##### Case Study: Filters for NetTopologySuite + +**Situation** + +> **Note:** If you are searching for `NetTopologySuite`, they are already implemented. Have a look at//TODO LINK + +1. Implicit Binding: `[UseFiltering]` should automagically create `Point` and the corresponding "\_distance" +2. Explicity Binding: `desc.Filter(x => x.Location).AllowDistance()` +3. Expression Visitor: You want to directly filter on the database. You use EF Core. + +Things are different in this case, as there is no longer a 1:1 mapping of input type to method or property. Imagine you want to fetch all bakeries that are near you. In C# you would write something like `dbContext.Bakeries.Where(x => x.Location.Distance(me.Location) < 5)`. This cannot be translated to a _GraphQL_ input type directly. + +A _GraphQL_ query might look like this. + +```graphql +{ + bakeries( + where: { location: { distance: { from: { x: 32, y: 15 }, is_lt: 5 } } } + ) { + name + } +} +``` + +_GraphQL_ input fields cannot have arguments. To work around this issue a data structure is needed that combines the filter payload and the operation. The input type for this example has the following structure. + +```csharp +public class FilterDistance +{ + + public FilterDistance( + FilterPointData from) + { + From = from; + } + /// contains the x and y coordinates. + public FilterPointData From { get; } + + public double Is { get; set; } +} +``` + +```graphql +input FilterDistanceInput { + from: FilterPointDataInput! + is: Float + is_gt: Float + is_gte: Float + is_lt: Float + is_lte: Float + is_in: Float + is_not: Float + is_not_gt: Float + is_not_gte: Float + is_not_lt: Float + is_not_lte: Float + is_not_in: Float +} +``` + +//TODO: Add skip / inopfield! + +Hot Chocolate would generate nested filters for the payload property "From" by default. This can be avoided by declaring the field as input payload. + +```csharp +public class DistanceFilterType + : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.Input(x => x.From); + descriptor.Filter(x => x.Is); + } +} +``` + +**Convention & Implicit Factory & Type Descriptor** + +The configuration of the convention, the implicit type factory and the descirptors are very similar to the the two examples before. To not bloat the documentation with duplication we just refere to these two examples and to the reference implementation here //TODO LINK + +--- + +## Translating Filters + +Hot Chocolate can translate incoming filters requests directly onto collections or even on to the database. In the default implementation, the output of this translation is a Linq expression that can be applied to `IQueryable` and `IEnumerable`. You can choose to change the expression that is generated or can even create custom output. Hot Chocolate is using visitors to translate input objects. + +[Learn more about visitors here](/docs/hotchocolate/v12/api-reference/visitors). + +### Expression Filters + +Filter conventions make it easier to change how an expression should be generated. There are three different extension points you can use to change the behavior of the expression visitor. You do not have to worry about the visiting of the input object itself. + +##### Describe the Expression Visitor + +The expression visitor descriptor is accessible through the filter convention. By calling `UseExpressionVisitor` on the convention descriptor you gain access. The expression visitor has the default set of expressions preconfigured. + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure( + IFilterConventionDescriptor descriptor) + { + descriptor.UseExpressionVisitor() + } +} +``` + +The descriptor provides a fluent interface that is very similar to the one of the convention descriptor itself. You have to specify what _operation_ on which _filter kind_ you want to configure. You can drill with `Kind` and `Operation` and go back up by calling `And()`: + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure( + IFilterConventionDescriptor descriptor) + { + descriptor + .UseExpressionVisitor() + .Kind(FilterKind.String) + .Operation(FilterKind.Equals) + .And() + .And() + .Kind(FilterKind.Comparable) + .Operation(FilterKind.In) + } +} +``` + +**Visitation Flow** + +The expression visitor starts as any other visitor at the node you pass in. Usually, this is the node object value node of the filter argument. It then starts the visitation. Every time the visitor _enters_ or _leaves_ an object field, it looks for a matching configuration. If there is no special _enter_ behavior of a field, the visitor generates the expression for the combination of _kind_ and _operation_. + +The next two paragraphs show how the algorithm works in detail. + +_Enter_ + +On _entering_ a field, the visitor tries to get a `FilterFieldEnter` delegate for the `FilterKind` of the current field. If a delegate was found, executed, and the execution return true, the `Enter` method returns the _action_ specified by the delegate. In all other cases, the visitor tries to execute an `OperationHandler` for the combination `FilterKind` and `OperationKind`. If the handler returns true, the expression returned by the handler is added to the context. + +1. Let _field_ be the field that is visited +1. Let _kind_ be the `FilterKind` of _field_ +1. Let _operation_ be the `FilterOperationKind` of _field_ +1. Let _convention_ be the `FilterConvention` used by this visitor +1. Let _enterField_ be the `FilterFieldEnter` delegate for _kind_ on _convention_ +1. If _enterField_ is not null: + 1. Let _action_ be the visitor action of _enterField_ + 1. If _enterField_ returns true: + 1. **return** _action_ +1. Let _operationHander_ be the `FilterOperationHandler` delegate for (_kind_, _operation_) on _convention_ +1. If _operationHandler_ is not null: + 1. Let _expression_ be the expression generated by _operationHandler_ + 1. If _enterField_ returns true: + 1. enqueue _expression_ +1. **return** `SkipAndLeave` + +_Leave_ + +On _entering_ a field, the visitor tries to get and execute a `FilterFieldLeave` delegate for the `FilterKind` of the current field. + +1. Let _field_ be the field that is visited +1. Let _kind_ be the `FilterKind` of _field_ +1. Let _operation_ be the `FilterOperationKind` of _field_ +1. Let _convention_ be the `FilterConvention` used by this visitor +1. Let _leaveField_ be the `FilterFieldLeave` delegate for _kind_ on _convention_ +1. If _leaveField_ is not null: + 1. Execute _leaveField_ + +**Operations** + +The operation descriptor provides you with the method `Handler`. With this method, you can configure, how the expression for the _operation_ of this filter _kind_ is generated. You have to pass a delegate of the following type: + +```csharp +public delegate bool FilterOperationHandler( + FilterOperation operation, + IInputType type, + IValueNode value, + IQueryableFilterVisitorContext context, + [NotNullWhen(true)]out Expression? result); +``` + +This delegate might seem intimidating first, but it is not bad as it looks. If this delegate `true` the `out Expression?` is enqueued on the filters. This means that the visitor will pick it up as it composes the filters. + +| Parameter | Description | +| ---------------------------------------- | --------------------------------------- | +| `FilterOperation operation` | The operation of the current field | +| `IInputType type` | The input type of the current field | +| `IValueNode value` | The AST value node of the current field | +| `IQueryableFilterVisitorContext context` | The context that builds up the state | +| `out Expression? result` | The generated expression | + +Operations handlers can be configured like the following: + +```csharp {10,13} +public class CustomConvention : FilterConvention +{ + protected override void Configure( + IFilterConventionDescriptor descriptor) + { + descriptor + .UseExpressionVisitor() + .Kind(FilterKind.String) + .Operation(FilterKind.Equals) + .Handler(YourVeryOwnHandler.HandleEquals) + .And() + .Operation(FilterKind.NotEquals) + .Handler(YourVeryOwnHandler.HandleNotEquals) + } +} +``` + +TODO: add example + +**Kind** + +There are two extension points on each _filter kind_. You can alter the _entering_ of a filter and the _leaving_. + +**Enter** +You can configure the entering with the following delegate: + +```csharp +public delegate bool FilterFieldEnter( + FilterOperationField field, + ObjectFieldNode node, + IQueryableFilterVisitorContext context, + [NotNullWhen(true)]out ISyntaxVisitorAction? action); +``` + +If this field returns _true_ the filter visitor will continue visitation with the specified _action_ in the out parameter `action`. [Check out the visitor documentation for all possible actions](http://addlinkshere). +If the field does not return true and a visitor action, the visitor will continue and search for a _operation handler_. After this, the visitor will continue with `SkipAndLeave`. + +| Parameter | Description | +| ---------------------------------------- | ------------------------------------ | +| `FilterOperationField field` | The current field | +| `ObjectFieldNode node` | The object node of the current field | +| `IQueryableFilterVisitorContext context` | The context that builds up the state | +| `out ISyntaxVisitorAction? action` | The visitor action | + +**Leave** +You can configure the entering with the following delegate: + +```csharp +public delegate void FilterFieldLeave( + FilterOperationField field, + ObjectFieldNode node, + IQueryableFilterVisitorContext context); +``` + +| Parameter | Description | +| ---------------------------------------- | ------------------------------------ | +| `FilterOperationField field` | The current field | +| `ObjectFieldNode node` | The object node of the current field | +| `IQueryableFilterVisitorContext context` | The context that builds up the state | diff --git a/website/src/docs/hotchocolate/v12/api-reference/index.md b/website/src/docs/hotchocolate/v12/api-reference/index.md new file mode 100644 index 00000000000..4a75b5e9079 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/index.md @@ -0,0 +1,7 @@ +--- +title: "Overview" +--- + +> We are still working on the documentation for Hot Chocolate so help us by finding typos, missing things or write some additional docs with us. + +The API Reference provides deep dives into specific subjects like [ASP.NET Core](/docs/hotchocolate/v12/api-reference/aspnetcore) configuration or how you can [extend filtering](/docs/hotchocolate/v12/api-reference/extending-filtering). diff --git a/website/src/docs/hotchocolate/v12/api-reference/language.md b/website/src/docs/hotchocolate/v12/api-reference/language.md new file mode 100644 index 00000000000..5282926684f --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/language.md @@ -0,0 +1,86 @@ +--- +title: "Language" +--- + +# Abstract Syntax Tree (AST) + +Hot Chocolate seems to focus solely around `ObjectType`, `InputType` et al. These types work as an interface to configure the _GraphQL_ schema. This schema is used to parse and validate incoming requests. Under the hood, every `query`, `mutation` or `subscription` request is parsed into a so-called abstract syntax tree. Each node of this tree denotes a part of the incoming _GraphQL_ query. + +```graphql +query Users { + userName + address { + street + nr + } +} +``` + +```mermaid +graph TD; + OperationDefinitionNode --> s1["SelectionSetNode"] + s1["SelectionSetNode"] --> id5["FieldNode (userName)"] + s1["SelectionSetNode"] --> id1["FieldNode (address)"] + id1["FieldNode (address)"] --> s2["SelectionSetNode"] + s2["SelectionSetNode"] --> id3["FieldNode (street)"] + s2["SelectionSetNode"] --> id4["FieldNode (nr)"] + +``` + +--- + +# Syntax Node + +Every node in a syntax tree implements `ISyntaxNode`. + +> 💡 The `ToString` method of a syntax node prints the corresponding _GraphQL_ syntax. + +This interface defines the `NodeKind` of the node. + +**Node Kinds:** + +| Name | Description (Spec Link) | Context | Example | +| ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------- | ------------------------------- | +| Name | [All names. e.g. Field, Argument ... ](http://spec.graphql.org/June2018/#sec-Names) | Both | foo | +| NamedType | [Denotes a reference to a type](https://spec.graphql.org/June2018/#NamedType) | Both | Foo | +| ListType | [Definition of a list](https://spec.graphql.org/June2018/#ListType) | Both | \[Foo] | +| NonNullType | [Definition of type that cannot be null](https://spec.graphql.org/June2018/#NonNullType) | Both | Foo! | +| Argument | [Representation of an argument. Has a _Name_ and a _Value_](https://spec.graphql.org/June2018/#sec-Language.Arguments) | Both | foo: "bar" | +| Directive | [Denotes a directive ](https://spec.graphql.org/June2018/#sec-Language.Directives) | Query | @foo | +| Document | [Describes a complete file or request a _GraphQL_ service operates on.](http://spec.graphql.org/June2018/#sec-Language.Document) | Query (out) | | +| OperationDefinition | [Describes a graphql operation like `query` `mutation` or `subscription`](http://spec.graphql.org/June2018/#sec-Language.Document) | Query (out) | query Foo {} | +| VariableDefinition | [The variables defined by an operation](http://spec.graphql.org/June2018/#VariableDefinitions) | Query (out) | (\$foo: String) | +| Variable | [A variable](https://spec.graphql.org/June2018/#sec-Language.Variables) | Query (out) | \$foo | +| SelectionSet | [specifies a selection of _Field_, _FragmentSpread_ or _InlineFragment_](http://spec.graphql.org/June2018/#sec-Selection-Sets) | Query (out) | {foo bar} | +| Field | [Describes a field as a part of a selection set](http://spec.graphql.org/June2018/#sec-Language.Fields) | Query (out) | foo | +| FragmentSpread | [Denotes a spread of a `FragemntDefinition`](https://spec.graphql.org/June2018/#FragmentSpread) | Query (out) | ...f1 | +| InlineFragment | [Denotes an inline fragment](https://spec.graphql.org/June2018/#sec-Inline-Fragments) | Query (out) | ... on Foo { bar} | +| FragmentDefinition | [Defines the definition of a fragment](https://spec.graphql.org/June2018/#FragmentDefinition) | Query (out) | fragment f1 on Foo {} | +| IntValue | [Denotes a `int` value](https://spec.graphql.org/June2018/#sec-Int-Value) | Query (in) | 1 | +| StringValue | [ Denotes a `string` value](https://spec.graphql.org/June2018/#sec-String-Value) | Query (in) | "bar" | +| BooleanValue | [Denotes a `boolean` value ](https://spec.graphql.org/June2018/#sec-Boolean-Value) | Query (in) | true | +| NullValue | [Denotes a `null` value ](https://spec.graphql.org/June2018/#sec-Null-Value) | Query (in) | null | +| EnumValue | [Denotes a `enum` value ](https://spec.graphql.org/June2018/#sec-Enum-Value) | Query (in) | FOO | +| FloatValue | [Denotes a _Float_ value](https://spec.graphql.org/June2018/#sec-Float-Value) | Query (in) | 0.2 | +| ListValue | [Denotes a _List_ value](https://spec.graphql.org/June2018/#sec-List-Value) | Query (in) | \["string"] | +| ObjectValue | [Denotes a _ObjectValue_ value ](https://spec.graphql.org/June2018/#sec-Input-Object-Values) | Query (in) | {foo: "bar" } | +| ObjectField | [Denotes a field of am input object type](https://spec.graphql.org/June2018/#ObjectField) | Query (in) | foo: "bar" | +| SchemaDefinition | [Definition of a schema](https://spec.graphql.org/June2018/#sec-Schema) | Schema | schema {} | +| OperationTypeDefinition | [This defines one of the root operations `Query`, `Mutation` or `Subscription` on the schema-definiton](https://spec.graphql.org/June2018/#RootOperationTypeDefinition) | Schema | query:FooQuery | +| ScalarTypeDefinition | [Definition of a scalar ](https://spec.graphql.org/June2018/#sec-Scalars) | Schema | scalar JSON | +| ObjectTypeDefinition | [Definition of an object type](https://spec.graphql.org/June2018/#sec-Objects) | Schema | type Foo{} | +| FieldDefinition | [Definition of a field](https://spec.graphql.org/June2018/#FieldDefinition) | Schema | bar:String | +| InputValueDefinition | [Definition of a input value of an argument](https://spec.graphql.org/June2018/#sec-Field-Arguments) | Schema | x: Float | +| InterfaceTypeDefinition | [Definition of an interface](https://spec.graphql.org/June2018/#sec-Interfaces) | Schema | interface NamedEntity {} | +| UnionTypeDefinition | [Definition of an union](https://spec.graphql.org/June2018/#sec-Unions) | Schema | union Ex = Foo \| Bar | +| EnumTypeDefinition | [Definition of an enum](https://spec.graphql.org/June2018/#sec-Enums) | Schema | enum Foo {BAR} | +| EnumValueDefinition | [Definition of an enum value](https://spec.graphql.org/June2018/#sec-Enum) | Schema | BAR | +| InputObjectTypeDefinition | [Definition of an input type definition](https://spec.graphql.org/June2018/#sec-Input-Objects) | Schema | input FooInput {} | +| SchemaExtension | [Definition of a schema extension](https://spec.graphql.org/June2018/#sec-Schema-Extension) | Schema | extend schema {} | +| ScalarTypeExtension | [Definition of a scalar extension](https://spec.graphql.org/June2018/#sec-Scalar-Extensions) | Schema | extend scalar Foo @bar | +| ObjectTypeExtension | [Definition of an object type extension](https://spec.graphql.org/June2018/#sec-Object-Extensions) | Schema | extend type Foo { name} | +| InterfaceTypeExtension | [Definition of an interface type extension](https://spec.graphql.org/June2018/#sec-Interface-Extensions) | Schema | extend interface NamedEntity {} | +| UnionTypeExtension | [Definition of an union type extension](https://spec.graphql.org/June2018/#sec-Union-Extensions) | Schema | extend union Ex = Foo{} | +| EnumTypeExtension | [Definition of an enum type extension](https://spec.graphql.org/June2018/#sec-Enum-Extensions) | Schema | extend enum foo{} | +| InputObjectTypeExtension | [Definition of an input types](https://spec.graphql.org/June2018/#sec-Input-Object-Extensions) | Schema | input foo {} | +| DirectiveDefinition | [Definition of a directive](https://spec.graphql.org/June2018/#sec-Type-System.Directives) | Schema | directive @foo on | diff --git a/website/src/docs/hotchocolate/v12/api-reference/migrate-from-10-to-11.md b/website/src/docs/hotchocolate/v12/api-reference/migrate-from-10-to-11.md new file mode 100644 index 00000000000..82a8258b77e --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/migrate-from-10-to-11.md @@ -0,0 +1,792 @@ +--- +title: Migrate from Hot Chocolate GraphQL server 10 to 11 +--- + +This guide will walk you through the manual migration steps to get you Hot Chocolate GraphQL server to version 11. + +As a general preparation, we recommend removing all HotChocolate.\* package references from your project. Then start by adding the `HotChocolate.AspNetCore` package. The server package now contains most of the needed packages. + +When do I need to add other Hot Chocolate packages explicitly? + +We have now added the most common packages to the Hot Chocolate core. But there are certain areas where we still need to add some additional packages. + +| Package | Topic | +| ---------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| HotChocolate.AspNetCore.Authorization | The authorization package adds the authorization directive and integrates with Microsoft Authorization Policies | +| HotChocolate.Data | The new data package represents our integration with all kinds of data sources. This package provides the fundamentals for filtering, sorting, and projection logic. | +| HotChocolate.Types.Spatial | This package provides GeoJson spatial types. | +| HotChocolate.Data.Spatial | The package integrates the spatial types with the data package to allow for spatial filtering, sorting, and projections. | +| HotChocolate.Subscriptions.Redis | The in-memory subscription provider, is now integrated by default. To have an integration with Redis, you need to add this package. | +| HotChocolate.PersistedQueries.FileSystem | This package provides a persisted query storage for the file system. | +| HotChocolate.PersistedQueries.Redis | This package provides a persisted query storage for Redis. | + +# ASP.NET Core + +One of the main focuses of version 11 was to create a new configuration API that brings all our builders together into one unified API. This also means that we had to introduce breaking changes to the way we +configure schemas. + +After you have cleaned up your packages, head over to the `Startup.cs` to start with the new configuration API migration. + +## ConfigureServices + +In your `Startup.cs` head over to the `ConfigureServices` methods. +The configuration of a schema has slightly changed, and the new configuration API has replaced the `SchemaBuilder`. + +We now start with `AddGraphQLServer` to define a new GraphQL server, `AddGraphQLServer`, returns the new `IRequestExecutorBuilder` that lets us apply all the configuration methods that used to be on the `SchemaBuilder`, `StitchingBuilder` and the `QueryExecutionBuilder`. + +**Old:** + +```csharp +services.AddGraphQL(sp => + SchemaBuilder.New() + .AddServices(sp) + .AddQueryType() + .AddMutationType() + ... + .Create()); +``` + +**New:** + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddMutationType() + ... +``` + +If you were using the `QueryRequestBuilder` to configure request options or change the request pipeline, you need to add those things to the configuration chain of the ```IRequestExecutorBuilder`. + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddMutationType() + ... + .ModifyRequestOptions(o => o.ExecutionTimeout = TimeSpan.FromSeconds(180)); +``` + +## Configure + +After migrating the schema configuration, the next area that has fundamentally changed is the schema middleware. + +Hot Chocolate server now embraces the new endpoint routing API from ASP.NET core and with that brings a lot of new features. Head over [here](/docs/hotchocolate/v12/api-reference/aspnetcore) to read more about the ASP.NET Core integration. + +**Old:** + +```csharp +app.UseGraphQL(); +``` + +**New:** + +```csharp +app.UseRouting(); + +// routing area + +app.UseEndpoints(x => x.MapGraphQL()); +``` + +## Request Interceptor + +The query request interceptor was reworked and we renamed it to `IHttpRequestInterceptor`. + +```csharp +public interface IHttpRequestInterceptor +{ + ValueTask OnCreateAsync( + HttpContext context, + IRequestExecutor requestExecutor, + IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken); +} +``` + +**Old:** + +```csharp +services.AddQueryRequestInterceptor( + (context, builder, ct) => + { + // your code + }); +``` + +**New:** + +```csharp +services.AddGraphQLServer() + ... + .AddHttpRequestInterceptor( + (context, executor, builder, ct) => + { + // your code + }); +``` + +You can also extend `DefaultHttpRequestInterceptor` and inject it like the following. + +```csharp +services.AddGraphQLServer() + ... + .AddHttpRequestInterceptor(); +``` + +> A request interceptor is a service that is used by all hosted schemas. + +## Entity Framework Serial Execution + +The serial execution for Entity Framework compatibility is gone. If you use Entity Framework Core we recommend using version 5 and the new context factory in combination with context pooling. This allows the execution engine to execute in parallel and still be memory efficient since context objects are pooled. + +Another variant here is to use our scoped service feature that scopes services for the resolver pipeline. This is explained in our GraphQL Workshop project. + +https://github.com/ChilliCream/graphql-workshop + +# Schema / Resolvers + +### Field ordering + +Hot Chocolate 11 follows the spec and returns the fields in the order they were defined. This feature +makes migrations harder because the schema snapshot looks different compared to version 11. You can change this behavior with the following setting. + +```csharp + builder.ModifyOptions(x => x.SortFieldsByName = true) +``` + +## DataLoaders + +With Hot Chocolate server 11, we have embraced the new DataLoader spec version 2. With that, we have decoupled the scheduler from the DataLoader itself, meaning you now have to pass on the `IBatchScheduler` to the base implementation of the DataLoader. +Apart from that, DataLoader now uses `ValueTask` instead of `Task` when doing async work. + +If you were adding the `DataLoaderRegistry` to the services, remove that code since `service.AddDataLoaderRegistry` is no longer needed. + +**Old:** + +```csharp +public class FooDataLoader : DataLoaderBase +{ + private readonly IFooRepository _fooRepository; + + public FooDataLoader(IFooRepository fooRepository) + { + _fooRepository = fooRepository; + } + + + protected override async Task>> FetchAsync( + IReadOnlyList keys, + CancellationToken cancellationToken) + { + .... + } +} +``` + +**New:** + +```csharp +public class FooDataLoader : DataLoaderBase +{ + private readonly IFooRepository _fooRepository; + + public FooDataLoader( + // ▼ + IBatchScheduler scheduler, + IFooRepository fooRepository) + : base(scheduler) + { + _fooRepository = fooRepository; + } + + + // ▼ + protected override async ValueTask>> FetchAsync( + IReadOnlyList keys, + CancellationToken cancellationToken) + { + + .... +} +``` + +## Node Resolver + +With version 11, we have reworked how Relay node types are defined. Furthermore, we added pure code-first (annotation-based) support. + +**Old:** + +```csharp +descriptor + .AsNode() + .IdField(d => d.Id) + .NodeResolver(async (ctx, id) => await ctx + .DataLoader() + .LoadAsync(id, ctx.RequestAborted)) +``` + +**New:** + +The following example essentially aligns very closely to the old variant. + +```csharp +descriptor + .ImplementsNode() + .IdField(d => d.Id) + .ResolveNode(async (ctx, id) => await ctx + .DataLoader() + .LoadAsync(id, ctx.RequestAborted)) +``` + +But, we can now also use an external resolver like with standard resolvers. This allows us to write better testable code that takes advantage of the method parameter injection we use in everyday resolvers. + +```csharp +descriptor + .ImplementsNode() + .IdField(d => d.Id) + .ResolveNodeWith(t => t.GetNodeAsync(default, default)); +``` + +But we can go even further now with pure code-first (annotation-based) support. By just annotating the entity with the `NodeAttribute`, we essentially told the schema builder that this is a node. The type initialization can then try to infer the node resolver directly from the type. + +```csharp +[Node] +public class MyEntity +{ + public string Id { get; set; } + + public async Task GetAsync(....) + { + .... + } +} +``` + +Often, however, we want the repository logic decoupled from our domain object/entity. In this case, we can specify the entity resolver type. + +```csharp +[Node(NodeResolverType = typeof(MyEntityResolver))] +public class MyEntity +{ + public string Id { get; set; } +} + +public class MyEntityResolver +{ + public async Task GetAsync(....) + { + .... + } +} +``` + +There are more variants possible, but to give an impression of the new convenience and flexibility around nodes. As a side note, if you do not want the node attribute on the domain objects, you can also now add your very own attribute or interface to mark this and rewrite that in the schema building process to the `NodeAttribute`. + +## Pagination + +The first thing to note around pagination is that we listened to a lot of feedback and have removed the `PaginationAmountType`. + +Moreover, we have introduced new PagingOptions, which can be set with the new configuration API on the schema level. With the new options, you can configure the `MaxPageSize`, `DefaultPageSize` and whether the total count shall be included `IncludeTotalCount`. + +```csharp +builder.SetPagingOptions( + new PagingOptions() + { + MaxPageSize = searchOptions.PaginationAmount, + DefaultPageSize = searchOptions.PaginationAmount, + IncludeTotalCount = true + }); +``` + +Further, you can override the paging option on the resolver level. + +```csharp +[UsePaging(MaxPageSize = 100)] +``` + +```csharp +descriptor.Field(...).UsePaging(maxPageSize = 100)... +``` + +## Projections + +The selection middleware, that was available in `HotChocolate.Types.Selections` was replaced by the projection middleware from `HotChocolate.Data`. + +**Old:** + +```csharp +descriptor.Field(...).UseSelection()... +``` + +**New:** + +```csharp +descriptor.Field(...).UseProjection()... +``` + +Similarly, the attribute `[UseSelection]` was replaced by `[UseProjection]`. + +To use projections with your GraphQL endpoint you have to register it on the schema: + +```csharp +services.AddGraphQLServer() + // Your schema configuration + .AddProjections(); +``` + +## Enum Type + +Hot Chocolate server 11 now follows the spec recommendation with the new enum name conventions and formats the enum values by default as UPPER_SNAIL_CASE. + +To avoid breaking changes to your schema, you will have to override the naming convention: + +**Configuration:** + +```csharp + builder + .AddConvention(new CompatibilityNamingConvention()) +``` + +**Convention:** + +```csharp + public class CompatibilityNamingConvention + : DefaultNamingConventions + { + public override NameString GetEnumValueName(object value) + { + if (value == null) + { + throw new ArgumentNullException(nameof(value)); + } + + return value.ToString().ToUpperInvariant(); + } + } +``` + +## IResolverContext.Source + +The source result stack was removed from the resolver context for performance reasons. If you need such a functionality, you can write a middleware that aggregates the resulting path on the scoped context. + +**Old:** + +```csharp + public class FooType : ObjectType + { + private static readonly object _empty = new object(); + + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("bar") + .Type>() + .Resolver(_empty); + } + } + + public class BarType : ObjectType + { + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("baz") + .Type() + .Resolve(ctx => + { + Foo foo = (Foo)ctx.Source.Pop().Peek(); + return foo.Baz; + }); + } + } + +``` + +**New:** + +```csharp + public class FooType : ObjectType + { + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("bar") + .Type>() + .Resolve( + ctx => + { + ctx.ScopedContextData = + ctx.ScopedContextData.SetItem(nameof(Foo), ctx.Parent()); + return new object(); + }); + } + } + + public class BarType : ObjectType + { + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("baz") + .Type() + .Resolve( + ctx => + { + if (ctx.ScopedContextData.TryGetValue(nameof(Foo), out object? potentialFoo) && + potentialFoo is Foo foo) + { + return foo.Baz; + } + + throw new GraphQLException( + ErrorBuilder.New() + .AddLocation(ctx.Field.SyntaxNode) + .SetMessage("Foo was not pushed down.") + .SetPath(ctx.Path) + .Build()); + }); + } + } +``` + +## Authorization + +If you use authorization, you need to add a package reference to `HotChocolate.AspNetCore.Authorization`. + +**Old:** + +```csharp + builder.AddAuthorizeDirectiveType() +``` + +**New:** + +```csharp + builder.AddAuthorization() +``` + +## TypeBinding + +We have renamed the binding method from `BindClrType` to `BindRuntimeType` to make it more clear what it does. + +**Old:** + +```csharp + builder.BindClrType() +``` + +**New:** + +```csharp + builder.BindRuntimeType() +``` + +## FieldMiddleware + +Since all configuration APIs were integrated into one, we needed to make it more specific for what a middleware is defined. `UseField` defines a middleware that is applied to the resolver pipeline / field pipeline whereas `UseRequest` defines a middleware that is defined for the request processing. + +**Old:** + +```csharp + builder.Use() +``` + +**New:** + +```csharp + builder.UseField() +``` + +# Stitching + +The schema stitching configuration API has been completely integrated into the new configuration API. This means that a Gateway is nothing more than a GraphQL schema, which will make it easier for new users. However, you will need to completely rewire your stitching configuration. + +## Configuration + +The stitching builder no longer exists in version 11 and you need to use the new configuration API to configure your gateway. + +**Old:** + +```csharp + services.AddStitchedSchema(x => ....); +``` + +**New:** + +```csharp + services.AddGraphQLServer().... +``` + +### AddSchemaFromHttp + +Registering a remote schema has slightly changed in version 11 to make it more clear that we are adding a remote schema into the local gateway schema. Removing, root types and importing a remote schema can be done in one go now. + +**Old:** + +```csharp + builder.AddSchemaFromHttp("SomeSchema").IgnoreRootTypes("SomeSchema"); +``` + +**New:** + +```csharp + builder.AddRemoteSchema("SomeSchema", ignoreRootTypes: true); +``` + +## AddSchemaConfiguration + +In version 11 it is now much easier to configure the gateway schema. + +**Old:** + +```csharp + services.AddStitchedSchema(x => x.AddSchemaConfiguration(y => y.RegisterType())); +``` + +**New:** + +```csharp + services + .AddGraphQLServer() + .AddType(); +``` + +## IgnoreField + +The order of the parameters in ignore field and ignore type has changed since we moved optional parameters to the end. + +**Old:** + +```csharp + services.AddStitchedSchema(x => x.IgnoreField("SchemaName", "TypeName, "FieldName")); +``` + +**New:** + +```csharp + services + .AddGraphQLServer() + .IgnoreField("TypeName, "FieldName", "SchemaName") +``` + +## SetExecutionOptions + +Execution options can now be configured on the root schema directly like for any other schema: + +**Old:** + +```csharp + services.AddStitchedSchema( + x => x.SetExecutionOptions( + new QueryExecutionOptions + { + TracingPreference = TracingPreference.OnDemand + })); +``` + +**New:** + +```csharp + services + .AddGraphQLServer() + .ModifyRequestOptions(x => x.TracingPreference = TracingPreference.OnDemand); +``` + +## Configuring a downstream schema + +In case you want to configure a downstream schema, you can now just use the new configuration API since all downstream schemas have an in-memory representation. + +```csharp + services + .AddGraphQLServer() + .AddRemoteSchema("SomeSchema"); + + services + .AddGraphQL("SomeSchema") + .AddType(new IntType("SpecialIntegerType")); +``` + +## PaginationAmount + +The `PaginationAmount` scalar was removed since it caused a lot of issues with clients and only provided limited benefit. The arguments `first` and `last` use now `Int` as a type. To avoid breaking schemas on a stitched schema, you can add a rewriter that rewrites all +`first: Int` and `last: Int` on a connection to `first: PaginationAmount` and `last: PaginationAmount`. +You also have to make sure that you register a new `IntType` on the root schema and rewrite all +downstream schemas. + +**Configuration:** + +```csharp + services + .AddGraphQLServer() + .AddRemoteSchema("SomeSchema") + .ConfigureSchema(x => + x.AddType(new IntType()) + .AddType(new IntType("PaginationAmount"))) + .AddMergedDocumentRewriter( + d => (DocumentNode)new PagingAmountRewriter().Rewrite(d, null)); + + services + .AddGraphQL("SomeSchema") + .ConfigureSchema(x => + x.AddType(new IntType()) + .AddType(new IntType("PaginationAmount"))); +``` + +**PagingAmountRewriter:** + +```csharp + internal class PagingAmountRewriter : SchemaSyntaxRewriter + { + protected override FieldDefinitionNode RewriteFieldDefinition( + FieldDefinitionNode node, + object? context) + { + if (node.Type.NamedType().Name.Value.EndsWith("Connection") && + (node.Arguments.Any( + t => t.Name.Value.EqualsOrdinal("first") && + t.Type.NamedType().Name.Value.EqualsOrdinal("Int")) + || node.Arguments.Any( + t => t.Name.Value.EqualsOrdinal("last") && + t.Type.NamedType().Name.Value.EqualsOrdinal("Int")) + )) + { + var arguments = node.Arguments.ToList(); + + InputValueDefinitionNode first = + arguments.FirstOrDefault(t => t.Name.Value.EqualsOrdinal("first")); + + InputValueDefinitionNode last = + arguments.FirstOrDefault(t => t.Name.Value.EqualsOrdinal("last")); + + if (first != null) arguments[arguments.IndexOf(first)] = first.WithType(RewriteType(first.Type, "PaginationAmount")); + + if (last != null) arguments[arguments.IndexOf(last)] = last.WithType(RewriteType(last.Type, "PaginationAmount")); + + node = node.WithArguments(arguments); + } + + return base.RewriteFieldDefinition(node, context); + } + + private static ITypeNode RewriteType(ITypeNode type, NameString name) + { + if (type is NonNullTypeNode nonNullType) + { + return new NonNullTypeNode( + (INullableTypeNode)RewriteType(nonNullType.Type, name)); + } + + if (type is ListTypeNode listType) + { + return new ListTypeNode(RewriteType(listType.Type, name)); + } + + return new NamedTypeNode(name); + } + } + + internal static class StringExtensions + { + public static bool EqualsOrdinal(this string value, string other) => + string.Equals(value, other, StringComparison.Ordinal); + } +``` + +## Batch responses + +In v10, responses to batched operations were returned as a JsonArray. In v11 the default is to return MultiPartChunked responses. To switch back to JsonArray, configure the HttpResult serializer as follows: + +```csharp +services.AddHttpResultSerializer( + batchSerialization: HttpResultSerialization.JsonArray +); +``` + +# Testing + +We have added a couple of test helpers to make the transition to the new configuration API easier. + +## Schema Snapshot Tests + +**Old:** + +```csharp + SchemaBuilder.New() + .AddQueryType() + .Create() + .ToString() + .MatchSnapshot(); +``` + +**New:** + +```csharp + ISchema schema = + await new ServiceCollection() + .AddGraphQL() + .AddQueryType() + .BuildSchemaAsync(); + + schema.Print().MatchSnapshot(); +``` + +## Request Tests + +**Old:** + +```csharp + IQueryExecutor executor = + SchemaBuilder.New() + .AddQueryType() + .Create() + .MakeExecutable(); +``` + +**New:** + +```csharp + IRequestExecutor executor = + await new ServiceCollection() + .AddGraphQL() + .AddQueryType() + .BuildRequestExecutorAsync(); + + IExecutionResult result = + await executor.ExecuteAsync("{ __typename }"); + + result.ToJson().MatchSnapshot(); +``` + +Or you can directly build and execute: + +```csharp + IExecutionResult result = + await new ServiceCollection() + .AddGraphQL() + .AddQueryType() + .ExecuteRequestAsync("{ __typename }"); + + result.ToJson().MatchSnapshot(); +``` + +## DataLoader Testing + +Due to the changed constructor you now need to also create a scheduler for the dataloaders + +Old + +```csharp + FooDataLoader dataLoader = new FooDataLoader( fooRepoMock.Object); +``` + +New + +```csharp + var scheduler = new BatchScheduler(); + FooDataLoader dataLoader = new FooDataLoader( + scheduler, + fooRepoMock.Object); +``` + +// TODO : Type Converter diff --git a/website/src/docs/hotchocolate/v12/api-reference/migrate-from-11-to-12.md b/website/src/docs/hotchocolate/v12/api-reference/migrate-from-11-to-12.md new file mode 100644 index 00000000000..f7aed60c37b --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/migrate-from-11-to-12.md @@ -0,0 +1,323 @@ +--- +title: Migrate from Hot Chocolate GraphQL server 11 to 12 +--- + +This guide will walk you through the manual migration steps to get your Hot Chocolate GraphQL server to version 12. + +# Resolvers + +We have reworked the resolver compiler and are now demanding that the `ParentAttribute` is used when an argument is referring to the parent object. +This is done since in some cases people want to get the parent object which is the same runtime type as an argument value. + +**v11** + +```csharp +public string MyResolver(Person parent, string additionalInput) +{ + // Code omitted for brevity +} +``` + +**v12** + +```csharp +public string MyResolver([Parent] Person parent, string additionalInput) +{ + // Code omitted for brevity +} +``` + +# Scalars + +We changed some defaults around scalars. These new defaults can break your existing schema but are, in general, better for newcomers and align better with the overall GraphQL ecosystem. Of course, you can naturally opt out of these new defaults to preserve your current schema's integrity. + +## UUID + +We changed the name of the UUID scalar from `Uuid` to `UUID`. To maintain the old name, register the type manually like the following: + +```csharp +services + .AddGraphQLServer() + .AddType(() => new UuidType("Uuid")); +``` + +Further, we changed the default serialization of UUID values from format `N` (`nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn`) to format `D` (`nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn`). While the format `N` saved a few payload characters, new users, in general, often had issues with that format and some other tooling. New users will now, by default, have a better experience when using non-ChilliCream tooling. + +To preserve the old format, you can directly provide the format in the scalar. + +```csharp +services + .AddGraphQLServer() + .AddType(() => new UuidType(defaultFormat: 'N')); +``` + +In order to fully preserve version 11 behavior do: + +```csharp +services + .AddGraphQLServer() + .AddType(() => new UuidType("Uuid", defaultFormat: 'N')); +``` + +## URL + +We changed the name of the URL scalar from `Url` to `URL`. To maintain the old name, register the type manually like the following: + +```csharp +services + .AddGraphQLServer() + .AddType(() => new UrlType("Url")); +``` + +# Pagination + +## ConnectionType + +In v12 we have removed the `ConnectionType` and `ConnectionType`. + +**v11** + +```csharp +descriptor + .Field("users") + .UsePaging() + .Type>() + .Resolver(context => + { + // Omitted code for brevity + }); +``` + +**v12** + +```csharp +descriptor + .Field("users") + .UsePaging() + .Resolver(context => + { + // Omitted code for brevity + }); +``` + +## Connection naming + +We have changed the way we infer the name for the connection type when using cursor-based pagination. By default, the connection name is now inferred from the field name instead of the type name. + +```sdl +type Person { + friends: [Person] +} +``` + +In version 11, we would have created a connection named `PersonConnection`. + +```sdl +type Person { + friends(first: Int, last: Int, after: String, before: String): PersonConnection +} +``` + +In version 12, we now will infer the connection name as `FriendsConnection`. + +```sdl +type Person { + friends(first: Int, last: Int, after: String, before: String): FriendsConnection +} +``` + +To keep your schema stable when you migrate, you can switch the behavior back to how you did in version 11. + +```csharp +services + .AddGraphQLServer() + .SetPagingOptions(new PagingOptions{ InferConnectionNameFromField = false }) + ... +``` + +Moreover, you now can explicitly define the connection name per field. + +```csharp +public class Person +{ + [UsePaging(ConnectionName = "Persons")] + public IQueryable GetFriends() => ... +} +``` + +[Reference](/docs/hotchocolate/v12/fetching-data/pagination#naming) + +## MongoDB Paging + +In version 11 we had the `UseMongoDbPagingAttribute` and the `UseMongoDbOffsetPagingAttribute`, which we removed with version 11. In version 12 you now can use the standard attributes `UsePagingAttribute` and `UseOffsetPagingAttribute`. + +To use these attributes with mongo, you need to register the mongo paging provider with your GraphQL configuration: + +```csharp +services + .AddGraphQLServer() + .AddMongoDbPagingProviders() + ... +``` + +[Reference](/docs/hotchocolate/v12/fetching-data/pagination#providers) + +# Records + +With version 11, we added support for records and added the ability to infer attributes from parameters. This, in the end, leads to more errors than benefits. With version 12, we removed this feature. Use the official' property' keyword to write records in C# short-hand syntax when annotating properties. + +```csharp +public record Foo([property: ID] string Id); +``` + +# Instrumentation + +We added more instrumentation events and generalized more how one can tap into our internal events. The class `DiagnosticEventListener` is now obsolete and replaced with `ExecutionDiagnosticEventListener`. This is due to new event listener classes like `DataLoaderDiagnosticEventListener`. Most virtual methods previously returning IActivityScope now return IDisposable. + +[Learn more about instrumentation](/docs/hotchocolate/v12/server/instrumentation) + +# Relay + +Previously the configuration of the Relay integration was focused around the `EnableRelaySupport()` method. It allowed you to enable Global Object Identification and automatically adding a query field to mutation payloads. + +The problem is that `EnableRelaySupport()` always enabled the Global Object Identification feature. This is not obviously implied by the name and also prevents you from using the other feature in isolation. + +Therefore we introduced two separate APIs to give you more explicit control over which parts of the Relay integration you want to enable. + +## Global Object Identification + +**v11** + +```csharp +services + .AddGraphQLServer() + .EnableRelaySupport(); +``` + +**v12** + +```csharp +services + .AddGraphQLServer() + .AddGlobalObjectIdentification(); +``` + +[Learn more about Global Object Identification](/docs/hotchocolate/v12/defining-a-schema/relay#global-object-identification) + +## Query field in Mutation payloads + +**v11** + +```csharp +services + .AddGraphQLServer() + .EnableRelaySupport(new RelayOptions + { + AddQueryFieldToMutationPayloads = true, + QueryFieldName = "rootQuery", + MutationPayloadPredicate = type => type.Name.Value.EndsWith("Result") + }); +``` + +**v12** + +```csharp +sevices + .AddGraphQL() + .AddQueryFieldToMutationPayloads(options => + { + options.QueryFieldName = "rootQuery"; + options.MutationPayloadPredicate = + type => type.Name.Value.EndsWith("Result"); + }); +``` + +If you just want to enable the feature without further configuration, you can omit the `options =>` action. + +> ⚠️ Note: Since `EnableRelaySupport()` previously always implied the usage of Global Object Identification, you might have to enable Global Object Identification separately as well. + +[Learn more about Query field in Mutation payloads](/docs/hotchocolate/v12/defining-a-schema/relay#query-field-in-mutation-payloads) + +# DataLoader + +We have consolidated the DataLoader base classes into the GreenDonut package which has no dependency on any HotChocolate packages. This allows for people using DataLoader in their business layer without having to reference GraphQL related packages. In your DataLoader classes the namespace `HotChocolate.Fetching` and `HotChocolate.DataLOader` are no longer needed. + +Second, we optimized memory usage of DataLoader and it is now best practice to let the DI inject the DataLoaderOptions into the DataLoader. + +**v11** + +```csharp +public class CustomBatchDataLoader : BatchDataLoader +{ + public CustomBatchDataLoader(IBatchScheduler batchScheduler) + : base(batchScheduler) + { + + } + + // code omitted for brevity. +} +``` + +**v12** + +```csharp +public class CustomBatchDataLoader : BatchDataLoader +{ + public CustomBatchDataLoader(IBatchScheduler batchScheduler, DataLoaderOptions options) + : base(batchScheduler, options) + { + + } + + // code omitted for brevity. +} +``` + +Allowing the DI to inject the options will allow the DataLoader to use the new shared pooled cache objects. + +# Custom naming conventions + +If you're using a custom naming convention and have xml documentation enabled, you'll need to modify the way the naming convention is hooked up +else your comments will disappear from your schema. + +**v11** + +```csharp +public class CustomNamingConventions : DefaultNamingConventions +{ + public CustomNamingConventions() + : base() { } +} + +services + .AddGraphQLServer() + .AddConvention(sp => new CustomNamingConventions()) // or + .AddConvention(); +``` + +**v12** + +```csharp +public class CustomNamingConventions : DefaultNamingConventions +{ + public CustomNamingConventions(IDocumentationProvider documentationProvider) + : base(documentationProvider) { } +} + +IReadOnlySchemaOptions capturedSchemaOptions; +services + .AddGraphQLServer() + .ModifyOptions(opt => capturedSchemaOptions = opt) + .AddConvention(sp => new CustomNamingConventions( + new XmlDocumentationProvider( + new XmlDocumentationFileResolver( + capturedSchemaOptions.ResolveXmlDocumentationFileName), + sp.GetApplicationService>() + ?? new NoOpStringBuilderPool()))); +``` + +# Miscellaneous + +* `IObjectField` + * If you were using `IObjectField.Member`, you'll likely want to move to `IObjectField.ResolverMember` (as `.Member` can be `null` in some cases now where it previously wasn't; and `.ResolverMember` will fall back to `.Member`). diff --git a/website/src/docs/hotchocolate/v12/api-reference/options.md b/website/src/docs/hotchocolate/v12/api-reference/options.md new file mode 100644 index 00000000000..d2e45a4a348 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/options.md @@ -0,0 +1,24 @@ +--- +title: Schema Options +--- + +Hot Chocolate distinguishes between schema and execution options. Schema options relate to the type system and execution options to the query engine. + +| Member | Type | Default | Description | +| ---------------------- | -------- | -------------- | --------------------------------------------------------------------------- | +| `QueryTypeName` | `string` | `Query` | The name of the query type. | +| `MutationTypeName` | `string` | `Mutation` | The name of the mutation type. | +| `SubscriptionTypeName` | `string` | `Subscription` | The name of the subscription type. | +| `StrictValidation` | `bool` | `true` | Defines if the schema is allowed to have errors like missing resolvers etc. | + +The schema options allow to alter the overall execution behaviour. The options can be set during schema creation. + +```csharp +SchemaBuilder.New() + .ModifyOptions(opt => + { + opt.QueryTypeName = "Foo"; + }) + ... + .Create() +``` diff --git a/website/src/docs/hotchocolate/v12/api-reference/visitors.md b/website/src/docs/hotchocolate/v12/api-reference/visitors.md new file mode 100644 index 00000000000..3b04f1f7ec9 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/api-reference/visitors.md @@ -0,0 +1,149 @@ +--- +title: "Visitors" +--- + +Hot Chocolate creates an abstract syntax tree for every incoming request. The execution engine evaluates this syntax tree in many different ways. Validation is a good example. Every incoming request has to be validated. The execution engine has to be sure that the semantic of the requested document is correct. A set of rules is applied to the syntax tree, to find potential semantic flaws. + +Usually, you do not have to access the _AST_ directly. The AST only becomes significant, when you want to change execution behavior based on the structure of the query. For example features like _Filtering_, _Sorting_, or _Selection_, analyze the incoming query and generate expressions based on it. + +Hot Chocolate provides you with different APIs that support you to traverse these trees. The `SyntaxWalker` is a visitor that has built-in all the logic to _walk down a syntax tree_. + +The `SyntaxWalker` is completely stateless. All the state is on a context object that is passed along. The generic argument `TContext` of `SyntaxWalker` denotes the type of the context. + +To start the visitation of a _GraphQL_ syntax tree, you have to pass the node and the context the visitation should start from to the visitors `Visit` method. + +--- + +# Visitation + +To start the visitation of a _GraphQL_ syntax tree, you have to pass the node and the context the visitation should start from to the visitors `Visit` method. On its way down the syntax tree, the visitor _enters_ a node. The visitor then gets the children of the current node and _enters_ its children. Once the visitor reached a leaf node, it starts walking back up the tree and _leaves_ all the nodes. The visitor provides a virtual `Enter` and a virtual `Leave` method for all _GraphQL_ AST nodes. These methods are called from the visitor as it _enters_ or _leaves_ a node. + +The syntax walker provides a few methods in addition to the `Enter` and `Leave` methods. For these two methods, there are also convenience methods that are called right _before_ and _after_ the method call. Namely, `OnBeforeEnter`, `OnAfterEnter`, `OnBeforeLeave`, `OnAfterLeave`. +These methods can modify the current `TContext`. These _before_ and _after_ methods are good places to initialize state that is used in the main _enter_ or _leave_ method. e.g. before entering a `FieldNode`, you may want to peek the latest type from the context and get the instance of the `ObjectField` corresponding to `FieldNode` of this type. You may also want to push this type onto the context to then use it in the `Enter` method. + +> **⚠️ NOTE:** In the following sequence diagram the participants do **NOT** represent any object instances. Furthermore, many steps are hidden in this example. The visualization below should just give you provide you visual insight on the order of the methods being called. + +```graphql +query GetFoos { + foo { + bar + } +} +``` + +```mermaid +sequenceDiagram +autonumber + Root->>Root: OnBeforeEnter `query GetFoos` + Root->>Root: Enter `query GetFoos` + Root->>Root: OnAfterEnter `query GetFoos` + Root->>Foo: VisitChildren + Foo->>Foo: OnBeforeEnter foo + Foo->>Foo: Enter foo + Foo->>Foo: OnAfterEnter foo + Foo->>Bar: VisitChildren + Note right of Bar: ... + Bar-->Foo: - + Foo->>Foo: OnBeforeLeave foo + Foo->>Foo: Leave foo + Foo->>Foo: OnAfterLeave foo + Foo-->Root: - + Root->>Root: OnBeforeLeave `query GetFoos` + Root->>Root: Leave `query GetFoos` + Root->>Root: OnAfterLeave `query GetFoos` +``` + +1. We start walking down the tree and _enter_.
Call the `csharp±OnBeforeEnter(OperationDefinitionNode node, TContext context)` +2. Call the `csharp±Enter(OperationDefinitionNode node, TContext context)` +3. Call the `csharp±OnAfterEnter(OperationDefinitionNode node, TContext context)` +4. Call the `csharp±VisitChildren(OperationDefinitionNode node, TContext context)` +5. Call the `csharp±OnBeforeEnter(ObjectFieldNode node, TContext context)` +6. Call the `csharp±Enter(ObjectFieldNode node, TContext context)` +7. Call the `csharp±OnAfterEnter(ObjectFieldNode node, TContext context)` +8. Call the `csharp±VisitChildren(ObjectFieldNode node, TContext context)` +9. We walk back up the tree and _leave_ +10. Call the `csharp±OnBeforeLeave(ObjectFieldNode node, TContext context)` +11. Call the `csharp±Leave(ObjectFieldNode node, TContext context)` +12. Call the `csharp±OnAfterLeave(ObjectFieldNode node, TContext context)` +13. We walk back up the tree and _leave_. +14. Call the `csharp±OnBeforeLeave(OperationDefinitionNode node, TContext context)` +15. Call the `csharp±Leave(OperationDefinitionNode node, TContext context)` +16. Call the `csharp±OnAfterLeave(OperationDefinitionNode node, TContext context)` + +--- + +# Visitor Actions + +The _Enter_ and _Leave_ methods return visitor actions. These methods control the visitors' next step in the visitation. Visitor actions can be used to _skip_ further visitation and step back up, or to _continue_ and walk the current branch of the tree further down. + +## Continue + +If `Continue` is returned from the `Enter` or `Leave` method visitation on the current branch continues. + +In the following example `Continue` is returned from the onEnter method. The visitor calls `VisitChildren` and continues by _entering_ the selection set. + +```graphql {4} +query { + foo { + bar + baz @onEnter(return: CONTINUE) { + quux + } + qux + } +} +``` + +## Skip + +If `Skip` is returned from the `Enter` or `Leave` method, further visitation on this node stops. + +In the following example `Skip` is returned from the onEnter method. The visitor skips the field _baz_. It continues visitation by _entering_ the field _qux_. + +```graphql {4} +query { + foo { + bar + baz @onEnter(return: SKIP) { + quux + } + qux + } +} +``` + +## SkipAndLeave + +If `SkipAndLeave` is returned from the `Enter` method, further visitation on this node stops. Instead of directly calling the next `Enter` method. The visitor calls the `Leave` method of the current node first. + +In the following example `SkipAndLeave` is returned from the onEnter method. The visitor skips the field _baz_. Before it continues visitation with the field _qux_ it _leaves_ the field _baz_ by calling `Leave` + +```graphql {4} +query { + foo { + bar + baz @onEnter(return: SKIPANDLEAVE) { + quux + } + qux + } +} +``` + +## Break + +If `Break` is returned from the `Enter` or `Leave` method, further visitation on this branch stops. + +In the following example `Break` is returned from the onEnter method. The visitor immediately starts walking back up. The visitor calls the `Leave` on `foo` instead of visiting the selections set of _baz_ it skips _baz_ and _qux_. + +```graphql {4} +query { + foo { + bar + baz @onEnter(return: BREAK) { + quux + } + qux + } +} +``` diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/arguments.md b/website/src/docs/hotchocolate/v12/defining-a-schema/arguments.md new file mode 100644 index 00000000000..256cc15a6cc --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/arguments.md @@ -0,0 +1,124 @@ +--- +title: "Arguments" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +GraphQL allows us to specify arguments on a field and access their values in the field's resolver. + +```sdl +type Query { + user(id: ID!): User +} +``` + +Clients can specify arguments like the following. + +```graphql +{ + user(id: "123") { + username + } +} +``` + +Often times arguments will be specified using variables. + +```graphql +query ($userId: ID!) { + user(id: $userId) { + username + } +} +``` + +Learn more about arguments [here](https://graphql.org/learn/schema/#arguments) and variables [here](https://graphql.org/learn/queries/#variables). + +# Usage + +Arguments can be defined like the following. + + + + +```csharp +public class Query +{ + public User GetUser(string username) + { + // Omitted code for brevity + } +} +``` + +We can also change the name of the argument used in the schema. + +```csharp +public class Query +{ + public User GetUser([GraphQLName("name")] string username) + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("user") + .Argument("username", a => a.Type>()) + .Resolve(context => + { + var username = context.ArgumentValue("username"); + + // Omitted code for brevity + }); + } +} +``` + +We can also access nullable values through an `Optional`. + +```csharp +var username = context.ArgumentOptional("username"); + +if (username.HasValue) +{ + // use username.Value +} +``` + + + + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + user(username: String!): User + } + ") + .AddResolver("Query", "user", (context) => + { + var username = context.ArgumentValue("username"); + + // Omitted code for brevity + }); +``` + + + + +Arguments can be made required by using the non-null type. Learn more about [non-null](/docs/hotchocolate/v12/defining-a-schema/non-null) + +If we need to provide complex objects as arguments, we can use [input object types](/docs/hotchocolate/v12/defining-a-schema/input-object-types). diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/directives.md b/website/src/docs/hotchocolate/v12/defining-a-schema/directives.md new file mode 100644 index 00000000000..71c78a4a588 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/directives.md @@ -0,0 +1,410 @@ +--- +title: "Directives" +--- + +Directives provide a way to add metadata for client tools such as code generators and IDEs or alternate a GraphQL server's runtime execution and type validation behavior. + +There are two kinds of directives, executable directives to annotate executable parts of GraphQL documents and type-system directives to annotate SDL types. + +Typically, any GraphQL server implementation should provide the following directives `@skip`, `@include`, and `@deprecated`. `@skip` and `@include`, for example, are executable directives used in GraphQL documents to exclude or include fields, whereas `@deprecated` is a type-system directive used in SDL types to inform client tools that a particular part such as a field is deprecated. + +# Structure + +Directives consist of a name and zero or more arguments. `@skip`, for example, has the name **skip** and a mandatory argument named **if**. Also, `@skip` carries a piece of hidden information only examinable in SDL, namely the location, which specifies where a directive is applicable. Let's take a look at the SDL of the `@skip` directive. + +```sdl +directive @skip(if: Boolean!) on + | FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT +``` + +The `directive` keyword in SDL indicates that we're dealing with a directive type declaration. The `@` sign also indicates that this is a directive but more from a usage perspective. + +The word `skip` represents the directive's name followed by a pair of parentheses that includes a list of arguments, consisting, in our case, of one argument named `if` of type non-nullable boolean (meaning it is required). + +The `on` keyword indicates the location where or at which part a directive is applicable, followed by a list of exact locations separated by pipes `|`. In the case of `@skip`, we can see that we're dealing with an executable directive because this directive is only applicable to fields, fragment-spreads, and inline-fragments. + +# Usage + +Let's say we have a GraphQL document and want to exclude details under certain circumstances; it would probably look something like this. + +```graphql +query me($excludeDetails: Boolean!) { + me { + id + name + ...Details @skip(if: $excludeDetails) + } +} + +fragment Details on User { + mobileNumber + phoneNumber +} +``` + +With `@skip`, we've successfully altered the GraphQL's runtime execution behavior. If `$excludeDetails` is set to `true`, the execution engine will exclude the fields `mobileNumber` and `phoneNumber`; the response would look like this. + +```json +{ + "data": { + "me": { + "id": "VXNlcgox", + "name": "Henry" + } + } +} +``` + +Now that we know how to use directives in GraphQL, let's head over to the next section, which is about one crucial aspect of directives. + +## Order Matters + +**The order of directives is significant**, because the execution is in **sequential order**, which means one after the other. If we have something like the following example, we can see how directives can affect each other. + +```graphql +query me { + me { + name @skip(if: true) @include(if: true) + } +} +``` + +Since we excluded the field `name` in the first place, `@include` does not affect the field `name` anymore. We then just get an empty `me` object in return. + +```json +{ + "data": { + "me": {} + } +} +``` + +> **Note:** We will have a deep dive on directives' order under the [Middleware](#order) section. + +Now that we have a basic understanding of what directives are, how they work, and what we can do with them, let's create a custom directive. + +# Custom Directives + +To create a directive, we need to create a new class that inherits from `DirectiveType` and also to override the `Configure` method. + +```csharp +public class MyDirectiveType : DirectiveType +{ + protected override void Configure(IDirectiveTypeDescriptor descriptor) + { + descriptor.Name("my"); + descriptor.Location(DirectiveLocation.Field); + } +} +``` + +[Learn more about Locations](#locations) + +We also have to register the directive explicitly. + +```csharp +services + .AddGraphQLServer() + .AddDirectiveType(); +``` + +Let's recap! We have registered a new directive named `my` without any arguments and limited the usage to fields only. A GraphQL query request with our new directive could look like this. + +```graphql +query foo { + bar @my +} +``` + +As of now, our custom directive provides no functionality. We will handle that part in the [Middleware](#middleware) section. But before that, let's talk about repeatable directives and arguments. + +## Repeatable + +By default, directives are not repeatable, which means directives are unique and can only be applied once at a specific location. For example, if we use the `my` directive twice at the field `bar`, we will encounter a validation error. So the following GraphQL query request results in an error if the directive is not repeatable. + +```graphql +query foo { + bar @my @my +} +``` + +We can enable repeatability like the following. + +```csharp +public class MyDirectiveType : DirectiveType +{ + protected override void Configure(IDirectiveTypeDescriptor descriptor) + { + descriptor.Name("my"); + descriptor.Location(DirectiveLocation.Field); + descriptor.Repeatable(); + } +} +``` + +This configuration will translate into the following SDL. + +```sdl +directive @my repeatable on FIELD +``` + +## Arguments + +A directive can provide additional information through arguments. +They might also come in handy, in combination with repeatable directives, for reusability purposes. + +We can add an argument like the following. + +```csharp +public class MyDirective +{ + public string Name { get; set; } +} + +public class MyDirectiveType : DirectiveType +{ + protected override void Configure( + IDirectiveTypeDescriptor descriptor) + { + descriptor.Name("my"); + descriptor.Location(DirectiveLocation.FieldDefinition); + + // The 'Name' property is included as an argument implicitly + + // descriptor + // .Argument(f => f.ChangeMe) + // .Type>() + // .Name("differentName"); + // descriptor.Ignore(f => f.IgnoreMe); + } +} +``` + +If we prefer to not use a backing POCO (``) we an also use the `Argument()` method on the `descriptor`. + +```csharp +public class MyDirectiveType : DirectiveType +{ + protected override void Configure(IDirectiveTypeDescriptor descriptor) + { + descriptor.Name("my"); + descriptor.Location(DirectiveLocation.Field); + + descriptor + .Argument("name") + .Type>(); + } +} +``` + +This configuration will translate into the following SDL. + +```sdl +directive @my(name: String!) on FIELD +``` + +## Usage within Types + +We could associate the `MyDirectiveType` with an object type like the following. + +```csharp +public class FooType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name("Foo"); + descriptor.Directive("my", new ArgumentNode("name", "bar")); + } +} +``` + +> Note: For this to work the `MyDirectiveType` directive needs to have the appropriate location within the schema. In this example it would be `DirectiveLocation.Object`. + +Referencing directives using their name is not type-safe and could lead to runtime errors, which are avoidable by using our generic variant of the directive type. + +Once we have defined our directive using `DirectiveType`, we can pass an instance of the backing POCO (``) instead of the name of the directive and an `ArgumentNode`. + +```csharp +public class FooType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name("Foo"); + descriptor.Directive(new MyDirective { Name = "bar" }); + } +} +``` + +Since the directive instance that we have added to our type is now a strong .NET type, we don't have to fear changes to the directive structure or name anymore. + +## Locations + +A directive can define one or multiple locations, where it can be applied. Multiple locations are seperated by a pipe `|`. + +```csharp +descriptor.Location(DirectiveLocation.Field | DirectiveLocation.Object); +``` + +Generally we distinguish between two types of locations: Type system and executable locations. + +### Type System Locations + +Type system locations specify where we can place a specific directive in the schema. The arguments of directives specified in these locations are fixed. We can query such directives through introspection. + +The following schema shows where type system directives can be applied. + +```sdl +directive @schema on SCHEMA +directive @object on OBJECT +directive @argumentDefinition on ARGUMENT_DEFINITION +directive @fieldDefinition on FIELD_DEFINITION +directive @inputObject on INPUT_OBJECT +directive @inputFieldDefinition on INPUT_FIELD_DEFINITION +directive @interface on INTERFACE +directive @enum on ENUM +directive @enumValue on ENUM_VALUE +directive @union on UNION +directive @scalar on SCALAR +schema @schema { + query: Query +} +type Query @object { + search(by: SearchInput! @argumentDefinition): SearchResult @fieldDefinition +} +input SearchInput @inputObject { + searchTerm: String @inputFieldDefinition +} +interface HasDescription @interface { + description: String +} +type Product implements HasDescription { + added: DateTime + description: String +} +enum UserKind @enum { + Administrator @enumValue + Moderator +} +type User { + name: String + userKind: UserKind +} +union SearchResult @union = Product | User +scalar DateTime @scalar +``` + +### Executable Locations + +Executable locations specify where a client can place a specific directive, when executing an operation. + +Our server defines the following directives. + +```sdl +directive @query on QUERY +directive @field on FIELD +directive @fragmentSpread on FRAGMENT_SPREAD +directive @inlineFragment on INLINE_FRAGMENT +directive @fragmentDefinition on FRAGMENT_DEFINITION +directive @mutation on MUTATION +directive @subscription on SUBSCRIPTION +``` + +The following request document shows where we, as a client, can apply these directives. + +```graphql +query getUsers @query { + search(by: { searchTerm: "Foo" }) @field { + ...DescriptionFragment @fragmentSpread + ... on User @inlineFragment { + userKind + } + } +} + +fragment DescriptionFragment on HasDescription @fragmentDefinition { + description +} + +mutation createNewUser @mutation { + createUser(input: { name: "Ada Lovelace" }) { + user { + name + } + } +} + +subscription subscribeToUser @subscription { + onUserChanged(id: 1) { + user { + name + } + } +} +``` + +## Middleware + +What makes directives in Hot Chocolate very useful is the ability to associate a middleware with it. A middleware can alternate the result, or even produce the result, of a field. A directive middleware is only added to a field middleware pipeline when the directive was annotated to the object definition, the field definition or the field. + +Moreover, if the directive is repeatable the middleware will be added multiple times to the middleware allowing to build a real pipeline with it. + +In order to add a middleware to a directive we could declare it with the descriptor as a delegate. + +```csharp +public class MyDirectiveType : DirectiveType +{ + protected override void Configure( + IDirectiveTypeDescriptor descriptor) + { + descriptor.Name("my"); + descriptor.Location(DirectiveLocation.Object); + + descriptor.Use(next => context => + { + context.Result = "Bar"; + return next.Invoke(context); + }); + } +} +``` + +Directives with middleware or executable directives can be put on object types and on their field definitions or on the field selection in a query. Executable directives on an object type will replace the field resolver of every field of the annotated object type. + +### Order + +In GraphQL the order of directives is significant and with our middleware we use this order to create a resolver pipeline through which the result flows. + +The resolver pipeline consists of a sequence of directive delegates, called one after the other. + +Each delegate can perform operations before and after the next delegate. A delegate can also decide to not pass a resolver request to the next delegate, which is called short-circuiting the resolver pipeline. +Short-circuiting is often desirable because it avoids unnecessary work. + +The order of the middleware pipeline is defined by the order of the directives. Since executable directives will flow from the object type to its field definitions, the directives of the type would be called first in the order that they were annotated. + +```sdl +type Query { + foo: Bar +} + +type Bar @a @b { + baz: String @c @d +} +``` + +So, the directives in the above example would be called in the following order `a, b, c, d`. + +If there were more directives in the query, they would be appended to the directives from the type. + +```graphql +{ + foo { + baz @e @f + } +} +``` + +So, now the order would be like the following: `a, b, c, d, e, f`. + +Every middleware can execute the original resolver function by calling `ResolveAsync()` on the `IDirectiveContext`. diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/documentation.md b/website/src/docs/hotchocolate/v12/defining-a-schema/documentation.md new file mode 100644 index 00000000000..fdb2ba7179a --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/documentation.md @@ -0,0 +1,248 @@ +--- +title: Documentation +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Documentation allows us to enrich our schema with additional information that is useful for a consumer of our API. + +In GraphQL we can do this by providing descriptions to our types, fields, etc. + +```sdl +type Query { + "A query field" + user("An argument" username: String): User +} + +"An object type" +type User { + "A field" + username: String +} + +"An enum" +enum UserRole { + "An enum value" + ADMINISTRATOR +} +``` + +# Usage + +We can define descriptions like the following. + + + + +```csharp +[GraphQLDescription("An object type")] +public class User +{ + [GraphQLDescription("A field")] + public string Username { get; set; } +} + +[GraphQLDescription("An enum")] +public enum UserRole +{ + [GraphQLDescription("An enum value")] + Administrator +} + +public class Query +{ + [GraphQLDescription("A query field")] + public User GetUser( + [GraphQLDescription("An argument")] string username) + { + // Omitted code for brevity + } +} +``` + +If the description provided to the `GraphQLDescriptionAttribute` is `null` or made up of only white space, XML documentation comments are used as a fallback. + +Learn more about XML documentation below. + + + + +```csharp +public class UserType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name("User"); + descriptor.Description("An object type"); + + descriptor + .Field(f => f.Username) + .Description("A field"); + } +} + +public class UserRoleType : EnumType +{ + protected override void Configure(IEnumTypeDescriptor descriptor) + { + descriptor.Name("UserRole"); + descriptor.Description("An enum"); + + descriptor + .Value(UserRole.Administrator) + .Description("An enum value"); + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("user") + .Description("A query field") + .Argument("username", a => a.Type() + .Description("An argument")) + .Resolve(context => + { + // Omitted code for brevity + }); + } +} +``` + +The `Description()` methods take precedence over all other forms of documentation. This is true, even if the provided value is `null` or only white space. + + + + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + """""" + A query field + """""" + user(""An argument"" username: String): User + } + + """""" + An object type + """""" + type User { + ""A field"" + username: String + } + + """""" + An enum + """""" + enum UserRole { + ""An enum value"" + ADMINISTRATOR + } + ") + // Omitted code for brevity +``` + + + + +# XML Documentation + +Hot Chocolate provides the ability to automatically generate API documentation from our existing [XML documentation](https://docs.microsoft.com/dotnet/csharp/codedoc). + +The following will produce the same schema descriptions we declared above. + +```csharp +/// +/// An object type +/// +public class User +{ + /// + /// A field + /// + public string Username { get; set; } +} + +/// +/// An enum +/// +public enum UserRole +{ + /// + /// An enum value + /// + Administrator +} + +public class Query +{ + /// + /// A query field + /// + /// An argument + public User GetUser(string username) + { + // Omitted code for brevity + } +} +``` + +To make the XML documentation available to Hot Chocolate, we have to enable `GenerateDocumentationFile` in our `.csproj` file. + +```xml + + true + $(NoWarn);1591 + +``` + +> Note: The `` element is optional. It prevents the compiler from emitting warnings for missing documentation strings. + +If we do not want to include XML documentation in our schema, we can set the `UseXmlDocumentation` property on the schema's `ISchemaOptions`. + +```csharp +services + .AddGraphQLServer() + .ModifyOptions(opt => opt.UseXmlDocumentation = false); +``` + +## With a custom naming convention + +If you want to use a custom naming convention and XML documentation, ensure you give the convention an instance of the `XmlDocumentationProvider` as demonstrated below; otherwise the comments won't appear in your schema. + +```csharp +public class CustomNamingConventions : DefaultNamingConventions +{ + // Before + public CustomNamingConventions() + : base() { } + + // After + public CustomNamingConventions(IDocumentationProvider documentationProvider) + : base(documentationProvider) { } +} + +// Startup +// Before +.AddConvention(sp => new CustomNamingConventions()); + +// After +IReadOnlySchemaOptions capturedSchemaOptions; + +services + .AddGraphQLServer() + .ModifyOptions(opt => capturedSchemaOptions = opt) + .AddConvention(sp => new CustomNamingConventions( + new XmlDocumentationProvider( + new XmlDocumentationFileResolver( + capturedSchemaOptions.ResolveXmlDocumentationFileName), + sp.GetApplicationService>() + ?? new NoOpStringBuilderPool()))); +``` + diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/enums.md b/website/src/docs/hotchocolate/v12/defining-a-schema/enums.md new file mode 100644 index 00000000000..c068562d604 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/enums.md @@ -0,0 +1,377 @@ +--- +title: "Enums" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +An Enum is a special kind of [scalar](/docs/hotchocolate/v12/defining-a-schema/scalars) that is restricted to a particular set of allowed values. It can be used as both an input and an output type. + +```sdl +enum UserRole { + GUEST, + DEFAULT, + ADMINISTRATOR +} + +type Query { + role: UserRole + usersByRole(role: UserRole): [User] +} +``` + +# Usage + +Given is the schema from above. + +When querying a field returning an enum type, the enum value will be serialized as a string. + +**Request** + +```graphql +{ + role +} +``` + +**Response** + +```json +{ + "data": { + "role": "STANDARD" + } +} +``` + +When using an enum value as an argument, it is represented as a literal and **not** a string. + +**Request** + +```graphql +{ + usersByRole(role: ADMINISTRATOR) { + id + } +} +``` + +When used as a type for a variable, it is represented as a string in the variables object, since JSON does not offer support for literals. + +**Request** + +Operation: + +```graphql +query ($role: UserRole) { + usersByRole(role: $role) { + id + } +} +``` + +Variables: + +```json +{ + "role": "ADMINISTRATOR" +} +``` + +# Definition + +We can define enums like the following. + + + + +```csharp +public enum UserRole +{ + Guest, + Standard, + Administrator +} + +public class Query +{ + public User[] GetUsers(UserRole role) + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public enum UserRole +{ + Guest, + Standard, + Administrator +} + +public class UserRoleType : EnumType +{ +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("users") + .Argument("role", a => a.Type()) + .Resolve(context => + { + var role = context.ArgumentValue("role"); + + // Omitted code for brevity + }); + } +} +``` + +Since there could be multiple enum types inheriting from `EnumType`, but differing in their name and values, it is not certain which of these types should be used when we return a `UserRole` CLR type from one of our resolvers. + +**Therefore it's important to note that Code-first enum types are not automatically inferred. They need to be explicitly specified or registered.** + +We can either [explicitly specify the type on a per-resolver basis](/docs/hotchocolate/v12/defining-a-schema/object-types#explicit-types) or we can register the type once globally: + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddType(); + } +} +``` + +With this configuration each `UserRole` CLR type we return from our resovlers would be assumed to be a `UserRoleType`. + + + + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + user(role: UserRole): User + } + + enum UserRole { + GUEST, + DEFAULT, + ADMINISTRATOR + } + ") + .AddResolver("Query", "user", (context) =>- + { + var role = context.ArgumentValue("role"); + + // Omitted code for brevity + }) +``` + + + + +## Non-enum values + +In Code-first we can also bind the enum type to any other .NET type, for example a `string`. + +```csharp +public class UserRoleType : EnumType +{ + protected override void Configure(IEnumTypeDescriptor descriptor) + { + // we need to specify a name or otherwise we will get a conflict + // with the built-in StringType + descriptor.Name("UserRole"); + + descriptor + .Value("Default") + .Name("STANDARD"); + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("users") + .Argument("role", a => a.Type()) + .Resolve(context => + { + var role = context.ArgumentValue("role"); + + // Omitted code for brevity + }); + } +} +``` + +# Binding behavior + +In the Annotation-based approach all enum values are implicitly included on the schema enum type. The same is true for `T` of `EnumType` when using the Code-first approach. + +In the Code-first approach we can also enable explicit binding, where we have to opt-in enum values we want to include instead of them being implicitly included. + + + +We can configure our preferred binding behavior globally like the following. + +```csharp +services + .AddGraphQLServer() + .ModifyOptions(options => + { + options.DefaultBindingBehavior = BindingBehavior.Explicit; + }); +``` + +> ⚠️ Note: This changes the binding behavior for all types, not only enum types. + +We can also override it on a per type basis: + +```csharp +public class UserRoleType : EnumType +{ + protected override void Configure(IEnumTypeDescriptor descriptor) + { + descriptor.BindValues(BindingBehavior.Implicit); + + // We could also use the following methods respectively + // descriptor.BindValuesExplicitly(); + // descriptor.BindValuesImplicitly(); + } +} +``` + +## Ignoring values + + + + +In the Annotation-based approach we can ignore values using the `[GraphQLIgnore]` attribute. + +```csharp +public enum UserRole +{ + [GraphQLIgnore] + Guest, + Standard, + Administrator +} +``` + + + + +In the Code-first approach we can ignore values using the `Ignore` method on the `IEnumTypeDescriptor`. This is only necessary, if the binding behavior of the enum type is implicit. + +```csharp +public class UserRoleType : EnumType +{ + protected override void Configure(IEnumTypeDescriptor descriptor) + { + descriptor.Ignore(UserRole.Guest); + } +} +``` + + + + +We do not have to ignore values in the Schema-first approach. + + + + +## Including values + +In the Code-first approach we can explicitly include values using the `Value` method on the `IEnumTypeDescriptor`. This is only necessary, if the binding behavior of the enum type is explicit. + +```csharp +public class UserRoleType : EnumType +{ + protected override void Configure(IEnumTypeDescriptor descriptor) + { + descriptor.BindValuesExplicitly(); + + descriptor.Value(UserRole.Guest); + } +} +``` + +# Naming + +Unless specified explicitly, Hot Chocolate automatically infers the names of enums and their values. Per default the name of the enum becomes the name of the enum type. When using `EnumType` in Code-first, the name of `T` is chosen as the name for the enum type. + +Enum values are automatically formatted to the UPPER_SNAIL_CASE according to the GraphQL specification: + +- `Guest` becomes `GUEST` +- `HeadOfDepartment` becomes `HEAD_OF_DEPARTMENT` + +If we need to we can override these inferred names. + + + + +The `[GraphQLName]` attribute allows us to specify an explicit name. + +```csharp +[GraphQLName("Role")] +public enum UserRole +{ + [GraphQLName("VISITOR")] + Guest, + Standard, + Administrator +} +``` + + + + +The `Name` method on the `IEnumTypeDescriptor` / `IEnumValueDescriptor` allows us to specify an explicit name. + +```csharp +public class UserRoleType : EnumType +{ + protected override void Configure(IEnumTypeDescriptor descriptor) + { + descriptor.Name("Role"); + + descriptor.Value(UserRole.Guest).Name("VISITOR"); + } +} +``` + + + + +Simply change the names in the schema. + + + + +This would produce the following `Role` schema enum type: + +```sdl +enum Role { + VISITOR, + STANDARD, + ADMINISTRATOR +} +``` diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/extending-types.md b/website/src/docs/hotchocolate/v12/defining-a-schema/extending-types.md new file mode 100644 index 00000000000..89e2898d447 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/extending-types.md @@ -0,0 +1,372 @@ +--- +title: "Extending Types" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Type extensions allow us to add, remove or replace fields on existing types, without necessarily needing access to these types. + +Because of these capabilities, they also allow for better organization of our types. We could for example have classes that encapsulate part of our domain and extend our `Query` type with these functionalities. + +Type extensions are especially useful if we want to modify third-party types, such as types that live in a separate assembly and are therefore not directly modifiable by us. + + + +> ⚠️ Note: Type extensions do not produce the [extend type syntax that GraphQL offers](http://spec.graphql.org/draft/#sec-Object-Extensions), since it would unnecessarily complicate the resulting schema. Instead, Hot Chocolate's type extensions are directly merged with the original type definition to create a single type at runtime. + +# Object Types + +Consider we have the following entity that we want to extend with functionality. + +```csharp +public class Book +{ + public int Id { get; set; } + + public string Title { get; set; } + + public int AuthorId { get; set; } +} +``` + +## Adding fields + +We can easily add new fields to our existing `Book` type. + + + + +```csharp +[ExtendObjectType(typeof(Book))] +public class BookExtensions +{ + public IEnumerable GetGenres([Parent] Book book) + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + +One of the most common use-cases for this would be adding new resolvers to one of our root types. + +```csharp +[ExtendObjectType(typeof(Query))] +public class QueryBookResolvers +{ + public IEnumerable GetBooks() + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + + + + +```csharp +public class BookTypeExtensions : ObjectTypeExtension +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("genres") + .Type>() + .Resolve(context => + { + var parent = context.Parent(); + + // Omitted code for brevity + }); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + +One of the most common use-cases for this would be adding new resolvers to one of our root types. + +```csharp +public class QueryTypeBookResolvers : ObjectTypeExtension +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("books") + .Type>() + .Resolve(context => + { + // Omitted code for brevity + }); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + + + + +Simply add a new field to the existing type. + + + + +## Removing fields + +We can also ignore fields of the type we are extending. + + + + +```csharp +[ExtendObjectType(typeof(Book), + IgnoreProperties = new[] { nameof(Book.AuthorId) })] +public class BookExtensions +{ +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + + + + +```csharp +public class BookTypeExtensions : ObjectTypeExtension +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Ignore(f => f.AuthorId); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + + + + +Simply remove the field from the existing type. + + + + +## Replacing fields + +We might have an `Id` field, which we want to replace with a field that resolves the actual type the `Id` is pointing to. + +In this example we replace the `authorId` field with an `author` field. + + + + +```csharp +[ExtendObjectType(typeof(Book))] +public class BookExtensions +{ + [BindMember(nameof(Book.AuthorId))] + public Author GetAuthor([Parent] Book book) + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + + + + +**This is currently not working ([#3776](https://github.com/ChilliCream/hotchocolate/issues/3776))** + +```csharp +public class BookTypeExtensions : ObjectTypeExtension +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.AuthorId) + .Type() + .Name("author") + .Resolve(context => + { + var parent = context.Parent(); + + // Omitted code for brevity + }); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddTypeExtension(); + } +} +``` + + + + +Simply replace the field on the existing type. + + + + +## Extending by name + +If we can not reference a type, we can still extend it by specifying its name. + + + + +```csharp +[ExtendObjectType("Foo")] +public class FooExtensions +{ + // Omitted code for brevity +} +``` + + + + +```csharp +public class FooTypeExtensions : ObjectTypeExtension +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name("Foo"); + + // Omitted code for brevity + } +} +``` + + + + +⚠️ Schema-first does not currently support extending types by their name + + + + +When extending root types, we can make use of the constants in `OperationTypeNames`. We can for example use `OperationTypeNames.Query` instead of writing `"Query"` everywhere. + +## Extending base types + +We can also extend multiple types at once, but still dedicate specific resolvers to specific types. + +```csharp +// this extends every type that inherits from object (essentially every type) +[ExtendObjectType(typeof(object))] +public class ObjectExtensions +{ + // this field is added to every object type + public string NewField() + { + // Omitted code for brevity + } + + // this field is only added to the Book type + public Author GetAuthor([Parent] Book book) + { + // Omitted code for brevity + } + + // this field is only added to the Author type + public IEnumerable GetBooks([Parent] Author author) + { + // Omitted code for brevity + } +} +``` + +We can also modify all object types that are connected by a base type, like an interface. + +```csharp +[InterfaceType] +public interface IPost +{ + string Title { get; set; } +} + +// this extends every type that implements the IPost interface, +// not the interface type itself +[ExtendObjectType(typeof(IPost))] +public class PostExtensions +{ + public string NewField([Parent] IPost post) + { + // Omitted code for brevity + } +} +``` + +> Note: The `IPost` is annotated with `[InterfaceType]` to include it in the GraphQL schema, but that isn't necessary for the type extension to work. +> We can use any base type, like `object` or an `abstract` base class, as an extension point without necessarily exposing the base type in our GraphQL schema. diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/index.md b/website/src/docs/hotchocolate/v12/defining-a-schema/index.md new file mode 100644 index 00000000000..a5a14540ef5 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/index.md @@ -0,0 +1,89 @@ +--- +title: "Overview" +--- + +In this section we will learn everything that is needed to build an expressive GraphQL schema. + +# Operations + +First we will look at the three root types, often called _Operations_, that represent entry points to our schema: + +- Queries allow us to _query_ our graph and retrieve data in a readonly manner.
[Learn more about queries](/docs/hotchocolate/v12/defining-a-schema/queries) + +- Mutations allow us to _mutate_ our graph entities in the form of adding, removing or updating entities.
[Learn more about mutations](/docs/hotchocolate/v12/defining-a-schema/mutations) + +- Subscriptions allow us to _subscribe_ to events in our system and be notified in real-time of their occurrence.
[Learn more about subscriptions](/docs/hotchocolate/v12/defining-a-schema/subscriptions) + +# Types + +Each GraphQL schema is made up of two basic building blocks: + +- Object types contain fields and describe our entities.
[Learn more about object types](/docs/hotchocolate/v12/defining-a-schema/object-types) + +- Scalars are the primitives of our GraphQL schema: `String`, `Int`, etc.
We can also define custom scalars to more precisely describe our business domain.
[Learn more about scalars](/docs/hotchocolate/v12/defining-a-schema/scalars) + +There are also more advanced types: + +- Enums are a special kind of scalar, restricted to a particular set of allowed values.
[Learn more about enums](/docs/hotchocolate/v12/defining-a-schema/enums) +- Interfaces represent a shared contract that other types can implement.
[Learn more about interfaces](/docs/hotchocolate/v12/defining-a-schema/interfaces) +- Unions represent a set of object types, without the need for a shared contract.
[Learn more about unions](/docs/hotchocolate/v12/defining-a-schema/unions). + +# Type Modifiers + +Besides regular types, like scalars and object types, there are also _type modifiers_. + +A non-null field for example indicates that a client can always expect a non-null value to be returned from the field. + +[Learn more about non-null](/docs/hotchocolate/v12/defining-a-schema/non-null) + +List fields indicate to a client that the field will return a list in the specified shape. + +[Learn more about lists](/docs/hotchocolate/v12/defining-a-schema/lists) + +# Arguments + +We can pass arguments to individual fields on an object type and access their values inside the field's resolver. + +[Learn more about arguments](/docs/hotchocolate/v12/defining-a-schema/arguments) + +Nested object types can also be used as arguments by declaring so called input object types. These are most commonly used when passing a payload to a mutation. + +[Learn more about input object types](/docs/hotchocolate/v12/defining-a-schema/input-object-types) + +# Extending Types + +Hot Chocolate allows us to extend existing types, helping us keep our code organized. + +Rather than adding more and more fields to the Query type in the same class for instance, we can _extend_ the Query type with a new field from another location in our codebase where that field logically should live. + +[Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +# Directives + +Directives allow us to decorate parts of our GraphQL schema with additional configuration. + +This configuration can be used as metadata for client tools or alternate our GraphQL server's runtime execution and type validation behavior. + +[Learn more about directives](/docs/hotchocolate/v12/defining-a-schema/directives) + +# Schema evolution + +As our data graph and number of developers/clients grows, we need to ensure that the graph is understood by everyone. Therefore, our schema should expose as much information to consumers of our API as possible. + +[Learn more about schema documentation](/docs/hotchocolate/v12/defining-a-schema/documentation) + +[Learn more about versioning](/docs/hotchocolate/v12/defining-a-schema/versioning) + +# Relay + +[Relay](https://relay.dev) proposes some schema design principles for GraphQL servers in order to more efficiently fetch, refetch and cache entities on the client. Since these principles make for a better schema, we encourage all users, not only those of Relay, to consider these principles. + +[Learn more about Relay-compatible schema design](/docs/hotchocolate/v12/defining-a-schema/relay) + +# Automatic type registration + +Starting with Hot Chocolate 12.7 we introduced a new source generator that automatically registers types and DataLoader with your GraphQL configuration builder. Watch on YouTube how you can simplify your Hot Chocolate configuration code. + + diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/input-object-types.md b/website/src/docs/hotchocolate/v12/defining-a-schema/input-object-types.md new file mode 100644 index 00000000000..a3f8c98ba99 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/input-object-types.md @@ -0,0 +1,506 @@ +--- +title: "Input Object Types" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +We already looked at [arguments](/docs/hotchocolate/v12/defining-a-schema/arguments), which allow us to use simple [scalars](/docs/hotchocolate/v12/defining-a-schema/scalars) like `String` to pass data into a field. GraphQL defines input object types to allow us to use objects as arguments on our fields. + +Input object type definitions differ from [object types](/docs/hotchocolate/v12/defining-a-schema/object-types) only in the used keyword and in that their fields can not have arguments. + +```sdl +input BookInput { + title: String + author: String +} +``` + +# Usage + +Input object types can be defined like the following. + + + + +```csharp +public class BookInput +{ + public string Title { get; set; } + + public string Author { get; set; } +} + +public class Mutation +{ + public async Task AddBook(BookInput input) + { + // Omitted code for brevity + } +} +``` + +> Note: If a class is used as an argument to a resolver and it does not end in `Input`, Hot Chocolate (by default) will append `Input` to the type name in the resulting schema. + +We can also use a class both as an output- and an input-type. + +```csharp +public class Book +{ + public string Title { get; set; } + + public string Author { get; set; } +} + +public class Mutation +{ + public async Task AddBook(Book input) + { + // Omitted code for brevity + } +} +``` + +This will produce the following schema. + +```sdl +type Book { + title: String + author: String +} + +input BookInput { + title: String + author: String +} + +type Mutation { + addBook(input: BookInput): Book +} +``` + +> Note: While it is possible, it is not encouraged, as it complicates future extensions of either type. + + + + +```csharp +public class BookInput +{ + public string Title { get; set; } + + public string Author { get; set; } +} + +public class BookInputType : InputObjectType +{ + protected override void Configure( + IInputObjectTypeDescriptor descriptor) + { + // Omitted code for brevity + } +} + +public class MutationType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Mutation); + + descriptor + .Field("addBook") + .Argument("input", a => a.Type()) + .Resolve(context => + { + var input = context.ArgumentValue("input"); + + // Omitted code for brevity + }); + } +} +``` + +The `IInputTypeDescriptor` is really similar to the `IObjectTypeDescriptor` and provides almost the same capabilities. + +[Learn more about object types](/docs/hotchocolate/v12/defining-a-schema/object-types) + + + + +```csharp +public class BookInput +{ + public string Title { get; set; } + + public string Author { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + input BookInput { + title: String + author: String + } + + type Mutation { + addBook(input: BookInput): Book + } + ") + .BindRuntimeType() + .AddResolver( "Mutation", "addBook", (context) => + { + var input = context.ArgumentValue("input"); + + // Omitted code for brevity + }); + } +} +``` + + + + +## Immutable types + +If we want our input type classes to be immutable, or we are using [nullable reference types](https://docs.microsoft.com/dotnet/csharp/nullable-references), we can provide a non-empty constructor and Hot Chocolate will instead use that when instantiating the input. Just note that + +1. The type of the argument must exactly match the property's type +2. The name of the argument must match the property name (bar a lowercase first letter) +3. No setters will be called, so you need to provide arguments for all the properties. + +Hot Chocolate validates any custom input constructor at schema build time, so we don't need to worry about breaking things during refactoring! + +```csharp +public class BookInput +{ + // No need for the setters now + public string Title { get; } + public string Author { get; } + + public BookingInput(string title, string author) + { + Title = title; + Author = author; + } +} +``` + +We can also use record types, if we're on C# 9.0+. The equivalent to the above would be: + +```csharp +public record BookingInput(string Title, string Author); +``` + +## Optional Properties + +If we want our input type classes to contain optional properties, we can use the `Optional` type or mark the properties of the class as `nullable`. It is important to also define a default value for any non-nullable property that is using the `Optional` type by adding the `[DefaultValue]` attribute, otherwise the field will still be required when defining the input. + +```csharp +public class BookInput +{ + [DefaultValue("")] + public Optional Title { get; set; } + public string Author { get; set; } + + public BookInput(string title, string author) + { + Title = title; + Author = author; + } +} + +``` + +Also with record types, the equivalent of the above would be: + +```csharp +public record BookInput([property:DefaultValue("")]Optional Title, string Author); + +``` + +## Oneof Input Objects + +Oneof Input Objects are a special variant of Input Objects where the type system asserts that exactly one of the fields must be set and non-null, all others being omitted. This is represented in introspection with the \_\_Type.oneField: Boolean field, and in SDL via the @oneOf directive on the input object. + +> ⚠️ Note: Oneof Input Objects is currently a draft feature to the GraphQL spec. https://github.com/graphql/graphql-spec/pull/825 + + + +This introduces a form of input polymorphism to GraphQL. For example, the following PetInput input object lets you choose between a number of potential input types: + +```sdl +input PetInput @oneOf { + cat: CatInput + dog: DogInput + fish: FishInput +} + +input CatInput { name: String!, numberOfLives: Int } +input DogInput { name: String!, wagsTail: Boolean } +input FishInput { name: String!, bodyLengthInMm: Int } + +type Mutation { + addPet(pet: PetInput!): Pet +} +``` + +Since the Oneof Input Objects RFC is not yet in the draft stage it is still an opt-in feature. In order to activate it set the schema options to enable it. + +```csharp +builder.Services + .AddGraphQLServer() + ... + .ModifyOptions(o => o.EnableOneOf = true); +``` + +Once activate you can create Oneof Input Objects like the following: + + + + +```csharp +[OneOf] +public class PetInput +{ + public Dog? Dog { get; set; } + + public Cat? Cat { get; set; } +} + +public interface IPet +{ + string Name { get; } +} + +public class Dog : IPet +{ + public string Name { get; set; } +} + +public class Cat : IPet +{ + public string Name { get; set; } +} + +public class Mutation +{ + public Task CreatePetAsync(PetInput input) + { + // Omitted code for brevity + } +} +``` + +This will produce the following schema. + +```sdl +input PetInput @oneOf { + dog: DogInput + cat: CatInput +} + +input DogInput { + name: String! +} + +input CatInput { + name: String! +} + +interface Pet { + name: String! +} + +type Dog implements Pet { + name: String! +} + +type Cat implements Pet { + name: String! +} + +type Mutation { + createPet(input: PetInput): Pet +} +``` + + + + +```csharp +public class PetInput +{ + public Dog? Dog { get; set; } + + public Cat? Cat { get; set; } +} + +public class Dog +{ + public string Name { get; set; } +} + +public class Cat +{ + public string Name { get; set; } +} + +public class PetType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor + .Name("Pet") + .Field("name") + .Type("String!"); + } +} + +public class PetInputType : InputObjectType +{ + protected override void Configure( + IInputObjectTypeDescriptor descriptor) + { + descriptor.OneOf(); + } +} + +public class MutationType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Mutation); + + descriptor + .Field("createPet") + .Argument("input", a => a.Type()) + .Resolve(context => + { + var input = context.ArgumentValue("input"); + + // Omitted code for brevity + }); + } +} +``` + +This will produce the following schema. + +```sdl +input PetInput @oneOf { + dog: DogInput + cat: CatInput +} + +input DogInput { + name: String! +} + +input CatInput { + name: String! +} + +interface Pet { + name: String! +} + +type Dog implements Pet { + name: String! +} + +type Cat implements Pet { + name: String! +} + +type Mutation { + createPet(input: PetInput): Pet +} +``` + + + + +```csharp +public class PetInput +{ + public Dog? Dog { get; set; } + + public Cat? Cat { get; set; } +} + +public interface IPet +{ + string Name { get; } +} + +public class Dog : IPet +{ + public string Name { get; set; } +} + +public class Cat : IPet +{ + public string Name { get; set; } +} + +public class Mutation +{ + public Task CreatePetAsync(PetInput input) + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + input PetInput @oneOf { + dog: DogInput + cat: CatInput + } + + input DogInput { + name: String! + } + + input CatInput { + name: String! + } + + interface Pet { + name: String! + } + + type Dog implements Pet { + name: String! + } + + type Cat implements Pet { + name: String! + } + + type Mutation { + createPet(input: PetInput): Pet + } + ") + .BindRuntimeType() + .BindRuntimeType() + .BindRuntimeType() + .ModifyOptions(o => o.EnableOneOf = true); + } +} +``` + + + diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/interfaces.md b/website/src/docs/hotchocolate/v12/defining-a-schema/interfaces.md new file mode 100644 index 00000000000..79682978eb5 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/interfaces.md @@ -0,0 +1,638 @@ +--- +title: "Interfaces" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +An interface is an abstract type that defines a certain set of fields that an object type or another interface must include to implement the interface. Interfaces can only be used as output types, meaning we can't use interfaces as arguments or as fields on input object types. + +```sdl +interface Message { + author: User! + createdAt: DateTime! +} + +type TextMessage implements Message { + author: User! + createdAt: DateTime! + content: String! +} + +type Query { + messages: [Message]! +} +``` + +# Usage + +GIven is the schema from above. + +When querying a field returning an interface, we can query the fields defined in the interface like we would query a regular object type. + +```graphql +{ + messages { + createdAt + } +} +``` + +If we need to access fields that are part of an object type implementing the interface, we can do so using [fragments](https://graphql.org/learn/queries/#fragments). + +```graphql +{ + messages { + createdAt + ... on TextMessage { + content + } + } +} +``` + +# Definition + +Interfaces can be defined like the following. + + + + +```csharp +[InterfaceType("Message")] +public interface IMessage +{ + User Author { get; set; } + + DateTime CreatedAt { get; set; } +} + +public class TextMessage : IMessage +{ + public User Author { get; set; } + + public DateTime CreatedAt { get; set; } + + public string Content { get; set; } +} + +public class Query +{ + public IMessage[] GetMessages() + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .AddType(); + } +} +``` + +We can also use classes to define an interface. + +```csharp +[InterfaceType] +public abstract class Message +{ + public User SendBy { get; set; } + + public DateTime CreatedAt { get; set; } +} + +public class TextMessage : Message +{ + public string Content { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + // ... + .AddType(); + } +} +``` + + + + +```csharp +public interface IMessage +{ + User Author { get; set; } + + DateTime CreatedAt { get; set; } +} + +public class MessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.Name("Message"); + } +} + +public class TextMessage : IMessage +{ + public User Author { get; set; } + + public DateTime CreatedAt { get; set; } + + public string Content { get; set; } +} + +public class TextMessageType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor.Name("TextMessage"); + + // The interface that is being implemented + descriptor.Implements(); + } +} + +public class Query +{ + public IMessage[] GetMessages() + { + // Omitted code for brevity + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetMessages(default)); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .AddType(); + } +} +``` + + + + +```csharp +public interface IMessage +{ + User Author { get; set; } + + DateTime CreatedAt { get; set; } +} + +public class TextMessage : IMessage +{ + public User Author { get; set; } + + public DateTime CreatedAt { get; set; } + + public string Content { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + messages: [Message] + } + + interface Message { + author: User! + createdAt: DateTime! + } + + type TextMessage implements Message { + author: User! + createdAt: DateTime! + content: String! + } + ") + .BindRuntimeType() + .AddResolver("Query", "messages", (context) => + { + // Omitted code for brevity + }); + } +} +``` + + + + +> Note: We have to explicitly register the interface implementations: +> +> ```csharp +> services.AddGraphQLServer().AddType() +> ``` + +# Binding behavior + +In the Annotation-based approach all public properties and methods are implicitly mapped to fields on the schema interface type. The same is true for `T` of `InterfaceType` when using the Code-first approach. + +In the Code-first approach we can also enable explicit binding, where we have to opt-in properties and methods we want to include instead of them being implicitly included. + + + +We can configure our preferred binding behavior globally like the following. + +```csharp +services + .AddGraphQLServer() + .ModifyOptions(options => + { + options.DefaultBindingBehavior = BindingBehavior.Explicit; + }); +``` + +> ⚠️ Note: This changes the binding behavior for all types, not only interface types. + +We can also override it on a per type basis: + +```csharp +public class MessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.BindFields(BindingBehavior.Implicit); + + // We could also use the following methods respectively + // descriptor.BindFieldsExplicitly(); + // descriptor.BindFieldsImplicitly(); + } +} +``` + +## Ignoring fields + + + + +In the Annotation-based approach we can ignore fields using the `[GraphQLIgnore]` attribute. + +```csharp +public interface IMessage +{ + [GraphQLIgnore] + User Author { get; set; } + + DateTime CreatedAt { get; set; } +} +``` + + + + +In the Code-first approach we can ignore fields using the `Ignore` method on the `IInterfaceTypeDescriptor`. This is only necessary, if the binding behavior of the interface type is implicit. + +```csharp +public class MessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.Ignore(f => f.Author); + } +} + +``` + + + + +We do not have to ignore fields in the Schema-first approach. + + + + +## Including fields + +In the Code-first approach we can explicitly include properties of our POCO using the `Field` method on the `IInterfaceTypeDescriptor`. This is only necessary, if the binding behavior of the interface type is explicit. + +```csharp +public class MessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + + descriptor.Field(f => f.Title); + } +} +``` + +# Naming + +Unless specified explicitly, Hot Chocolate automatically infers the names of interface types and their fields. Per default the name of the interface / abstract class becomes the name of the interface type. When using `InterfaceType` in Code-first, the name of `T` is chosen as the name for the interface type. The names of methods and properties on the respective interface / abstract class are chosen as names of the fields of the interface type + +If we need to we can override these inferred names. + + + + +The `[GraphQLName]` attribute allows us to specify an explicit name. + +```csharp +[GraphQLName("Post")] +public interface IMessage +{ + User Author { get; set; } + + [GraphQLName("addedAt")] + DateTime CreatedAt { get; set; } +} +``` + +We can also specify a name for the interface type using the `[InterfaceType]` attribute. + +```csharp +[InterfaceType("Post")] +public interface IMessage +``` + + + + +The `Name` method on the `IInterfaceTypeDescriptor` / `IInterfaceFieldDescriptor` allows us to specify an explicit name. + +```csharp +public class MessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.Name("Post"); + + descriptor + .Field(f => f.CreatedAt) + .Name("addedAt"); + } +} +``` + + + + +Simply change the names in the schema. + + + + +This would produce the following `Post` schema interface type: + +```sdl +interface Post { + author: User! + addedAt: DateTime! +} +``` + +# Interfaces implementing interfaces + +Interfaces can also implement other interfaces. + +```sdl +interface Message { + author: User +} + +interface DatedMessage implements Message { + createdAt: DateTime! + author: User +} + +type TextMessage implements DatedMessage & Message { + author: User + createdAt: DateTime! + content: String +} +``` + +We can implement this like the following. + + + + +```csharp +[InterfaceType("Message")] +public interface IMessage +{ + User Author { get; set; } +} + +[InterfaceType("DatedMessage")] +public interface IDatedMessage : IMessage +{ + DateTime CreatedAt { get; set; } +} + +public class TextMessage : IDatedMessage +{ + public User Author { get; set; } + + public DateTime CreatedAt { get; set; } + + public string Content { get; set; } +} + +public class Query +{ + public IMessage[] GetMessages() + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .AddType() + .AddType(); + } +} +``` + + + + +```csharp +public interface IMessage +{ + User Author { get; set; } +} + +public class MessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.Name("Message"); + } +} + +public interface IDatedMessage : IMessage +{ + DateTime CreatedAt { get; set; } +} + +public class DatedMessageType : InterfaceType +{ + protected override void Configure( + IInterfaceTypeDescriptor descriptor) + { + descriptor.Name("DatedMessage"); + + descriptor.Implements(); + } +} + +public class TextMessage : IDatedMessage +{ + public User Author { get; set; } + + public DateTime CreatedAt { get; set; } + + public string Content { get; set; } +} + +public class TextMessageType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor.Name("TextMessage"); + + // The interface that is being implemented + descriptor.Implements(); + } +} + +public class Query +{ + public IMessage[] GetMessages() + { + // Omitted code for brevity + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetMessages(default)); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .AddType() + .AddType(); + } +} +``` + + + + +```csharp +public interface IMessage +{ + User Author { get; set; } +} + +public interface IDatedMessage : IMessage +{ + DateTime CreatedAt { get; set; } +} + +public class TextMessage : IDatedMessage +{ + public User Author { get; set; } + + public DateTime CreatedAt { get; set; } + + public string Content { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + messages: [Message] + } + + interface Message { + author: User + } + + interface DatedMessage implements Message { + createdAt: DateTime! + author: User + } + + type TextMessage implements DatedMessage & Message { + author: User + createdAt: DateTime! + content: String + } + ") + .BindRuntimeType() + .AddResolver("Query", "messages", (context) => + { + // Omitted code for brevity + }); + } +} +``` + + + + +> Note: We also have to register the `DatedMessage` interface manually, if we do not expose it through a field directly: +> +> ```csharp +> services.AddGraphQLServer().AddType() +> ``` diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/lists.md b/website/src/docs/hotchocolate/v12/defining-a-schema/lists.md new file mode 100644 index 00000000000..c6b3be74408 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/lists.md @@ -0,0 +1,105 @@ +--- +title: "Lists" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +GraphQL allows us to return lists of elements from our fields. + +```sdl +type Query { + users: [User] +} +``` + +Clients can query list fields like any other field. + +```graphql +{ + users { + id + name + } +} +``` + +Querying a list field will result in an ordered list containing elements with the specified subselection of fields. + +Learn more about lists [here](https://graphql.org/learn/schema/#lists-and-non-null). + +# Usage + +Lists can be defined like the following. + + + + +If our field resolver returns a list type, e.g. `IEnumerable` or `IQueryable`, it will automatically be treated as a list type in the schema. + +```csharp +public class Query +{ + public List GetUsers() + { + // Omitted code for brevity + } +} +``` + + + + +If our field resolver returns a list type, e.g. `IEnumerable` or `IQueryable`, it will automatically be treated as a list type in the schema. + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("users") + .Resolve(context => + { + List users = null; + + // Omitted code for brevity + + return users; + }); + } +} +``` + +We can also be more explicit by specifying a `ListType` as the return type. + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("users") + .Type>() + .Resolve(context => + { + // Omitted code for brevity + }); + } +} +``` + + + + +```sdl +type Query { + users: [User] +} +``` + + + diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/mutations.md b/website/src/docs/hotchocolate/v12/defining-a-schema/mutations.md new file mode 100644 index 00000000000..a6758a8454a --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/mutations.md @@ -0,0 +1,1054 @@ +--- +title: "Mutations" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +The mutation type in GraphQL is used to mutate/change data. This means that when we are doing mutations, we are intending to cause side-effects in the system. + +GraphQL defines mutations as top-level fields on the mutation type. Meaning only the fields on the mutation root type itself are mutations. Everything that is returned from a mutation field represents the changed state of the server. + +```sdl +type Mutation { + addBook(input: AddBookInput!): AddBookPayload! + publishBook(input: PublishBookInput!): PublishBookPayload! +} +``` + +Clients can execute one or more mutations through the mutation type. + +```graphql +mutation { + addBook(input: { title: "C# in depth" }) { + book { + id + title + } + } + publishBook(input: { id: 1 }) { + book { + publishDate + } + } +} +``` + +Each of these mutations is executed serially one by one whereas their child selection sets are executed possibly in parallel since only top-level mutation fields (those directly under `mutation`) are allowed to cause side-effects in GraphQL. + +# Usage + +A mutation type can be defined like the following. + + + + +```csharp +public class Mutation +{ + public async Task AddBook(Book book) + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddMutationType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +public class Mutation +{ + public async Task AddBook(Book book) + { + // Omitted code for brevity + } +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.AddBook(default)); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddMutationType(); + } +} +``` + + + + +```csharp +public class Mutation +{ + public async Task AddBook(Book book) + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Mutation { + addBook(input: BookInput): Book + } + + input BookInput { + title: String + author: String + } + + type Book { + title: String + author: String + } + ") + .BindRuntimeType(); + } +} +``` + + + + +> ⚠️ Note: Only **one** mutation type can be registered using `AddMutationType()`. If we want to split up our mutation type into multiple classes, we can do so using type extensions. +> +> [Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +A mutation type is just a regular object type, so everything that applies to an object type also applies to the mutation type (this is true for all root types). + +[Learn more about object types](/docs/hotchocolate/v12/defining-a-schema/object-types) + +# Transactions + +With multiple mutations executed serially in one request it can be useful to wrap these in a transaction that we can control. + +Hot Chocolate provides for this the `ITransactionScopeHandler` which is used by the operation execution middleware to create transaction scopes for mutation requests. + +Hot Chocolate provides a default implementation based on the `System.Transactions.TransactionScope` which works with Microsoft ADO.NET data provider and hence can be used in combination with Entity Framework. + +The default transaction scope handler can be added like the following. + +```csharp +services + .AddGraphQLServer() + .AddDefaultTransactionScopeHandler(); +``` + +This is how the default implementation looks like: + +```csharp +/// +/// Represents the default mutation transaction scope handler implementation. +/// +public class DefaultTransactionScopeHandler : ITransactionScopeHandler +{ + /// + /// Creates a new transaction scope for the current + /// request represented by the . + /// + /// + /// The GraphQL request context. + /// + /// + /// Returns a new . + /// + public virtual ITransactionScope Create(IRequestContext context) + { + return new DefaultTransactionScope( + context, + new TransactionScope( + TransactionScopeOption.Required, + new TransactionOptions + { + IsolationLevel = IsolationLevel.ReadCommitted + })); + } +} +``` + +If we implement a custom transaction scope handler or if we choose to extend upon the default transaction scope handler, we can add it like the following. + +```csharp +services + .AddGraphQLServer() + .AddTransactionScopeHandler(); +``` + +# Conventions + +In GraphQL, it is best practice to have a single argument on mutations called `input`, and each mutation should return a payload object. +The payload object allows to read the changes of the mutation or to access the domain errors caused by a mutation. + +```sdl +type Mutation { + updateUserName(input: UpdateUserNameInput!): UpdateUserNamePayload! +} + +input UpdateUserNameInput { + userId: ID! + username: String! +} + +type UpdateUserNamePayload { + user: User +} +``` + +Following this pattern helps to keep the schema evolvable but requires a lot of boilerplate code to realize. + +## Input and Payload + +HotChocolate has built-in conventions for mutations to minimize boilerplate code. + +The HotChocolate mutation conventions are opt-in and can be enabled like the following: + +```csharp +service + .AddGraphQLServer() + .AddMutationConventions() + ... +``` + +With the mutation conventions enabled, we can define the described mutation pattern with minimal code by just annotating a field with `UseMutationConvention`. + + + + +```csharp +public class Mutation +{ + [UseMutationConvention] + public User? UpdateUserNameAsync([ID] Guid userId, string username) + { + //... + } +} +``` + + + + +```csharp +public class Mutation +{ + public User UpdateUserNameAsync( + Guid userId, + string username) + => ... +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.UpdateUserNameAsync(default, default)) + .Argument("userId", a => a.ID()) + .UseMutationConvention(); + } +} +``` + + + + +```sdl +type Mutation { + updateUserName(userId: ID!, username: String!) : User @useMutationConvention +} +``` + + + + +We also can configure the mutation conventions to be applied to all mutations by default. + +```csharp +service + .AddGraphQLServer() + .AddMutationConventions(applyToAllMutations: true) + ... +``` + +In the case that the conventions are applied by default we no longer need any annotation. + + + + +```csharp +public class Mutation +{ + public User? UpdateUserNameAsync([ID] Guid userId, string username) + { + //... + } +} +``` + + + + +```csharp +public class Mutation +{ + public User UpdateUserNameAsync( + Guid userId, + string username) + => ... +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.UpdateUserNameAsync(default, default)) + .Argument("userId", a => a.ID()); + } +} +``` + + + + +```sdl +type Mutation { + updateUserName(userId: ID!, username: String!) : User +} +``` + + + + +## Errors + +The mutation conventions also allow you to create mutations that follow the error +[stage 6a Pattern Marc-Andre Giroux layed out](https://xuorig.medium.com/a-guide-to-graphql-errors-bb9ba9f15f85) with minimal effort. + +The basic concept here is to keep the resolver clean of any error handling code and use exceptions to signal an error state. The field will simply expose which exceptions are domain errors that shall be exposed to the schema. All other exceptions will still cause runtime errors. + + + + +```csharp +public class Mutation +{ + [Error(typeof(UserNameTakenException))] + [Error(typeof(InvalidUserNameException))] + public User? UpdateUserNameAsync([ID] Guid userId, string username) + { + //... + } +} +``` + + + + +```csharp +public class Mutation +{ + public User? UpdateUserNameAsync(Guid userId, string username) + { + // ... + } +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.UpdateUserNameAsync(default)) + .Error() + .Error(); + } +} +``` + + + + +```sdl +type Mutation { + updateUserName(userId: ID!, username: String!): User +} +``` + +```csharp +public class Mutation +{ + [Error(typeof(UserNameTakenException))] + [Error(typeof(InvalidUserNameException))] + public User? UpdateUserNameAsync(Guid userId, string username) + { + //... + } +} +``` + + + + +The HotChocolate schema is automatically rewritten, and an error middleware will catch all the exceptions that represent domain errors and rewrite them into the correct error object. + +The configuration above emits the following schema: + +```sdl +type Mutation { + updateUserName(input: UpdateUserNameInput!): UpdateUserNamePayload! +} + +input UpdateUserNameInput { + userId: ID! + username: String! +} + +type UpdateUserNamePayload { + user: User + errors: [UpdateUserNameError!] +} + +type User { + username: String +} + +interface Error { + message: String! +} + +type UserNameTakenError implements Error { + message: String! +} + +type InvalidUserNameError implements Error { + message: String! +} + +union UpdateUserNameError = UserNameTakenError | InvalidUserNameError +``` + +There are three ways to map an exception to a user error. + +1. Map the exception directly +2. Map with a factory method (`CreateErrorFrom`) +3. Map with a constructor + +> Note: You can use AggregateExceptions to return multiple errors at once. + +### Map exceptions directly + +The quickest way to define a user error, is to map the exception directly into the graph. You can just annotate the exception directly on the resolver. +If the exception is thrown and is caught in the error middleware, it will be rewritten into an user error that is exposed on the mutation payload. + +> The name of the exception will be rewritten. `Exception` is replaced with `Error` to follow the common GraphQL naming conventions. + + + + +```csharp +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + : base($"The username {username} is already taken.") + { + } +} + +public class Mutation +{ + [Error(typeof(UserNameTakenException))] + public User? UpdateUserNameAsync([ID] Guid userId, string username) + { + //... + } +} +``` + + + + +```csharp +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + : base($"The username {username} is already taken.") + { + } +} + +public class Mutation +{ + public User? UpdateUserNameAsync(Guid userId, string username) + { + // ... + } +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.UpdateUserNameAsync(default)) + .Error(); + } +} +``` + + + + +```csharp +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + : base($"The username {username} is already taken.") + { + } +} +``` + +```sdl +type Mutation { + updateUserName(userId: ID!, username: String!): User +} +``` + +```csharp +public class Mutation +{ + [Error(typeof(UserNameTakenException))] + public User? UpdateUserNameAsync(Guid userId, string username) + { + //... + } +} +``` + + + + +### Map with a factory method + +Often there is a need to control the error shape and ensure that not too many details are exposed. In these cases, we can use a custom error class representing the user error in our schema. + +The error instance and the translation of the exception can be done by an error factory. The error factory method receives an exception and returns the error object. + +Add a `public` `static` method called `CreateErrorFrom` that takes an exception and returns the error object. + + + + +```csharp +public class UserNameTakenError +{ + private UserNameTakenError(string username) + { + Message = $"The username {username} is already taken."; + } + + public static MyCustomError CreateErrorFrom(UserNameTakenException ex) + { + return new MyCustomError(ex.Username); + } + + public static MyCustomError CreateErrorFrom(OtherException ex) + { + return new MyCustomError(ex.Username); + } + + public string Message { get; } +} + +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + { + Username = username; + } + + public string Username { get; } +} + +public class Mutation +{ + [Error(typeof(UserNameTakenError))] + public User? UpdateUserNameAsync([ID] Guid userId, string username) + { + //... + } +} +``` + + + + +```csharp +public class UserNameTakenError +{ + private UserNameTakenError(string username) + { + Message = $"The username {username} is already taken."; + } + + public static MyCustomError CreateErrorFrom(UserNameTakenException ex) + { + return new MyCustomError(ex.Username); + } + + public static MyCustomError CreateErrorFrom(OtherException ex) + { + return new MyCustomError(ex.Username); + } + + public string Message { get; } +} + +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + { + Username = username; + } + + public string Username { get; } +} + +public class Mutation +{ + public User? UpdateUserNameAsync(Guid userId, string username) + { + // ... + } +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.UpdateUserNameAsync(default)) + .Error(); + } +} +``` + + + + +```csharp +public class UserNameTakenError +{ + private UserNameTakenError(string username) + { + Message = $"The username {username} is already taken."; + } + + public static MyCustomError CreateErrorFrom(UserNameTakenException ex) + { + return new MyCustomError(ex.Username); + } + + public static MyCustomError CreateErrorFrom(OtherException ex) + { + return new MyCustomError(ex.Username); + } + + public string Message { get; } +} +``` + +```sdl +type Mutation { + updateUserName(userId: ID!, username: String!): User +} +``` + +```csharp +public class Mutation +{ + [Error(typeof(UserNameTakenError))] + public User? UpdateUserNameAsync(Guid userId, string username) + { + //... + } +} +``` + + + + +Error factories can also be located in a dedicated class. + +```csharp +public static class CreateUserErrorFactory +{ + public static MyCustomErrorA CreateErrorFrom(DomainExceptionA ex) + { + return new MyCustomError(); + } + + public static MyCustomErrorB CreateErrorFrom(DomainExceptionB ex) + { + return new MyCustomError(); + } +} + +public class Mutation +{ + [Error(typeof(CreateUserErrorFactory))] + public CreateUserPayload CreateUser(CreateUserInput input) + { + // ... + } +} +``` + +Further the error factory methods do not have to be static. +You can also use the `IPayloadErrorFactory` interface, to define instance error factory methods. This also enables you to use dependency injection with your factory class. + +```csharp +public class CreateUserErrorFactory + : IPayloadErrorFactory + , IPayloadErrorFactory +{ + public MyCustomErrorA CreateErrorFrom(DomainExceptionA ex) + { + return new MyCustomError(); + } + + public MyCustomErrorB CreateErrorFrom(DomainExceptionB ex) + { + return new MyCustomError(); + } +} + +public class Mutation +{ + [Error(typeof(CreateUserErrorFactory))] + public CreateUserPayload CreateUser(CreateUserInput input) + { + // ... + } +} +``` + +### Map with a constructor + +Lastly, we can also use the constructor of an error class to consume an exception. Essentially the constructor in this case represents the factory that we described earlier. + + + + +```csharp +public class UserNameTakenError +{ + private UserNameTakenError(UserNameTakenException ex) + { + Message = $"The username {ex.Username} is already taken."; + } + + public string Message { get; } +} + +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + { + Username = username; + } + + public string Username { get; } +} + +public class Mutation +{ + [Error(typeof(UserNameTakenError))] + public User? UpdateUserNameAsync([ID] Guid userId, string username) + { + //... + } +} +``` + + + + +```csharp +public class UserNameTakenError +{ + private UserNameTakenError(UserNameTakenException ex) + { + Message = $"The username {ex.Username} is already taken."; + } + + public string Message { get; } +} + +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + { + Username = username; + } + + public string Username { get; } +} + +public class Mutation +{ + public User? UpdateUserNameAsync(Guid userId, string username) + { + // ... + } +} + +public class MutationType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.UpdateUserNameAsync(default)) + .Error(); + } +} +``` + + + + +```csharp +public class UserNameTakenError +{ + private UserNameTakenError(UserNameTakenException ex) + { + Message = $"The username {ex.Username} is already taken."; + } + + public string Message { get; } +} + +public class UserNameTakenException : Exception +{ + public UserNameTakenException(string username) + { + Username = username; + } + + public string Username { get; } +} +``` + +```sdl +type Mutation { + updateUserName(userId: ID!, username: String!): User +} +``` + +```csharp +public class Mutation +{ + [Error(typeof(UserNameTakenError))] + public User? UpdateUserNameAsync(Guid userId, string username) + { + //... + } +} +``` + + + + +> Note: errors and error factories can be shared between multiple mutations. + +## Customization + +While the mutation conventions strictly follow the outlined mutation and error patterns they still can be customized. + +### Naming + +The naming patterns for inputs, payloads and errors can be adjusted globally as well as on a per mutation basis. + +In order to change the global mutation naming patterns you can pass in the `MutationConventionOptions` into the `AddMutationConventions` configuration method. + +```csharp +services + .AddGraphQL() + .AddMutationConventions( + new MutationConventionOptions + { + InputArgumentName = "input", + InputTypeNamePattern = "{MutationName}Input", + PayloadTypeNamePattern = "{MutationName}Payload", + PayloadErrorTypeNamePattern = "{MutationName}Error", + PayloadErrorsFieldName = "errors", + ApplyToAllMutations = true + }) + ... +``` + +To override the global mutation settings on a mutation use the `UseMutationConvention` annotation. + +```csharp +[UseMutationConvention( + InputTypeName = "FooInput", + InputArgumentName = "foo", + PayloadTypeName = "FooPayload", + PayloadFieldName = "bar")] +public User? UpdateUserNameAsync(Guid userId, string username) +{ + //... +} +``` + +### Opting Out + +Often we want to infer everything and only opt-out for exceptional cases, and the mutation convention allows us to do that in an effortless way. + +The first way to opt out of the global conventions is to use the `UseMutationConvention` annotation. With `UseMutationConvention` we can tell the type system initialization to disable the convention on certain mutations. + +```csharp +[UseMutationConvention(Disable = true)] +public User? UpdateUserNameAsync(Guid userId, string username) +{ + //... +} +``` + +In many cases, we do not want to entirely opt-out but rather override the global settings since we wish for a more complex payload or input. We can simply add our own payload or input type in these cases, and the schema initialization will recognize that. Essentially if we follow the naming pattern for either input or payload, the initialization will not rewrite that part that already follows the global convention. + +```csharp +public UpdateUserNamePayload UpdateUserNameAsync(UpdateUserNameInput input) +{ + //... +} +``` + +You can also partially opt-out: + +```csharp +public User UpdateUserNameAsync(UpdateUserNameInput input) +{ + //... +} +``` + +### Custom error interface + +Lastly, we can customize the error interface we want to use with our mutation convention. The error interface is shared across all error types that the schema defines and provides the minimum shape that all errors have to fulfill. + +By default, this error interface type is called `Error` and defines a non-nullable field `message`. + +```sdl +interface Error { + message: String! +} +``` + +Often we also want to provide an error code so that the GUI components can more easily implement error handling logic. In such a case, we could provide our own error interface. + +> Note: All your error types have to implement the contract that the interface declares! Your errors/exceptions do not have to implement the common interface, but they have to declare all the interface's members. + + + + +```csharp +[GraphQLName("UserError")] +public interface IUserError +{ + string Message { get; } + + string Code { get; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + // ... Omitted code for brevity + .AddErrorInterfaceType(); + } +} +``` + + + + +```csharp +public class CustomErrorInterfaceType : InterfaceType +{ + protected override void Configure(IInterfaceTypeDescriptor descriptor) + { + descriptor.Name("UserError"); + descriptor.Field("message").Type>(); + descriptor.Field("code").Type>(); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + // ... Omitted code for brevity + .AddErrorInterfaceType(); + } +} +``` + + + + +```sdl +interface UserError @errorInterface { + message: String! + code: String! +} +``` + + + + +```sdl +interface UserError { + message: String! + code: String! +} +``` diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/non-null.md b/website/src/docs/hotchocolate/v12/defining-a-schema/non-null.md new file mode 100644 index 00000000000..e184e55695d --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/non-null.md @@ -0,0 +1,155 @@ +--- +title: "Non-Null" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Per default all fields on an object type can be either `null` or the specified type. + +```sdl +type User { + name: String +} +``` + +In the above example `name` can either be `null` or a `String`. + +Being nullable does not make sense for every field though. Maybe we have some database constraint which enforces the `name` to never be `null`. +GraphQL allows us to be specific about this, by marking a field as non-null. + +```sdl +type User { + name: String! +} +``` + +The exclamation mark (`!`) denotes that the field can never be `null`. +This is also enforced by the execution engine. If we were to return a `null` value in the `name` resolver, the execution engine would throw an error. This prevents unexpected `null` values from causing issues in the consuming applications. + + + +# Implicit nullability + +Hot Chocolate automatically infers the nullability of the schema type from the nullability of the used CLR type. + +[Value types](https://docs.microsoft.com/dotnet/csharp/language-reference/builtin-types/value-types) are non-null per default, unless they have been marked as nullable. + +| CLR Type | Schema Type | +| ----------------------- | ----------- | +| int | Int! | +| int? | Int | +| Nullable<int> | Int | + +[Reference types](https://docs.microsoft.com/dotnet/csharp/language-reference/keywords/reference-types) are always nullable, unless we have enabled [nullable reference types](https://docs.microsoft.com/dotnet/csharp/nullable-references). With nullable reference types enabled all fields are non-null per default. + +We strongly encourage the use of nullable reference types. + +# Explicit nullability + +We can also be explicit about the nullability of our fields. + + + + +```csharp +public class Query +{ + [GraphQLNonNullType] + public Book GetBook() + { + return new Book { Title = "C# in depth", Author = "Jon Skeet" }; + } +} + +public class Book +{ + [GraphQLNonNullType] + public string Title { get; set; } + + public string Author { get; set; } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetBook()) + .Type>(); + } +} + +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Title) + .Type>(); + + descriptor + .Field(f => f.Author) + .Type(); + } +} +``` + + + + +```sdl +type Book { + title: String! + nullableTitle: String +} +``` + + + + +The inner type of a list can be made non-null like the following. + + + + +```csharp +public class Book +{ + [GraphQLType(typeof(ListType>))] + public List Genres { get; set; } +} +``` + + + + +```csharp +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Genres) + .Type>>(); + } +} +``` + + + + +```sdl +type Book { + genres: [String!] +} +``` + + + diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/object-types.md b/website/src/docs/hotchocolate/v12/defining-a-schema/object-types.md new file mode 100644 index 00000000000..11d035efd51 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/object-types.md @@ -0,0 +1,526 @@ +--- +title: "Object Types" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +The most important type in a GraphQL schema is the object type. It contains fields that can return simple scalars like `String`, `Int`, or again object types. + +```sdl +type Author { + name: String +} + +type Book { + title: String + author: Author +} +``` + +Learn more about object types [here](https://graphql.org/learn/schema/#object-types-and-fields). + +# Definition + +Object types can be defined like the following. + + + + +In the Annotation-based approach we are essentially just creating regular C# classes. + +```csharp +public class Author +{ + public string Name { get; set; } +} +``` + + + + +In the Code-first approach we create a new class inheriting from `ObjectType` to map our POCO `Author` to an object type. + +```csharp +public class Author +{ + public string Name { get; set; } +} + +public class AuthorType : ObjectType +{ +} +``` + +We can override the `Configure` method to have access to an `IObjectTypeDescriptor` through which we can configure the object type. + +```csharp +public class AuthorType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + + } +} +``` + +The `IObjectTypeDescriptor` gives us the ability to configure the object type. We will cover how to use it in the following chapters. + +Since there could be multiple types inheriting from `ObjectType`, but differing in their name and fields, it is not certain which of these types should be used when we return an `Author` CLR type from one of our resolvers. + +**Therefore it's important to note that Code-first object types are not automatically inferred. They need to be explicitly specified or registered.** + +We can either [explicitly specify the type on a per-resolver basis](#explicit-types) or we can register the type once globally: + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddType(); + } +} +``` + +With this configuration every `Author` CLR type we return from our resovlers would be assumed to be an `AuthorType`. + +We can also create schema object types without a backing POCO. + +```csharp +public class AuthorType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + + } +} +``` + +Head over [here](#additional-fields) to learn how to add fields to such a type. + + + + +```csharp +public class Author +{ + public string Name { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Author { + name: String + } + ") + .BindRuntimeType(); + } +} +``` + + + + +# Binding behavior + +In the Annotation-based approach all public properties and methods are implicitly mapped to fields on the schema object type. The same is true for `T` of `ObjectType` when using the Code-first approach. + +In the Code-first approach we can also enable explicit binding, where we have to opt-in properties and methods we want to include instead of them being implicitly included. + + + +We can configure our preferred binding behavior globally like the following. + +```csharp +services + .AddGraphQLServer() + .ModifyOptions(options => + { + options.DefaultBindingBehavior = BindingBehavior.Explicit; + }); +``` + +> ⚠️ Note: This changes the binding behavior for all types, not only object types. + +We can also override it on a per type basis: + +```csharp +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.BindFields(BindingBehavior.Implicit); + + // We could also use the following methods respectively + // descriptor.BindFieldsExplicitly(); + // descriptor.BindFieldsImplicitly(); + } +} +``` + +## Ignoring fields + + + + +In the Annotation-based approach we can ignore fields using the `[GraphQLIgnore]` attribute. + +```csharp +public class Book +{ + [GraphQLIgnore] + public string Title { get; set; } + + public Author Author { get; set; } +} +``` + + + + +In the Code-first approach we can ignore fields of our POCO using the `Ignore` method on the `IObjectTypeDescriptor`. This is only necessary, if the binding behavior of the object type is implicit. + +```csharp +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Ignore(f => f.Title); + } +} +``` + + + + +We do not have to ignore fields in the Schema-first approach. + + + + +## Including fields + +In the Code-first approach we can explicitly include properties of our POCO using the `Field` method on the `IObjectTypeDescriptor`. This is only necessary, if the binding behavior of the object type is explicit. + +```csharp +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + + descriptor.Field(f => f.Title); + } +} +``` + +# Naming + +Unless specified explicitly, Hot Chocolate automatically infers the names of object types and their fields. Per default the name of the class becomes the name of the object type. When using `ObjectType` in Code-first, the name of `T` is chosen as the name for the object type. The names of methods and properties on the respective class are chosen as names of the fields of the object type. + +The following conventions are applied when transforming C# method and property names into SDL types and fields: + +- **Get prefixes are removed:** The get operation is implied and therefore redundant information. +- **Async postfixes are removed:** The `Async` is an implementation detail and therefore not relevant to the schema. +- **The first letter is lowercased:** This is not part of the specification, but a widely agreed upon standard in the GraphQL world. + +If we need to we can override these inferred names. + + + + +The `[GraphQLName]` attribute allows us to specify an explicit name. + +```csharp +[GraphQLName("BookAuthor")] +public class Author +{ + [GraphQLName("fullName")] + public string Name { get; set; } +} +``` + + + + +The `Name` method on the `IObjectTypeDescriptor` / `IObjectFieldDescriptor` allows us to specify an explicit name. + +```csharp +public class AuthorType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name("BookAuthor"); + + descriptor + .Field(f => f.Name) + .Name("fullName"); + } +} +``` + + + + +Simply change the names in the schema. + + + + +This would produce the following `BookAuthor` schema object type: + +```sdl +type BookAuthor { + fullName: String +} +``` + +If only one of our clients requires specific names, it is better to use [aliases](https://graphql.org/learn/queries/#aliases) in this client's operations than changing the entire schema. + +```graphql +{ + MyUser: user { + Username: name + } +} +``` + +# Explicit types + +Hot Chocolate will, most of the time, correctly infer the schema types of our fields. Sometimes we might have to be explicit about it though. For example when we are working with custom scalars or Code-first types in general. + + + + +In the annotation-based approach we can use the `[GraphQLType]` attribute. + +```csharp +public class Author +{ + [GraphQLType(typeof(StringType))] + public string Name { get; set; } +} +``` + + + + +In the Code-first approach we can use the `Type` method on the `IObjectFieldDescriptor`. + +```csharp +public class AuthorType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Name) + .Type(); + } +} +``` + + + + +Simply change the field type in the schema. + + + + +# Additional fields + +We can add additional (dynamic) fields to our schema types, without adding new properties to our backing class. + + + + +```csharp +public class Author +{ + public string Name { get; set; } + + public DateTime AdditionalField() + { + // Omitted code for brevity + } +} +``` + + + + +In the Code-first approach we can use the `Resolve` method on the `IObjectFieldDescriptor`. + +```csharp +public class AuthorType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("AdditionalField") + .Resolve(context => + { + // Omitted code for brevity + }) + } +} +``` + + + + +```csharp +public class Author +{ + public string Name { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Author { + name: String + additionalField: DateTime! + } + ") + .BindRuntimeType() + .AddResolver("Author", "additionalField", (context) => + { + // Omitted code for brevity + }); + } +} +``` + + + + +What we have just created is a resolver. Hot Chocolate automatically creates resolvers for our properties, but we can also define them ourselves. + +[Learn more about resolvers](/docs/hotchocolate/v12/fetching-data/resolvers) + +# Generics + +> Note: Read about [interfaces](/docs/hotchocolate/v12/defining-a-schema/interfaces) and [unions](/docs/hotchocolate/v12/defining-a-schema/unions) before resorting to generic object types. + +In the Code-first approach we can define generic object types. + +```csharp +public class Response +{ + public string Status { get; set; } + + public object Payload { get; set; } +} + +public class ResponseType : ObjectType + where T : class, IOutputType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.Status); + + descriptor + .Field(f => f.Payload) + .Type(); + } +} + +public class Query +{ + public Response GetResponse() + { + return new Response + { + Status = "OK", + Payload = 123 + }; + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetResponse()) + .Type>(); + } +} +``` + +This will produce the following schema types. + +```sdl +type Query { + response: Response +} + +type Response { + status: String! + payload: Int +} +``` + +We have used an `object` as the generic field above, but we can also make `Response` generic and add another generic parameter to the `ResponseType`. + +```csharp +public class Response +{ + public string Status { get; set; } + + public T Payload { get; set; } +} + +public class ResponseType + : ObjectType> + where TSchemaType : class, IOutputType +{ + protected override void Configure( + IObjectTypeDescriptor> descriptor) + { + descriptor.Field(f => f.Status); + + descriptor + .Field(f => f.Payload) + .Type(); + } +} +``` + +## Naming + +If we were to use the above type with two different generic arguments, we would get an error, since both `ResponseType` have the same name. + +We can change the name of our generic object type depending on the used generic type. + +```csharp +public class ResponseType : ObjectType + where T : class, IOutputType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor + .Name(dependency => dependency.Name + "Response") + .DependsOn(); + + descriptor.Field(f => f.Status); + + descriptor + .Field(f => f.Payload) + .Type(); + } +} +``` diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/queries.md b/website/src/docs/hotchocolate/v12/defining-a-schema/queries.md new file mode 100644 index 00000000000..13ff1562070 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/queries.md @@ -0,0 +1,149 @@ +--- +title: "Queries" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +The query type in GraphQL represents a read-only view of all of our entities and ways to retrieve them. A query type is required for every GraphQL server. + +```sdl +type Query { + books: [Book!]! + author(id: Int!): Author +} +``` + +Clients can query one or more fields through the query type. + +```graphql +query { + books { + title + author + } + author(id: 1) { + name + } +} +``` + +Queries are expected to be side-effect free and are therefore parallelized by the execution engine. + +# Usage + +A query type can be defined like the following. + + + + +```csharp +public class Query +{ + public Book GetBook() + { + return new Book { Title = "C# in depth", Author = "Jon Skeet" }; + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } +} +``` + + + + +```csharp +public class Query +{ + public Book GetBook() + { + return new Book { Title = "C# in depth", Author = "Jon Skeet" }; + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetBook()) + .Type(); + } +} + +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Title) + .Type(); + + descriptor + .Field(f => f.Author) + .Type(); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } +} +``` + + + + +```csharp +public class Query +{ + public Book GetBook() + { + return new Book { Title = "C# in depth", Author = "Jon Skeet" }; + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + book: Book + } + + type Book { + title: String + author: String + } + ") + .BindRuntimeType() + .BindRuntimeType(); + } +} +``` + + + + +> ⚠️ Note: Only **one** query type can be registered using `AddQueryType()`. If we want to split up our query type into multiple classes, we can do so using type extensions. +> +> [Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +A query type is just a regular object type, so everything that applies to an object type also applies to the query type (this is true for all root types). + +[Learn more about object types](/docs/hotchocolate/v12/defining-a-schema/object-types) diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/relay.md b/website/src/docs/hotchocolate/v12/defining-a-schema/relay.md new file mode 100644 index 00000000000..53386aeab28 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/relay.md @@ -0,0 +1,548 @@ +--- +title: "Relay" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +> Note: Even though they originated in Relay, the design principles described in this document are not exclusive to Relay. They lead to an overall better schema design, which is why we recommend them to **all** users of Hot Chocolate. + +[Relay](https://relay.dev) is a JavaScript framework for building data-driven React applications with GraphQL, which is developed and used by _Facebook_. + +As part of a specification Relay proposes some schema design principles for GraphQL servers in order to more efficiently fetch, refetch and cache entities on the client. In order to get the most performance out of Relay our GraphQL server needs to abide by these principles. + +[Learn more about the Relay GraphQL Server Specification](https://relay.dev/docs/guides/graphql-server-specification) + + + +# Global identifiers + +If an output type contains an `id: ID!` field, [Relay](https://relay.dev) and other GraphQL clients will consider this the unique identifier of the entity and might use it to construct a flat cache. This can be problematic, since we could have the same identifier for two of our types. When using a database for example, a `Foo` and `Bar` entity could both contain a row with the identifier `1` in their respective tables. + +We could try and enforce unique identifiers for our Ids. Still, as soon as we introduce another data source to our schema, we might be facing identifier collisions between entities of our various data sources. + +Fortunately there is an easier, more integrated way to go about solving this problem in Hot Chocolate: Global identifiers. + +With Global Identifiers, Hot Chocolate adds a middleware that automatically serializes our identifiers to be unique within the schema. The concern of globally unique identifiers is therefore kept separate from our business domain and we can continue using the "real" identifiers within our business code, without worrying about uniqueness for a client. + +## Usage in Output Types + +Id fields can be opted in to the global identifier behavior using the `ID` middleware. + +Hot Chocolate automatically combines the value of fields annotated as `ID` with another value to form a global identifier. Per default, this additional value is the name of the type the Id belongs to. Since type names are unique within a schema, this ensures that we are returning a unique Id within the schema. If our GraphQL server serves multiple schemas, the schema name is also included in this combined Id. The resulting Id is then Base64 encoded to make it opaque. + + + + +```csharp +public class Product +{ + [ID] + public int Id { get; set; } +} +``` + +If no arguments are passed to the `[ID]` attribute, it will use the name of the output type, in this case `Product`, to serialize the Id. + +The `[ID]` attribute can be used on primary key fields and on fields that act as foreign keys. For these, we have to specify the name of the type they are referencing manually. In the below example, a type named `Foo` is being referenced using its Id. + +```csharp +[ID("Foo")] +public int FooId { get; set; } +``` + + + + +```csharp +public class Product +{ + public string Id { get; set; } +} + +public class ProductType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.Id).ID(); + } +} +``` + +If no arguments are passed to `ID()`, it will use the name of the output type, in this case `Product`, to serialize the Id. + +The `ID()` can not only be used on primary key fields but also on fields that act as foreign keys. For these, we have to specify the name of the type they are referencing manually. In the below example, a type named `Foo` is being referenced using its Id. + +```csharp +descriptor.Field(f => f.FooId).ID("Foo"); +``` + + + + +The approach of either Annotation-based or Code-first can be used in conjunction with Schema-first. + + + + +The type of fields specified as `ID` is also automatically rewritten to the ID scalar. + +[Learn more about the ID scalar](/docs/hotchocolate/v12/defining-a-schema/scalars#id) + +## Usage in Input Types + +If our `Product` output type returns a serialized Id, all arguments and fields on input object types, accepting a `Product` Id, need to be able to interpret the serialized Id. +Therefore we also need to define them as `ID`, in order to deserialize the serialized Id to the actual Id. + + + + +```csharp +public class Query +{ + public Product GetProduct([ID] int id) + { + // Omitted code for brevity + } +} +``` + +In input object types we can use the `[ID]` attribute on specific fields. + +```csharp +public class ProductInput +{ + [ID] + public int ProductId { get; set; } +} +``` + +Per default all serialized Ids are accepted. If we want to only accept Ids that have been serialized for the `Product` output type, we can specify the type name as argument to the `[ID]` attribute. + +```csharp +public Product GetProduct([ID(nameof(Product))] int id) +``` + +This will result in an error if an Id, serialized using a different type name than `Product`, is used as input. + + + + +```csharp +descriptor + .Field("product") + .Argument("id", a => a.Type>().ID()) + .Type() + .Resolve(context => + { + var id = context.ArgumentValue("id"); + + // Omitted code for brevity + }); +``` + +> Note: `ID()` can only be used on fields and arguments with a concrete type. Otherwise type modifiers like non-null or list can not be correctly rewritten. + +In input object types we can use `ID()` on specific fields. + +```csharp +descriptor + .Field("id") + .Type>() + .ID(); +``` + +Per default all serialized Ids are accepted. If we want to only accept Ids that have been serialized for the `Product` output type, we can specify the type name as argument to `ID()`. + +```csharp +.Argument("id", a => a.Type>().ID(nameof(Product))) +``` + +This will result in an error if an Id, serialized using a different type name than `Product`, is used as input. + + + + +The approach of either Annotation-based or Code-first can be used in conjunction with Schema-first. + + + + +## Id Serializer + +Unique (or global) Ids are generated using the `IIdSerializer`. We can access it like any other service and use it to serialize or deserialize global Ids ourselves. + +```csharp +public class Query +{ + public string Example([Service] IIdSerializer serializer) + { + string serializedId = serializer.Serialize(null, "Product", "123"); + + IdValue deserializedIdValue = serializer.Deserialize(serializedId); + object deserializedId = deserializedIdValue.Value; + + // Omitted code for brevity + } +} +``` + +The `Serialize()` method takes the schema name as a first argument, followed by the type name and lastly the actual Id. + +[Learn more about accessing services](/docs/hotchocolate/v12/fetching-data/resolvers#injecting-services) + +# Global Object Identification + +Global Object Identification, as the name suggests, is about being able to uniquely identify an object within our schema. Moreover, it allows consumers of our schema to refetch an object in a standardized way. This capability allows client applications, such as [Relay](https://relay.dev), to automatically refetch types. + +To identify types that can be refetched, a new `Node` interface type is introduced. + +```sdl +interface Node { + id: ID! +} +``` + +Implementing this type signals to client applications, that the implementing type can be refetched. Implementing it also enforces the existence of an `id` field, a unique identifier, needed for the refetch operation. + +To refetch the types implementing the `Node` interface, a new `node` field is added to the query. + +```sdl +type Query { + node(id: ID!): Node +} +``` + +While it is not part of the specification, it is recommended to add the ability for plural fetches. That's why Hot Chocolate adds a `nodes` field allowing us to refetch multiple objects in one round trip. + +```sdl +type Query { + node(id: ID!): Node + nodes(ids: [ID!]!): [Node]! +} +``` + +## Usage + +In Hot Chocolate we can enable Global Object Identification, by calling `AddGlobalObjectIdentification()` on the `IRequestExecutorBuilder`. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddGlobalObjectIdentification() + .AddQueryType(); + } +} +``` + +This registers the `Node` interface type and adds the `node(id: ID!): Node` and the `nodes(ids: [ID!]!): [Node]!` field to our query type. At least one type in our schema needs to implement the `Node` interface or an exception is raised. + +> ⚠️ Note: Using `AddGlobalObjectIdentification()` in two upstream stitched services does currently not work out of the box. + +Next we need to extend our object types with the `Global Object Identification` functionality. Therefore 3 criteria need to be fulfilled: + +1. The type needs to implement the `Node` interface. +2. On the type an `id` field needs to be present to properly implement the contract of the `Node` interface. +3. A method responsible for refetching an object based on its `id` needs to be defined. + + + + +To declare an object type as a refetchable, we need to annotate it using the `[Node]` attribute. This in turn causes the type to implement the `Node` interface and if present automatically turns the `id` field into a [global identifier](#global-identifiers). + +There also needs to be a method, a _node resolver_, responsible for the acutal refetching of the object. Assuming our class is called `Product`, Hot Chocolate looks for a static method, with one of the following names: + +- `Get` +- `GetAsync` +- `GetProduct` +- `GetProductAsync` + +The method is expected to have a return type of either `Product` or `Task`. Furthermore the first argument of this method is expected to be of the same type as the `Id` property. At runtime Hot Chocolate will invoke this method with the `id` of the object that should be refetched. Special types, such as services, can be injected as arguments as well. + +```csharp +[Node] +public class Product +{ + public string Id { get; set; } + + public static async Task Get(string id, + [Service] ProductService service) + { + Product product = await service.GetByIdAsync(id); + + return product; + } +} +``` + +If we need to influence the global identifier generation, we can annotate the `Id` property manually. + +```csharp +[ID("Example")] +public string Id { get; set; } +``` + +If the `Id` property of our class is not called `id`, we can either [rename it](/docs/hotchocolate/v12/defining-a-schema/object-types#naming) or specify the name of the property that should be the `id` field through the `[Node]` attribute. Hot Chocolate will then automatically rename this property to `id` in the schema to properly implement the contract of the `Node` interface. + +```csharp +[Node(IdField = nameof(ProductId))] +public class Product +{ + public string ProductId { get; set; } + + // Omitted code for brevity +} +``` + +If our _node resolver_ method doesn't follow the naming conventions laid out above, we can annotate it using the `[NodeResolver]` attribute to let Hot Chocolate know that this should be the method used for refetching the object. + +```csharp +[NodeResolver] +public static Product OtherMethod(string id) +{ + // Omitted code for brevity +} +``` + +If we want to resolve the object using another class, we can reference the class/method like the following. + +```csharp +[Node(NodeResolverType = typeof(ProductNodeResolver), + NodeResolver = nameof(ProductNodeResolver.MethodName))] +public class Product +{ + public string ProductId { get; set; } +} + +public class ProductNodeResolver +{ + public static Product MethodName(string id) + { + // Omitted code for brevity + } +} +``` + +When placing the `Node` functionality in an extension type, it is important to keep in mind that the `[Node]` attribute needs to be defined on the class extending the original type. + +```csharp +[Node] +[ExtendObjectType(typeof(Product))] +public class ProductExtensions +{ + public Product GetProductAsync(string id) + { + // Omitted code for brevity + } +} +``` + +[Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + + + + +In the Code-first approach, we have multiple APIs on the `IObjectTypeDescriptor` to fulfill these criteria: + +- `ImplementsNode`: Implements the `Node` interface. +- `IdField`: Selects the property that represents the unique identifier of the object. +- `ResolveNode` / `ResolveNodeWith`: Method that refetches the object by its Id, also called the _node resolver_. If these methods are chained after `IdField`, they automatically infer the correct type of the `id` argument. + +```csharp +public class Product +{ + public string Id { get; set; } +} + +public class ProductType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .ImplementsNode() + .IdField(f => f.Id) + .ResolveNode(async (context, id) => + { + Product product = + await context.Service().GetByIdAsync(id); + + return product; + }); + } +} +``` + +> ⚠️ Note: When using middleware such as `UseDbContext` it needs to be chained after the `ResolveNode` call. The order of middleware still matters. + +If the `Id` property of our class is not called `id`, we can either [rename it](/docs/hotchocolate/v12/defining-a-schema/object-types#naming) or specify it through the `IdField` method on the `IObjectTypeDescriptor`. Hot Chocolate will then automatically rename this property to `id` in the schema to properly implement the contract of the `Node` interface. + +```csharp +public class Product +{ + public string ProductId { get; set; } +} + +public class ProductType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .ImplementsNode() + .IdField(f => f.ProductId) + .ResolveNode((context, id) => + { + // Omitted code for brevity + }); + } +} +``` + +In case we want to resolve the object using another class, we can do so using `ResolveNodeWith`. + +```csharp +public class ProductType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .ImplementsNode() + .IdField(f => f.ProductId) + .ResolveNodeWith(r => + r.GetProductAsync(default)); + } +} + +public class ProductNodeResolver +{ + public async Task GetProductAsync(string id) + { + // Omitted code for brevity + } +} +``` + + + + +The approach of either Annotation-based or Code-first can be used in conjunction with Schema-first. + + + + +Since node resolvers resolve entities by their Id, they are the perfect place to start utilizing DataLoaders. + +[Learn more about DataLoaders](/docs/hotchocolate/v12/fetching-data/dataloader) + +# Connections + +_Connections_ are a standardized way to expose pagination capabilities. + +```sdl +type Query { + users(first: Int after: String last: Int before: String): UsersConnection +} + +type UsersConnection { + pageInfo: PageInfo! + edges: [UsersEdge!] + nodes: [User!] +} + +type UsersEdge { + cursor: String! + node: User! +} + +type PageInfo { + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String + endCursor: String +} +``` + +[Learn more about Connections](/docs/hotchocolate/v12/fetching-data/pagination#connections) + +# Query field in Mutation payloads + +It's a common best practice to return a payload type from mutations containing the affected entity as a field. + +```sdl +type Mutation { + likePost(id: ID!): LikePostPayload +} + +type LikePostPayload { + post: Post +} +``` + +This allows us to immediately process the affected entity in the client application responsible for the mutation. + +Sometimes a mutation might affect other parts of our application as well. Maybe the `likePost` mutation needs to update an Activity Feed. + +For this scenario, we can expose a `query` field on our payload type to allow the client application to fetch everything it needs to update its state in one round trip. + +```sdl +type LikePostPayload { + post: Post + query: Query +} +``` + +A resulting mutation request could look like the following. + +```graphql +mutation { + likePost(id: 1) { + post { + id + content + likes + } + query { + ...ActivityFeed_Fragment + } + } +} +``` + +## Usage + +Hot Chocolate allows us to automatically add this `query` field to all of our mutation payload types: + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryFieldToMutationPayloads(); + } +} +``` + +By default, this will add a field of type `Query` called `query` to each top-level mutation field type, whose name ends in `Payload`. + +Of course these defaults can be tweaked: + +```csharp +services + .AddGraphQLServer() + .AddQueryFieldToMutationPayloads(options => + { + options.QueryFieldName = "rootQuery"; + options.MutationPayloadPredicate = + (type) => type.Name.Value.EndsWith("Result"); + }); +``` + +This would add a field of type `Query` with the name of `rootQuery` to each top-level mutation field type, whose name ends in `Result`. + +> ⚠️ Note: This feature currently doesn't work on a stitching gateway, however this will be addressed in a future release focused on stitching. It's tracked as [#3158](https://github.com/ChilliCream/hotchocolate/issues/3158). diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/scalars.md b/website/src/docs/hotchocolate/v12/defining-a-schema/scalars.md new file mode 100644 index 00000000000..5d8fc5ef7cd --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/scalars.md @@ -0,0 +1,657 @@ +--- +title: "Scalars" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Scalar types are the primitives of our schema and can hold a specific type of data. They are leaf types, meaning we cannot use e.g. `{ fieldname }` to further drill down into the type. The main purpose of a scalar is to define how a value is serialized and deserialized. + +Besides basic scalars like `String` and `Int`, we can also create custom scalars like `CreditCardNumber` or `SocialSecurityNumber`. These custom scalars can greatly enhance the expressiveness of our schema and help new developers to get a grasp of our API. + +# GraphQL scalars + +The GraphQL specification defines the following scalars. + +## String + +```sdl +type Product { + description: String; +} +``` + +This scalar represents an UTF-8 character sequence. + +It is automatically inferred from the usage of the .NET [string type](https://docs.microsoft.com/dotnet/csharp/language-reference/builtin-types/reference-types#the-string-type). + +## Boolean + +```sdl +type Product { + purchasable: Boolean; +} +``` + +This scalar represent a Boolean value, which can be either `true` or `false`. + +It is automatically inferred from the usage of the .NET [bool type](https://docs.microsoft.com/dotnet/csharp/language-reference/builtin-types/bool). + +## Int + +```sdl +type Product { + quantity: Int; +} +``` + +This scalar represents a signed 32-bit numeric non-fractional value. + +It is automatically inferred from the usage of the .NET [int type](https://docs.microsoft.com/dotnet/api/system.int32). + +## Float + +```sdl +type Product { + price: Float; +} +``` + +This scalar represents double-precision fractional values, as specified by IEEE 754. + +It is automatically inferred from the usage of the .NET [float](https://docs.microsoft.com/dotnet/api/system.single) or [double type](https://docs.microsoft.com/dotnet/api/system.double). + +> Note: We introduced a separate `Decimal` scalar to handle `decimal` values. + +## ID + +```sdl +type Product { + id: ID!; +} +``` + +This scalar is used to facilitate technology-specific Ids, like `int`, `string` or `Guid`. + +It is **not** automatically inferred and the `IdType` needs to be [explicitly specified](/docs/hotchocolate/v12/defining-a-schema/object-types#explicit-types). + +`ID` values are always represented as a [String](#string) in client-server communication, but can be coerced to their expected type on the server. + + + + +```csharp +public class Product +{ + [GraphQLType(typeof(IdType))] + public int Id { get; set; } +} + +public class Query +{ + public Product GetProduct([GraphQLType(typeof(IdType))] int id) + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class Product +{ + public int Id { get; set; } +} + +public class ProductType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name("Product"); + + descriptor.Field(f => f.Id).Type(); + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Name(OperationTypeNames.Query); + + descriptor + .Field("product") + .Argument("id", a => a.Type()) + .Type() + .Resolve(context => + { + var id = context.ArgumentValue("id"); + + // Omitted code for brevity + }); + } +} +``` + + + + +```csharp +public class Product +{ + public int Id { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + product(id: ID): Product + } + + type Product { + id: ID + } + ") + .BindRuntimeType() + .AddResolver("Query", "product", context => + { + var id = context.ArgumentValue("id"); + + // Omitted code for brevity + }); + } +} +``` + + + + +Notice how our code uses `int` for the `Id`, but in a request / response it would be serialized as a `string`. This allows us to switch the CLR type of our `Id`, without affecting the schema and our clients. + +# GraphQL Community Scalars + +The website https://www.graphql-scalars.com/ hosts specifications for GraphQL scalars defined by the community. The community scalars use the `@specifiedBy` directive to point to the spec that is implemented. + +```sdl +scalar UUID @specifiedBy(url: "https://tools.ietf.org/html/rfc4122") +``` + +## DateTime Type + +A custom GraphQL scalar which represents an exact point in time. This point in time is specified by having an offset to UTC and does not use time zone. + +The DateTime scalar is based RFC3339. + +```sdl +scalar DateTime @specifiedBy(url: "https://www.graphql-scalars.com/date-time/") +``` + + + +# .NET Scalars + +In addition to the scalars defined by the specification, Hot Chocolate also supports the following set of scalar types: + +| Type | Description | +| ----------- | ------------------------------------------------------------ | +| `Byte` | Byte | +| `ByteArray` | Base64 encoded array of bytes | +| `Short` | Signed 16-bit numeric non-fractional value | +| `Long` | Signed 64-bit numeric non-fractional value | +| `Decimal` | .NET Floating Point Type | +| `Url` | Url | +| `Date` | ISO-8601 date | +| `TimeSpan` | ISO-8601 duration | +| `Uuid` | GUID | +| `Any` | This type can be anything, string, int, list or object, etc. | + +## Uuid Type + +The `Uuid` scalar supports the following serialization formats. + +| Specifier | Format | +| ----------- | -------------------------------------------------------------------- | +| N (default) | 00000000000000000000000000000000 | +| D | 00000000-0000-0000-0000-000000000000 | +| B | {00000000-0000-0000-0000-000000000000} | +| P | (00000000-0000-0000-0000-000000000000) | +| X | {0x00000000,0x0000,0x0000,{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00}} | + +The `UuidType` will always return the value in the specified format. In case it is used as an input type, it will first try to parse the result in the specified format. If the parsing does not succeed, it will try to parse the value in other formats. + +To change the default format we have to register the `UuidType` with the specfier on the schema: + +```csharp +services + .AddGraphQLServer() + .AddType(new UuidType('D')); +``` + +## Any Type + +The `Any` scalar is a special type that can be compared to `object` in C#. +`Any` allows us to specify any literal or return any output type. + +Consider the following type: + +```sdl +type Query { + foo(bar: Any): String +} +``` + +Since our field `foo` specifies an argument `bar` of type `Any` all of the following queries would be valid: + +```graphql +{ + a: foo(bar: 1) + b: foo(bar: [1, 2, 3, 4, 5]) + a: foo(bar: "abcdef") + a: foo(bar: true) + a: foo(bar: { a: "foo", b: { c: 1 } }) + a: foo(bar: [{ a: "foo", b: { c: 1 } }, { a: "foo", b: { c: 1 } }]) +} +``` + +The same goes for the output side. `Any` can return a structure of data although it is a scalar type. + +If we want to access the data we can either fetch data as an object or you can ask the context to provide it as a specific object. + +```csharp +object foo = context.ArgumentValue("bar"); +Foo foo = context.ArgumentValue("bar"); +``` + +We can also ask the context which kind the current argument is: + +```csharp +ValueKind kind = context.ArgumentKind("bar"); +``` + +The value kind will tell us by which kind of literal the argument is represented. + +> An integer literal can still contain a long value and a float literal could be a decimal but it also could just be a float. + +```csharp +public enum ValueKind +{ + String, + Integer, + Float, + Boolean, + Enum, + Object, + Null +} +``` + +If we want to access an object dynamically without serializing it to a strongly typed model we can get it as `IReadOnlyDictionary` or as `ObjectValueNode`. + +Lists can be accessed generically by getting them as `IReadOnlyList` or as `ListValueNode`. + +# Additional Scalars + +We also offer a separate package with scalars for more specific usecases. + +To use these scalars we have to add the `HotChocolate.Types.Scalars` package. + +```bash +dotnet add package HotChocolate.Types.Scalars +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +**Available Scalars:** + +| Type | Description | +| ---------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | +| EmailAddress | Email address, represented as UTF-8 character sequences, as defined in [RFC5322](https://tools.ietf.org/html/rfc5322) | +| HexColor | HEX color code | +| Hsl | CSS HSL color as defined [here][1] | +| Hsla | CSS HSLA color as defined [here][1] | +| IPv4 | IPv4 address as defined [here](https://en.wikipedia.org/wiki/IPv4) | +| IPv6 | IPv6 address as defined in [RFC8064](https://tools.ietf.org/html/rfc8064) | +| Isbn | ISBN-10 or ISBN-13 number as defined [here](https://en.wikipedia.org/wiki/International_Standard_Book_Number) | +| Latitude | Decimal degrees latitude number | +| Longitude | Decimal degrees longitude number | +| LocalCurrency | Currency string | +| LocalDate | ISO date string, represented as UTF-8 character sequences yyyy-mm-dd, as defined in [RFC3339][2] | +| LocalTime | Local time string (i.e., with no associated timezone) in 24-hr `HH:mm:ss` | +| MacAddress | IEEE 802 48-bit (MAC-48/EUI-48) and 64-bit (EUI-64) Mac addresses, represented as UTF-8 character sequences, as defined in [RFC7042][3] and [RFC7043][4] | +| NegativeFloat | Double‐precision fractional value less than 0 | +| NegativeInt | Signed 32-bit numeric non-fractional with a maximum of -1 | +| NonEmptyString | Non empty textual data, represented as UTF‐8 character sequences with at least one character | +| NonNegativeFloat | Double‐precision fractional value greater than or equal to 0 | +| NonNegativeInt | Unsigned 32-bit numeric non-fractional value greater than or equal to 0 | +| NonPositiveFloat | Double‐precision fractional value less than or equal to 0 | +| NonPositiveInt | Signed 32-bit numeric non-fractional value less than or equal to 0 | +| PhoneNumber | A value that conforms to the standard E.164 format as defined [here](https://en.wikipedia.org/wiki/E.164) | +| PositiveInt | Signed 32‐bit numeric non‐fractional value of at least the value 1 | +| PostalCode | Postal code | +| Port | TCP port within the range of 0 to 65535 | +| Rgb | CSS RGB color as defined [here](https://developer.mozilla.org/docs/Web/CSS/color_value#rgb_colors) | +| Rgba | CSS RGBA color as defined [here](https://developer.mozilla.org/docs/Web/CSS/color_value#rgb_colors) | +| SignedByte | Signed 8-bit numeric non‐fractional value greater than or equal to -127 and smaller than or equal to 128. | +| UnsignedInt | Unsigned 32‐bit numeric non‐fractional value greater than or equal to 0 | +| UnsignedLong | Unsigned 64‐bit numeric non‐fractional value greater than or equal to 0 | +| UnsignedShort | Unsigned 16‐bit numeric non‐fractional value greater than or equal to 0 and smaller or equal to 65535. | +| UtcOffset | A value of format `±hh:mm` | + +[1]: https://developer.mozilla.org/docs/Web/CSS/color_value#hsl_colors +[2]: https://tools.ietf.org/html/rfc3339 +[3]: https://tools.ietf.org/html/rfc7042#page-19 +[4]: https://tools.ietf.org/html/rfc7043 + +Most of these scalars are built on top of native .NET types. An Email Address for example is represented as a `string`, but just returning a `string` from our resolver would result in Hot Chocolate interpreting it as a `StringType`. We need to explicitly specify that the returned type (`string`) should be treated as an `EmailAddressType`. + +```csharp +[GraphQLType(typeof(EmailAddressType))] +public string GetEmail() => "test@example.com"; +``` + +[Learn more about explicitly specifying GraphQL types](/docs/hotchocolate/v12/defining-a-schema/object-types#explicit-types) + +## NodaTime + +We also offer a package specifically for [NodaTime](https://github.com/nodatime/nodatime). + +It can be installed like the following. + +```bash +dotnet add package HotChocolate.Types.NodaTime +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +**Available Scalars:** + +| Type | Description | Example | +| -------------- | ----------------------------------------------------------------------------------------- | --------------------------------------------- | +| DateTimeZone | A [NodaTime DateTimeZone](https://nodatime.org/TimeZones) | `"Europe/Rome"` | +| Duration | A [NodaTime Duration](https://nodatime.org/3.0.x/userguide/duration-patterns) | `"-123:07:53:10.019"` | +| Instant | A [NodaTime Instant](https://nodatime.org/3.0.x/userguide/instant-patterns) | `"2020-02-20T17:42:59Z"` | +| IsoDayOfWeek | A [NodaTime IsoDayOfWeek](https://nodatime.org/3.0.x/api/NodaTime.IsoDayOfWeek.html) | `7` | +| LocalDate | A [NodaTime LocalDate](https://nodatime.org/3.0.x/userguide/localdate-patterns) | `"2020-12-25"` | +| LocalDateTime | A [NodaTime LocalDateTime](https://nodatime.org/3.0.x/userguide/localdatetime-patterns) | `"2020-12-25T13:46:78"` | +| LocalTime | A [NodaTime LocalTime](https://nodatime.org/3.0.x/userguide/localtime-patterns) | `"12:42:13.03101"` | +| OffsetDateTime | A [NodaTime OffsetDateTime](https://nodatime.org/3.0.x/userguide/offsetdatetime-patterns) | `"2020-12-25T13:46:78+02:35"` | +| OffsetDate | A [NodaTime OffsetDate](https://nodatime.org/3.0.x/userguide/offsetdate-patterns) | `"2020-12-25+02:35"` | +| OffsetTime | A [NodaTime OffsetTime](https://nodatime.org/3.0.x/userguide/offsettime-patterns) | `"13:46:78+02:35"` | +| Offset | A [NodeTime Offset](https://nodatime.org/3.0.x/userguide/offset-patterns) | `"+02:35"` | +| Period | A [NodeTime Period](https://nodatime.org/3.0.x/userguide/period-patterns) | `"P-3W3DT139t"` | +| ZonedDateTime | A [NodaTime ZonedDateTime](https://nodatime.org/3.0.x/userguide/zoneddatetime-patterns) | `"2020-12-31T19:40:13 Asia/Kathmandu +05:45"` | + +When returning a NodaTime type from one of our resolvers, for example a `NodaTime.Duration`, we also need to explicitly register the corresponding scalar type. In the case of a `NodaTime.Duration` this would be the `DurationType` scalar. + +```csharp +public class Query +{ + public Duration GetDuration() => Duration.FromMinutes(3); +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .AddType(); + } +} +``` + +This package was originally developed by [@shoooe](https://github.com/shoooe). + +# Binding behavior + +Hot Chocolate binds most of the native .NET types automatically. +A `System.String` is for example automatically mapped to a `StringType` in the schema. + +We can override these mappings by explicitly specifying type bindings. + +```csharp +services + .AddGraphQLServer() + .BindRuntimeType(); +``` + +Furthermore, we can also bind scalars to arrays or type structures: + +```csharp +services + .AddGraphQLServer() + .BindRuntimeType(); +``` + +Hot Chocolate only exposes the used scalars in the generated schema, keeping it simple and clean. + +# Custom Converters + +We can reuse existing scalar types and bind them to different runtime types by specifying converters. + +We could for example register converters between [NodaTime](https://nodatime.org/)'s `OffsetDateTime` and .NET's `DateTimeOffset` to reuse the existing `DateTimeType`. + +```csharp +public class Query +{ + public OffsetDateTime GetDateTime(OffsetDateTime offsetDateTime) + { + return offsetDateTime; + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .BindRuntimeType() + .AddTypeConverter( + x => x.ToDateTimeOffset()) + .AddTypeConverter( + x => OffsetDateTime.FromDateTimeOffset(x)); + } +} +``` + +# Scalar Options + +Some scalars like `TimeSpan` or `Uuid` have options like their serialization format. + +We can specify these options by registering the scalar explictly. + +```csharp +services + .AddGraphQLServer() + .AddType(new UuidType('D')); +``` + +# Custom Scalars + +All scalars in Hot Chocolate are defined through a `ScalarType`. +The easiest way to create a custom scalar is to extend `ScalarType`. +This base class already includes basic serialization and parsing logic. + +```csharp +public sealed class CreditCardNumberType : ScalarType +{ + private readonly ICreditCardValidator _validator; + + // we can inject services that have been registered + // with the DI container + public CreditCardNumberType(ICreditCardValidator validator) + : base("CreditCardNumber") + { + _validator = validator; + + Description = "Represents a credit card number"; + } + + // is another StringValueNode an instance of this scalar + protected override bool IsInstanceOfType(StringValueNode valueSyntax) + => IsInstanceOfType(valueSyntax.Value); + + // is another string .NET type an instance of this scalar + protected override bool IsInstanceOfType(string runtimeValue) + => _validator.ValidateCreditCard(runtimeValue); + + public override IValueNode ParseResult(object? resultValue) + => ParseValue(resultValue); + + // define how a value node is parsed to the string .NET type + protected override string ParseLiteral(StringValueNode valueSyntax) + => valueSyntax.Value; + + // define how the string .NET type is parsed to a value node + protected override StringValueNode ParseValue(string runtimeValue) + => new StringValueNode(runtimeValue); + + public override bool TryDeserialize(object? resultValue, + out object? runtimeValue) + { + runtimeValue = null; + + if (resultValue is string s && _validator.ValidateCreditCard(s)) + { + runtimeValue = s; + return true; + } + + return false; + } + + public override bool TrySerialize(object? runtimeValue, + out object? resultValue) + { + resultValue = null; + + if (runtimeValue is string s && _validator.ValidateCreditCard(s)) + { + resultValue = s; + return true; + } + + return false; + } +} +``` + +By extending `ScalarType` we have full control over serialization and parsing. + +```csharp +public class CreditCardNumberType : ScalarType +{ + private readonly ICreditCardValidator _validator; + + public CreditCardNumberType(ICreditCardValidator validator) + : base("CreditCardNumber") + { + _validator = validator; + + Description = "Represents a credit card number"; + } + + // define which .NET type represents your type + public override Type RuntimeType { get; } = typeof(string); + + // define which value nodes this type can be parsed from + public override bool IsInstanceOfType(IValueNode valueSyntax) + { + if (valueSyntax == null) + { + throw new ArgumentNullException(nameof(valueSyntax)); + } + + return valueSyntax is StringValueNode stringValueNode && + _validator.ValidateCreditCard(stringValueNode.Value); + } + + // define how a value node is parsed to the native .NET type + public override object ParseLiteral(IValueNode valueSyntax, + bool withDefaults = true) + { + if (valueSyntax is StringValueNode stringLiteral && + _validator.ValidateCreditCard(stringLiteral.Value)) + { + return stringLiteral.Value; + } + + throw new SerializationException( + "The specified value has to be a credit card number in the format " + + "XXXX XXXX XXXX XXXX", + this); + } + + // define how the .NET type is parsed to a value node + public override IValueNode ParseValue(object? runtimeValue) + { + if (runtimeValue is string s && + _validator.ValidateCreditCard(s)) + { + return new StringValueNode(null, s, false); + } + + throw new SerializationException( + "The specified value has to be a credit card number in the format " + + "XXXX XXXX XXXX XXXX", + this); + } + + public override IValueNode ParseResult(object? resultValue) + { + if (resultValue is string s && + _validator.ValidateCreditCard(s)) + { + return new StringValueNode(null, s, false); + } + + throw new SerializationException( + "The specified value has to be a credit card number in the format " + + "XXXX XXXX XXXX XXXX", + this); + } + + public override bool TrySerialize(object? runtimeValue, + out object? resultValue) + { + resultValue = null; + + if (runtimeValue is string s && + _validator.ValidateCreditCard(s)) + { + resultValue = s; + return true; + } + + return false; + } + + public override bool TryDeserialize(object? resultValue, + out object? runtimeValue) + { + runtimeValue = null; + + if (resultValue is string s && + _validator.ValidateCreditCard(s)) + { + runtimeValue = s; + return true; + } + + return false; + } +} +``` + +The implementation of [Hot Chocolate's own scalars](https://github.com/ChilliCream/hotchocolate/tree/main/src/HotChocolate/Core/src/Types.Scalars) can be used as a reference for writing custom scalars. diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/subscriptions.md b/website/src/docs/hotchocolate/v12/defining-a-schema/subscriptions.md new file mode 100644 index 00000000000..6f89b9c5dc7 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/subscriptions.md @@ -0,0 +1,293 @@ +--- +title: "Subscriptions" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +The subscription type in GraphQL is used to add real-time capabilities to our applications. Clients can subscribe to events and receive the event data in real-time, as soon as the server publishes it. + +```sdl +type Subscription { + bookAdded: Book! + bookPublished(author: String!): Book! +} +``` + +Subscribing to an event is like writing a standard query. The only difference is the operation keyword and that we are only allowed to have one root field. + +```graphql +subscription { + bookAdded { + title + } +} +``` + +Hot Chocolate implements subscriptions via WebSockets and uses the pub/sub approach of [Apollo](https://www.apollographql.com/docs/apollo-server/data/subscriptions/#the-pubsub-class) for triggering subscriptions. + +# Usage + +A subscription type can be defined like the following. + + + + +```csharp +public class Subscription +{ + [Subscribe] + public Book BookAdded([EventMessage] Book book) => book; +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddSubscriptionType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +public class SubscriptionType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("bookAdded") + .Type() + .Resolve(context => context.GetEventMessage()) + .Subscribe(async context => + { + var receiver = context.Service(); + + ISourceStream stream = + await receiver.SubscribeAsync("bookAdded"); + + return stream; + }); + } +} + + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddSubscriptionType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +public class Subscription +{ + [Subscribe] + public Book BookAdded([EventMessage] Book book) => book; +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Subscription { + bookAdded: Book! + } + + type Book { + title: String + author: String + } + ") + .BindRuntimeType(); + } + + // Omitted code for brevity +} +``` + + + + +> ⚠️ Note: Only **one** subscription type can be registered using `AddSubscriptionType()`. If we want to split up our subscription type into multiple classes, we can do so using type extensions. +> +> [Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +A subscription type is just a regular object type, so everything that applies to an object type also applies to the subscription type (this is true for all all root types). + +[Learn more about object types](/docs/hotchocolate/v12/defining-a-schema/object-types) + +# Transport + +After defining the subscription type, we need to add the WebSockets middleware to our request pipeline. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseWebSockets(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL(); + }); + } + + // Omitted code for brevity +} +``` + +To make pub/sub work, we also have to register a subscription provider. A subscription provider represents a pub/sub implementation used to handle events. Out of the box we support two subscription providers. + +## In-Memory Provider + +The In-Memory subscription provider does not need any configuration and is easily setup. + +```csharp +services.AddInMemorySubscriptions(); +``` + +## Redis Provider + +The Redis subscription provider enables us to run multiple instances of our Hot Chocolate GraphQL server and handle subscription events reliably. + +In order to use the Redis provider we have to add the `HotChocolate.Subscriptions.Redis` package. + +```bash +dotnet add package HotChocolate.Subscriptions.Redis +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +After we have added the package we can setup the Redis subscription provider. + +```csharp +services.AddRedisSubscriptions((sp) => + ConnectionMultiplexer.Connect("host:port")); +``` + +Our Redis subscription provider uses the [StackExchange.Redis](https://github.com/StackExchange/StackExchange.Redis) Redis client underneath. + +# Publishing Events + +To publish events and trigger subscriptions, we can use the `ITopicEventSender`. The `ITopicEventSender` is an abstraction for the registered event publishing provider. Using this abstraction allows us to seamlessly switch between subscription providers, when necessary. + +Most of the time we will be publishing events for successful mutations. Therefore we can simply inject the `ITopicEventSender` into our mutations like we would with every other `Service`. Of course we can not only publish events from mutations, but everywhere we have access to the `ITopicEventSender` through the DI Container. + +```csharp +public class Mutation +{ + public async Book AddBook(Book book, [Service] ITopicEventSender sender) + { + await sender.SendAsync("BookAdded", book); + + // Omitted code for brevity + } +} +``` + +In the example the `"BookAdded"` is the topic we want to publish to, and `book` is our payload. Even though we have used a string as the topic, we do not have to. Any other type works just fine. + +But where is the connection between `"BookAdded"` as a topic and the subscription type? By default, Hot Chocolate will try to map the topic to a field of the subscription type. If we want to make this binding less error-prone, we could do the following. + +```csharp +await sender.SendAsync(nameof(Subscription.BookAdded), book); +``` + +If we do not want to use the method name, we could use the `Topic` attribute. + +```csharp +public class Subscription +{ + [Subscribe] + [Topic("ExampleTopic")] + public Book BookAdded([EventMessage] Book book) => book; +} + +public async Book AddBook(Book book, [Service] ITopicEventSender sender) +{ + await sender.SendAsync("ExampleTopic", book); + + // Omitted code for brevity +} +``` + +## Dynamic Topics + +We can even use the `Topic` attribute on dynamic arguments of the subscription field. + +```csharp +public class Subscription +{ + [Subscribe] + public Book BookPublished([Topic] string author, [EventMessage] Book book) + => book; +} + +public async Book PublishBook(Book book, [Service] ITopicEventSender sender) +{ + await sender.SendAsync(book.Author, book); + + // Omitted code for brevity +} +``` + +## ITopicEventReceiver + +If more complex topics are required, we can use the `ITopicEventReceiver`. + +```csharp +public class Subscription +{ + [SubscribeAndResolve] + public ValueTask> BookPublished(string author, + [Service] ITopicEventReceiver receiver) + { + var topic = $"{author}_PublishedBook"; + + return receiver.SubscribeAsync(topic); + } +} + +public async Book PublishBook(Book book, [Service] ITopicEventSender sender) +{ + await sender.SendAsync($"{book.Author}_PublishedBook", book); + + // Omitted code for brevity +} +``` + +If we do not want to mix the subscription logic with our resolver, we can also use the `With` argument on the `Subscribe` attribute to specify a seperate method that handles the event subscription. + +```csharp +public class Subscription +{ + public ValueTask> SubscribeToBooks( + [Service] ITopicEventReceiver receiver) + => receiver.SubscribeAsync("ExampleTopic"); + + [Subscribe(With = nameof(SubscribeToBooks))] + public Book BookAdded([EventMessage] Book book) + => book; +} +``` diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/unions.md b/website/src/docs/hotchocolate/v12/defining-a-schema/unions.md new file mode 100644 index 00000000000..a10067eabcc --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/unions.md @@ -0,0 +1,196 @@ +--- +title: "Unions" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +A union type represents a set of object types. It is very similar to an [interface](/docs/hotchocolate/v12/defining-a-schema/interfaces), except that there is no requirement for common fields between the specified types. + +```sdl +type TextContent { + text: String! +} + +type ImageContent { + imageUrl: String! + height: Int! +} + +union PostContent = TextContent | ImageContent +``` + +Clients can query fields returning a union like the following. + +```graphql +{ + content { + ... on TextContent { + text + } + ... on ImageContent { + imageUrl + } + } +} +``` + +Learn more about unions [here](https://graphql.org/learn/schema/#union-types). + +# Usage + +Unions can be defined like the following. + + + + +We can use a marker interface to define object types as part of a union. + +```csharp +[UnionType("PostContent")] +public interface IPostContent +{ +} + +public class TextContent : IPostContent +{ + public string Text { get; set; } +} + +public class ImageContent : IPostContent +{ + public string ImageUrl { get; set; } + + public int Height { get; set; } +} + +public class Query +{ + public IPostContent GetContent() + { + // Omitted code for brevity + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType() + .AddType() + .AddType(); + } +} +``` + + + + +```csharp +public class TextContent +{ + public string Text { get; set; } +} + +public class TextContentType : ObjectType +{ +} + +public class ImageContent +{ + public string ImageUrl { get; set; } + + public int Height { get; set; } +} + +public class ImageContentType : ObjectType +{ +} + +public class PostContentType : UnionType +{ + protected override void Configure(IUnionTypeDescriptor descriptor) + { + descriptor.Name("PostContent"); + + // The object types that belong to this union + descriptor.Type(); + descriptor.Type(); + } +} + +public class Query +{ + public object GetContent() + { + // Omitted code for brevity + } +} + + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetContent(default)) + .Type(); + } +} +``` + +Since the types are already registered within the union, we do not have to register them again in our `Startup` class. + +We can use a marker interface, as in the annotation-based approach, to type our union definition: `UnionType` + + + + +```csharp +public class TextContent +{ + public string Text { get; set; } +} + +public class ImageContent +{ + public string ImageUrl { get; set; } + + public int Height { get; set; } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + content: PostContent + } + + type TextContent { + text: String! + } + + type ImageContent { + imageUrl: String! + height: Int! + } + + union PostContent = TextContent | ImageContent + ") + .BindRuntimeType() + .BindRuntimeType() + .AddResolver("Query", "content", (context) => + { + // Omitted code for brevity + }); + } +} +``` + + + diff --git a/website/src/docs/hotchocolate/v12/defining-a-schema/versioning.md b/website/src/docs/hotchocolate/v12/defining-a-schema/versioning.md new file mode 100644 index 00000000000..9c3004ff242 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/defining-a-schema/versioning.md @@ -0,0 +1,75 @@ +--- +title: "Versioning" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Whilst we could version our GraphQL API similar to REST, i.e. `/graphql/v1`, it is not a best practice and often unnecessary. + +Many changes to a GraphQL schema are non-breaking. We can freely add new types and extend existing types with new fields. This does not break existing queries. +However removing a field or changing its nullability does. + +Instead of removing a field immediately and possibly breaking existing consumers of our API, fields can be marked as deprecated in our schema. This signals to consumers that the field will be removed in the future and they need to adapt before then. + +```sdl +type Query { + users: [User] @deprecated("Use the `authors` field instead") + authors: [User] +} + +``` + +# Deprecating fields + +Fields can be deprecated like the following. + + + + +```csharp +public class Query +{ + [GraphQLDeprecated("Use the `authors` field instead")] + public User[] GetUsers() + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("users") + .Deprecated("Use the `authors` field instead") + .Resolve(context => + { + // Omitted code for brevity + }); + } +} +``` + + + + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + users: [User] @deprecated(""Use the `authors` field instead"") + } + "); +``` + + + + +> Note: It is currently not possible to deprecate input values, such as arguments. diff --git a/website/src/docs/hotchocolate/v12/distributed-schema/index.md b/website/src/docs/hotchocolate/v12/distributed-schema/index.md new file mode 100644 index 00000000000..3379f49da59 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/distributed-schema/index.md @@ -0,0 +1,280 @@ +--- +title: "Distributed Schemas" +--- + +The strongly typed nature of a GraphQL schema makes it ideal for distribution. +It is possible to have specialized services that expose a GraphQL schema and bundle them together on a gateway. +There are two common approaches to schema distribution: schema stitching and federated schemas. + +Both approaches allow us to integrate services into one schema by folding types into one another and even renaming or removing parts. +You can also extend types and delegate resolvers to a specific backend service. + +With this, we can create a consistent GraphQL schema that hides the implementation details of our backend services and provides the consumer of our endpoint with the capability to fetch the data they need with one call, no under- or over-fetching and most importantly no repeated fetching + +Every Hot Chocolate GraphQL server can be used for schema stitching or federations. +You can also use both approaches on the same gateway. +You can for example stitch third-party services and use federations for domain services. + +_Schema of the Person Service_ + +```sdl +type Query { + persons: Person +} + +type Person { + id: ID! + name: String! +} +``` + +_Schema of the Address Service_ + +```sdl +type Query { + addressOfPerson(persondId: ID!): Address +} + +type Address { + street: String! + city: String! +} +``` + +_Gateway Service_ + +```sdl +type Query { + persons: Person +} + +type Person { + id: ID! + name: String! + address: Address +} + +type Address { + street: String! + city: String! +} +``` + +# Schema Stitching + +In the scenario of schema stitching, the configuration of the gateway schema is on the gateway itself. +The gateway pulls the schema from the downstream services and can be extended with SDL. +The benefit of this approach is that no configuration on the downstream services is needed. +With this approach, it is possible to also stitch third party GraphQL services. + +Gateway: + +```sdl +extend type Person { + address: Address @delegate(schema: "Address", path:"addressOfPerson(personId: $fields:Id")) +} +``` + +```mermaid +graph TD; + Gateway-->Address; + Gateway-->Person; +``` + +# Federated Schema + +The major difference between schema federation and schema stitching is where the extensions are stored. +In the federated approach, the schema extensions are defined on the federated service. +The domain services push the schema on a cache (like Redis). +The gateway is subscribed to changes from this cache and can hot reload the schema if the configuration of a domain service changes. +It is also possible to set up federations in a pull setup. +In this setup, the downstream services expose a field on their schema to fetch the stitching information. +The gateway then fetches the federation information from the downstream service on startup. +With this setup, you do not need a Redis instance, but lose the ability to hot-reload the schema on a change of the downstream service. + +The benefit of this approach is that the domain services define where they extend the Gateway schema. +This might first seem odd and you could argue that the domain service should not have any knowledge of the gateway schema. +Though in practice many companies see this as a feature rather than a limitation. +As the gateway schema grows, the configuration of the schema on the gateway with schema stitching will become more and more complex. +A change to a downstream service requires also a change to the gateway. This makes development and also deployment a lot more complex. +If teams have sovereignty over different domain services, these teams can work completely independently when you use federations. + +Address Service: + +```sdl +extend type Person { + address: Address @delegate(schema: "Address", path:"addressOfPerson(personId: $fields:Id")) +} +``` + +```mermaid +graph TD; + Gateway-->Address; + Gateway-->Person; + Address-->Redis; + Person-->Redis; + Redis-->Gateway +``` + +# A real world example + +To showcase how schema stitching works and what the problems are let us assume we have a service like Twitter, where a user can post messages. + +Moreover, let us assume we have three teams working on internal micro-/domain-services that handle certain aspects of that service. + +The first service is handling the message stream and has the following schema: + +```sdl +type Query { + messages(userId: ID!): [Message!] + message(messageId: ID!): Message +} + +type Mutation { + newMessage(input: NewMessageInput!): NewMessagePayload! +} + +type Message { + id: ID! + text: String! + createdBy: ID! + createdAt: DateTime! + tags: [String!] +} + +type NewMessageInput { + text: String! + tags: [String!] +} + +type NewMessagePayload { + message: Message +} +``` + +The second service is handling the users of the services and has the following schema: + +```sdl +type Query { + user(userId: ID!): User! + users: [User!] +} + +type Mutation { + newUser(input: NewUserInput!): NewUserPayload! + resetPassword(input: ResetPasswordInput!): ResetPasswordPayload! +} + +type NewUserInput { + username: String! + password: String! +} + +type ResetPasswordInput { + username: String! + password: String! +} + +type NewUserPayload { + user: User +} + +type ResetPasswordPayload { + user: User +} + +type User { + id: ID! + username: String! +} +``` + +Last but not least we have a third service handling the message analytics. In our example case we keep it simple and our analytics service just tracks three different counters per message. The schema for this service looks like the following: + +```sdl +type Query { + analytics(messageId: ID!, type: CounterType!): MessageAnalytics +} + +type MessageAnalytics { + id: ID! + messageId: ID! + count: Int! + type: CounterType! +} + +enum CounterType { + VIEWS + LIKES + REPLIES +} +``` + +With those three separate schemas our UI team would have to fetch from multiple endpoints. + +Even worse for our UI team, in order to build a stream view that shows the message text and the name of the user who posted the message, they would have to first fetch all the messages and could only then fetch the names of the users. + +This is actually one of the very things GraphQL tries to solve. + +With schema stitching or federations the services can be bundled together into one schema: + +```sdl +type Query { + me: User! + messages(userId: ID!): [Message!] + message(messageId: ID!): Message + user(userId: ID!): User! + users: [User!] +} + +type Mutation { + newMessage(input: NewMessageInput!): NewMessagePayload! + newUser(input: NewUserInput!): NewUserPayload! + resetPassword(input: ResetPasswordInput!): ResetPasswordPayload! +} + +type Message { + id: ID! + text: String! + createdBy: User + createdById: ID! + createdAt: DateTime! + tags: [String!] + views: Int! + likes: Int! + replies: Int! +} + +type NewMessageInput { + text: String! + tags: [String!] +} + +type NewMessagePayload { + message: Message +} + +type NewUserInput { + username: String! + password: String! +} + +type ResetPasswordInput { + username: String! + password: String! +} + +type NewUserPayload { + user: User +} + +type ResetPasswordPayload { + user: User +} + +type User { + id: ID! + username: String! + messages: [Message!] +} +``` diff --git a/website/src/docs/hotchocolate/v12/distributed-schema/schema-configuration.md b/website/src/docs/hotchocolate/v12/distributed-schema/schema-configuration.md new file mode 100644 index 00000000000..f85e12b6a7b --- /dev/null +++ b/website/src/docs/hotchocolate/v12/distributed-schema/schema-configuration.md @@ -0,0 +1,537 @@ +--- +title: "Schema Configuration" +--- + +Schema stitching and federations do have a lot more potential than just merging root types. +You can remove and rename types and fields, extend types with new resolvers and delegate these resolvers to a domain service. + +# Schema Transformation + +## Rename Types + +The name of a GraphQL type has to be unique. +When you build a standalone GraphQL server, the schema validation will make sure that no name is duplicated. +In case a name is duplicated, an exception is thrown and the schema will not compile. + +This behaviour is good for the standalone server but can be an issue in distributed schemas. +Even with domain services covering domain-specific topics, a type may be duplicated. + +To avoid an invalid schema, Hot Chocolate will prefix duplicated types with the schema name and auto resolves name collisions if they are not structurally equal. + +Let us assume we have a product and an inventory service. Both define a type called `Category`: + +```sdl +type Category { + name: String +} +``` + +```sdl +type Category { + name: String + subCategories: [Category!]! +} +``` + +The collision resolver of Hot Chocolate will resolve the following on the stitching layer: + +```sdl +type Category @source(name: "Category", schema: "products") { + name: String! + subCategories: [Category!]! +} + +type inventory_Category @source(name: "Category", schema: "inventory") { + name: String! +} +``` + +Hot Chocolate allows you to rename types to avoid collision auto resolving: + +```sdl +type Category @source(name: "Category", schema: "inventory") { + name: String! +} + +type ProductCategory @source(name: "Category", schema: "products") { + name: String! + subCategories: [ProductCategory!]! +} +``` + +### Schema Stitching + +In schema stitching type renames can be defined on the gateway: + +```csharp +services + .AddGraphQLServer() + .AddRemoteSchema(Products) + .AddRemoteSchema(Inventiory) + .RenameType("Category","ProductCategory", Products); +``` + +### Schema Federations + +In a federated approach, type renames can be done on the domain service: + +```csharp +services + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType() + .InitializeOnStartup() + .PublishSchemaDefinition( + c => c + .SetName("products") + .RenameType("Category", "ProductCategory") + .AddTypeExtensionsFromFile("./Stitching.graphql") + .PublishToRedis( + "Demo", + sp => sp.GetRequiredService())); +``` + +## Rename Fields + +Similar to type names, also fields can collide. A type can only declare a field once. +When you bundle domain services together, multiple domain services may declare the same field on the query type. + +Let us assume we have a product and an inventory service. Both define a type field called `categories`: + +```sdl +type Query { + categories: [Category!]! +} +``` + +```sdl +type Query { + categories: [ProductCategory!]! +} +``` + +Hot Chocolate will autoresolve the nameing conflict by prefixing the field with the schema name: + +```sdl +type Query { + categories: [ProductCategory!]! @delegate(schema: "products") + inventory_categories: [Category!]! @delegate(schema: "inventory", path: "categories") +} +``` + +Hot Chocolate allows you to rename fields to avoid collision auto resolving: + +```sdl +type Query { + productCategories: [ProductCategory!]! @source(name: "categories", schema: "products") @delegate(schema: "products") + categories: [Category!]! @delegate(schema: "inventory") +} +``` + +### Schema Stitching + +In schema stitching field renames can be defined on the gateway: + +```csharp +services + .AddGraphQLServer() + .AddRemoteSchema(Products) + .AddRemoteSchema(Inventiory) + .RenameField("Query", "categories", "productCategories", schemaName: Products) +``` + +### Schema Federations + +In a federated approach, type renames can be done on the domain service: + +```csharp +services + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType() + .InitializeOnStartup() + .PublishSchemaDefinition( + c => c + .SetName("products") + .RenameField("Query", "categories", "productCategories") + .AddTypeExtensionsFromFile("./Stitching.graphql") + .PublishToRedis( + "Demo", + sp => sp.GetRequiredService())); +``` + +## Ignore Types + +By default, all types of remote schemas are added to the gateway schema. +This can produce types that are not reachable. +You can remove all not reachable types on the gateway: + +```csharp +services + .AddGraphQLServer() + .AddQueryType(d => d.Name("Query")) + .AddRemoteSchemasFromRedis("Demo", sp => sp.GetRequiredService()) + .ModifyOptions(x => x.RemoveUnreachableTypes = true) +``` + +If you want to remove a specific type from the schema you can also use `IgnoreType` + +### Schema Stitching + +```csharp +services + .AddGraphQLServer() + .AddRemoteSchema(Products) + .AddRemoteSchema(Inventiory) + .IgnoreType("Category", schemaName: Products); +``` + +### Schema Federations + +```csharp +services + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType() + .InitializeOnStartup() + .PublishSchemaDefinition( + c => c + .SetName("products") + .IgnoreType("Category") + .AddTypeExtensionsFromFile("./Stitching.graphql") + .PublishToRedis( + "Demo", + sp => sp.GetRequiredService())); +``` + +## Ignore Field + +Hot Chocolate has a convenience API to ignore fields of types. +This can be useful when you want to merge root fields of domain services, but ignore some specific fields + +### Schema Stitching + +```csharp +services + .AddGraphQLServer() + .AddRemoteSchema(Products) + .AddRemoteSchema(Inventiory) + .IgnoreField("Query", "categories", Products) + .IgnoreField("Query", "categories", Inventory); +``` + +# Delegation of Resolvers + +The real power of schema stitching is the delegation of resolvers. +You can extend types with fields and redirect calls to a domain service + +Let us assume we have a product and an inventory service. + +The product service defines the following types + +```sdl +type Product { + upc: Int! + name: String! + price: Int! + weight: Int! +} + +type Query { + products: [Product!]! +} +``` + +The inventory service defines the following types + +```sdl +type InventoryInfo { + upc: Int! + isInStock: bool +} + +type Query { + inventoryInfo(upc: Int!): InventoryInfo! + shippingEsitmate(price: Int!, weight: Int!): InventoryInfo! +} +``` + +Resolver delegation allows us to combine these schemas into one cohesive schema. + +We can extend the product type with `inStock` and `shippingEstimate` + +```sdl +extend type Product { + inStock: Boolean @delegate(schema:"inventory", path: "inventoryInfo(upc: $fields:upc).isInStock") + shippingEstimate: Int @delegate(schema:"inventory", path: "shippingEstimate(price: $fields:price weight: $fields:weight)") +} +``` + +This results in the following schema: + +```sdl +type Product { + upc: Int! + name: String! + price: Int! + weight: Int! + inStock: Boolean + shippingEstimate: Int +} + +type Query { + products: [Product!]! +} +``` + +## Delegate Directive + +The `@delegate` directive describes where the remote data is found. + +```sdl +directive @delegate( + "The name of the schema to which this field shall be delegated to" + schema: String + "The path on the schema where delegation points to" + path: String! +) on FIELD_DEFINITION +``` + +The `path` argument can contain references to context data or fields. + +### Field Reference ($fields) + +```sdl +@delegate(path: "inventoryInfo(upc: $fields:upc).isInStock") +``` + +With the `$fields` variable, you can access fields of the type you extend. + +```sdl +type Product { + upc: Int! + name: String! +} + +extend type Product { + inStock: Boolean @delegate(schema:"inventory", path: "inventoryInfo(upc: $fields:upc).isInStock") +} +``` + +### Argument Reference ($arguments) + +```sdl +@delegate(path: "inventoryInfo(upc: $arguments:sku).isInStock") +``` + +With the `$fields` variable you can access fields of the type you extend. + +```sdl +extend type Query { + isProductInStock(sku:String!): Boolean @delegate(schema:"inventory", path: "inventoryInfo(upc: $arguments:upc)") +} +``` + +### Context Data Reference ($contextData) + +Every request contains context data. Context data can be set in resolvers or with a `IHttpRequestInterceptor` + +```sdl +extend type Query { + me: User! @delegate(schema: "users", path: "user(id: $contextData:UserId)") +} +``` + +**UseRequest** + +```csharp +services + .AddGraphQLServer() + .UseRequest(next => context => + { + context.ContextData["UserId"] = context.GetLoggedInUserId(); + return next(context); + }) + ... +``` + +**RequestInterceptor** + +```csharp +public class RequestInterceptor : DefaultHttpRequestInterceptor +{ + public ValueTask OnCreateAsync( + HttpContext context, + IRequestExecutor requestExecutor, + IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + string userId = context.GetLoggedInUserId(); + requestBuilder.SetProperty("UserId", userId); + + return base.OnCreateAsync( + context, requestExecutor, requestBuilder, cancellationToken); + } +} +``` + +```csharp +services + .AddGraphQLServer() + .AddHttpRequestInterceptor() + ... +``` + +**Static Context Data** +Context data can also be set directly on the schema builder. + +```csharp +services + .AddGraphQLServer() + .SetContextData("foo", "bar") + ... +``` + +### Scoped Context Data Reference ($scopedContext) + +Scoped context data can be set in a resolver and will be available in all resolvers in the subtree. +You have to use scoped context data when a resolver depends on a field that is higher up than just the parent. +You can use field middlewares to set scoped context data. + +Let's assume you have a message and account service. +The message holds a field `messageInfo` and knows the id of the creator of the message. +You want to extend the `messageInfo` with the user from the account service. + +**Schema** + +```sdl +type Message { + content: String! + createdById: ID! + messageInfo: MessageInfo! +} + +type MessageInfo { + createdAt: DateTime! +} +``` + +**Extensions** + +```sdl +extend type MessageInfo { + createdBy: User @delegate(schema:"accounts", path: "userById(upc: $scopedContextData:upc).isInStock") +} +``` + +**UseField** + +This middleware is executed for each field. + +```csharp +services + .AddGraphQLServer() + .UseField(next => async context => + { + if(context.Field.Type.NamedType() is ObjectType objectType && + objectType.Name.Equals("Message") && + context.Result is IDictionary data && + data.TryGetValue("createdById", out object value)) + { + context.ScopedContextData = + context.ScopedContextData.SetItem("createdById", value); + } + + await next.Invoke(context); + }) +``` + +**Type Interceptor** + +The middleware of `UseField` is executed on each field and created overhead. +It would be better if the middleware is only applied to the field that needs it. +You can use a schema interceptor to apply the middleware to the fields that use it. + +```csharp +public class MessageMiddlwareInterceptor : TypeInterceptor +{ + public override bool CanHandle(ITypeSystemObjectContext context) + { + return context.Type is INamedType { Name: { Value: "Message" } }; + } + public override void OnBeforeCompleteType( + ITypeCompletionContext completionContext, + DefinitionBase? definition, + IDictionary contextData) + { + if (definition is ObjectTypeDefinition otd) + { + var field = otd.Fields + .FirstOrDefault(x => x.Name.Value == "messageInfo"); + if (field is { } messageInfo) + { + messageInfo.MiddlewareComponents.Insert( + 0, + next => async context => + { + if(context.Result is IDictionary data && + data.TryGetValue("createdById", out object value)) + { + context.ScopedContextData = + context.ScopedContextData.SetItem("createdById", value); + } + + await next.Invoke(context); + }); + } + } + } +} +``` + +## Configuration + +You can configure the schema extensions either on the gateway or on the domain service if you use federations. +Type extensions can either be strings, files or resources + +- `AddTypeExtensionFromFile("./Stitching.graphql");` +- `AddTypeExtensionFromResource(assembly, key);` +- `AddTypeExtensionFromString("extend type Product {foo : String}");` + +### Schema Stitching + +**Gateway:** + +```csharp +services + .AddGraphQLServer() + .AddRemoteSchema(Products) + .AddRemoteSchema(Inventory) + // Adds a type extension. + .AddTypeExtensionsFromFile("./Stitching.graphql") +``` + +### Schema Federations + +**Inventory Domain Service:** + +```csharp +services + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType() + .InitializeOnStartup() + .PublishSchemaDefinition( + c => c + .SetName("inventory") + // Ignores the root types. This removes `inStock` and `shippingEsitmate` + // from the `Query` type of the Gateway + .IgnoreRootTypes() + // Adds a type extension. + .AddTypeExtensionsFromFile("./Stitching.graphql") + .PublishToRedis( + "Demo", + sp => sp.GetRequiredService())); +``` + +If you use the `@delegate` directive in federations you can omit the `schema:` argument. diff --git a/website/src/docs/hotchocolate/v12/distributed-schema/schema-federations.md b/website/src/docs/hotchocolate/v12/distributed-schema/schema-federations.md new file mode 100644 index 00000000000..0e4680dd077 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/distributed-schema/schema-federations.md @@ -0,0 +1,204 @@ +--- +title: "Schema Federations" +--- + +In schema federations, the extension points of the gateway schema are defined on the downstream services. +Therefore you need to configure federations in two places: the gateway schema and the downstream service. + +The schemas can either be pushed to a Redis cache and then pulled from the gateway or directly be pulled by the gateway from the downstream service. + +# Federation with Redis + +Hot Chocolate uses the Redis cache as a pub/sub system to signal changes on the downstream services. +With a cache, the gateway schema is also more stable and faster in bootstrapping, because it does not require to call all downstream services on startup. + +You will need to add a package reference to `HotChocolate.Stitching.Redis` to all your services: + +```bash +dotnet add package HotChocolate.Stitching.Redis +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +## Configuration of a domain service + +A domain service has to _publish the schema definition_. +The schema is published on the initialization of the schema. +By default, a schema is lazy and only initialized when the first request is sent. +You can also initialize the schema on startup with `IntitializeOnStartup`. +Every schema requires a unique name. This name is used in several places to reference the schema. +By calling `PublishSchemaDefinition` you can configure how the schema should be published. + +Schemas are published to Redis under a configuration name. The gateway is subscribed to this configuration. +All schemas that are registered under this name, will be discovered by the gateway + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + // This is the connection multiplexer that redis will use + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType() + // We initialize the schema on startup so it is published to the redis as soon as possible + .InitializeOnStartup() + // We configure the publish definition + .PublishSchemaDefinition(c => c + // The name of the schema. This name should be unique + .SetName("accounts") + .PublishToRedis( + // The configuration name under which the schema should be published + "Demo", + // The connection multiplexer that should be used for publishing + sp => sp.GetRequiredService())); +} +``` + +## Configuration of the gateway + +The gateway needs HttpClients to fetch the data from the domain services. +You have to register them on the service collection. +The name of the HttpClient has to be the same as the name of the schema it refers to. +As you may use the schema names in several places, it is good practise to store them as constant. + +```csharp +public static class WellKnownSchemaNames +{ + public const string Accounts = "accounts"; + public const string Inventory = "inventory"; + public const string Products = "products"; + public const string Reviews = "reviews"; +} +``` + +```csharp +services.AddHttpClient(Accounts, c => c.BaseAddress = new Uri("http://accounts.service.local/graphql")); +services.AddHttpClient(Inventory, c => c.BaseAddress = new Uri("http://inventory.service.local/graphql")); +services.AddHttpClient(Products, c => c.BaseAddress = new Uri("http://products.service.local/graphql")); +services.AddHttpClient(Reviews, c => c.BaseAddress = new Uri("http://reviews.service.local/graphql")); +``` + +The gateway is subscribed to the Redis cache. +As soon as the domain service has published its schema, the gateway grab the changes and update its own schema. + +```csharp +services + // This is the connection multiplexer that redis will use + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddRemoteSchemasFromRedis("Demo", sp => sp.GetRequiredService()); +``` + +## Example + +You can find a full schema federation example here [Federated Schema with Redis](https://github.com/ChilliCream/hotchocolate-examples/tree/master/misc/Stitching/federated-with-hot-reload) + +# Federation with schema polling + +You can also use federations without a Redis cache. In this case, you cannot hot reload the schema. +The configuration is very much the same as in Redis except the `PublishToRedis` part. +Your schema will expose an additional field. This field is used by the Gateway to fetch the schema definition. + +You will need to add a package reference to `HotChocolate.Stitching` to all your services: + +```cli +dotnet add package HotChocolate.Stitching +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +## Configuration of a domain service + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + + services + .AddGraphQLServer() + .AddQueryType() + // We initialize the schema on startup so it is published to the redis as soon as possible + .InitializeOnStartup() + // We configure the publish definition + .PublishSchemaDefinition(c => c + // The name of the schema. This name should be unique + .SetName("accounts")); +} +``` + +## Configuration of the gateway + +With the polling approach, we need to make the schema aware of the domain services. +We can just add the schema with `AddRemoteSchema`. + +```csharp +public static class WellKnownSchemaNames +{ + public const string Accounts = "accounts"; + public const string Inventory = "inventory"; + public const string Products = "products"; + public const string Reviews = "reviews"; +} +``` + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + // register the http clients th + services.AddHttpClient(Accounts, c => c.BaseAddress = new Uri("http://accounts.service.local/graphql")); + services.AddHttpClient(Inventory, c => c.BaseAddress = new Uri("http://inventory.service.local/graphql")); + services.AddHttpClient(Products, c => c.BaseAddress = new Uri("http://products.service.local/graphql")); + services.AddHttpClient(Reviews, c => c.BaseAddress = new Uri("http://reviews.service.local/graphql")); + + services + .AddGraphQLServer() + // add the remote schemas + .AddRemoteSchema(Accounts) + .AddRemoteSchema(Inventory) + .AddRemoteSchema(Products) + .AddRemoteSchema(Reviews); +``` + +## Example + +You can find a full schema federation with polling example here [Federated Schema with polling](https://github.com/ChilliCream/hotchocolate-examples/tree/master/misc/Stitching/federated-with-pull) + +# Configuration + +By default, all the fields that are declared on `Mutation` and `Query` are exposed on the gateway. +In case the schema you do not want to expose the root fields and prefer to define the extension points in an extension file, you can also ignore the root types for a schema on the domain service. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + // This is the connection multiplexer that redis will use + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType() + .PublishSchemaDefinition(c => c + .SetName("accounts") + // Ignore the root types of accounts + .IgnoreRootTypes() + // Declares where the type extension is used + .AddTypeExtensionsFromFile("./Stitching.graphql") + .PublishToRedis( + // The configuration name under which the schema should be published + "Demo", + // The connection multiplexer that should be used for publishing + sp => sp.GetRequiredService())); +} +``` + +In case you choose to ignore the root types, make sure to add a `Query` and `Mutation` type to the gateway. +If there are no root types registered on the gateway the schema will be invalid. + +```csharp +services + // This is the connection multiplexer that redis will use + .AddSingleton(ConnectionMultiplexer.Connect("stitching-redis.services.local")) + .AddGraphQLServer() + .AddQueryType(d => d.Name("Query")) + .AddRemoteSchemasFromRedis("Demo", sp => sp.GetRequiredService()); +``` + +For further configuration with extension files, have a look at [Schema Configuration](/docs/hotchocolate/v12/distributed-schema/schema-configuration) diff --git a/website/src/docs/hotchocolate/v12/distributed-schema/schema-stitching.md b/website/src/docs/hotchocolate/v12/distributed-schema/schema-stitching.md new file mode 100644 index 00000000000..9fed6736a7b --- /dev/null +++ b/website/src/docs/hotchocolate/v12/distributed-schema/schema-stitching.md @@ -0,0 +1,65 @@ +--- +title: "Schema Stitching" +--- + +In the centralized approach of schema distribution, all the configuration is done on the gateway. + +Hot Chocolate uses the schema name as an identifier for schemas. This schema name is used to create HTTP clients and references the schema in various places. It is good practice to store these schema names as a constant. + +You will need to add a package reference to `HotChocolate.Stitching` to your gateway: + +```bash +dotnet add package HotChocolate.Stitching +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +```csharp +public static class WellKnownSchemaNames +{ + public const string Accounts = "accounts"; + public const string Inventory = "inventory"; + public const string Products = "products"; + public const string Reviews = "reviews"; +} +``` + +The schema names are used to create a HttpClient. You have to register the HttpClients of the schema with `AddHttpClient`. + +```csharp +services.AddHttpClient(Accounts, c => c.BaseAddress = new Uri("http://accounts.service.local/graphql")); +services.AddHttpClient(Inventory, c => c.BaseAddress = new Uri("http://inventory.service.local/graphql")); +services.AddHttpClient(Products, c => c.BaseAddress = new Uri("http://products.service.local/graphql")); +services.AddHttpClient(Reviews, c => c.BaseAddress = new Uri("http://reviews.service.local/graphql")); +``` + +To make your schema aware of the downstream services you have to add them to the schema with `AddRemoteSchema` + +```csharp +services + .AddGraphQLServer() + .AddRemoteSchema(Accounts) + .AddRemoteSchema(Inventory) + .AddRemoteSchema(Products) + .AddRemoteSchema(Reviews) +``` + +By default, all the fields that are declared on `Mutation` and `Query` are exposed on the gateway. +In case the schema you do not want to expose the root fields and prefer to define the extension points in an extension file, you can also ignore the root types for a schema. + +```csharp +services + .AddGraphQLServer() + .AddQueryType(d => d.Name("Query")) + .AddRemoteSchema(Accounts, ignoreRootTypes: true) + .AddRemoteSchema(Inventory, ignoreRootTypes: true) + .AddRemoteSchema(Products, ignoreRootTypes: true) + .AddRemoteSchema(Reviews, ignoreRootTypes: true) + .AddTypeExtensionsFromFile("./Stitching.graphql"); +``` + +For further configuration with extension files, have a look at [Schema Configuration](/docs/hotchocolate/v12/distributed-schema/schema-configuration) + +# Example + +You can find a full schema stitching example here [Centralized Schema Stitching](https://github.com/ChilliCream/hotchocolate-examples/tree/master/misc/Stitching/centralized) diff --git a/website/src/docs/hotchocolate/v12/distributed-schema/subscriptions.md b/website/src/docs/hotchocolate/v12/distributed-schema/subscriptions.md new file mode 100644 index 00000000000..95518dec65a --- /dev/null +++ b/website/src/docs/hotchocolate/v12/distributed-schema/subscriptions.md @@ -0,0 +1,67 @@ +--- +title: "Subscriptions" +--- + +A Subscription type cannot be stitched from downstream services so it must be defined directly in the gateway schema. + +> [Learn more about defining a Subscription type](/docs/hotchocolate/v12/defining-a-schema/subscriptions) + +> ⚠️ Note: Subscription stitching is coming in v13 + +After adding a Subscription type to the gateway service, you may encounter an error when building the gateway schema. + +```csharp +1. The schema builder was unable to identify the query type of the schema. Either specify which type is the query type or set the schema builder to non-strict validation mode. +``` + +If you turn off strict validation and generate the schema, the `schema` element won't include a `query` field despite a Query type being defined. + +```csharp +services + .AddGraphQLServer() + .ModifyOptions(o => + { + o.StrictValidation = false; + } + ) +``` + +```json +schema { + subscription: Subscription +} + +type Query { + messages: [Message!]! +} +``` + +To resolve this issue, use the Schema Options to specify the `QueryTypeName` and `MutationTypeName`. + +```csharp +services + .AddGraphQLServer() + .ModifyOptions(o => + { + o.QueryTypeName = "Query"; + o.MutationTypeName = "Mutation"; + } + ) +``` + +Generating the schema again results in a valid schema. + +```json +schema { + query: Query + subscription: Subscription +} + +type Query { + messages: [Message!]! +} + +type Subscription { + onMessagePosted: Message! +} +``` \ No newline at end of file diff --git a/website/src/docs/hotchocolate/v12/execution-engine/field-middleware.md b/website/src/docs/hotchocolate/v12/execution-engine/field-middleware.md new file mode 100644 index 00000000000..c699881f16c --- /dev/null +++ b/website/src/docs/hotchocolate/v12/execution-engine/field-middleware.md @@ -0,0 +1,299 @@ +--- +title: Field middleware +--- + +The field middleware is one of the fundamental components in Hot Chocolate. It allows you to create reuseable logic that can be run before or after a field resolver. Field middleware is composable, so you can specify multiple middleware and they will be executed in order. The field resolver is always the last element in this middleware chain. + +Each field middleware only knows about the next element in the chain and can choose to + +- execute logic before it +- execute logic after all later components (including the field resolver) have been run +- not execute the next component + +Each field middleware also has access to an `IMiddlewareContext`. It implements the `IResolverContext` interface so you can use all of the `IResolverContext` APIs in your middleware, similarly to how you would use them in your resolver. There are also some special properties like the `Result`, which holds the resolver or middleware computed result. + +# Middleware order + +If you have used Hot Chocolate's data middleware before you might have encountered warnings about the order of middleware. The order is important, since it determines in which order the middleware are executed, e.g. in which order the resolver result is being processed. + +Take the `UsePagination` and `UseFiltering` middleware for example: Does it make sense to first paginate and then filter? No. It should first be filtered and then paginated. That's why the correct order is `UsePagination` > `UseFiltering`. + +```csharp +descriptor + .UsePagination() + .UseFiltering() + .Resolve(context => + { + // Omitted code for brevity + }); +``` + +But hold up, isn't this the opposite order of what we've just described? + +Lets visualize the middleware chain to understand why it is indeed the correct order. + +```mermaid +sequenceDiagram + UsePagination->>UseFiltering: next(context) + UseFiltering->>Resolver: next(context) + Resolver->>UseFiltering: Result of the Resolver + UseFiltering->>UsePagination: Result of UseFiltering +``` + +As you can see the result of the resolver flows backwards through the middleware. So the middleware is first invoked in the order they were defined, but the result produced by the last middleware, the field resolver, is sent back to first middleware in reverse order. + +# Definition + +Field middleware can be defined either as a delegate or as a separate type. In both cases we gain access to a `FieldDelegate`, which allows us to invoke the next middleware, and the `IMiddlewareContext`. + +By awaiting the `FieldDelegate` we are waiting for the completion of all of the middleware that might come after the current middleware, including the actual field resolver. + +## Field middleware delegate + +A field middleware delegate can be defined using Code-first APIs. + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("example") + .Use(next => async context => + { + // Code up here is executed before the following middleware + // and the actual field resolver + + // This invokes the next middleware + // or if we are at the last middleware the field resolver + await next(context); + + // Code down here is executed after all later middleware + // and the actual field resolver has finished executing + }) + .Resolve(context => + { + // Omitted for brevity + }); + } +} +``` + +### Reusing the middleware delegate + +As it's shown above the middleware is only applied to the `example` field on the `Query` type, but what if you want to use this middleware in multiple places? + +You can simply create an extension method for the `IObjectFieldDescriptor`. + +```csharp +public static class MyMiddlewareObjectFieldDescriptorExtension +{ + public static IObjectFieldDescriptor UseMyMiddleware( + this IObjectFieldDescriptor descriptor) + { + descriptor + .Use(next => async context => + { + // Omitted code for brevity + + await next(context); + + // Omitted code for brevity + }); + } +} +``` + +> Note: We recommend sticking to the convention of prepending `Use` to your extension method to indicate that it is applying a middleware. + +You can now use this middleware in different places throughout your schema definition. + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("example") + .UseMyMiddleware() + .Resolve(context => + { + // Omitted for brevity + }); + } +} +``` + +## Field middleware as a class + +If you do not like using a delegate, you can also create a dedicated class for your middleware. + +```csharp +public class MyMiddleware +{ + private readonly FieldDelegate _next; + + public MyMiddleware(FieldDelegate next) + { + _next = next; + } + + // this method must be called InvokeAsync or Invoke + public async Task InvokeAsync(IMiddlewareContext context) + { + // Code up here is executed before the following middleware + // and the actual field resolver + + // This invokes the next middleware + // or if we are at the last middleware the field resolver + await _next(context); + + // Code down here is executed after all later middleware + // and the actual field resolver has finished executing + } +} +``` + +If you need to access services you can either inject them via the constructor, if they are singleton, or as an argument of the `InvokeAsync` method, if they have a scoped or transient lifetime. + +```csharp +public class MyMiddleware +{ + private readonly FieldDelegate _next; + private readonly IMySingletonService _singletonService; + + public MyMiddleware(FieldDelegate next, IMySingletonService singletonService) + { + _next = next; + _singletonService = singletonService; + } + + public async Task InvokeAsync(IMiddlewareContext context, + IMyScopedService scopedService) + { + // Omitted code for brevity + } +} +``` + +The ability to add additional arguments to the `InvokeAsync` method is the reason why there isn't a contract like an interface or a base class for field middleware. + +### Usage + +Now that you've defined the middleware as a class we need to still apply it to a field. + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("example") + .Use() + .Resolve(context => + { + // Omitted for brevity + }); + } +} +``` + +While an extension method like `UseMyMiddleware` on the `IObjectFieldDescriptor` doesn't make as much sense for `Use` in contrast to the middleware delegate, we still recommend creating one as shown [here](#reusing-the-middleware-delegate). The reason being that you can make changes to this middleware more easily in the future without potentially having to change all places this middleware is being used in. + +If you need to pass an additional custom argument to the middleware you can do so using the factory overload of the `Use` method. + +```csharp +descriptor + .Field("example") + .Use((provider, next) => new MyMiddleware(next, "custom", + provider.GetRequiredService())); +``` + +# Usage as an attribute + +Up until now we have only worked with Code-first APIs to create the field middleware. What if you want to apply your middleware to a field resolver defined using the Annotation-based approach? + +You can create a new attribute inheriting from `ObjectFieldDescriptorAttribute` and call or create your middleware inside of the `OnConfigure` method. + +> Note: Attribute order is not guaranteed in C#, so we, in the case of middleware attributes, use the `CallerLineNumberAttribute` to inject the C# line number at compile time. The line number is used as an order. We do not recommend inheriting middleware attributes from a base method or property since this can lead to confusion about ordering. Look at the example below to see how we infer the order. When inheriting from middleware, attributes always pass through the order argument. Further, indicate with the `Use` verb that your attribute is a middleware attribute. + +```csharp +public class UseMyMiddlewareAttribute : ObjectFieldDescriptorAttribute +{ + public UseMyMiddlewareAttribute([CallerLineNumber] int order = 0) + { + Order = order; + } + + public override void OnConfigure(IDescriptorContext context, + IObjectFieldDescriptor descriptor, MemberInfo member) + { + descriptor.UseMyMiddleware(); + } +} + +``` + +The attribute can then be used like the following. + +```csharp +public class Query +{ + [UseMyMiddleware] + public string MyResolver() + { + // Omitted code for brevity + } +} +``` + +# Accessing the resolver result + +The `IMiddlewareContext` conveniently contains a `Result` property that can be used to access the field resolver result. + +```csharp +descriptor + .Use(next => async context => + { + await next(context); + + // It only makes sense to access the result after calling + // next(context), i.e. after the field resovler and any later + // middleware has finished executing. + object? result = context.Result; + + // If needed you can now narrow down the type of the result + // using pattern matching and continue with the typed result + if (result is string stringResult) + { + // Work with the stringResult + } + }); +``` + +A middleware can also set or override the result by assigning the `context.Result` property. + +> Note: The field resolver will only execute if no result has been produced by one of the preceding field middleware. If any middleware has set the `Result` property on the `IMiddlewareContext`, the field resolver will be skipped. + +# Short-circuiting + +In some cases we might want to short-circuit the execution of field middleware / the field resolver. For this we can simply not call the `FieldDelegate` (`next`). + +```csharp +descriptor + .Use(next => context => + { + if(context.Parent() is IDictionary dict) + { + context.Result = dict[context.Field.Name]; + + // We are not executing any of the later middleware + // or the field resolver + return Task.CompletedTask; + } + else + { + return next(context); + } + }) +``` diff --git a/website/src/docs/hotchocolate/v12/execution-engine/index.md b/website/src/docs/hotchocolate/v12/execution-engine/index.md new file mode 100644 index 00000000000..5210a7bd60c --- /dev/null +++ b/website/src/docs/hotchocolate/v12/execution-engine/index.md @@ -0,0 +1,28 @@ +--- +title: Overview +--- + +In this section we will learn about the Hot Chocolate execution engine. + +# Request Middleware + +The GraphQL execution is abstracted into a request pipeline composed of many request middleware. Each request middleware represents one part of executing a GraphQL request, like the parsing of the GraphQL request document or the semantical validation of the GraphQL request document. + + + +# Field middleware + +Field middleware allows us to create reusable logic that is run before or after a resolver. It also allows us to access or even modify the result produced by a resolver. + +[Learn more about field middleware](/docs/hotchocolate/v12/execution-engine/field-middleware) + +# Resolver Compiler + +The resolver compiler will compile for each resolver an optimized resolver pipeline. The resolver compiler can be customized by providing parameter expression builder. + + + diff --git a/website/src/docs/hotchocolate/v12/fetching-data/dataloader.md b/website/src/docs/hotchocolate/v12/fetching-data/dataloader.md new file mode 100644 index 00000000000..cc0ac22fe4c --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/dataloader.md @@ -0,0 +1,283 @@ +--- +title: "DataLoader" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +> If you want to read more about data loaders in general, you can head over to Facebook's [GitHub repository](https://github.com/facebook/dataloader). + +Every data fetching technology suffers the _n+1_ problem. +The difference between GraphQL and e.g. REST is, that the _n+1_ problem occurs on the server, rather than on the client. +The clear benefit is, that we only have to deal with this problem once on the server, rather than on every client. + +To depict the issue that data loaders solve in this context, let assume we have this schema: + +```sdl +type Query { + person(id: ID): Person +} + +type Person { + id: ID + name: String + friends: [Person] +} +``` + +The above schema allows to fetch a person by its internal identifier and each person has a list of friends that is represented by a list of persons. + +A query against the above schema could look like the following: + +```graphql +{ + a: person(id: "a") { + name + } + + b: person(id: "b") { + name + } +} +``` + +The above request fetches two persons in one go without the need to call the backend twice. The problem with the GraphQL backend is that field resolvers are atomic and do not have any knowledge about the query as a whole. So, a field resolver does not know that it will be called multiple times in parallel to fetch similar or equal data from the same data source. + +The idea of a dataloader is to batch these two requests into one call to the database. + +Let's look at some code to understand what data loaders are doing. First, let's have a look at how we would write our field resolver without data loaders: + +```csharp +public async Task GetPerson(string id, [Service]IPersonRepository repository) +{ + return await repository.GetPersonById(id); +} +``` + +The above example would result in two calls to the person repository that would then fetch the persons one by one from our data source. + +If you think that through you see that each GraphQL request would cause multiple requests to our data source resulting in sluggish performance and unnecessary round-trips to our data source. + +This means that we reduced the round-trips from our client to our server with GraphQL but still have the round-trips between the data sources and the service layer. + +With data loaders we can now centralise the data fetching and reduce the number of round trips to our data source. + +Instead of fetching the data from the repository directly, we fetch the data from the data loader. +The data loader batches all the requests together into one request to the database. + +```csharp +// This is one way of implementing a data loader. You will find the different ways of declaring +// data loaders further down the page. +public class PersonBatchDataLoader : BatchDataLoader +{ + private readonly IPersonRepository _repository; + + public PersonBatchDataLoader( + IPersonRepository repository, + IBatchScheduler batchScheduler, + DataLoaderOptions? options = null) + : base(batchScheduler, options) + { + _repository = repository; + } + + protected override async Task> LoadBatchAsync( + IReadOnlyList keys, + CancellationToken cancellationToken) + { + // instead of fetching one person, we fetch multiple persons + var persons = await _repository.GetPersonByIds(keys); + return persons.ToDictionary(x => x.Id); + } +} + + +public class Query +{ + public async Task GetPerson( + string id, + PersonBatchDataLoader dataLoader) + => await dataLoader.LoadAsync(id); +} +``` + +# Execution + +With a data loader, you can fetch entities with a key. +These are the two generics you have in the class data loaders: + +```csharp +public class BatchDataLoader +``` + +`TId` is used as an identifier of `TEntity`. `TId` is the type of the values you put into `LoadAsync`. + +The execution engine of Hot Chocolate tries to batch as much as possible. +It executes resolvers until the queue is empty and then triggers the data loader to resolve the data for the waiting resolvers. + +# Data Consistency + +Dataloader do not only batch calls to the database, they also cache the database response. +A data loader guarantees data consistency in a single request. +If you load an entity with a data loader in your request more than once, it is given that these two entities are equivalent. + +Data loaders do not fetch an entity if there is already an entity with the requested key in the cache. + +# Types of Data loaders + +In Hot Chocolate you can declare data loaders in two different ways. +You can separate the data loading concern into separate classes or you can use a delegate in the resolver to define data loaders on the fly. +Below you will find the different types of data loaders with examples for class and delegate definition. + +## Batch DataLoader + +> One - To - One, usually used for fields like `personById` or one to one relations + +The batch data loader collects requests for entities and sends them as a batch request to the data source. Moreover, the data loader caches the retrieved entries within a request. + +The batch data loader gets the keys as `IReadOnlyList` and returns an `IReadOnlyDictionary`. + +### Class + +```csharp +public class PersonBatchDataLoader : BatchDataLoader +{ + private readonly IPersonRepository _repository; + + public PersonBatchDataLoader( + IPersonRepository repository, + IBatchScheduler batchScheduler, + DataLoaderOptions? options = null) + : base(batchScheduler, options) + { + _repository = repository; + } + + protected override async Task> LoadBatchAsync( + IReadOnlyList keys, + CancellationToken cancellationToken) + { + // instead of fetching one person, we fetch multiple persons + var persons = await _repository.GetPersonByIds(keys); + return persons.ToDictionary(x => x.Id); + } +} + + +public class Query +{ + public async Task GetPerson( + string id, + PersonBatchDataLoader dataLoader) + => await dataLoader.LoadAsync(id); +} +``` + +### Delegate + +```csharp +public Task GetPerson( + string id, + IResolverContext context, + [Service] IPersonRepository repository) +{ + return context.BatchDataLoader( + async (keys, ct) => + { + var result = await repository.GetPersonByIds(keys); + return result.ToDictionary(x => x.Id); + }) + .LoadAsync(id); +} +``` + +_An example with the **Batch DataLoader** can be found [here](https://github.com/ChilliCream/graphql-workshop/blob/master/code/complete/GraphQL/DataLoader/TrackByIdDataLoader.cs)._ + +## Group DataLoader + +> One - To - Many, usually used for fields like `personsByLastName` or one to many relations + +The group data loader is also a batch data loader but instead of returning one entity per key, it returns multiple entities per key. As with the batch data loader retrieved collections are cached within a request. + +The group data loader gets the keys as `IReadOnlyList` and returns an `ILookup`. + +### Class + +```csharp +public class PersonsByLastNameDataloader + : GroupedDataLoader +{ + private readonly IPersonRepository _repository; + + public PersonsByLastNameDataloader( + IPersonRepository repository, + IBatchScheduler batchScheduler, + DataLoaderOptions? options = null) + : base(batchScheduler, options) + { + _repository = repository; + } + + + protected override async Task> LoadGroupedBatchAsync( + IReadOnlyList names, + CancellationToken cancellationToken) + { + var persons = await _repository.GetPersonsByLastName(names); + return persons.ToLookup(x => x.LastName); + } +} + +public class Query +{ + public async Task> GetPersonByLastName( + string lastName, + PersonsByLastNameDataloader dataLoader) + => await dataLoader.LoadAsync(lastName); +} +``` + +### Delegate + +```csharp +public Task> GetPersonByLastName( + string lastName, + IResolverContext context, + [Service]IPersonRepository repository) +{ + return context.GroupDataLoader( + async (keys, ct) => + { + var result = await repository.GetPersonsByLastName(keys); + return result.ToLookup(t => t.LastName); + }) + .LoadAsync(lastName); +} +``` + +## Cache DataLoader + +> No batching, just caching. This data loader is used rarely. You most likely want to use the batch data loader. + +The cache data loader is the easiest to implement since there is no batching involved. You can just use the initial `GetPersonById` method. We do not get the benefits of batching with this one, but if in a query graph the same entity is resolved twice we will load it only once from the data source. + +```csharp +public Task GetPerson(string id, IResolverContext context, [Service]IPersonRepository repository) +{ + return context.CacheDataLoader("personById", keys => repository.GetPersonById(keys)).LoadAsync(id); +} +``` + +# Stacked DataLoader Calls + +This is more like an edge case that is supported than a certain type of data loader. Sometimes we have more complex resolvers that might first fetch data from one data loader and use that to fetch data from the next. + +```csharp +public Task> GetCustomers( + string personId, + PersonByIdDataLoader personByIdDataLoader, + CustomerByIdsDataLoader customerByIdsDataloader) +{ + Person person = await personByIdDataLoader.LoadAsync(personId); + return await customerByIdsDataloader.LoadAsync(person.CustomerIds); +} +``` diff --git a/website/src/docs/hotchocolate/v12/fetching-data/fetching-from-databases.md b/website/src/docs/hotchocolate/v12/fetching-data/fetching-from-databases.md new file mode 100644 index 00000000000..e8d6f2b788b --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/fetching-from-databases.md @@ -0,0 +1,165 @@ +--- +title: "Fetching from Databases" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +In this section, you find a simple example on how you can fetch data from a database and expose it as a GraphQL API. + +**Hot Chocolate is not bound to a specific database, pattern or architecture.** +[We do have a few integrations](/docs/hotchocolate/v12/integrations), that help with a variety of databases, though these are just additions on top of HotChocolate. +You can couple your business logic close to the GraphQL server, or cleanly decouple your domain layer from the GraphQL layer over abstractions. +The GraphQL server only knows its schema, types and resolvers, what you do in these resolvers and what types you expose, is up to you. + +In this example, we will directly fetch data from MongoDB in a resolver. + +# Setting up the Query + +The query type in a GraphQL schema is the root type. Each field defined on this type is available at the root of a query. +If a field is requested, the resolver of the field is called. +The data of this resolver is used for further execution. +If you return a scalar, value (e.g. `string`, `int` ...) the value is serialized and added to the response. +If you return an object, this object is the parent of the resolver in the subtree. + + + + +```csharp +// Query.cs +public class Query +{ + public Task GetBookById( + [Service] IMongoCollection collection, + Guid id) + { + return collection.Find(x => x.Id == id).FirstOrDefaultAsync(); + } +} + +// Book.cs +public class Book +{ + public string Title { get; set; } + + public string Author { get; set; } +} + +// Startup.cs +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +// Query.cs +public class Query +{ + public Task GetBookById( + [Service] IMongoCollection collection, + Guid id) + { + return collection.Find(x => x.Id == id).FirstOrDefaultAsync(); + } +} + +// QueryType.cs +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetBookById(default!, default!)) + .Type(); + } +} + +// Book.cs +public class Book +{ + public string Title { get; set; } + + public string Author { get; set; } +} + +// BookType.cs +public class BookType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Title) + .Type(); + + descriptor + .Field(f => f.Author) + .Type(); + } +} + + +// Startup.cs +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +// Query.cs +public class Query +{ + public Task GetBookById( + [Service] IMongoCollection collection, + Guid id) + { + return collection.Find(x => x.Id == id).FirstOrDefaultAsync(); + } +} + +// Startup.cs +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + bookById(id: Uuid): Book + } + + type Book { + title: String + author: String + } + ") + .BindRuntimeType(); + } + + // Omitted code for brevity +} +``` + + + diff --git a/website/src/docs/hotchocolate/v12/fetching-data/fetching-from-rest.md b/website/src/docs/hotchocolate/v12/fetching-data/fetching-from-rest.md new file mode 100644 index 00000000000..f13730552c0 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/fetching-from-rest.md @@ -0,0 +1,260 @@ +--- +title: "Fetching from REST" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +In this section, we will cover how you can easily integrate a REST API into your GraphQL API. + +If you want to have an outlook into the upcoming native REST integration with Hot Chocolate 13 you can head over to YouTube and have a look. + + + +GraphQL has a strongly-typed type system and therefore also has to know the dotnet runtime types of the data it returns in advance. + +The easiest way to integrate a REST API is, to define an OpenAPI specification for it. +OpenAPI describes what data a REST endpoint returns. +You can automatically generate a dotnet client for this API and integrate it into your schema. + +# OpenAPI in .NET + +If you do not have an OpenAPI specification for your REST endpoint yet, you can easily add it to your API. +There are two major OpenAPI implementations in dotnet: [NSwag](http://nswag.org) and [Swashbuckle](https://github.com/domaindrivendev/Swashbuckle.AspNetCore). +Head over to the [official ASP.NET Core](https://docs.microsoft.com/aspnet/core/tutorials/web-api-help-pages-using-swagger) documentation to see how it is done. + +In this example, we will use [the official example of Swashbuckle](https://github.com/dotnet/AspNetCore.Docs/tree/main/aspnetcore/tutorials/web-api-help-pages-using-swagger/samples/3.0/TodoApi.Swashbuckle). +When you start this project, you can navigate to the [Swagger UI](http://localhost:5000/swagger). + +This REST API covers a simple Todo app. +We will expose `todos` and `todoById` in our GraphQL API. + +# Generating a client + +Every REST endpoint that supports OpenAPI, can easily be wrapped with a fully typed client. +Again, you have several options on how you generate your client. +You can generate your client from the OpenAPI specification of your endpoint, during build or even with external tools with GUI. +Have a look here and see what fits your use case the best: + +- [NSwag Code Generation](https://docs.microsoft.com/aspnet/core/tutorials/getting-started-with-nswag?tabs=visual-studio#code-generation) + +In this example, we will use the NSwag dotnet tool. +First, we need to create a tool manifest. +Switch to your GraphQL project and execute + +```bash +dotnet new tool-manifest +``` + +Then we install the NSwag tool + +```bash +dotnet tool install NSwag.ConsoleCore --version 13.10.9 +``` + +You then have to get the `swagger.json` from your REST endpoint + +```bash +curl -o swagger.json http://localhost:5000/swagger/v1/swagger.json +``` + +Now you can generate the client from the `swagger.json`. + +```bash +dotnet nswag swagger2csclient /input:swagger.json /classname:TodoService /namespace:TodoReader /output:TodoService.cs +``` + +The code generator generated a new file called `TodoService.cs`. +In this file, you will find the client for your REST API. + +The generated needs `Newtonsoft.Json`. +Make sure to also add this package by executing: + +```bash +dotnet add package Newtonsoft.Json +``` + +# Exposing the API + +You will have to register the client in the dependency injection of your GraphQL service. +To expose the API you can inject the generated client into your resolvers. + + + + +```csharp +// Query.cs +public class Query +{ + public Task> GetTodosAsync( + [Service]TodoService service, + CancellationToken cancellationToken) + { + return service.GetAllAsync(cancellationToken); + } + + public Task GetTodoByIdAsync( + [Service]TodoService service, + long id, + CancellationToken cancellationToken) + { + return service.GetByIdAsync(id, cancellationToken); + } +} + +// Startup.cs +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services.AddHttpClient(); + services + .AddGraphQLServer() + .AddQueryType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +// Query.cs +public class Query +{ + public Task> GetTodosAsync( + [Service]TodoService service, + CancellationToken cancellationToken) + { + return service.GetAllAsync(cancellationToken); + } + + public Task GetTodoByIdAsync( + [Service]TodoService service, + long id, + CancellationToken cancellationToken) + { + return service.GetByIdAsync(id, cancellationToken); + } +} + +// QueryType.cs +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetTodoByIdAsync(default!, default!, default!)) + .Type(); + + descriptor + .Field(f => f.GetTodosAsync(default!, default!)) + .Type>(); + } +} + +// TodoType.cs +public class TodoType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Id) + .Type(); + + descriptor + .Field(f => f.Name) + .Type(); + + descriptor + .Field(f => f.IsComplete) + .Type(); + } +} + + +// Startup.cs +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } + + // Omitted code for brevity +} +``` + + + + +```csharp +// Query.cs +public class Query +{ + public Task> GetTodosAsync( + [Service]TodoService service, + CancellationToken cancellationToken) + { + return service.GetAllAsync(cancellationToken); + } + + public Task GetTodoByIdAsync( + [Service]TodoService service, + long id, + CancellationToken cancellationToken) + { + return service.GetByIdAsync(id, cancellationToken); + } +} + +// Startup.cs +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + todos: [TodoItem!]! + todoById(id: Uuid): TodoItem + } + + type TodoItem { + id: Long + name: String + isCompleted: Boolean + } + ") + .BindRuntimeType(); + } + + // Omitted code for brevity +} +``` + + + + +You can now head over to your Banana Cake Pop on your GraphQL Server (/graphql) and query todos: + +```graphql +{ + todoById(id: 1) { + id + isComplete + name + } + todos { + id + isComplete + name + } +} +``` diff --git a/website/src/docs/hotchocolate/v12/fetching-data/filtering.md b/website/src/docs/hotchocolate/v12/fetching-data/filtering.md new file mode 100644 index 00000000000..8b2ef0355f5 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/filtering.md @@ -0,0 +1,740 @@ +--- +title: Filtering +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +With Hot Chocolate filters, you can expose complex filter objects through your GraphQL API that translates to native database queries. The default filter implementation translates filters to expression trees that are applied to `IQueryable`. +Hot Chocolate by default will inspect your .NET model and infer the possible filter operations from it. +Filters use `IQueryable` (`IEnumerable`) by default, but you can also easily customize them to use other interfaces. + +The following type would yield the following filter operations: + +```csharp +public class Foo +{ + public string Bar { get; set; } +} +``` + +```sdl +input FooFilterInput { + and: [FooFilterInput!] + or: [FooFilterInput!] + name: StringOperationFilterInput +} + +input StringOperationFilterInput { + and: [StringOperationFilterInput!] + or: [StringOperationFilterInput!] + eq: String + neq: String + contains: String + ncontains: String + in: [String] + nin: [String] + startsWith: String + nstartsWith: String + endsWith: String + nendsWith: String +} +``` + +# Getting started + +Filtering is part of the `HotChocolate.Data` package. You can add the dependency with the `dotnet` cli + +```bash +dotnet add package HotChocolate.Data +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +To use filtering you need to register it on the schema: + +```csharp +services.AddGraphQLServer() + // Your schema configuration + .AddFiltering(); +``` + +Hot Chocolate will infer the filters directly from your .Net Model and then use a Middleware to apply filters to `IQueryable` or `IEnumerable` on execution. + + + + +```csharp +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class Query +{ + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.GetUsers(default)) + .Type>>() + .UseFiltering(); + } +} +``` + + + + +⚠️ Schema-first does currently not support filtering! + + + + +> ⚠️ **Note:** If you use more than one middleware, keep in mind that **ORDER MATTERS**. The correct order is UsePaging > UseProjections > UseFiltering > UseSorting + +# Customization + +Under the hood, filtering is based on top of normal Hot Chocolate input types. You can easily customize them with a very familiar fluent interface. The filter input types follow the same `descriptor` scheme as you are used to from the normal input types. Just extend the base class `FilterInputType` and override the descriptor method. + +`IFilterInputTypeDescriptor` supports most of the methods of `IInputTypeDescriptor`. By default filters for all fields of the type are generated. +If you do want to specify the filters by yourself you can change this behavior with `BindFields`, `BindFieldsExplicitly` or `BindFieldsImplicitly`. +When fields are bound implicitly, meaning filters are added for all properties, you may want to hide a few fields. You can do this with `Ignore(x => Bar)`. +It is also possible to customize the GraphQL field of the operation further. You can change the name, add a description or directive. + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Field(f => f.Name).Name("custom_name"); + } +} +``` + +If you want to limit the operations on a field, you need to declare you own operation type. +Given you want to only allow `eq` and `neq` on a string field, this could look like this + +```csharp {7} +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Field(f => f.Name).Type(); + } +} + +public class CustomerOperationFilterInput : StringOperationFilterInput +{ + protected override void Configure(IFilterInputTypeDescriptor descriptor) + { + descriptor.Operation(DefaultFilterOperations.Equals).Type(); + descriptor.Operation(DefaultFilterOperations.NotEquals).Type(); + } +} +``` + +```sdl +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + name: CustomerOperationFilterInput +} + +input CustomerOperationFilterInput { + and: [CustomerOperationFilterInput!] + or: [CustomerOperationFilterInput!] + eq: String + neq: String +} +``` + +To apply this filter type we just have to provide it to the `UseFiltering` extension method with as the generic type argument. + + + + +```csharp +public class Query +{ + [UseFiltering(typeof(UserFilterType))] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.GetUsers(default)) + .Type>>(); + .UseFiltering() + } +} +``` + + + + +⚠️ Schema-first does currently not support filtering! + + + + +# "and" / "or" Filter + +There are two built in fields. + +- `and`: Every condition has to be valid +- `or` : At least one condition has to be valid + +Example: + +```graphql +query { + posts( + first: 5 + where: { + or: [{ title: { contains: "Doe" } }, { title: { contains: "John" } }] + } + ) { + edges { + node { + id + title + } + } + } +} +``` + +**⚠️ `or` does not work when you use it like this: ** + +```graphql +query { + posts( + first: 5 + where: { title: { contains: "John", or: { title: { contains: "Doe" } } } } + ) { + edges { + node { + id + title + } + } + } +} +``` + +In this case the filters are applied like `title.Contains("John") && title.Contains("Doe")` rather than `title.Contains("John") || title.Contains("Doe")` how you probably intended it. + +## Removing "and" / "or" + +If you do not want to expose `and` and `or` you can remove these fields with the descriptor API: + +```csharp +public class UserFilterType : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.AllowAnd(false).AllowOr(false); + } +} +``` + +# Filter Types + +## Boolean Filter + +Defined the filter operations of a `bool` field. + +```csharp +public class User +{ + public bool IsOnline { get; set; } +} + +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +``` + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +input BooleanOperationFilterInput { + eq: Boolean + neq: Boolean +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + isOnline: BooleanOperationFilterInput +} +``` + +## Comparable Filter + +Defines filters for `IComparables` like: `bool`, `byte`, `shot`, `int`, `long`, `float`, `double` `decimal`, `Guid`, `DateTime`, `DateTimeOffset` and `TimeSpan` + +```csharp +public class User +{ + public int LoginAttempts { get; set; } +} + +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +``` + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +input ComparableOperationInt32FilterInput { + eq: Int + neq: Int + in: [Int!] + nin: [Int!] + gt: Int + ngt: Int + gte: Int + ngte: Int + lt: Int + nlt: Int + lte: Int + nlte: Int +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + loginAttempts: ComparableOperationInt32FilterInput +} +``` + +## String Filter + +Defines filters for `string` + +```csharp +public class User +{ + public string Name { get; set; } +} + +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +``` + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +input StringOperationFilterInput { + and: [StringOperationFilterInput!] + or: [StringOperationFilterInput!] + eq: String + neq: String + contains: String + ncontains: String + in: [String] + nin: [String] + startsWith: String + nstartsWith: String + endsWith: String + nendsWith: String +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + name: StringOperationFilterInput +} +``` + +## Enum Filter + +Defines filters for C# enums + +```csharp +public enum Role { + Default, + Moderator, + Admin +} + +public class User +{ + public Role Role { get; set; } +} + +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +``` + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +input RoleOperationFilterInput { + eq: Role + neq: Role + in: [Role!] + nin: [Role!] +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + kind: RoleOperationFilterInput +} +``` + +## Object Filter + +An object filter is generated for all nested objects. The object filter can also be used to filter over database relations. +For each nested object, filters are generated. + +```csharp +public class User +{ + public Address Address { get; set; } +} + +public class Address +{ + public string Street { get; set; } + + public bool IsPrimary { get; set; } +} + +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +``` + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +input AddressFilterInput { + and: [AddressFilterInput!] + or: [AddressFilterInput!] + street: StringOperationFilterInput + isPrimary: BooleanOperationFilterInput +} + +input BooleanOperationFilterInput { + eq: Boolean + neq: Boolean +} + +input StringOperationFilterInput { + and: [StringOperationFilterInput!] + or: [StringOperationFilterInput!] + eq: String + neq: String + contains: String + ncontains: String + in: [String] + nin: [String] + startsWith: String + nstartsWith: String + endsWith: String + nendsWith: String +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + address: AddressFilterInput +} +``` + +## List Filter + +List filters are generated for all nested enumerations. + +```csharp +public class User +{ + public string[] Roles { get; set; } + + public IEnumerable
Addresses { get; set; } +} + +public class Address +{ + public string Street { get; set; } + + public bool IsPrimary { get; set; } +} + +public class Query +{ + [UseFiltering] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} + +``` + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +input AddressFilterInput { + and: [AddressFilterInput!] + or: [AddressFilterInput!] + street: StringOperationFilterInput + isPrimary: BooleanOperationFilterInput +} + +input BooleanOperationFilterInput { + eq: Boolean + neq: Boolean +} + +input ListAddressFilterInput { + all: AddressFilterInput + none: AddressFilterInput + some: AddressFilterInput + any: Boolean +} + +input ListStringOperationFilterInput { + all: StringOperationFilterInput + none: StringOperationFilterInput + some: StringOperationFilterInput + any: Boolean +} + +input StringOperationFilterInput { + and: [StringOperationFilterInput!] + or: [StringOperationFilterInput!] + eq: String + neq: String + contains: String + ncontains: String + in: [String] + nin: [String] + startsWith: String + nstartsWith: String + endsWith: String + nendsWith: String +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + roles: ListStringOperationFilterInput + addresses: ListAddressFilterInput +} +``` + +# Filter Conventions + +If you want to change the behavior filtering globally, you want to create a convention for your filters. The filter convention comes with a fluent interface that is close to a type descriptor. + +## Get Started + +To use a filter convention you can extend `FilterConvention` and override the `Configure` method. Alternatively, you can directly configure the convention over the constructor argument. +You then have to register your custom convention on the schema builder with `AddConvention`. +By default a new convention is empty. To add the default behaviour you have to add `AddDefaults`. + +```csharp +public class CustomConvention : FilterConvention +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + descriptor.AddDefaults(); + } +} + +services.AddGraphQLServer() + .AddConvention(); +// or +services.AddGraphQLServer() + .AddConvention(new FilterConvention(x => + x.AddDefaults())) +``` + +Often you just want to extend the default behaviour of filtering. If this is the case, you can also use `FilterConventionExtension` + +```csharp +public class CustomConventionExtension : FilterConventionExtension +{ + protected override void Configure(IFilterConventionDescriptor descriptor) + { + // config + } +} + +services.AddGraphQLServer() + .AddConvention(); +// or +services.AddGraphQLServer() + .AddConvention(new FilterConventionExtension(x => + { + // config + })); +``` + +## Argument Name + +With the convention descriptor, you can easily change the argument name of the `FilterInputType`. + +**Configuration** + +```csharp +descriptor.ArgumentName("example_argument_name"); +``` + +**Result** + +```sdl +type Query { + users(example_argument_name: UserFilter): [User] +} +``` + +## Binding of FilterTypes + +`FilterInputType`'s **cannot** just be registered on the schema. You have to bind them to the runtime type on the convention. + +**Configuration** + +```csharp +public class UserFilterInput : FilterInputType +{ + protected override void Configure( + IFilterInputTypeDescriptor descriptor) + { + descriptor.Field(x => x.Name).Description("This is the name"); + } +} + +public class CustomStringOperationFilterInput : StringOperationFilterInput +{ + protected override void Configure(IFilterInputTypeDescriptor descriptor) + { + descriptor + .Operation(DefaultFilterOperations.Equals) + .Type(); + descriptor + .Operation(DefaultFilterOperations.NotEquals) + .Type(); + } +} + +descriptor.BindRuntimeType(); +descriptor.BindRuntimeType(); +``` + +**Result** + +```sdl +type Query { + users(where: UserFilterInput): [User] +} + +type User { + name: String! +} + +input CustomStringOperationFilterInput { + and: [CustomStringOperationFilterInput!] + or: [CustomStringOperationFilterInput!] + eq: String + neq: String +} + +input UserFilterInput { + and: [UserFilterInput!] + or: [UserFilterInput!] + "This is the name" + name: CustomStringOperationFilterInput +} +``` + +## Extend FilterTypes + +Instead of defining your own operation type, you can also just change the configuration of the built +in ones. +You can use `Configure()` to alter the configuration of a type. + +```csharp + descriptor.Configure( + x => x.Operation(DefaultFilterOperations.Equals).Description("Equals")) +``` + +```sdl +input StringOperationFilterInput { + and: [StringOperationFilterInput!] + or: [StringOperationFilterInput!] + "Equals" + eq: String + neq: String + contains: String + ncontains: String + in: [String] + nin: [String] + startsWith: String + nstartsWith: String + endsWith: String + nendsWith: String +} +``` diff --git a/website/src/docs/hotchocolate/v12/fetching-data/index.md b/website/src/docs/hotchocolate/v12/fetching-data/index.md new file mode 100644 index 00000000000..f856310e78d --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/index.md @@ -0,0 +1,49 @@ +--- +title: Overview +--- + +In this section we will learn everything about fetching data with Hot Chocolate. + +# Resolvers + +Resolvers are the main building blocks when it comes to fetching data. Every field in our GraphQL schema is backed by such a resolver function, responsible for returning the field's value. Since a resolver is just a function, we can use it to retrieve data from a database, a REST service, or any other data source as needed. + +[Learn more about resolvers](/docs/hotchocolate/v12/fetching-data/resolvers) + +Even though we can connect Hot Chocolate to any data source, most of the time it will be either a database or a REST service. + +[Learn how to fetch data from a database](/docs/hotchocolate/v12/fetching-data/fetching-from-databases) + +[Learn how to fetch data from a REST service](/docs/hotchocolate/v12/fetching-data/fetching-from-rest) + +# DataLoader + +DataLoaders provide a way to deduplicate and batch requests to data sources. They can significantly improve the performance of our queries and ease the load on our data sources. + +[Learn more about DataLoaders](/docs/hotchocolate/v12/fetching-data/dataloader) + +# Pagination + +Hot Chocolate provides pagination capabilities out of the box. They allow us to expose pagination in a standardized way and can even take care of crafting the necessary pagination queries to our databases. + +[Learn more about pagination](/docs/hotchocolate/v12/fetching-data/pagination) + +# Filtering + +When returning a list of entites, we often need to filter them using operations like `equals`, `contains`, `startsWith`, etc. Hot Chocolate takes away a lot of the boilerplate, by handling the generation of necessary input types and even translating the applied filters into native database queries. + +[Learn more about filtering](/docs/hotchocolate/v12/fetching-data/filtering) + +# Sorting + +Similar to filtering, Hot Chocolate can also autogenerate input types related to sorting. They allow us to specify by which fields and in which direction our entities should be sorted. These can also be translated into native database queries automatically. + +[Learn more about sorting](/docs/hotchocolate/v12/fetching-data/sorting) + +# Projections + +Projections allow Hot Chocolate to transform an incoming GraphQL query with a subselection of fields into an optimized database operation. + +For example, if the client only requests the `name` and `id` of a user in their GraphQL query, Hot Chocolate will only query the database for those two columns. + +[Learn more about projections](/docs/hotchocolate/v12/fetching-data/projections) diff --git a/website/src/docs/hotchocolate/v12/fetching-data/pagination.md b/website/src/docs/hotchocolate/v12/fetching-data/pagination.md new file mode 100644 index 00000000000..5173ffb2096 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/pagination.md @@ -0,0 +1,933 @@ +--- +title: "Pagination" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs"; + +Pagination is one of the most common problems that we have to solve when implementing our backend. Often, sets of data are too large to pass them directly to the consumer of our service. + +Pagination solves this problem by giving the consumer the ability to fetch a set in chunks. + +# Connections + +_Connections_ are a standardized way to expose pagination to clients. + +Instead of returning a list of entries, we return a _Connection_. + +```sdl +type Query { + users(first: Int after: String last: Int before: String): UsersConnection +} + +type UsersConnection { + pageInfo: PageInfo! + edges: [UsersEdge!] + nodes: [User!] +} + +type UsersEdge { + cursor: String! + node: User! +} + +type PageInfo { + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String + endCursor: String +} +``` + +You can learn more about this in the [GraphQL Cursor Connections Specification](https://relay.dev/graphql/connections.htm). + +> Note: _Connections_ are often associated with _cursor-based_ pagination, due to the use of a _cursor_. Nonetheless, since the specification describes the _cursor_ as opaque, it can be used to facilitate an _offset_ as well. + +## Definition + +Adding pagination capabilties to our fields is a breeze. All we have to do is add the `UsePaging` middleware. + + + + +```csharp +public class Query +{ + [UsePaging] + public IEnumerable GetUsers([Service] IUserRespository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("users") + .UsePaging() + .Resolve(context => + { + var repository = context.Service(); + + return repository.GetUsers(); + }); + } +} +``` + + + + +In the Schema-first approach we define the resolver in the same way we would in the Annotation-based approach. + +To make our life easier, we do not have to write out the _Connection_ types in our schema, we can simply return a list of our type, e.g. `[User]`. If the resolver for this field is annotated to use pagination, Hot Chocolate will automatically rewrite the field to return a proper _Connection_ type. + +```csharp +public class Query +{ + [UsePaging] + public IEnumerable GetUsers([Service] IUserRespository repository) + => repository.GetUsers(); +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + users : [User!]! + } + ") + .AddResolver(); + } +} +``` + + + + +For the `UsePaging` middleware to work, our resolver needs to return an `IEnumerable` or an `IQueryable`. The middleware will then apply the pagination arguments to what we have returned. In the case of an `IQueryable` this means that the pagination operations can be directly translated to native database queries. + +We also offer pagination integrations for some database technologies that do not use `IQueryable`. + +[Learn more about pagination providers](#providers) + +## Naming + +The name of the _Connection_ and Edge type is automatically inferred from the field name. If our field is called `users`, a `UsersConnection` and `UsersEdge` type is automatically generated. + +We can also specify a custom name for our _Connection_ like the following. + + + + +```csharp +public class Query +{ + [UsePaging(ConnectionName = "CustomUsers")] + public IEnumerable GetUsers([Service] IUserRespository repository) + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("users") + .UsePaging(connectionName: "CustomUsers") + .Resolve(context => + { + // Omitted code for brevity + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +The strings `Connection` and `Edge` are automatically appended to this user specified value to form the names of the _Connection_ and Edge types. + +## Options + +We can define a number of options on a per-field basis. + + + + +In the Annotation-based approach we can define these options using properties on the `[UsePaging]` attribute. + +```csharp +[UsePaging(MaxPageSize = 100)] +``` + + + + +In the Code-first approach we can pass an instance of `PagingOptions` to the `UsePaging` middleware. + +```csharp +descriptor.Field("users").UsePaging(options: new PagingOptions +{ + MaxPageSize = 100 +}); +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +[Learn more about the possible PagingOptions](#pagingoptions) + +## Changing the node type + +Lets say we are returning a collection of `string` from our pagination resolver, but we want these `string` to be represented in the schema using the `ID` scalar. + +For this we can specifically tell the `UsePaging` middleware, which type to use in the schema for representation of the returned CLR type. + + + + +```csharp +public class Query +{ + [UsePaging(typeof(IdType))] + public IEnumerable GetIds() + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("ids") + .UsePaging() + .Resolve(context => + { + // Omitted code for brevity + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example.. + + + + +The same applies of course, if we are returning a collection of `User` from our pagination resolver, but we want to use the `UserType` for representation in the schema. + +## Custom pagination logic + +If we need more control over the pagination process we can do so, by returning a `Connection`. + + + + +```csharp +public class Query +{ + [UsePaging] + public Connection GetUsers(string? after, int? first, string sortBy) + { + // get users using the above arguments + IEnumerable users = null; + + var edges = users.Select(user => new Edge(user, user.Id)) + .ToList(); + var pageInfo = new ConnectionPageInfo(false, false, null, null); + + var connection = new Connection(edges, pageInfo, + ct => ValueTask.FromResult(0)); + + return connection; + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("users") + .UsePaging() + .Argument("sortBy", a => a.Type>()) + .Resolve(context => + { + var after = context.ArgumentValue("after"); + var first = context.ArgumentValue("first"); + var sortBy = context.ArgumentValue("sortBy"); + + // get users using the above arguments + IEnumerable users = null; + + var edges = users.Select(user => new Edge(user, user.Id)) + .ToList(); + var pageInfo = new ConnectionPageInfo(false, false, null, null); + + var connection = new Connection(edges, pageInfo, + ct => ValueTask.FromResult(0)); + + return connection; + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +## Adding fields to an Edge + +We can add new fields to an Edge type, by creating a type extension that targets the Edge type by its name. + +If our Edge is named `UsersEdge`, we can add a new field to it like the following. + +```csharp +[ExtendObjectType("UsersEdge")] +public class UsersEdge +{ + public string NewField([Parent] Edge edge) + { + var cursor = edge.Cursor; + var user = edge.Node; + + // Omitted code for brevity + } +} +``` + +[Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +## Adding fields to a Connection + +We can add new fields to a _Connection_ type, by creating a type extension that targets the _Connection_ type by its name. + +If our _Connection_ is named `UsersConnection`, we can add a new field to it like the following. + +```csharp +[ExtendObjectType("UsersConnection")] +public class UsersConnectionExtension +{ + public string NewField() + { + // Omitted code for brevity + } +} +``` + +[Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +These additional fields are great to perform aggregations either on the entire dataset, by for example issuing a second database call, or on top of the paginated result. + +We can access the pagination result like the following: + +```csharp +[ExtendObjectType("UsersConnection")] +public class UsersConnectionExtension +{ + public string NewField([Parent] Connection connection) + { + var result = connection.Edges.Sum(e => e.Node.SomeField); + + // Omitted code for brevity + } +} +``` + +> Note: If you are using [Projections](/docs/hotchocolate/v12/fetching-data/projections), be aware that some properties on your model might not be set, depending on what the user queried for. + +## Total count + +Sometimes we might want to return the total number of pageable entries. + +For this to work we need to enable the `IncludeTotalCount` flag on the `UsePaging` middleware. + + + + +```csharp +[UsePaging(IncludeTotalCount = true)] +``` + + + + +```csharp +descriptor.UsePaging(options: new PagingOptions +{ + IncludeTotalCount = true +}); +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +This will add a new field called `totalCount` to our _Connection_. + +```sdl +type UsersConnection { + pageInfo: PageInfo! + edges: [UsersEdge!] + nodes: [User!] + totalCount: Int! +} +``` + +If our resolver returns an `IEnumerable` or an `IQueryable` the `totalCount` will be automatically computed, if it has been specified as a subfield in the query. + +If we have customized our pagination and our resolver now returns a `Connection`, we have to explicitly declare how the `totalCount` value is computed. + +```csharp +var connection = new Connection( + edges, + pageInfo, + getTotalCount: cancellationToken => ValueTask.FromResult(0)); +``` + +# Offset Pagination + +> Note: While we support _offset-based_ pagination, we highly encourage the use of [_Connections_](#connections) instead. _Connections_ provide an abstraction which makes it easier to switch to another pagination mechanism later on. + +Besides _Connections_ we can also expose a more traditional _offset-based_ pagination. + +```sdl +type Query { + users(skip: Int take: Int): UserCollectionSegment +} + +type UserCollectionSegment { + items: [User!] + pageInfo: CollectionSegmentInfo! +} + +type CollectionSegmentInfo { + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} +``` + +## Definition + +To add _offset-based_ pagination capabilties to our fields we have to add the `UseOffsetPaging` middleware. + + + + +```csharp +public class Query +{ + [UseOffsetPaging] + public IEnumerable GetUsers([Service] IUserRespository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("users") + .UseOffsetPaging() + .Resolve(context => + { + var repository = context.Service(); + + return repository.GetUsers(); + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +For the `UseOffsetPaging` middleware to work, our resolver needs to return an `IEnumerable` or an `IQueryable`. The middleware will then apply the pagination arguments to what we have returned. In the case of an `IQueryable` this means that the pagination operations can be directly translated to native database queries. + +We also offer pagination integrations for some database technologies that do not use `IQueryable`. + +[Learn more about pagination providers](#providers) + +## Naming + +The name of the CollectionSegment type is inferred from the item type name. If our field returns a collection of `UserType` and the name of this type is `User`, the CollectionSegment will be called `UserCollectionSegment`. + +## Options + +We can define a number of options on a per-field basis. + + + + +In the Annotation-based approach we can define these options using properties on the `[UseOffsetPaging]` attribute. + +```csharp +[UseOffsetPaging(MaxPageSize = 100)] +``` + + + + +In the Code-first approach we can pass an instance of `PagingOptions` to the `UseOffsetPaging` middleware. + +```csharp +descriptor.Field("users").UseOffsetPaging(options: new PagingOptions +{ + MaxPageSize = 100 +}); +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +[Learn more about the possible PagingOptions](#pagingoptions) + +## Changing the item type + +Lets say we are returning a collection of `string` from our pagination resolver, but we want these `string` to be represented in the schema using the `ID` scalar. + +For this we can specifically tell the `UseOffsetPaging` middleware, which type to use in the schema for representation of the returned CLR type. + + + + +```csharp +public class Query +{ + [UseOffsetPaging(typeof(IdType))] + public IEnumerable GetIds() + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("ids") + .UseOffsetPaging() + .Resolve(context => + { + // Omitted code for brevity + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example.. + + + + +The same applies of course, if we are returning a collection of `User` from our pagination resolver, but we want to use the `UserType` for representation in the schema. + +## Custom pagination logic + +If we need more control over the pagination process we can do so, by returning a `CollectionSegment`. + + + + +```csharp +public class Query +{ + [UseOffsetPaging] + public CollectionSegment GetUsers(int? skip, int? take, string sortBy) + { + /// get users using the above arguments + IEnumerable users = null; + + var pageInfo = new CollectionSegmentInfo(false, false); + + var collectionSegment = new CollectionSegment( + users, + pageInfo, + ct => ValueTask.FromResult(0)); + + return collectionSegment; + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("users") + .UseOffsetPaging() + .Argument("sortBy", a => a.Type>()) + .Resolve(context => + { + var skip = context.ArgumentValue("skip"); + var take = context.ArgumentValue("take"); + var sortBy = context.ArgumentValue("sortBy"); + + // get users using the above arguments + IEnumerable users = null; + + var pageInfo = new CollectionSegmentInfo(false, false); + + var collectionSegment = new CollectionSegment( + users, + pageInfo, + ct => ValueTask.FromResult(0)); + + return collectionSegment; + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example.. + + + + +## Addings fields to a CollectionSegment + +We can add new fields to a CollectionSegment type, by creating a type extension that targets the CollectionSegment by its name. + +If our CollectionSegment is named `UserCollectionSegment`, we can add a new field to it like the following. + +```csharp +[ExtendObjectType("UserCollectionSegment")] +public class UserCollectionSegmentExtension +{ + public string NewField() + { + // Omitted code for brevity + } +} +``` + +[Learn more about extending types](/docs/hotchocolate/v12/defining-a-schema/extending-types) + +These additional fields are great to perform aggregations either on the entire dataset, by for example issuing a second database call, or on top of the paginated result. + +We can access the pagination result like the following: + +```csharp +[ExtendObjectType("UserCollectionSegment")] +public class UserCollectionSegmentExtension +{ + public string NewField([Parent] CollectionSegment collectionSegment) + { + var result = collectionSegment.Items.Sum(i => i.SomeField); + + // Omitted code for brevity + } +} +``` + +> Note: If you are using [Projections](/docs/hotchocolate/v12/fetching-data/projections), be aware that some properties on your model might not be set, depending on what the user queried for. + +## Total count + +Sometimes we might want to return the total number of pageable entries. + +For this to work we need to enable the `IncludeTotalCount` flag on the `UseOffsetPaging` middleware. + + + + +```csharp +[UseOffsetPaging(IncludeTotalCount = true)] +``` + + + + +```csharp +descriptor.UseOffsetPaging(options: new PagingOptions +{ + IncludeTotalCount = true +}); +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +This will add a new field called `totalCount` to our _CollectionSegment_. + +```sdl +type UserCollectionSegment { + pageInfo: CollectionSegmentInfo! + items: [User!] + totalCount: Int! +} +``` + +If our resolver returns an `IEnumerable` or an `IQueryable` the `totalCount` will be automatically computed, if it has been specified as a subfield in the query. + +If we have customized our pagination and our resolver now returns a `CollectionSegment`, we have to explicitly declare how the `totalCount` value is computed. + +```csharp +var collectionSegment = new CollectionSegment( + items, + pageInfo, + getTotalCount: cancellationToken => ValueTask.FromResult(0)); +``` + +# Providers + +The `UsePaging` and `UseOffsetPaging` middleware provide a unified way of applying pagination to our resolvers. Depending on the data source used within the resolver the pagination mechanism needs to be different though. Hot Chocolate includes so called paging providers that allow us to use the same API, e.g. `UsePaging`, but for different data sources, e.g. MongoDB and SQL. + +Paging providers can be registered using various methods on the `IRequestExecutorBuilder`. For example the MongoDB paging provider can be registered like the following. + +```csharp +services + .AddGraphQLServer() + .AddMongoDbPagingProviders(); +``` + +[Consult the specific integration documentation for more details](/docs/hotchocolate/v12/integrations) + +When registering paging providers we can name them to be able to explicitly reference them. + +```csharp +services + .AddGraphQLServer() + .AddMongoDbPagingProviders(providerName: "MongoDB"); +``` + +They can then be referenced like the following. + + + + +```csharp +[UsePaging(ProviderName = "MongoDB")] +public IEnumerable GetUsers() +``` + + + + +```csharp +descriptor + .Field("users") + .UsePaging(options: new PagingOptions + { + ProviderName = "MongoDB" + }) +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +If no `ProviderName` is specified, the correct provider is selected based on the return type of the resolver. If the provider to use can't be inferred from the return type, the first (default) provider is used automatically. If needed we can mark a paging provider as the explicit default. + +```csharp +services + .AddGraphQLServer() + .AddMongoDbPagingProviders(defaultProvider: true); +``` + +If no paging providers have been registered, a default paging provider capable of handling `IEnumerable` and `IQueryable` is used. + +# PagingOptions + +`PagingOptions` can either be defined on a per-field basis or [globally](#pagination-defaults). + +The following options can be configured. + +| Property | Default | Description | +| ------------------------------ | ------- | ----------------------------------------------------------------------------------- | +| `MaxPageSize` | `50` | Maximum number of items a client can request via `first`, `last` or `take`. | +| `DefaultPageSize` | `10` | The default number of items, if a client does not specify`first`, `last` or `take`. | +| `IncludeTotalCount` | `false` | Add a `totalCount` field for clients to request the total number of items. | +| `AllowBackwardPagination` | `true` | Include `before` and `last` arguments on the _Connection_. | +| `RequirePagingBoundaries` | `false` | Clients need to specify either `first`, `last` or `take`. | +| `InferConnectionNameFromField` | `true` | Infer the name of the _Connection_ from the field name rather than its type. | +| `ProviderName` | `null` | The name of the pagination provider to use. | + +# Pagination defaults + +If we want to enforce consistent pagination defaults throughout our app, we can do so by setting the global `PagingOptions`. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .SetPagingOptions(new PagingOptions + { + MaxPageSize = 100 + }); + } +} +``` + +[Learn more about possible PagingOptions](#pagingoptions) + +# Types of pagination + +In this section we will look at the most common pagination approaches and their downsides. There are mainly two concepts we find today: _offset-based_ and _cursor-based_ pagination. + +> Note: This section is intended as a brief overview and should not be treated as a definitive guide or recommendation. + +## Offset Pagination + +_Offset-based_ pagination is found in many server implementations whether the backend is implemented in SOAP, REST or GraphQL. + +It is so common, since it is the simplest form of pagination we can implement. All it requires is an `offset` (start index) and a `limit` (number of entries) argument. + +```sql +SELECT * FROM Users +ORDER BY Id +LIMIT %limit OFFSET %offset +``` + +### Problems + +But whilst _offset-based_ pagination is simple to implement and works relatively well, there are also some problems: + +- Using `OFFSET` on the database-side does not scale well for large datasets. Most databases work with an index instead of numbered rows. This means the database always has to count _offset + limit_ rows, before discarding the _offset_ and only returning the requested number of rows. + +- If new entries are written to or removed from our database at high frequency, the _offset_ becomes unreliable, potentially skipping or returning duplicate entries. + +## Cursor Pagination + +Contrary to the _offset-based_ pagination, where we identify the position of an entry using an _offset_, _cursor-based_ pagination works by returning the pointer to the next entry in our pagination. + +To understand this concept better, let's look at an example: We want to paginate over the users in our application. + +First we execute the following to receive our first page: + +```sql +SELECT * FROM Users +ORDER BY Id +LIMIT %limit +``` + +`%limit` is actually `limit + 1`. We are doing this to know wether there are more entries in our dataset and to receive the _cursor_ of the next entry (in this case its `Id`). This additional entry will not be returned to the consumer of our pagination. + +To now receive the second page, we execute: + +```sql +SELECT * FROM Users +WHERE Id >= %cursor +ORDER BY Id +LIMIT %limit +``` + +Using `WHERE` instead of `OFFSET` is great, since now we can leverage the index of the `Id` field and the database does not have to compute an _offset_. + +For this to work though, our _cursor_ needs to be **unique** and **sequential**. Most of the time the _Id_ field will be the best fit. + +But what if we need to sort by a field that does not have the aforementioned properties? We can simply combine the field with another field, which has the needed properties (like `Id`), to form a _cursor_. + +Let's look at another example: We want to paginate over the users sorted by their birthday. + +After receiving the first page, we create a combined _cursor_, like `"1435+2020-12-31"` (`Id` + `Birthday`), of the next entry. To receive the second page, we convert the _cursor_ to its original values (`Id` + `Birthday`) and use them in our query: + +```sql +SELECT * FROM Users +WHERE (Birthday >= %cursorBirthday +OR (Birthday = %cursorBirthday AND Id >= %cursorId)) +ORDER BY Birthday, Id +LIMIT %limit +``` + +### Problems + +Even though _cursor-based_ pagination can be more performant than _offset-based_ pagination, it comes with some downsides as well: + +- When using `WHERE` and `ORDER BY` on a field without an index, it can be slower than using `ORDER BY` with `OFFSET`. + +- Since we now only know of the next entry, there is no more concept of pages. If we have a feed or only _Next_ and _Previous_ buttons, this works great, but if we depend on page numbers, we are in a tight spot. diff --git a/website/src/docs/hotchocolate/v12/fetching-data/projections.md b/website/src/docs/hotchocolate/v12/fetching-data/projections.md new file mode 100644 index 00000000000..71bd51af3e3 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/projections.md @@ -0,0 +1,315 @@ +--- +title: Projections +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Every GraphQL request specifies exactly what data should be returned. Over or under fetching can be reduced +or even eliminated. Hot Chocolate projections leverage this concept and directly projects incoming queries +to the database. + +Projections operate on `IQueryable` by default, but it is possible to create custom providers for projections +to support a specific database driver. + +> ⚠️ **Note:** Projections currently need a public setter on fields they operate on in order to function correctly. Otherwise the default constructed value will be returned upon query. + +```graphql +{ + users { + email + address { + street + } + } +} +``` + +```sql +SELECT "u"."Email", "a"."Id" IS NOT NULL, "a"."Street" +FROM "Users" AS "u" +LEFT JOIN "Address" AS "a" ON "u"."AddressId" = "a"."Id" +``` + +# Getting Started + +Filtering is part of the `HotChocolate.Data` package. You can add the dependency with the `dotnet` cli + +```bash +dotnet add package HotChocolate.Data +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +To use projections with your GraphQL endpoint you have to register projections on the schema: + +```csharp +services.AddGraphQLServer() + // Your schema configuration + .AddProjections(); +``` + +Projections can be registered on a field. A middleware will apply the selected fields on the result. +Support for `IQueryable` comes out of the box. +The projection middleware will create a projection for the whole subtree of its field. Only fields that +are members of a type will be projected. Fields that define a customer resolver cannot be projected +to the database. If the middleware encounters a field that specifies `UseProjection()` this field will be skipped. + + + + +```csharp +public class Query +{ + [UseProjection] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(t => t.GetUsers(default)).UseProjection(); + } +} + +public class Query +{ + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +⚠️ Schema-first does currently not support projections! + + + + +> ⚠️ **Note:** If you use more than one middleware, keep in mind that **ORDER MATTERS**. The correct order is UsePaging > UseProjection > UseFiltering > UseSorting + +# FirstOrDefault / SingleOrDefault + +If you want to limit the response to a single result, you would have to declare a resolver. +Without returning an `IQueryable<>` you lose the ability to use filtering. + +There are two extensions you can use to leverage `collection.FirstOrDefault()` and `collection.SingleOrDefault()` to +the GraphQL layer. The extensions will rewrite the response type to the element type of the collection apply the behavior. + +```csharp + public class Query + { + [UseFirstOrDefault] + [UseProjection] + [UseFiltering] + public IQueryable GetUsers([ScopedService] SomeDbContext someDbContext) + { + return someDbContext.Users; + } + } +``` + +```sdl +type Query { + users(where: UserFilterInput): User +} + +type User { + id: Int! + name: String! + email: String! +} +``` + +# Sorting Filtering and Paging + +Projections can be used together with sorting, filtering and paging. The order of the middlewares must be correct. +Make sure to have the following order: UsePaging > UseProjection > UseFiltering > UseSorting + +Filtering and sorting can be projected over relations. Projections **cannot** project paging over relations. + +```csharp +public class Query +{ + [UsePaging] + [UseProjection] + [UseFiltering] + [UseSorting] + public IQueryable GetUsers([ScopedService] SomeDbContext someDbContext) + { + return someDbContext.Users; + } +} + +public class User +{ + public int Id { get; set; } + + public string Name { get; set; } + + public string Email { get; set; } + + [UseFiltering] + [UseSorting] + public ICollection
Addresses { get; set; } +} +``` + +```graphql +{ + users( + where: { name: { eq: "ChilliCream" } } + order: [{ name: DESC }, { email: DESC }] + ) { + nodes { + email + addresses(where: { street: { eq: "Sesame Street" } }) { + street + } + } + pageInfo { + endCursor + hasNextPage + hasPreviousPage + startCursor + } + } +} +``` + +```sql +SELECT "t"."Email", "t"."Id", "a"."Street", "a"."Id" +FROM ( + SELECT "u"."Email", "u"."Id", "u"."Name" + FROM "Users" AS "u" + WHERE "u"."Name" = @__p_0 + ORDER BY "u"."Name" DESC, "u"."Email" DESC + LIMIT @__p_1 +) AS "t" +LEFT JOIN "Address" AS "a" ON "t"."Id" = "a"."UserId" +ORDER BY "t"."Name" DESC, "t"."Email" DESC, "t"."Id", "a"."Id" +``` + +# Always Project Fields + +Resolvers on types often access data of the parent, e.g. uses the `Email` member of the parent to fetch some +related data from another service. With projections, this resolver could only work when the user also queries +for the `email` field. To ensure a field is always projected you have to use `IsProjected(true)`. + + + + +```csharp +public class User +{ + public int Id { get; set; } + public string Name { get; set; } + [IsProjected(true)] + public string Email { get; set; } + public Address Address { get; set; } +} +``` + + + + +```csharp +public class UserType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.Email).IsProjected(true); + } +} +``` + + + + +⚠️ Schema-first does currently not support projections! + + + + +```graphql +{ + users { + address { + street + } + } +} +``` + +```sql +SELECT "u"."Email", "a"."Id" IS NOT NULL, "a"."Street" +FROM "Users" AS "u" +LEFT JOIN "Address" AS "a" ON "u"."AddressId" = "a"."Id" +``` + +# Exclude fields + +If a projected field is requested, the whole subtree is processed. Sometimes you want to opt out of projections. +The projections middleware skips a field in two cases. Either the visitor encounters a field that is a `UseProjection` field +itself, or it defines `IsProjected(false)`. + + + + +```csharp +public class User +{ + public int Id { get; set; } + public string Name { get; set; } + [IsProjected(false)] + public string Email { get; set; } + public Address Address { get; set; } +} +``` + + + + +```csharp +public class UserType : ObjectType +{ + protected override void Configure( + IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.Email).IsProjected(false); + } +} +``` + + + + +⚠️ Schema-first does currently not support projections! + + + + +```graphql +{ + users { + email + address { + street + } + } +} +``` + +```sql +SELECT "a"."Id" IS NOT NULL, "a"."Street" +FROM "Users" AS "u" +LEFT JOIN "Address" AS "a" ON "u"."AddressId" = "a"."Id" +``` diff --git a/website/src/docs/hotchocolate/v12/fetching-data/resolvers.md b/website/src/docs/hotchocolate/v12/fetching-data/resolvers.md new file mode 100644 index 00000000000..8b3838550bd --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/resolvers.md @@ -0,0 +1,446 @@ +--- +title: "Resolvers" +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +When it comes to fetching data in a GraphQL server, it will always come down to a resolver. + +**A resolver is a generic function that fetches data from an arbitrary data source for a particular field.** + +We can think of each field in our query as a method of the previous type which returns the next type. + +## Resolver Tree + +A resolver tree is a projection of a GraphQL operation that is prepared for execution. + +For better understanding, let's imagine we have a simple GraphQL query like the following, where we select some fields of the currently logged-in user. + +```graphql +query { + me { + name + company { + id + name + } + } +} +``` + +In Hot Chocolate, this query results in the following resolver tree. + +```mermaid +graph LR + A(query: QueryType) --> B(me: UserType) + B --> C(name: StringType) + B --> D(company: CompanyType) + D --> E(id: IdType) + D --> F(name: StringType) +``` + +This tree will be traversed by the execution engine, starting with one or more root resolvers. In the above example the `me` field represents the only root resolver. + +Field resolvers that are subselections of a field, can only be executed after a value has been resolved for their _parent_ field. In the case of the above example this means that the `name` and `company` resolvers can only run, after the `me` resolver has finished. Resolvers of field subselections can and will be executed in parallel. + +**Because of this it is important that resolvers, with the exception of top level mutation field resolvers, do not contain side-effects, since their execution order may vary.** + +The execution of a request finishes, once each resolver of the selected fields has produced a result. + +_This is of course an oversimplification that differs from the actual implementation._ + +# Defining a Resolver + +Resolvers can be defined in a way that should feel very familiar to C# developers, especially in the Annotation-based approach. + +## Properties + +Hot Chocolate automatically converts properties with a public get accessor to a resolver that simply returns its value. + +Properties are also covered in detail by the [object type documentation](/docs/hotchocolate/v12/defining-a-schema/object-types). + +## Regular Resolver + +A regular resolver is just a simple method, which returns a value. + + + + +```csharp +public class Query +{ + public string Foo() => "Bar"; +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } +} +``` + + + + +```csharp +public class Query +{ + public string Foo() => "Bar"; +} + +public class QueryType: ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field(f => f.Foo()) + .Type>(); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddQueryType(); + } +} +``` + +We can also provide a resolver delegate by using the `Resolve` method. + +```csharp +descriptor + .Field("foo") + .Resolve(context => + { + return "Bar"; + }); +``` + + + + +```csharp +public class Query +{ + public string Foo() => "Bar"; +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + foo: String! + } + ") + .BindRuntimeType(); + } +} +``` + +We can also add a resolver by calling `AddResolver()` on the `IRequestExecutorBuilder`. + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + foo: String! + } + ") + .AddResolver("Query", "foo", (context) => "Bar"); +``` + + + + +## Async Resolver + +Most data fetching operations, like calling a service or communicating with a database, will be asynchronous. + +In Hot Chocolate, we can simply mark our resolver methods and delegates as `async` or return a `Task` and it becomes an async-capable resolver. + +We can also add a `CancellationToken` argument to our resolver. Hot Chocolate will automatically cancel this token if the request has been aborted. + +```csharp +public class Query +{ + public async Task Foo(CancellationToken ct) + { + // Omitted code for brevity + } +} +``` + +When using a delegate resolver, the `CancellationToken` is passed as second argument to the delegate. + +```csharp +descriptor + .Field("foo") + .Resolve((context, ct) => + { + // Omitted code for brevity + }); +``` + +The `CancellationToken` can also be accessed through the `IResolverContext`. + +```csharp +descriptor + .Field("foo") + .Resolve(context => + { + CancellationToken ct = context.RequestAborted; + + // Omitted code for brevity + }); +``` + +## ResolveWith + +Thus far we have looked at two ways to specify resolvers in Code-first: + +- Add new methods to the CLR type, e.g. the `T` type of `ObjectType` +- Add new fields to the schema type in the form of delegates + ```csharp + descriptor.Field("foo").Resolve(context => ) + ``` + +But there's a third way. We can describe our field using the `descriptor`, but instead of a resolver delegate, we can point to a method on another class, responsible for resolving this field. + +```csharp +public class FooResolvers +{ + public string GetFoo(string arg, [Service] FooService service) + { + // Omitted code for brevity + } +} + +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("foo") + .Argument("arg", a => a.Type>()) + .ResolveWith(r => r.GetFoo(default, default)); + } +} +``` + +# Arguments + +We can access arguments we defined for our resolver like regular arguments of a function. + +There are also specific arguments that will be automatically populated by Hot Chocolate when the resolver is executed. These include [Dependency injection services](#injecting-services), [DataLoaders](/docs/hotchocolate/v12/fetching-data/dataloader), state, or even context like a [_parent_](#accessing-parent-values) value. + +[Learn more about arguments](/docs/hotchocolate/v12/defining-a-schema/arguments) + +# Injecting Services + +Let's assume we have created a `UserService` and registered it as a service. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddSingleton() + +builder.Services + .AddGraphQLServer() + .AddQueryType(); +``` + +We can now access it like the following in our resolvers. + +```csharp +public class Query +{ + public List GetUsers([Service] UserService userService) + => userService.GetUsers(); +} +``` + +[Learn more about dependency injection](/docs/hotchocolate/v12/server/dependency-injection) + +# Accessing the HttpContext + +The [IHttpContextAccessor](https://docs.microsoft.com/dotnet/api/microsoft.aspnetcore.http.ihttpcontextaccessor) allows us to access the [HttpContext](https://docs.microsoft.com/dotnet/api/microsoft.aspnetcore.http.httpcontext) of the current request from within our resolvers. This is useful, if we for example need to set a header or cookie. + +First we need to add the `IHttpContextAccessor` as a service. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services.AddHttpContextAccessor(); + + // Omitted code for brevity + } +} +``` + +After this we can inject it into our resolvers and make use of the the `HttpContext` property. + +```csharp +public string Foo(string id, [Service] IHttpContextAccessor httpContextAccessor) +{ + if (httpContextAccessor.HttpContext is not null) + { + // Omitted code for brevity + } +} +``` + +# Accessing parent values + +The resolver of each field on a type has access to the value that was resolved for said type. + +Let's look at an example. We have the following schema. + +```sdl +type Query { + me: User!; +} + +type User { + id: ID!; + friends: [User!]!; +} +``` + +The `User` schema type is represented by an `User` CLR type. The `id` field is an actual property on this CLR type. + +```csharp +public class User +{ + public string Id { get; set; } +} +``` + +`friends` on the other hand is a resolver i.e. method we defined. It depends on the user's `Id` property to compute its result. +From the point of view of this `friends` resolver, the `User` CLR type is its _parent_. + +We can access this so called _parent_ value like the following. + + + + +In the Annotation-based approach we can just access the properties using the `this` keyword. + +```csharp +public class User +{ + public string Id { get; set; } + + public List GetFriends() + { + var currentUserId = this.Id; + + // Omitted code for brevity + } +} +``` + +There's also a `[Parent]` attribute that injects the parent into the resolver. + +```csharp +public class User +{ + public string Id { get; set; } + + public List GetFriends([Parent] User parent) + { + // Omitted code for brevity + } +} +``` + +This is especially useful when using [type extensions](/docs/hotchocolate/v12/defining-a-schema/extending-types). + + + + +```csharp +public class User +{ + public string Id { get; set; } + + public List GetFriends([Parent] User parent) + { + // Omitted code for brevity + } +} +``` + +When using the `Resolve` method, we can access the parent through the `IResolverContext`. + +```csharp +public class User +{ + public string Id { get; set; } +} + +public class UserType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("friends") + .Resolve(context => + { + User parent = context.Parent(); + + // Omitted code for brevity + }); + } +} +``` + + + + +```csharp +public class User +{ + public string Id { get; set; } + + public List GetFriends([Parent] User parent) + { + // Omitted code for brevity + } +} +``` + +When using `AddResolver()`, we can access the parent through the `IResolverContext`. + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type User { + friends: [User!]! + } + ") + .AddResolver("User", "friends", (context) => + { + User parent = context.Parent(); + + // Omitted code for brevity + }); +``` + + + diff --git a/website/src/docs/hotchocolate/v12/fetching-data/sorting.md b/website/src/docs/hotchocolate/v12/fetching-data/sorting.md new file mode 100644 index 00000000000..af3a73c4baf --- /dev/null +++ b/website/src/docs/hotchocolate/v12/fetching-data/sorting.md @@ -0,0 +1,450 @@ +--- +title: Sorting +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +# What is sorting + +Ordering results of a query dynamically is a common case. With Hot Chocolate sorting, you can expose a sorting argument that abstracts the complexity of ordering logic. +With little configuration, your GraphQL API has sorting capabilities, which translates to native database queries. +The default sort implementation translates sorting statements to expression trees that are applied to `IQueryable`. +Hot Chocolate by default will inspect your .NET model and infer the possible filter operations from it. +Sorting uses `IQueryable` (`IEnumerable`) by default, but you can also easily customize them to use other interfaces. + +The following type would yield the following sorting operation + +```csharp + public class User + { + public string Name { get; set; } + + public Address Address { get; set; } + } + + public class Address + { + public string Street { get; set; } + } + +``` + +```sdl +type Query { + users(order: [UserSortInput]): [User] +} + +type User { + name: String! + address: Address! +} + +input AddressSortInput { + street: SortEnumType +} + +input UserSortInput { + name: SortEnumType + address: AddressSortInput +} + +enum SortEnumType { + ASC + DESC +} +``` + +# Getting started + +Sorting is part of the `HotChocolate.Data` package. You can add the dependency with the `dotnet` cli + +```bash +dotnet add package HotChocolate.Data +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +To use sorting you need to register it on the schema: + +```csharp +services.AddGraphQLServer() + // Your schema configuration + .AddSorting(); +``` + +Hot Chocolate will infer the sorting types directly from your .Net Model and then use a Middleware to apply the order to `IQueryable` or `IEnumerable` on execution. + + + + +```csharp +public class Query +{ + [UseSorting] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.GetUsers(default)).UseSorting(); + } +} + +public class Query +{ + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +⚠️ Schema-first does currently not support sorting! + + + + +> ⚠️ **Note:** If you use more than one middleware, keep in mind that **ORDER MATTERS**. The correct order is UsePaging > UseProjections > UseFiltering > UseSorting + +The type can be sorted using the `order` field in the query: + +```graphql +query { + users(order: [{name: ASC}]) { + name + address { + street + } + } +} +``` + +Properties of nested objects can be sorted as well: + +```graphql +query { + users(order: [{address: {street: ASC}}]) { + name + address { + street + } + } +} +``` + +Note that it is possible to sort on a field and then by another field: + +```graphql +query { + users(order: [{name: ASC}, {address: {street: DESC}}]) { + name + address { + street + } + } +} +``` + +# Customization + +Under the hood, sorting is based on top of normal Hot Chocolate input types. You can easily customize them with a very familiar fluent interface. The sorting input types follow the same `descriptor` scheme as you are used to from the normal input types. Just extend the base class `SortInputType` and override the descriptor method. + +`ISortInputTypeDescriptor` supports most of the methods of `IInputTypeDescriptor`. By default, operations are generated for all fields of the type. +Members that are collections are skipped because you cannot order based on lists. +If you do want to specify the sorting types by yourself, you can change this behavior with `BindFields`, `BindFieldsExplicitly`, or `BindFieldsImplicitly`. +When fields are bound implicitly, meaning sorting is added for all valid properties, you may want to hide a few fields. You can do this with `Ignore(x => Bar)`. +It is also possible to customize the GraphQL field of the operation further. You can change the name or add a description or directive. + +```csharp +public class UserSortType : SortInputType +{ + protected override void Configure(ISortInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Field(f => f.Name).Name("custom_name"); + } +} +``` + +If you want to change the sorting operations on a field, you need to declare your own operation enum type. + +```csharp {7} +public class UserSortType : SortInputType +{ + protected override void Configure(ISortInputTypeDescriptor descriptor) + { + descriptor.BindFieldsExplicitly(); + descriptor.Field(f => f.Name).Type(); + } +} + +public class AscOnlySortEnumType : DefaultSortEnumType +{ + protected override void Configure(ISortEnumTypeDescriptor descriptor) + { + descriptor.Operation(DefaultSortOperations.Ascending); + } +} + +``` + +```sdl +type Query { + users(order: [UserSortInput]): [User] +} + +type User { + name: String! + address: Address! +} + +input UserSortInput { + name: AscOnlySortEnumType +} + +enum AscOnlySortEnumType { + ASC +} +``` + +To apply this sorting type, we just have to provide it to the `UseSorting` extension method as the generic type argument. + + + + +```csharp +public class Query +{ + [UseSorting(typeof(UserSortType))] + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.GetUsers(default)).UseSorting(); + } +} + +public class Query +{ + public IQueryable GetUsers([Service] IUserRepository repository) + => repository.GetUsers(); +} +``` + + + + +⚠️ Schema-first does currently not support sorting! + + + + +# Sorting Conventions + +If you want to change the behavior of sorting globally, you want to create a convention for sorting. The sorting convention comes with a fluent interface that is close to a type descriptor. + +## Get Started + +To use a sort convention, you have to extend `SortConvention` and override the `Configure` method. Alternatively, you can directly configure the convention over the constructor argument. +You then have to register your custom convention on the schema builder with `AddConvention`. +By default, a new convention is empty. To add the default behavior, you have to add `AddDefaults`. + +```csharp +public class CustomConvention + : SortConvention +{ + protected override void Configure(ISortConventionDescriptor descriptor) + { + descriptor.AddDefaults(); + } +} + +services.AddGraphQLServer() + .AddConvention(); +// or +services.AddGraphQLServer() + .AddConvention(new Convention(x => + x.AddDefaults())) +``` + +Often you just want to extend the default behavior of sorting. If this is the case, you can also use `SortConventionExtension` + +```csharp +public class CustomConventionExtension + : SortConventionExtension +{ + protected override void Configure(ISortConventionDescriptor descriptor) + { + // config + } +} + +services.AddGraphQLServer() + .AddConvention(); +// or +services.AddGraphQLServer() + .AddConvention(new FilterConventionExtension(x => + { + // config + })) +``` + +## Argument Name + +With the convention descriptor, you can easily change the argument name of the `FilterInputType`. + +**Configuration** + +```csharp +descriptor.ArgumentName("example_argument_name"); +``` + +**Result** + +```sdl +type Query { + users(example_argument_name: [UserSortInput]): [User] +} +``` + +## Binding of SortTypes + +`SortInputType`'s **cannot** just be registered on the schema. You have to bind them to the runtime type on the convention. + +### SortInputType bindings + +By default, only the `string` type is bound explicitly. If you want to configure sorting globally, you are free to bind additional types. + +**Configuration** + +```csharp +public class CustomSortInputType + : SortInputType +{ + protected override void Configure(ISortInputTypeDescriptor descriptor) + { + descriptor.Name("CustomSortInputType"); + } +} +public class CustomConvention + : SortConvention +{ + protected override void Configure(ISortConventionDescriptor descriptor) + { + descriptor.AddDefaults().BindRuntimeType(); + } +} +``` + +**Result** + +```sdl + +type Query { + users(order: [CustomSortInputType!]): [User] +} + +type User { + name: String! +} + +input CustomSortInputType { + name: SortEnumType +} + +enum SortEnumType { + ASC + DESC +} +``` + +### Default bindings + +For fields all fields where no explicit binding is found, a default is applied. This default is `DefaultSortEnumType`. +This can be configured with the method `DefaultBinding`. + +**Configuration** + +```csharp +public class CustomConvention : SortConvention +{ + protected override void Configure(ISortConventionDescriptor descriptor) + { + descriptor.AddDefaults().DefaultBinding(); + } +} +``` + +**Result** + +```sdl +type Query { + users(order: [UserSortInput]): [User] +} + +type User { + logonCount: Int! +} + +input UserSortInput { + logonCount: AscOnlySortEnumType +} + +enum AscOnlySortEnumType { + ASC +} +``` + +## Extend Types + +### SortEnumType + +When you build extensions for sorting, you may want to modify or extend the `DefaultSortEnumType`. + +```csharp +descriptor.ConfigureEnum( + x => x.Operation(CustomOperations.NULL_FIRST).Name("NULL_FIRST)); +``` + +```sdl +enum SortEnumType { + ASC + DESC + NULL_FIRST +} +``` + +### SortType + +In case you want to change a specific sort type, you can do this too. +You can use `Configure()` to alter the configuration of a type. + +```csharp +descriptor.Configure( + x => x.Description("This is my custom description")); +``` + +```sdl +"This is my customer description" +input CustomSortInputType { + name: SortEnumType +} +``` diff --git a/website/src/docs/hotchocolate/v12/get-started.md b/website/src/docs/hotchocolate/v12/get-started.md new file mode 100644 index 00000000000..cc823081958 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/get-started.md @@ -0,0 +1,275 @@ +--- +title: "Get started with Hot Chocolate" +--- + +import { ApiChoiceTabs } from "../../components/mdx/api-choice-tabs" +import { InputChoiceTabs } from "../../components/mdx/input-choice-tabs" + +In this tutorial, we will walk you through the basics of creating a GraphQL server with Hot Chocolate. + + + +# Setup + +If you are integrating Hot Chocolate into an existing project using ASP.NET Core, you can skip step 1. + +## 1. Create a new ASP.NET Core project + + + + +```bash +dotnet new web -n Demo +``` + +This will create a new directory called "Demo" containing your project's files. + +You can now open the "Demo" directory or the "Demo.csproj" file in your favorite code editor. + + + + +Create a new project from within Visual Studio using the "ASP.NET Core Empty" template. + +[Learn how you can create a new project within Visual Studio](https://docs.microsoft.com/visualstudio/ide/create-new-project) + + + + +## 2. Add the HotChocolate.AspNetCore package + +This package includes everything that's needed to get your GraphQL server up and running. + + + + +```bash +dotnet add package HotChocolate.AspNetCore +``` + + + + +You can add the `HotChocolate.AspNetCore` package using the NuGet Package Manager within Visual Studio. + +[Learn how you can use the NuGet Package Manager to install a package](https://docs.microsoft.com/nuget/quickstart/install-and-use-a-package-in-visual-studio#nuget-package-manager) + + + + +> ⚠️ Note: Additional `HotChocolate.*` packages need to have the same version. + +## 3. Define the types + +Next, we need to define the types our GraphQL schema should contain. These types and their fields define what consumers can query from our GraphQL API. + +For starters we can define two object types that we want to expose through our schema. + +```csharp +public class Book +{ + public string Title { get; set; } + + public Author Author { get; set; } +} + +public class Author +{ + public string Name { get; set; } +} +``` + +## 4. Add a Query type + +Now we need to define a Query type that exposes the types we have just created through a field. + +```csharp +public class Query +{ + public Book GetBook() => + new Book + { + Title = "C# in depth.", + Author = new Author + { + Name = "Jon Skeet" + } + }; +} +``` + +The field in question is called `GetBook`, but the name will be shortened to just `book` in the resulting schema. + +## 5. Add GraphQL services + +Next, we need to add the services required by Hot Chocolate to our Dependency Injection container. + + + + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services + .AddGraphQLServer() + .AddQueryType(); +``` + + + + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + .AddGraphQLServer() + .AddQueryType(); +} +``` + + + + +The `AddGraphQLServer` returns an `IRequestExecutorBuilder`, which has many extension methods, similar to an `IServiceCollection`, that can be used to configure the GraphQL schema. In the above example we are specifying the Query type that should be exposed by our GraphQL server. + +## 6. Map the GraphQL endpoint + +Now that we've added the necessary services, we need to expose our GraphQL server at an endpoint. Hot Chocolate comes with an ASP.NET Core middleware that is used to serve up the GraphQL server. + + + + +```csharp +var app = builder.Build(); + +app.MapGraphQL(); + +app.Run(); +``` + + + + +```csharp +public void Configure(IApplicationBuilder app, IWebHostEnvironment env) +{ + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL(); + }); +} +``` + + + + +And this is it - you have successfully setup a Hot Chocolate GraphQL server! 🚀 + + + +# Executing a query + +First off we have to run the project. + + + + +```bash +dotnet run +``` + + + + +The Project can be started by either pressing `Ctrl + F5` or clicking the green "Debug" button in the Visual Studio toolbar. + + + + +If you have setup everything correctly, you should be able to open http://localhost:5000/graphql (the port might be different for you) in your browser and be greeted by our GraphQL IDE [Banana Cake Pop](/docs/bananacakepop). + +![GraphQL IDE](../../images/get-started-bcp.png) + +Next click on "Create document". You will be presented with a settings dialog for this new tab, pictured below. Make sure the "Schema Endpoint" input field has the correct URL under which your GraphQL endpoint is available. If it is correct you can just go ahead and click the "Apply" button in the bottom right. + +![GraphQL IDE: Setup](../../images/get-started-bcp-setup.png) + +Now you should be seeing an editor like the one pictured below. If your GraphQL server has been correctly setup you should be seeing a green "online" in the top right corner of the editor. + +![GraphQL IDE: Editor](../../images/get-started-bcp-editor.png) + +The view is split into four panes. The top left pane is where you enter the queries you wish to send to the GraphQL server - the result will be displayed in the top right pane. Variables and headers can be modified in the bottom left pane and recent queries can be viewed in the bottom right pane. + +Okay, so let's send a query to your GraphQL server. Paste the below query into the top left pane of the editor: + +```graphql +{ + book { + title + author { + name + } + } +} +``` + +To execute the query, simply press the "Run" button. The result should be displayed as JSON in the top right pane as shown below: + +![GraphQL IDE: Executing a query](../../images/get-started-bcp-query.png) + +You can also view and browse the schema from within Banana Cake Pop. Click on the "Schema Reference" tab next to "Operations" in order to browse the schema. There's also a "Schema Definition" tab, pictured below, which shows the schema using the raw SDL (Schema Definition Language). + +![GraphQL IDE: Schema](../../images/get-started-bcp-schema.png) + +Congratulations, you've built your first Hot Chocolate GraphQL server and sent a query using the Banana Cake Pop GraphQL IDE 🎉🚀 + +# Additional resources + +Now that you've setup a basic GraphQL server, what should be your next steps? + +If this is your first time using GraphQL, we recommend [this guide](https://graphql.org/learn/) that walks you through the basic concepts of GraphQL. + +If you want to get an overview of Hot Chocolate's features, we recommend reading the _Overview_ pages in each section of the documentation. They can be found in the sidebar to your left. + +For a guided tutorial that explains how you can setup your GraphQL server beyond this basic example, checkout [our workshop](https://github.com/ChilliCream/graphql-workshop). Here we will dive deeper into several topics around Hot Chocolate and GraphQL in general. + +You can also jump straight into our documentation and learn more about
[Defining a GraphQL schema](/docs/hotchocolate/v12/defining-a-schema). diff --git a/website/src/docs/hotchocolate/v12/index.md b/website/src/docs/hotchocolate/v12/index.md new file mode 100644 index 00000000000..b3c664979d0 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/index.md @@ -0,0 +1,23 @@ +--- +title: "Introduction" +--- + +> We are still working on the documentation for Hot Chocolate so help us by finding typos, missing things or write some additional docs with us. + +Hot Chocolate is an open-source GraphQL server for the Microsoft .NET platform that is compliant with the newest GraphQL October 2021 spec + Drafts, which makes Hot Chocolate compatible to all GraphQL compliant clients like Strawberry Shake, Relay, Apollo Client, and various other clients and tools. + +Hot Chocolate takes the complexity away from building a fully-fledged GraphQL server and lets you focus on delivering the next big thing. + +![Platform](../../images/platform.png) + +You can use Hot Chocolate Server as: + +- Stand-alone ASP.NET Core GraphQL Server. +- Serverless Azure Function or Amazon Lambda that serves up a GraphQL server. +- GraphQL Gateway for a federated data graph that pulls all your data sources together to create the one source of truth. + +Hot Chocolate is very easy to set up and takes the clutter away from writing GraphQL schemas. We update Hot Chocolate continuously and implement new spec features as they hit draft status. This lets you pick up new GraphQL features incrementally to open up new development opportunities for your ideas. + +Let's [get started](/docs/hotchocolate/v12/get-started) with Hot Chocolate! + +Join us on [YouTube](https://youtube.chillicream.com) for Hot Chocolate deep dives. diff --git a/website/src/docs/hotchocolate/v12/integrations/entity-framework.md b/website/src/docs/hotchocolate/v12/integrations/entity-framework.md new file mode 100644 index 00000000000..2b29f27e77a --- /dev/null +++ b/website/src/docs/hotchocolate/v12/integrations/entity-framework.md @@ -0,0 +1,217 @@ +--- +title: Entity Framework Core +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +[Entity Framework Core](https://docs.microsoft.com/ef/core/) is a powerful object-relational mapping framework that has become a staple when working with SQL-based Databases in .NET Core applications. + +When working with Entity Framework Core's [DbContext](https://docs.microsoft.com/dotnet/api/system.data.entity.dbcontext), it is most commonly registered as a scoped service. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDbContext( + options => options.UseSqlServer("YOUR_CONNECTION_STRING")); +``` + +If you have read our [guidance on dependency injection](/docs/hotchocolate/v12/server/dependency-injection#resolver-injection) you might be inclined to simply inject your `DbContext` using the `HotChocolate.ServiceAttribute`. + +```csharp +public Foo GetFoo([Service] ApplicationDbContext dbContext) + => // Omitted code for brevity +``` + +While this is usually the correct way to inject services and it may appear to work initially, it has a fatal flaw: Entity Framework Core doesn't support [multiple parallel operations being run on the same context instance](https://docs.microsoft.com/ef/core/miscellaneous/async). + +Lets take a look at an example to understand why this can lead to issues. Both the `foo` and `bar` field in the below query are backed by a resolver that injects a scoped `DbContext` instance and performs a database query using it. + +```graphql +{ + foo + bar +} +``` + +Since Hot Chocolate parallelizes the execution of query fields, and both of the resolvers will receive the same scoped `DbContext` instance, two database queries are likely to be ran through this scoped `DbContext` instance in parallel. This will then lead to one of the following exceptions being thrown: + +- `A second operation started on this context before a previous operation completed.` +- `Cannot access a disposed object.` + +# Resolver injection of a DbContext + +In order to ensure that resolvers do not access the same scoped `DbContext` instance in parallel, you can inject it using the `ServiceKind.Synchronized`. + +```csharp +public Foo GetFoo( + [Service(ServiceKind.Synchronized)] ApplicationDbContext dbContext) + => // Omitted code for brevity +``` + +[Learn more about `ServiceKind.Synchronized`](/docs/hotchocolate/v12/server/dependency-injection#servicekindsynchronized) + +Since this is a lot of code to write, just to inject a `DbContext`, you can use [`RegisterDbContext`](#registerdbcontext) to simplify the injection. + +# RegisterDbContext + +In order to simplify the injection of a `DbContext` we have introduced a method called `RegisterDbContext`, similar to the [`RegisterService`](/docs/hotchocolate/v12/server/dependency-injection#registerservice) method for regular services. This method is part of the `HotChocolate.Data.EntityFramework` package, which you'll have to install. + +```bash +dotnet add package HotChocolate.Data.EntityFramework +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +Once installed you can simply call the `RegisterDbContext` method on the `IRequestExecutorBuilder`. The Hot Chocolate Resolver Compiler will then take care of correctly injecting your scoped `DbContext` instance into your resolvers and also ensuring that the resolvers using it are never run in parallel. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDbContext( + options => options.UseSqlServer("YOUR_CONNECTION_STRING")); + +builder.Services + .AddGraphQLServer() + .RegisterDbContext() + .AddQueryType(); + +public class Query +{ + public Foo GetFoo(ApplicationDbContext dbContext) + => // Omitted code for brevity +} +``` + +> ⚠️ Note: You still have to register your `DbContext` in the actual dependency injection container, by calling `services.AddDbContext`. `RegisterDbContext` on its own is not enough. + +You can also specify a [DbContextKind](#dbcontextkind) as argument to the `RegisterDbContext` method, to change how the `DbContext` should be injected. + +```csharp +builder.Services + .AddGraphQLServer() + .RegisterDbContext(DbContextKind.Pooled) +``` + +# DbContextKind + +When registering a `DbContext` you can specify a `DbContextKind` to instruct Hot Chocolate to use a certain strategy when injecting the `DbContext`. For the most part the `DbContextKind` is really similar to the [ServiceKind](/docs/hotchocolate/v12/server/dependency-injection#servicekind), with the exception of the [DbContextKind.Pooled](#dbcontextkindpooled). + +## DbContextKind.Synchronized + +This injection mechanism ensures that resolvers injecting the specified `DbContext` are never run in parallel. It is the default for the [`RegisterDbContext`](#registerdbcontext) method and behaves in the same fashion as [ServiceKind.Synchronized](/docs/hotchocolate/v12/server/dependency-injection#servicekindsynchronized) does for regular services. + +## DbContextKind.Resolver + +This injection mechanism will resolve the scoped `DbContext` from a resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope). It behaves in the same fashion as [ServiceKind.Resolver](/docs/hotchocolate/v12/server/dependency-injection#servicekindresolver) does for regular services. Since a different `DbContext` instance is resolved for each resolver invocation, Hot Chocolate can parallelize the execution of resolvers using this `DbContext`. + +## DbContextKind.Pooled + +This injection mechanism will require your `DbContext` to be registered as a [pooled](https://docs.microsoft.com/ef/core/performance/advanced-performance-topics?tabs=with-constant#dbcontext-pooling) `IDbContextFactory`. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddPooledDbContextFactory( + options => options.UseSqlServer("YOUR_CONNECTION_STRING")); + +builder.Services + .AddGraphQLServer() + .RegisterDbContext(DbContextKind.Pooled) + .AddQueryType(); + +public class Query +{ + public Foo GetFoo(ApplicationDbContext dbContext) + => // Omitted code for brevity +} +``` + +When injecting a `DbContext` using the `DbContextKind.Pool`, Hot Chocolate will retrieve one `DbContext` instance from the pool for each invocation of a resolver. Once the resolver has finished executing, the instance will be returned to the pool. + +Since each resolver invocation is therefore working with a "transient" `DbContext` instance, Hot Chocolate can parallelize the execution of resolvers using this `DbContext`. + +# Working with a pooled DbContext + +If you have registered your `DbContext` using [DbContextKind.Pooled](#dbcontextkindpooled) you are on your way to squeeze the most performance out of your GraphQL server, but unfortunately it also changes how you have to use the `DbContext`. + +For example you need to move all of the configuration from the `OnConfiguring` method inside your `DbContext` into the configuration action on the `AddPooledDbContextFactory` call. + +You also need to access your `DbContext` differently. In the following chapters we will take a look at some of the changes you have to make. + +## DataLoaders + +When creating DataLoaders that need access to your `DbContext`, you need to inject the `IDbContextFactory` using the constructor. + +The `DbContext` should only be created **and disposed** in the `LoadBatchAsync` method. + +```csharp +public class FooByIdDataLoader : BatchDataLoader +{ + private readonly IDbContextFactory _dbContextFactory; + + public FooByIdDataLoader( + IDbContextFactory dbContextFactory, + IBatchScheduler batchScheduler, DataLoaderOptions options) + : base(batchScheduler, options) + { + _dbContextFactory = dbContextFactory; + } + + protected override async Task> + LoadBatchAsync(IReadOnlyList keys, CancellationToken ct) + { + await using ApplicationDbContext dbContext = + _dbContextFactory.CreateDbContext(); + + return await dbContext.Foos + .Where(s => keys.Contains(s.Id)) + .ToDictionaryAsync(t => t.Id, ct); + } +} +``` + +> ⚠️ Note: It is important that you dispose the `DbContext` to return it to the pool. In the above example we are using `await using` to dispose the `DbContext` after it is no longer required. + +## Services + +When creating services, they now need to inject the `IDbContextFactory` instead of the `DbContext` directly. Your services also need be of a transient lifetime. Otherwise you could be faced with the `DbContext` concurrency issue again, if the same `DbContext` instance is accessed by two resolvers through our service in parallel. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddPooledDbContextFactory( + options => options.UseSqlServer("YOUR_CONNECTION_STRING")); + +builder.Services.AddTransient() + +builder.Services + .AddGraphQLServer() + .RegisterService() + .AddQueryType(); + +public class Query +{ + public Foo GetFoo(FooService FooService) + => // Omitted code for brevity +} + +public class FooService : IAsyncDisposable +{ + private readonly ApplicationDbContext _dbContext; + + public FooService(IDbContextFactory dbContextFactory) + { + _dbContext = dbContextFactory.CreateDbContext(); + } + + public Foo GetFoo() + => _dbContext.Foos.FirstOrDefault(); + + public ValueTask DisposeAsync() + { + return _dbContext.DisposeAsync(); + } +} +``` + +> ⚠️ Note: It is important that you dispose the `DbContext` to return it to the pool, once your transient service is being disposed. In the above example we are implementing `IAsyncDisposable` and disposing the created `DbContext` in the `DisposeAsync` method. This method will be invoked by the dependency injection system. diff --git a/website/src/docs/hotchocolate/v12/integrations/index.md b/website/src/docs/hotchocolate/v12/integrations/index.md new file mode 100644 index 00000000000..332a0cdc586 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/integrations/index.md @@ -0,0 +1,30 @@ +--- +title: "Integrations" +--- + +In this section we will look at different technologies and how you can integrate them into your GraphQL server. + +# Entity Framework Core + +Using Entity Framework Core requires some additional setup to play nicely with the concurrent nature of GraphQL resolvers. You will learn how to correctly use your `DbContext` in different scenarios and also how to tune Entity Framework Core for maximum throughput of your GraphQL server. + +[Learn more about our Entity Framework Core integration](/docs/hotchocolate/v12/integrations/entity-framework) + +# MongoDB + +You will learn how to access MongoDB from within your resovlers and how to translate our pagination, projection, filtering and sorting capabilities to native MongoDB queries. + +[Learn more about our MongoDB integration](/docs/hotchocolate/v12/integrations/mongodb) + + +# Neo4J + +You will learn how to access Neo4J from within your resovlers and how to translate our pagination, projection, filtering and sorting capabilities to native Neo4J queries. + +[Learn more about our Neo4J integration](/docs/hotchocolate/v12/integrations/neo4j) + +# Spatial Data + +You will learn how you can expose [NetTopologySuite types](https://github.com/NetTopologySuite/NetTopologySuite) in form of [GeoJSON](https://geojson.org/) and how to integrate it with our data APIs. + +[Learn more about our Spatial Data integration](/docs/hotchocolate/v12/integrations/spatial-data) diff --git a/website/src/docs/hotchocolate/v12/integrations/mongodb.md b/website/src/docs/hotchocolate/v12/integrations/mongodb.md new file mode 100644 index 00000000000..5c402ccb346 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/integrations/mongodb.md @@ -0,0 +1,267 @@ +--- +title: MongoDB +--- + +Hot Chocolate has a data integration for MongoDB. +With this integration, you can translate paging, filtering, sorting, and projections, directly into native MongoDB queries. + +You can find a example project in [Hot Chocolate Examples](https://github.com/ChilliCream/hotchocolate-examples/tree/master/misc/MongoDB) + +# Get Started + +To use the MongoDB integration, you need to install the package `HotChocolate.Data.MongoDb`. + +```bash +dotnet add package HotChocolate.Data.MongoDb +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +# MongoExecutable + +The whole integration builds around `IExecutable`. +The integration provides you the extension method `AsExecutable` on `IMongoCollection`, `IAggregateFluent` and `IFindFluent` +The execution engine picks up the `IExecutable` and executes it efficiently. +You are free to use any form of aggregation or find a pipeline before you execute `AsExecutable` + +```csharp +[UsePaging] +[UseProjection] +[UseSorting] +[UseFiltering] +public IExecutable GetPersons([Service] IMongoCollection collection) +{ + return collection.AsExecutable(); +} + +[UseFirstOrDefault] +public IExecutable GetPersonById( + [Service] IMongoCollection collection, + Guid id) +{ + return collection.Find(x => x.Id == id).AsExecutable(); +} +``` + +# Filtering + +To use MongoDB filtering you need to register the convention on the schema builder: + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddMongoDbFiltering(); +``` + +> To use MongoDB filtering alongside with `IQueryable`/`IEnumerable`, you have to register the MongoDB convention under a different scope. +> You can specify the scope on the schema builder by executing `AddMongoDbFiltering("yourScope")`. +> You then have to specify this scope on each method you use MongoDb filtering: `[UseFiltering(Scope = "yourScope")]` or `UseFiltering(scope = "yourScope")` + +Your filters are now converted to `BsonDocument`s and applied to the executable. + +_GraphQL Query:_ + +```graphql +query GetPersons { + persons( + where: { + name: { eq: "Yorker Shorton" } + addresses: { some: { street: { eq: "04 Leroy Trail" } } } + } + ) { + name + addresses { + street + city + } + } +} +``` + +_Mongo Query_ + +```json +{ + "find": "person", + "filter": { + "Name": { "$eq": "Yorker Shorton" }, + "Addresses": { "$elemMatch": { "Street": { "$eq": "04 Leroy Trail" } } } + } +} +``` + +# Sorting + +To use MongoDB sorting you need to register the convention on the schema builder: + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddMongoDbSorting(); +``` + +> To use MongoDB Sorting alongside with `IQueryable`/`IEnumerable`, you have to register the MongoDB convention under a different scope. +> You can specify the scope on the schema builder by executing `AddMongoDbSorting("yourScope")`. +> You then have to specify this scope on each method you use MongoDb Sorting: `[UseSorting(Scope = "yourScope")]` or `UseSorting(scope = "yourScope")` + +Your sorting is now converted to `BsonDocument`s and applied to the executable. + +_GraphQL Query:_ + +```graphql +query GetPersons { + persons(order: [{ name: ASC }, { mainAddress: { city: DESC } }]) { + name + addresses { + street + city + } + } +} +``` + +_Mongo Query_ + +```json +{ + "find": "person", + "filter": {}, + "sort": { "Name": 1, "MainAddress.City": -1 } +} +``` + +# Projections + +To use MongoDB projections you need to register the convention on the schema builder: + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddMongoDbProjections(); +``` + +> To use MongoDB Projections alongside with `IQueryable`/`IEnumerable`, you have to register the MongoDB convention under a different scope. +> You can specify the scope on the schema builder by executing `AddMongoDbProjections("yourScope")`. +> You then have to specify this scope on each method you use MongoDb Projections: `[UseProjections(Scope = "yourScope")]` or `UseProjections(scope = "yourScope")` + +Projections do not always lead to a performance increase. +Even though MongoDB processes and transfers less data, it more often than not harms query performance. +This [Medium article by Tek Loon](https://betterprogramming.pub/improve-mongodb-performance-using-projection-c08c38334269) explains how and when to use projections well. + +_GraphQL Query:_ + +```graphql +query GetPersons { + persons { + name + addresses { + city + } + } +} +``` + +_Mongo Query_ + +```json +{ + "find": "person", + "filter": {}, + "projection": { "Addresses.City": 1, "Name": 1 } +} +``` + +# Paging + +In order to use pagination with MongoDB, we have to register the MongoDB specific pagination providers. + +```csharp +services + .AddGraphQLServer() + .AddMongoDbPagingProviders(); +``` + +[Learn more about pagination providers](/docs/hotchocolate/v12/fetching-data/pagination#providers) + +## Cursor Pagination + +To use cursor based pagination annoate you resolver with `[UsePaging]` or `.UsePaging()` + +```csharp +[UsePaging] +public IExecutable GetPersons([Service] IMongoCollection collection) +{ + return collection.AsExecutable(); +} +``` + +You can then execute queries like the following one: + +```graphql +query GetPersons { + persons(first: 50, after: "OTk=") { + nodes { + name + addresses { + city + } + } + pageInfo { + endCursor + hasNextPage + hasPreviousPage + startCursor + } + } +} +``` + +## Offset Pagination + +To use offset based pagination annoate you resolver with `[UseOffsetPaging]` or `.UseOffsetPaging()` + +```csharp +[UseOffsetPaging] +public IExecutable GetPersons([Service] IMongoCollection collection) +{ + return collection.AsExecutable(); +} +``` + +You can then execute queries like the following one: + +```graphql +query GetPersons { + persons(skip: 50, take: 50) { + items { + name + addresses { + city + } + } + pageInfo { + hasNextPage + hasPreviousPage + } + } +} +``` + +# FirstOrDefault / SingleOrDefault + +Sometimes you may want to return only a single object of a collection. +To limit the response to one element you can use the `UseFirstOrDefault` or `UseSingleOrDefault` middleware. +Hot Chocolate will rewrite the type of the field from a list type to an object type. + +```csharp +[UseFirstOrDefault] +public IExecutable GetPersonById( + [Service] IMongoCollection collection, + Guid id) +{ + return collection.Find(x => x.Id == id).AsExecutable(); +} +``` diff --git a/website/src/docs/hotchocolate/v12/integrations/neo4j.md b/website/src/docs/hotchocolate/v12/integrations/neo4j.md new file mode 100644 index 00000000000..cf679f6be59 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/integrations/neo4j.md @@ -0,0 +1,225 @@ +--- +title: Neo4J Database +--- + +HotChocolate has a data integration for Neo4J. +With this integration, you can translate paging, filtering, sorting, and projections, directly into native cypher queries. + +You can find a example project in [HotChocolate Examples](https://github.com/ChilliCream/graphql-workshop-neo4j) + +# Get Started + +To use the Neo4J integration, you need to install the package `HotChocolate.Data.Neo4J`. + +```bash +dotnet add package HotChocolate.Data.Neo4J +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +# Neo4JExecutable + +The whole integration builds around `IExecutable`. +The execution engine picks up the `IExecutable` and executes it efficiently. + +```csharp +[UseNeo4JDatabase("neo4j")] +[UsePaging] +[UseProjection] +[UseSorting] +[UseFiltering] +public IExecutable GetPersons([ScopedService] IAsyncSession session) => new Neo4JExecutable(session); +``` + +# Filtering + +To use Neo4J filtering you need to register the convention on the schema builder: + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddNeo4JFiltering(); +``` + +> To use Neo4J filtering alongside with `IQueryable`/`IEnumerable`, you have to register the Neo4J convention under a different scope. +> You can specify the scope on the schema builder by executing `AddNeo4JFiltering("yourScope")`. +> You then have to specify this scope on each method you use Neo4J filtering: `[UseFiltering(Scope = "yourScope")]` or `UseFiltering(scope = "yourScope")` + +Your filters are now converted to cypher and applied to the executable. + +_GraphQL Query:_ + +```graphql +query GetPersons { + persons( + where: { + name: { eq: "Yorker Shorton" } + addresses: { some: { street: { eq: "04 Leroy Trail" } } } + } + ) { + name + addresses { + street + city + } + } +} +``` + +_Cypher Query_ + +```cypher +MATCH (person:Person) +WHERE person.name = 'Yorker Shorton" AND +RETURN person {.name} +``` + +# Sorting + +To use Neo4J sorting you need to register the convention on the schema builder: + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddNeo4JSorting(); +``` + +> To use Neo4J Sorting alongside with `IQueryable`/`IEnumerable`, you have to register the Neo4J convention under a different scope. +> You can specify the scope on the schema builder by executing `AddNeo4JSorting("yourScope")`. +> You then have to specify this scope on each method you use Neo4J Sorting: `[UseSorting(Scope = "yourScope")]` or `UseSorting(scope = "yourScope")` + +Your sorting is now converted to cypher and applied to the executable. + +_GraphQL Query:_ + +```graphql +query GetPersons { + persons(order: [{ name: ASC }]) { + name + addresses { + street + city + } + } +} +``` + +_Cypher Query_ + +```cypher +MATCH (person:Person) +WHERE person.name = 'Yorker Shorton" AND +RETURN person {.name} +``` + +# Projections + +To use Neo4J projections you need to register the convention on the schema builder: + +```csharp +services + .AddGraphQLServer() + .AddQueryType() + .AddNeo4JProjections(); +``` + +> To use Neo4J Projections alongside with `IQueryable`/`IEnumerable`, you have to register the Neo4J convention under a different scope. +> You can specify the scope on the schema builder by executing `AddNeo4JProjections("yourScope")`. +> You then have to specify this scope on each method you use Neo4J Projections: `[UseProjections(Scope = "yourScope")]` or `UseProjections(scope = "yourScope")` + +_GraphQL Query:_ + +```graphql +query GetPersons { + persons { + name + addresses { + city + } + } +} +``` + +_Cypher Query_ + +```cypher +MATCH (person:Person) +WHERE person.name = 'Yorker Shorton" AND +RETURN person {.name} +``` + +# Paging + +In order to use pagination with Neo4J, we have to register the Neo4J specific pagination providers. + +```csharp +services + .AddGraphQLServer() + .AddNeo4JPagingProviders(); +``` + +[Learn more about pagination providers](/docs/hotchocolate/v12/fetching-data/pagination#providers) + +## Cursor Pagination + +To use cursor based pagination annotate you resolver with `[UseNeo4JPaging]` or `.UseNeo4JPaging()` + +```csharp +[UseNeo4JDatabase("neo4j")] +[UsePaging] +[UseProjection] +public IExecutable GetPersons([ScopedService] IAsyncSession session) => new Neo4JExecutable(session); +``` + +You can then execute queries like the following one: + +```graphql +query GetPersons { + persons(first: 50, after: "OTk=") { + nodes { + name + addresses { + city + } + } + pageInfo { + endCursor + hasNextPage + hasPreviousPage + startCursor + } + } +} +``` + +## Offset Pagination + +To use cursor based pagination annotate you resolver with `[UseNeo4JPaging]` or `.UseNeo4JPaging()` + +```csharp +[UseNeo4JDatabase("neo4j")] +[UseOffsetPaging] +[UseProjection] +public IExecutable GetPersons([ScopedService] IAsyncSession session) => new Neo4JExecutable(session); +``` + +You can then execute queries like the following one: + +```graphql +query GetPersons { + persons(skip: 50, take: 50) { + items { + name + addresses { + city + } + } + pageInfo { + hasNextPage + hasPreviousPage + } + } +} +``` diff --git a/website/src/docs/hotchocolate/v12/integrations/spatial-data.md b/website/src/docs/hotchocolate/v12/integrations/spatial-data.md new file mode 100644 index 00000000000..dde25f32e97 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/integrations/spatial-data.md @@ -0,0 +1,610 @@ +--- +title: Spatial Data +--- + +> ⚠️ Experimental Warning: This feature is not yet finished nor polished. +> +> Spatial types is a community-driven feature. As the core team has little experience with spatial data, we need your feedback to decide the next steps for this feature. It is important for us to deliver you the best experience, so reach out to us if you run into issues or have ideas to improve it. +> +> We try not to introduce breaking changes, but we save ourselves the possibility to make changes to the API in future releases, if we find flaws in the current design. + +Spatial data describes locations or shapes in form of objects. Many database providers have support +for storing this type of data. APIs often use GeoJSON to send spatial data over the network. + +The most common library used for spatial data in .NET is [NetTopologySuite](https://github.com/NetTopologySuite/NetTopologySuite). +Entity Framework supports [Spatial Data](https://docs.microsoft.com/en-gb/ef/core/modeling/spatial) and uses +NetToplogySuite as its data representation. + +The package `HotChocolate.Spatial` integrates NetTopologySuite into HotChocolate. With this package your resolvers +can return NetTopologySuite shapes and they will be transformed into GeoJSON. + +# Getting Started + +You first need to add the `HotChocolate.Spatial` package reference to your project. You can do this with the `dotnet` cli: + +```bash +dotnet add package HotChocolate.Spatial +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +To make the schema recognize the spatial types you need to register them on the schema builder. + +```csharp +services + .AddGraphQLServer() + .AddSpatialTypes(); +``` + +If you are using our data extensions to project data from a database you also need to add the package `HotChocolate.Data.Spatial` to your project. + +```bash +dotnet add package HotChocolate.Data.Spatial +``` + +In order to use the data extensions in your resolvers you need to register them with the GraphQL configuration builder. + +```csharp +services + .AddGraphQLServer() + .AddSpatialTypes() + .AddFiltering() + .AddProjections() + .AddSpatialFiltering() + .AddSpatialProjections(); +``` + +All NetToplogySuite runtime types are now bound to the corresponding GeoJSON type. + +```csharp +public class Pub +{ + public int Id { get; set; } + + public string Name { get; set; } + + public Point Location { get; set; } +} + +public class Query +{ + // we use ef in this example + [UseDbContext(typeof(SomeDbContext))] + public IQueryable GetPubs([ScopedService] SomeDbContext someDbContext) + { + return someDbContext.Pubs; + } +} +``` + +```sdl +type Pub { + id: Int! + name: String! + location: GeoJSONPointType! +} + +type Query { + pubs: [Pub!]! +} +``` + +```graphql +{ + pubs { + id + location { + __typename + bbox + coordinates + crs + type + } + name + } +} +``` + +```json +{ + "data": { + "pubs": [ + { + "id": 1, + "location": { + "__typename": "GeoJSONPointType", + "bbox": [12, 12, 12, 12], + "coordinates": [[12, 12]], + "crs": 4326, + "type": "Point" + }, + "name": "The Winchester" + }, + { + "id": 2, + "location": { + "__typename": "GeoJSONPointType", + "bbox": [43, 534, 43, 534], + "coordinates": [[43, 534]], + "crs": 4326, + "type": "Point" + }, + "name": "Fountains Head" + } + ] + } +} +``` + +# Spatial Types + +Hot Chocolate supports GeoJSON input and output types. There is also a GeoJSON scalar to make generic inputs possible. + +## Output Types + +The following mappings are available by default: + +| NetTopologySuite | GraphQL | +| ---------------- | -------------------------- | +| Point | GeoJSONPointType | +| MultiPoint | GeoJSONMultiPointType | +| LineString | GeoJSONLineStringType | +| MultiLineString | GeoJSONMultiLineStringType | +| Polygon | GeoJSONPolygonType | +| MultiPolygon | GeoJSONMultiPolygonType | +| Geometry | GeoJSONInterface | + +All GeoJSON output types implement the following interface. + +```sdl +interface GeoJSONInterface { + "The geometry type of the GeoJson object" + type: GeoJSONGeometryType! + "The minimum bounding box around the geometry object" + bbox: [Float] + "The coordinate reference system integer identifier" + crs: Int +} +``` + +A `NetTopologySuite.Gemeotry` is mapped to this interface by default. + +## Input Types + +For each output type there is a corresponding input type + +| NetTopologySuite | GraphQL | +| ---------------- | --------------------------- | +| Point | GeoJSONPointInput | +| MultiPoint | GeoJSONMultiPointInput | +| LineString | GeoJSONLineStringInput | +| MultiLineString | GeoJSONMultiLineStringInput | +| Polygon | GeoJSONPolygonInput | +| MultiPolygon | GeoJSONMultiPolygonInput | + +## Scalar + +With interfaces or unions it is possible to have multiple possible return types. +Input types do not yet have a way of defining multiple possibilities. +As an addition to output and input types there is the `Geometry` scalar, which closes this gap. +When a resolver expects any `Geometry` type as an input, you can use this scalar. +This scalar should be used with caution. Input and output types are much more expressive than a custom scalar. + +```sdl +scalar Geometry +``` + +# Projections + +To project spatial types, a special handler is needed. This handler can be registered on the schema with `.AddSpatialProjections()` + +```csharp + services + .AddGraphQLServer() + .AddProjections() + .AddSpatialTypes() + .AddSpatialProjections() +``` + +The projection middleware will use this handler to project the spatial data directly to the database + +```csharp +[UseDbContext(typeof(SomeDbContext))] +[UseProjection] +public IQueryable GetPubs([ScopedService] SomeDbContext someDbContext) +{ + return someDbContext.Pubs; +} +``` + +```graphql +{ + pubs { + id + location { + __typename + bbox + coordinates + crs + type + } + name + } +} +``` + +```sql +SELECT p."Id", p."Location", p."Name" +FROM "Pubs" AS p +``` + +# Filtering + +Entity framework supports filtering on NetTopologySuite objects. +`HotChocolate.Spatial` provides handlers for filtering spatial types on `IQueryable`. +These handlers can be registered on the schema with `.AddSpatialFiltering()` + +```csharp + services + .AddGraphQLServer() + .AddProjections() + .AddSpatialTypes() + .AddSpatialFiltering() +``` + +After the registration of the handlers `UseFiltering()` will infer the possible filter types +for all `Geometry` based types. + +```csharp +[UseDbContext(typeof(SomeDbContext))] +[UseFiltering] +public IQueryable GetPubs([ScopedService] SomeDbContext someDbContext) +{ + return someDbContext.Pubs; +} +``` + +```sdl {10} +type Query { + pubs(where: PubFilterInput): [Pub!]! +} + +input PubFilterInput { + and: [PubFilterInput!] + or: [PubFilterInput!] + id: ComparableInt32OperationFilterInput + name: StringOperationFilterInput + location: PointFilterInput +} + +input PointFilterInput { + and: [PointFilterInput!] + or: [PointFilterInput!] + m: ComparableDoubleOperationFilterInput + x: ComparableDoubleOperationFilterInput + y: ComparableDoubleOperationFilterInput + z: ComparableDoubleOperationFilterInput + area: ComparableDoubleOperationFilterInput + boundary: GeometryFilterInput + centroid: PointFilterInput + dimension: DimensionOperationFilterInput + envelope: GeometryFilterInput + geometryType: StringOperationFilterInput + interiorPoint: PointFilterInput + isSimple: BooleanOperationFilterInput + isValid: BooleanOperationFilterInput + length: ComparableDoubleOperationFilterInput + numPoints: ComparableInt32OperationFilterInput + ogcGeometryType: OgcGeometryTypeOperationFilterInput + pointOnSurface: PointFilterInput + srid: ComparableInt32OperationFilterInput + contains: GeometryContainsOperationFilterInput + distance: GeometryDistanceOperationFilterInput + intersects: GeometryIntersectsOperationFilterInput + overlaps: GeometryOverlapsOperationFilterInput + touches: GeometryTouchesOperationFilterInput + within: GeometryWithinOperationFilterInput + ncontains: GeometryContainsOperationFilterInput + ndistance: GeometryDistanceOperationFilterInput + nintersects: GeometryIntersectsOperationFilterInput + noverlaps: GeometryOverlapsOperationFilterInput + ntouches: GeometryTouchesOperationFilterInput + nwithin: GeometryWithinOperationFilterInput +} +``` + +## Distance + +The `distance` filter is an implementation of [`Geometry.Within`](http://nettopologysuite.github.io/NetTopologySuite/api/NetTopologySuite.Geometries.Geometry.html#NetTopologySuite_Geometries_Geometry_Within_NetTopologySuite_Geometries_Geometry_) + +The filter requires an input geometry. You can optionally buffer this geometry with the input field buffer. +The filter also has all comparable filters. + +```sdl +input GeometryDistanceOperationFilterInput { + geometry: Geometry! + buffer: Float + eq: Float + neq: Float + in: [Float!] + nin: [Float!] + gt: Float + ngt: Float + gte: Float + ngte: Float + lt: Float + nlt: Float + lte: Float + nlte: Float +} +``` + +```graphql +{ + pubs( + where: { + location: { + within: { geometry: { type: Point, coordinates: [1, 1] }, lt: 120 } + } + } + ) { + id + name + location + } +} +``` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE ST_Within(c."Area", @__p_0) +``` + +The negation of this operation is `nwithin` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE NOT ST_Within(c."Area", @__p_0) +``` + +## Contains + +The `contains` filter is an implementation of [`Geometry.Contains`](http://nettopologysuite.github.io/NetTopologySuite/api/NetTopologySuite.Geometries.Geometry.html#NetTopologySuite_Geometries_Geometry_Contains_NetTopologySuite_Geometries_Geometry) + +The filter requires an input geometry. You can optionally buffer this geometry with the input field buffer. + +```sdl +input GeometryContainsOperationFilterInput { + geometry: Geometry! + buffer: Float +} +``` + +```graphql +{ + counties( + where: { + area: { contains: { geometry: { type: Point, coordinates: [1, 1] } } } + } + ) { + id + name + area + } +} +``` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE ST_Contains(c."Area", @__p_0) +``` + +The negation of this operation is `ncontains` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE NOT ST_Contains(c."Area", @__p_0) +``` + +## Touches + +The `touches` filter is an implementation of [`Geometry.Touches`](http://nettopologysuite.github.io/NetTopologySuite/api/NetTopologySuite.Geometries.Geometry.html#NetTopologySuite_Geometries_Geometry_Touches_NetTopologySuite_Geometries_Geometry_) + +The filter requires an input geometry. You can optionally buffer this geometry with the input field buffer. + +```sdl +input GeometryTouchesOperationFilterInput { + geometry: Geometry! + buffer: Float +} +``` + +```graphql +{ + counties( + where: { + area: { + touches: { + geometry: { + type: Polygon, + coordinates: [[1, 1], ....] + } + } + } + }){ + id + name + area + } +} +``` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE ST_Touches(c."Area", @__p_0) +``` + +The negation of this operation is `ntouches` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE NOT ST_Touches(c."Area", @__p_0) +``` + +## Intersects + +The `intersects` filter is an implementation of [`Geometry.Intersects`](http://nettopologysuite.github.io/NetTopologySuite/api/NetTopologySuite.Geometries.Geometry.html#NetTopologySuite_Geometries_Geometry_Intersects_NetTopologySuite_Geometries_Geometry_) + +The filter requires an input geometry. You can optionally buffer this geometry with the input field buffer. + +```sdl +input GeometryIntersectsOperationFilterInput { + geometry: Geometry! + buffer: Float +} +``` + +```graphql +{ + roads( + where: { + road: { + intersects: { + geometry: { + type: LineString, + coordinates: [[1, 1], ....] + } + } + } + }){ + id + name + road + } +} +``` + +```sql +SELECT r."Id", r."Name", r."Road" +FROM "Roads" AS r +WHERE ST_Intersects(r."Road", @__p_0) +``` + +The negation of this operation is `nintersects` + +```sql +SELECT r."Id", r."Name", r."Road" +FROM "Roads" AS r +WHERE NOT ST_Intersects(r."Road", @__p_0) +``` + +## Overlaps + +The `overlaps` filter is an implementation of [`Geometry.Overlaps`](http://nettopologysuite.github.io/NetTopologySuite/api/NetTopologySuite.Geometries.Geometry.html#NetTopologySuite_Geometries_Geometry_Overlaps_NetTopologySuite_Geometries_Geometry_) + +```sdl +input GeometryOverlapsOperationFilterInput { + geometry: Geometry! + buffer: Float +} +``` + +```graphql +{ + county( + where: { + area: { + overlaps: { + geometry: { + type: Polygon, + coordinates: [[1, 1], ....] + } + } + } + }){ + id + name + area + } +} +``` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE ST_Overlaps(c."Area", @__p_0) +``` + +The negation of this operation is `noverlaps` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE NOT ST_Overlaps(c."Area", @__p_0) +``` + +## Within + +The `within` filter is an implementation of [`Geometry.Within`](http://nettopologysuite.github.io/NetTopologySuite/api/NetTopologySuite.Geometries.Geometry.html#NetTopologySuite_Geometries_Geometry_Within_NetTopologySuite_Geometries_Geometry_) + +```sdl +input GeometryWithinOperationFilterInput { + geometry: Geometry! + buffer: Float +} +``` + +```graphql +{ + pubs( + where: { + location: { + within: { geometry: { type: Point, coordinates: [1, 1] }, buffer: 200 } + } + } + ) { + id + name + location + } +} +``` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE ST_Within(c."Area", @__p_0) +``` + +The negation of this operation is `nwithin` + +```sql +SELECT c."Id", c."Name", c."Area" +FROM "Counties" AS c +WHERE NOT ST_Within(c."Area", @__p_0) +``` + +# What's next? + +In upcoming releases spatial data will get reprojection features and sorting capabilities. + +## Reprojection + +At the moment the coordinate reference system (crs) is fixed. The user has to know the crs of the backend +to do spatial filtering. The API will furthermore always return the data in the crs it was stored in the database. + +We want to improve this. The user should be able to send data to the backend without knowing what the crs. The +backend should reproject the incoming data automatically to the correct crs. + +Additionally we want to provide a way for users, to specify in what CRS they want to receive the data. + +## Sorting + +Currently we only support filtering for spatial data. We also want to provide a way for users to sort results. +This can e.g. be used to find the nearest result for a given point. diff --git a/website/src/docs/hotchocolate/v12/performance/automatic-persisted-queries.md b/website/src/docs/hotchocolate/v12/performance/automatic-persisted-queries.md new file mode 100644 index 00000000000..24e8183a19f --- /dev/null +++ b/website/src/docs/hotchocolate/v12/performance/automatic-persisted-queries.md @@ -0,0 +1,303 @@ +--- +title: "Automatic persisted queries" +--- + +This guide will walk you through how automatic persisted queries work and how you can set them up with the Hot Chocolate GraphQL server. + +# How it works + +The automatic persisted queries protocol was originally specified by Apollo and represent an evolution of the persisted query feature that many GraphQL servers implement. Instead of storing persisted queries ahead of time, the client can store queries dynamically. This preserves the original proposal's performance benefits but removes the friction of setting up build processes that post-process the client applications source code. + +When the client makes a request to the server, it will optimistically send a short cryptographic hash instead of the full query text. + +## Optimized Path + +Hot Chocolate server will inspect the incoming request for a query id or a full GraphQL query. If the request has only a query id the execution engine will first try to resolve the full query from the query storage. If the query storage contains a query that matches the provided query id, the request will be upgraded to a fully valid GraphQL request and will be executed. + +## New Query Path + +If the query storage does not contain a query that matches the sent query id, the Hot Chocolate server will return an error result that indicates that the query was not found (this will only happen the first time a client asks for a certain query). The client application will then send in a second request with the specified query id and the complete GraphQL query. This will trigger Hot Chocolate server to store this new query in its query storage and, at the same time, execute the query and returning the result. + +# Setup + +In the following tutorial, we will walk you through creating a Hot Chocolate GraphQL server and configuring it to support automatic persisted queries. + +## Step 1: Create a GraphQL server project + +Open your preferred terminal and select a directory where you want to add the code of this tutorial. + +1. Install the Hot Chocolate GraphQL server template. + +```bash +dotnet new -i HotChocolate.Templates.Server +``` + +2. Create a new Hot Chocolate GraphQL server project. + +```bash +dotnet new graphql +``` + +3. Add the in-memory query storage to your project. + +```bash +dotnet add package HotChocolate.PersistedQueries.InMemory +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +## Step 2: Configure automatic persisted queries + +Next, we want to configure our GraphQL server to be able to handle automatic persisted query requests. For this, we need to register the in-memory query storage and configure the automatic persisted query request pipeline. + +1. Configure GraphQL server to use the automatic persisted query pipeline. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + .AddGraphQLServer() + .AddQueryType() + .UseAutomaticPersistedQueryPipeline(); +} +``` + +2. Next, register the in-memory query storage. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + .AddGraphQLServer() + .AddQueryType() + .UseAutomaticPersistedQueryPipeline() + .AddInMemoryQueryStorage(); +} +``` + +3. Last but not least, we need to add the Microsoft Memory Cache, which the in-memory query storage will use as the in-memory key-value store. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + // Global Services + .AddMemoryCache() + + // GraphQL server configuration + .AddGraphQLServer() + .AddQueryType() + .UseAutomaticPersistedQueryPipeline() + .AddInMemoryQueryStorage(); +} +``` + +## Step 3: Verify server setup + +Now that our server is set up with automatic persisted queries, let us verify that it works as expected. We can do that by just using our console and a tool called `curl`. For our example, we will use a dummy query `{__typename}` with an MD5 hash serialized to base64 as a query id `71yeex4k3iYWQgg9TilDIg==`. We will test the full automatic persisted query flow and walk you through the responses. + +1. Start the GraphQL server. + +```bash +dotnet run +``` + +2. First, we will ask our GraphQL server to execute our query with the optimized request containing only the query hash. At this point, the server will not know this query and hast to return an error indicating this. + +**Request** + +```bash +curl -g 'http://localhost:5000/graphql/?extensions={"persistedQuery":{"version":1,"md5Hash":"71yeex4k3iYWQgg9TilDIg=="}}' +``` + +**Response** + +The response indicates, as expected, that this query is unknown so far. + +```json +{ + "errors": [ + { + "message": "PersistedQueryNotFound", + "extensions": { "code": "HC0020" } + } + ] +} +``` + +3. Next, we want to store our dummy query on the server. We will send in the hash as before but now also provide the query parameter with the full GraphQL query string. + +**Request** + +```bash +curl -g 'http://localhost:5000/graphql/?query={__typename}&extensions={"persistedQuery":{"version":1,"md5Hash":"71yeex4k3iYWQgg9TilDIg=="}}' +``` + +**Response** + +Our GraphQL server will respond with the query result and indicate that the query was stored on the server `"persisted": true`. + +```json +{ + "data": { "__typename": "Query" }, + "extensions": { + "persistedQuery": { + "md5Hash": "71yeex4k3iYWQgg9TilDIg==", + "persisted": true + } + } +} +``` + +4. Last but not least, we will verify that we can now use our optimized request by executing our initial request containing only the query hash. + +**Request** + +```bash +curl -g 'http://localhost:5000/graphql/?extensions={"persistedQuery":{"version":1,"md5Hash":"71yeex4k3iYWQgg9TilDIg=="}}' +``` + +**Response** + +This time the server knows the query and will respond with the simple result of this query. + +```json +{ "data": { "__typename": "Query" } } +``` + +> In this example, we used GraphQL HTTP GET requests, which are also useful in caching scenarios with CDNs. But the automatic persisted query flow can also be used with GraphQL HTTP POST requests. + +## Step 4: Configure the hashing algorithm + +Hot Chocolate server is configured to use by default the MD5 hashing algorithm, which is serialized to a base64 string. Hot Chocolate server comes out of the box with support for MD5, SHA1, and SHA256 and can serialize the hash to base64 or hex. In this step, we will walk you through changing the hashing algorithm to SHA256 with a hex serialization. + +1. Add the SHA256 document hash provider to your Hot Chocolate GraphQL server's global services. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + // Global Services + .AddMemoryCache() + .AddSha256DocumentHashProvider(HashFormat.Hex) + + // GraphQL server configuration + .AddGraphQLServer() + .AddQueryType() + .UseAutomaticPersistedQueryPipeline() + .AddInMemoryQueryStorage(); +} +``` + +2. Start the GraphQL server. + +```bash +dotnet run +``` + +3. Next, let us verify that our server now operates with the new hash provider and the new hash serialization format. For this we will store again a query on the server, but this time our hash string will look like the following: `7f56e67dd21ab3f30d1ff8b7bed08893f0a0db86449836189b361dd1e56ddb4b`. + +**Request** + +```bash +curl -g 'http://localhost:5000/graphql/?query={__typename}&extensions={"persistedQuery":{"version":1,"sha256Hash":"7f56e67dd21ab3f30d1ff8b7bed08893f0a0db86449836189b361dd1e56ddb4b"}}' +``` + +**Response** + +```json +{ + "data": { "__typename": "Query" }, + "extensions": { + "persistedQuery": { + "sha256Hash": "7f56e67dd21ab3f30d1ff8b7bed08893f0a0db86449836189b361dd1e56ddb4b", + "persisted": true + } + } +} +``` + +## Step 4: Use Redis as a query storage + +If you run multiple Hot Chocolate server instances and want to preserve stored queries after a server restart, you can opt to use a file system based query storage or opt to use a Redis cache. Hot Chocolate server supports both. + +1. Setup a Redis docker container. + +```bash +docker run --name redis-stitching -p 7000:6379 -d redis +``` + +2. Add the Redis persisted query storage package to your server. + +```bash +dotnet add package HotChocolate.PersistedQueries.Redis +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +3. Next, we need to configure the server to use Redis as query storage. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + // Global Services + .AddSha256DocumentHashProvider(HashFormat.Hex) + + // GraphQL server configuration + .AddGraphQLServer() + .AddQueryType() + .UseAutomaticPersistedQueryPipeline() + .AddRedisQueryStorage(services => ConnectionMultiplexer.Connect("localhost:7000").GetDatabase()); +} +``` + +4. Start the GraphQL server. + +```bash +dotnet run +``` + +5. Now, let us verify again if our server works correctly by storing our query first. + +**Request** + +```bash +curl -g 'http://localhost:5000/graphql/?query={__typename}&extensions={"persistedQuery":{"version":1,"sha256Hash":"7f56e67dd21ab3f30d1ff8b7bed08893f0a0db86449836189b361dd1e56ddb4b"}}' +``` + +**Response** + +```json +{ + "data": { "__typename": "Query" }, + "extensions": { + "persistedQuery": { + "sha256Hash": "7f56e67dd21ab3f30d1ff8b7bed08893f0a0db86449836189b361dd1e56ddb4b", + "persisted": true + } + } +} +``` + +6. Stop your GraphQL server. + +7. Start your GraphQL server again. + +```bash +dotnet run +``` + +8. Let us execute the optimized query to see if our query was correctly stored on our Redis cache. + +**Request** + +```bash +curl -g 'http://localhost:5000/graphql/?extensions={"persistedQuery":{"version":1,"sha256Hash":"7f56e67dd21ab3f30d1ff8b7bed08893f0a0db86449836189b361dd1e56ddb4b"}}' +``` + +**Response** + +```json +{ "data": { "__typename": "Query" } } +``` diff --git a/website/src/docs/hotchocolate/v12/performance/index.md b/website/src/docs/hotchocolate/v12/performance/index.md new file mode 100644 index 00000000000..4a29630178e --- /dev/null +++ b/website/src/docs/hotchocolate/v12/performance/index.md @@ -0,0 +1,50 @@ +--- +title: "Overview" +--- + +In this section we will look at some ways of how we can improve the performance of our Hot Chocolate GraphQL server. + +# Startup performance + +The first GraphQL request issued against a Hot Chocolate server will most of the time take a little longer than subsequent requests. This is because Hot Chocolate has to build up the GraphQL schema and prepare for the execution of requests. + +We can however delegate this task to the startup of the application instead of the first request, by call `InitializeOnStartup()` on the `IRequestExecutorBuilder`. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .InitializeOnStartup() + } +} +``` + +This will create the schema and warmup the request executor as soon as the app starts. This also brings the added benefit that schema errors are surfaced at app startup and not on the first request. + +# Persisted queries + +The size of individual GraphQL requests can become a major pain point. This is not only true for the transport but also the server, since large requests need to be parsed and validated often. To solve this problem, Hot Chocolate implements persisted queries. With persisted queries, we can store queries on the server in a key-value store. When we want to execute a persisted query, we can send the key under which the query is stored instead of the query itself. This saves precious bandwidth and also improves execution time since the server will validate, parse, and compile persisted queries just once. + +Hot Chocolate supports two flavors of persisted queries. + +## Regular persisted queries + +The first approach is to store queries ahead of time (ahead of deployment). +This can be done by extracting the queries from our client applications at build time. This will reduce the size of the requests and the bundle size of our application since queries can be removed from the client code at build time and are replaced with query hashes. + +Strawberry Shake, [Relay](https://relay.dev/docs/guides/persisted-queries/), and [Apollo](https://www.apollographql.com/docs/react/api/link/persisted-queries/) client all support this approach. + +[Learn more about persisted queries](/docs/hotchocolate/v12/performance/persisted-queries) + +## Automatic persisted queries + +Automatic persisted queries allow us to store queries dynamically on the server at runtime. With this approach, we can give our applications the same performance benefits as with persisted queries without having to opt in to a more complex build process. + +However, we do not get any bundle size improvements for our applications since the queries are still needed at runtime. + +Both Strawberry Shake and [Apollo](https://www.apollographql.com/docs/apollo-server/performance/apq/) client support this approach. + +[Learn more about automatic persisted queries](/docs/hotchocolate/v12/performance/automatic-persisted-queries) diff --git a/website/src/docs/hotchocolate/v12/performance/persisted-queries.md b/website/src/docs/hotchocolate/v12/performance/persisted-queries.md new file mode 100644 index 00000000000..c61d437e239 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/performance/persisted-queries.md @@ -0,0 +1,158 @@ +--- +title: "Persisted queries" +--- + +Persisted queries allow us to pre-register all required queries of our clients. This can be done by extracting the queries of our client applications at build time and placing them in the server's query storage. + +Extracting queries is supported by client libraries like [Relay](https://relay.dev/docs/guides/persisted-queries/) and in the case of [Strawberry Shake](/docs/strawberryshake) we do not have to do any additional work. + +> Note: While this feature is called persisted _queries_ it works for all other GraphQL operations as well. + + + +# How it works + +- All queries our client(s) will execute are extracted during their build process. Individual queries are hashed to generate a unique identifier for each query. +- Before our server is deployed, the extracted queries are placed in the server's query storage. +- After the server has been deployed, clients can execute persisted queries, by specifying the query id (hash) in their requests. +- If Hot Chocolate can find a query that matches the specified hash in the query storage it will execute it and return the result to the client. + +> Note: There are also [automatic persisted queries](/docs/hotchocolate/v12/performance/automatic-persisted-queries), which allow clients to persist queries at runtime. They might be a better fit, if our API is used by many clients with different requirements. + +# Benefits + + + +**Performance** + +- Only a hash and optionally variables need to be sent to the server, reducing network traffic. +- Queries no longer need to be embeded into the client code, reducing the bundle size in the case of websites. +- Hot Chocolate can optimize the execution of persisted queries, as they will always be the same. + + + +# Usage + +First we have to instruct our server to handle persisted queries. We can do so by calling `UsePersistedQueryPipeline()` on the `IRequestExecutorBuilder`. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + .AddGraphQLServer() + .AddQueryType() + .UsePersistedQueryPipeline(); +} +``` + +## Storage mechanisms + +Hot Chocolate supports two query storages for regular persisted queries. + +### Filesystem + +To load persisted queries from the filesystem, we have to add the following package. + +```bash +dotnet add package HotChocolate.PersistedQueries.FileSystem +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +After this we need to specify where the persisted queries are located. The argument of `AddReadOnlyFileSystemQueryStorage()` specifies the directory in which the persisted queries are stored. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + .AddGraphQLServer() + .AddQueryType() + .UsePersistedQueryPipeline() + .AddReadOnlyFileSystemQueryStorage("./persisted_queries"); +} +``` + +When presented with a query hash, Hot Chocolate will now check the specified folder for a file in the following format: `{Hash}.graphql`. + +Example: `0c95d31ca29272475bf837f944f4e513.graphql` + +This file is expected to contain the query the hash was generated from. + +> ⚠️ Note: Do not forget to ensure that the server has access to the directory. + +### Redis + +To load persisted queries from Redis, we have to add the following package. + +```bash +dotnet add package HotChocolate.PersistedQueries.Redis +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +After this we need to specify where the persisted queries are located. Using `AddReadOnlyRedisQueryStorage()` we can point to a specific Redis database in which the persisted queries are stored. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + .AddGraphQLServer() + .AddQueryType() + .UsePersistedQueryPipeline() + .AddReadOnlyRedisQueryStorage(services => + ConnectionMultiplexer.Connect("host:port").GetDatabase()); +} +``` + +Keys in the specified Redis database are expected to be a query id (hash) and contain the actual query as the value. + +## Hashing algorithms + +Per default Hot Chocolate uses the MD5 hashing algorithm, but we can override this default by specifying a `DocumentHashProvider`. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services + // choose one of the following providers + .AddMD5DocumentHashProvider() + .AddSha256DocumentHashProvider() + .AddSha1DocumentHashProvider() + + // GraphQL server configuration + .AddGraphQLServer() + .AddQueryType() + .UsePersistedQueryPipeline() + .AddReadOnlyFileSystemQueryStorage("./persisted_queries"); +} +``` + +We can also configure how these hashes are encoded, by specifying a `HashFormat` as argument: + +```csharp +AddSha256DocumentHashProvider(HashFormat.Hex) +AddSha256DocumentHashProvider(HashFormat.Base64) +``` + +> Note: [Relay](https://relay.dev) uses the MD5 hashing algorithm - no additional Hot Chocolate configuration is required. + +# Client expectations + +A client is expected to send an `id` field containing the query hash instead of a `query` field. + +**HTTP POST** + +```json +{ + "id": "0c95d31ca29272475bf837f944f4e513", + "variables": { + // ... + } +} +``` + +> Note: [Relay's persisted queries documentation](https://relay.dev/docs/guides/persisted-queries/#network-layer-changes) uses `doc_id` instead of `id`, be sure to change it to `id`. diff --git a/website/src/docs/hotchocolate/v12/security/authentication.md b/website/src/docs/hotchocolate/v12/security/authentication.md new file mode 100644 index 00000000000..f38c12a1e3b --- /dev/null +++ b/website/src/docs/hotchocolate/v12/security/authentication.md @@ -0,0 +1,161 @@ +--- +title: Authentication +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Authentication allows us to determine a user's identity. This is of course a prerequisite for authorization, but it also allows us to access the authenticated user in our resolvers. This is useful, if we for example want to build a `me` field that fetches details about the authenticated user. + +Hot Chocolate fully embraces the authentication capabilities of ASP.NET Core, making it easy to reuse existing authentication configuration and integrating a variety of authentication providers. + +[Learn more about authentication in ASP.NET Core](https://docs.microsoft.com/aspnet/core/security/authentication) + +# Setup + +Setting up authentication is largely the same as in any other ASP.NET Core application. + +**In the following example we are using JWTs, but we could use any other authentication scheme supported by ASP.NET Core.** + +1. Install the `Microsoft.AspNetCore.Authentication.JwtBearer` package + +```bash +dotnet add package Microsoft.AspNetCore.Authentication.JwtBearer +``` + +2. Register the JWT authentication scheme + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + var signingKey = new SymmetricSecurityKey( + Encoding.UTF8.GetBytes("MySuperSecretKey")); + + services + .AddAuthentication(JwtBearerDefaults.AuthenticationScheme) + .AddJwtBearer(options => + { + options.TokenValidationParameters = + new TokenValidationParameters + { + ValidIssuer = "https://auth.chillicream.com", + ValidAudience = "https://graphql.chillicream.com", + ValidateIssuerSigningKey = true, + IssuerSigningKey = signingKey + }; + }); + } +} +``` + +> ⚠️ Note: This is an example configuration that's not intended for use in a real world application. + +3. Register the ASP.NET Core authentication middleware with the request pipeline by calling `UseAuthentication` + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseAuthentication(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL(); + }); + } +} +``` + +The above takes care of parsing and validating an incoming HTTP request. + +In order to make the authentication result available to our resolvers, we need to complete some additional, Hot Chocolate specific steps. + +1. Install the `HotChocolate.AspNetCore.Authorization` package + +```bash +dotnet add package HotChocolate.AspNetCore.Authorization +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +2. Call `AddAuthorization()` on the `IRequestExecutorBuilder` + +```csharp +services + .AddGraphQLServer() + .AddAuthorization() + .AddQueryType(); +``` + +All of this does not yet lock out unauthenticated users. It only exposes the identity of the authenticated user to our application through a `ClaimsPrincipal`. If we want to prevent certain users from querying our graph, we need to utilize authorization. + +[Learn more about authorization](/docs/hotchocolate/v12/security/authorization) + +# Accessing the ClaimsPrincipal + +The [ClaimsPrincipal](https://docs.microsoft.com/dotnet/api/system.security.claims.claimsprincipal) of an authenticated user can be accessed in our resolvers like the following. + + + + +```csharp +public class Query +{ + public User GetMe(ClaimsPrincipal claimsPrincipal) + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("me") + .Resolve(context => + { + var claimsPrincipal = context.GetUser(); + + // Omitted code for brevity + }); + } +} +``` + + + + +```csharp +services + .AddGraphQLServer() + .AddDocumentFromString(@" + type Query { + me: User + } + ") + .AddResolver("Query", "me", (context) => + { + var claimsPrincipal = context.GetUser(); + + // Omitted code for brevity + }) +``` + + + + +With the authenticated user's `ClaimsPrincipal`, we can now access their claims. + +```csharp +var userId = claimsPrincipal.FindFirstValue(ClaimTypes.NameIdentifier); +``` diff --git a/website/src/docs/hotchocolate/v12/security/authorization.md b/website/src/docs/hotchocolate/v12/security/authorization.md new file mode 100644 index 00000000000..d8c4c8648b6 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/security/authorization.md @@ -0,0 +1,411 @@ +--- +title: Authorization +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Authorization allows us to determine a user's permissions within our system. We can for example limit access to resources or only allow certain users to execute specific mutations. + +Authentication is a prerequisite of Authorization, as we first need to validate a user's "authenticity" before we can evaluate his authorization claims. + +[Learn how to setup authentication](/docs/hotchocolate/v12/security/authentication) + +# Setup + +After we have successfully setup authentication, there are only a few things left to do. + +1. Install the `HotChocolate.AspNetCore.Authorization` package + +```bash +dotnet add package HotChocolate.AspNetCore.Authorization +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +2. Register the necessary ASP.NET Core services + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services.AddAuthorization(); + + // Omitted code for brevity + + services + .AddGraphQLServer() + .AddAuthorization() + .AddQueryType(); + } +} +``` + +> ⚠️ Note: We need to call `AddAuthorization()` on the `IServiceCollection`, to register the services needed by ASP.NET Core, and on the `IRequestExecutorBuilder` to register the `@authorize` directive and middleware. + +3. Register the ASP.NET Core authorization middleware with the request pipeline by calling `UseAuthorization` + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseAuthentication(); + app.UseAuthorization(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL(); + }); + } +} +``` + +# Usage + +At the core of authorization with Hot Chocolate is the `@authorize` directive. It can be applied to fields and types to denote that they require authorization. + + + + +In the Annotation-based approach we can use the `[Authorize]` attribute to add the `@authorize` directive. + +```csharp +[Authorize] +public class User +{ + public string Name { get; set; } + + [Authorize] + public Address Address { get; set; } +} +``` + +> ⚠️ Note: We need to use the `HotChocolate.AspNetCore.Authorization.AuthorizeAttribute` instead of the `Microsoft.AspNetCore.AuthorizationAttribute`. + + + + +```csharp +public class UserType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Authorize(); + + descriptor.Field(f => f.Address).Authorize(); + } +} +``` + + + + +```sdl +type User @authorize { + name: String! + address: Address! @authorize +} +``` + + + + +Specified on a type the `@authorize` directive will be applied to each field of that type. Its authorization logic is executed once for each individual field, depending on whether it was selected by the requestor or not. If the directive is placed on an individual field, it overrules the one on the type. + +If we do not specify any arguments to the `@authorize` directive, it will only enforce that the requestor is authenticated, nothing more. If he is not and tries to access an authorized field, a GraphQL error will be raised and the field result set to `null`. + +> ⚠️ Note: Using the @authorize directive, all unauthorized requests by default will return status code 200 and a payload like this: + +```json +{ + "errors": [ + { + "message": "The current user is not authorized to access this resource.", + "locations": [ + { + "line": 2, + "column": 3 + } + ], + "path": ["welcome"], + "extensions": { + "code": "AUTH_NOT_AUTHENTICATED" + } + } + ], + "data": { + "welcome": null + } +} +``` + +## Roles + +Roles provide a very intuitive way of dividing our users into groups with different access rights. + +When building our `ClaimsPrincipal`, we just have to add one or more role claims. + +```csharp +claims.Add(new Claim(ClaimTypes.Role, "Administrator")); +``` + +We can then check whether an authenticated user has these role claims. + + + + +```csharp +[Authorize(Roles = new [] { "Guest", "Administrator" })] +public class User +{ + public string Name { get; set; } + + [Authorize(Roles = new[] { "Administrator" })] + public Address Address { get; set; } +} +``` + + + + +```csharp +public class UserType : ObjectType +{ + protected override Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Authorize(new[] { "Guest", "Administrator" }); + + descriptor.Field(t => t.Address).Authorize(new[] { "Administrator" }); + } +} +``` + + + + +```sdl +type User @authorize(roles: [ "Guest", "Administrator" ]) { + name: String! + address: Address! @authorize(roles: "Administrator") +} +``` + + + + +> ⚠️ Note: If multiple roles are specified, a user only has to match one of the specified roles, in order to be able to execute the resolver. + +[Learn more about role-based authorization in ASP.NET Core](https://docs.microsoft.com/aspnet/core/security/authorization/roles) + +## Policies + +Policies allow us to create richer validation logic and decouple the authorization rules from our GraphQL resolvers. + +A policy consists of an [IAuthorizationRequirement](https://docs.microsoft.com/aspnet/core/security/authorization/policies#requirements) and an [AuthorizationHandler<T>](https://docs.microsoft.com/aspnet/core/security/authorization/policies#authorization-handlers). + +Once defined, we can register our policies like the following. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services.AddAuthorization(options => + { + options.AddPolicy("AtLeast21", policy => + policy.Requirements.Add(new MinimumAgeRequirement(21))); + + options.AddPolicy("HasCountry", policy => + policy.RequireAssertion(context => + context.User.HasClaim(c => c.Type == ClaimTypes.Country))); + }); + + services.AddSingleton(); + + // Omitted code for brevity + + services + .AddGraphQLServer() + .AddAuthorization() + .AddQueryType(); + } +} +``` + +We can then use these policies to restrict access to our fields. + + + + +```csharp +[Authorize(Policy = "AllEmployees")] +public class User +{ + public string Name { get; } + + [Authorize(Policy = "SalesDepartment")] + public Address Address { get; } +} +``` + + + + +```csharp +public class UserType : ObjectType +{ + protected override Configure(IObjectTypeDescriptor descriptor) + { + descriptor.Authorize("AllEmployees"); + + descriptor.Field(t => t.Address).Authorize("SalesDepartment"); + } +} +``` + + + + +```sdl +type User @authorize(policy: "AllEmployees") { + name: String! + address: Address! @authorize(policy: "SalesDepartment") +} +``` + + + + +This essentially uses the provided policy and runs it against the `ClaimsPrincipal` that is associated with the current request. + +The `@authorize` directive is also repeatable, which means that we are able to chain the directive and a user is only allowed to access the field if they meet all of the specified conditions. + + + + +```csharp +[Authorize(Policy = "AtLeast21")] +[Authorize(Policy = "HasCountry")] +public class User +{ + public string Name { get; set; } +} +``` + + + + +```csharp +public class UserType : ObjectType +{ + protected override Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Authorize("AtLeast21") + .Authorize("HasCountry"); + } +} +``` + + + + +```sdl +type User + @authorize(policy: "AtLeast21") + @authorize(policy: "HasCountry") { + name: String! +} +``` + + + + +[Learn more about policy-based authorization in ASP.NET Core](https://docs.microsoft.com/aspnet/core/security/authorization/policies) + +### IResolverContext within an AuthorizationHandler + +If we need to, we can also access the `IResolverContext` in our `AuthorizationHandler`. + +```csharp +public class MinimumAgeHandler + : AuthorizationHandler +{ + protected override Task HandleRequirementAsync( + AuthorizationHandlerContext context, + MinimumAgeRequirement requirement, + IResolverContext resolverContext) + { + // Omitted code for brevity + } +} +``` + +# Global authorization + +We can also apply authorization to our entire GraphQL endpoint. To do this, simply call `RequireAuthorization()` on the `GraphQLEndpointConventionBuilder`. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseAuthentication(); + app.UseAuthorization(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL().RequireAuthorization(); + }); + } +} +``` + +This method also accepts [roles](#roles) and [policies](#policies) as arguments, similiar to the `Authorize` attribute / methods. + +> ⚠️ Note: Unlike the `@authorize directive` this will return status code 401 and prevent unauthorized access to all middleware included in `MapGraphQL`. This includes our GraphQL IDE Banana Cake Pop. If we do not want to block unauthorized access to Banana Cake Pop, we can split up the `MapGraphQL` middleware and for example only apply the `RequireAuthorization` to the `MapGraphQLHttp` middleware. + +[Learn more about available middleware](/docs/hotchocolate/v12/server/endpoints) + +# Modifying the ClaimsPrincipal + +Sometimes we might want to add additional [ClaimsIdentity](https://docs.microsoft.com/dotnet/api/system.security.claims.claimsidentity) to our `ClaimsPrincipal` or modify the default identity. + +Hot Chocolate provides the ability to register an `IHttpRequestInterceptor`, allowing us to modify the incoming HTTP request, before it is passed along to the execution engine. + +```csharp +public class HttpRequestInterceptor : DefaultHttpRequestInterceptor +{ + public override ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + var identity = new ClaimsIdentity(); + identity.AddClaim(new Claim(ClaimTypes.Country, "us")); + + context.User.AddIdentity(identity); + + return base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddHttpRequestInterceptor(); + + // Omitted code for brevity + } +} +``` + +[Learn more about interceptors](/docs/hotchocolate/v12/server/interceptors) diff --git a/website/src/docs/hotchocolate/v12/security/index.md b/website/src/docs/hotchocolate/v12/security/index.md new file mode 100644 index 00000000000..4b210bd66e7 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/security/index.md @@ -0,0 +1,68 @@ +--- +title: "Overview" +--- + +In this section we will learn how to secure our GraphQL endpoint. + +When we think about API security, we, in many cases, only think about authorization. With GraphQL, we need to think further about security concepts to ensure that our GraphQL API performs predictably and malicious actors do not bring our service down or degrade performance for others. + +# Authentication + +Authentication in Hot Chocolate is built around the official authentication mechanisms in ASP.NET Core, allowing us to fully embrace their customizability and variety of authentication providers. + +[Learn more about authentication](/docs/hotchocolate/v12/security/authentication) + +# Authorization + +Authorization is one of the most basic security concepts. It builds on top of authentication and allows us to restrict access to types and fields, based on whether a user is authenticated, assigned specific roles or satisfies one or more policies. Hot Chocolate closely matches and nicely integrates with the official ASP.NET Core authorization APIs. + +[Learn more about authorization](/docs/hotchocolate/v12/security/authorization) + +# Persisted Queries + +Depending on our setup and requirements, the simplest way to make our server secure and control the request impact is to use persisted queries. With this approach, we can export the request from our client applications at development time and only allow the set of known queries to be executed in our production environment. + +[Learn more about persisted queries](/docs/hotchocolate/v12/performance/persisted-queries) + +# Introspection + +Introspection is one of the GraphQL's core features and powers many GraphQL IDEs and developer tools. But introspection can also produce large results, which can degrade the performance of our server. Apart from the performance aspect, we might want to limit who can introspect our GraphQL server. Hot Chocolate allows us to control who can access introspection fields by using query validation rules. + +[Learn more about restricting introspection](/docs/hotchocolate/v12/server/introspection#disabling-introspection). + +# Pagination + +Pagination is another topic we often forget when thinking about securing our GraphQL API. Hot Chocolate, by default, will apply strict defaults so that APIs will only allow a certain amount of nodes per connection. While we set defaults, they might not be the right ones for your environment and might yield too much load. + +[Learn more about pagination](/docs/hotchocolate/v12/fetching-data/pagination) + + + + + +# Operation complexity + +With technologies like REST, it was easy to scale servers and measure the impact of a single request on our server infrastructure. With GraphQL, we need to do a bit more to enforce that requests have a consistent impact on our servers. Hot Chocolate can track the cost of fields and deny the execution of requests that exceed the allowed impact on our system. + +[Learn more about the operation complexity analyzer](/docs/hotchocolate/v12/security/operation-complexity). + +# FIPS compliance + +Per default Hot Chocolate uses MD5 to create a unique document hash. Since MD5 is not FIPS compliant, this might lead to issues, if you are trying to run Hot Chocolate on a device that is in FIPS compliance mode. + +Fortunately, we offer the option to use the FIPS compliant SHA256 hashing algorithm to create document hashes. + +```csharp +public void ConfigureServices(IServiceCollection services) +{ + services.AddSha256DocumentHashProvider(); +} +``` + +[Learn more about document hashing providers](/docs/hotchocolate/v12/performance/persisted-queries#hashing-algorithms) diff --git a/website/src/docs/hotchocolate/v12/security/operation-complexity.md b/website/src/docs/hotchocolate/v12/security/operation-complexity.md new file mode 100644 index 00000000000..2da57247706 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/security/operation-complexity.md @@ -0,0 +1,216 @@ +--- +title: Operation Complexity +--- + +The operation complexity analyzer is a useful tool to make your API secure. The operation complexity analyzer assigns by default every field a complexity of `1`. The complexity of all fields in one of the operations of a GraphQL request is not allowed to be greater than the maximum permitted operation complexity. + +# Static Request Analysis + +This sounds fairly simple at first, but the more you think about this, the more you wonder if that is so. Does every field have the same complexity? + +In a data graph, not every field is the same. We have fields that fetch data that are more expensive than fields that just complete already resolved data. + +```graphql +type Query { + books(take: Int = 10): [Book] +} + +type Book { + title + author: Author +} + +type Author { + name +} +``` + +In the above example executing the `books` field on the `Query` type might go to the database and fetch the `Book`. This means that the cost of the `books` field is probably higher than the cost of the `title` field. The cost of the title field might be the impact on the memory and to the transport. For `title`, the default cost of `1` os OK. But for `books`, we might want to go with a higher cost of `10` since we are getting a list of books from our database. + +Moreover, we have the field `author` on the book, which might go to the database as well to fetch the `Author` object. Since we are only fetching a single item here, we might want to apply a cost of `5` to this field. + +```graphql +type Query { + books(take: Int = 10): [Book] @cost(complexity: 10) +} + +type Book { + title + author: Author @cost(complexity: 5) +} + +type Author { + name +} +``` + +If we run the following query against our data graph, we will come up with the cost of `11`. + +```graphql +query { + books { + title + } +} +``` + +When drilling in further, a cost of `17` occurs. + +```graphql +query { + books { + title + author { + name + } + } +} +``` + +This kind of analysis is entirely static and could just be done by inspecting the query syntax tree. The impact on the overall execution performance is very low. But with this static approach, we do have a very rough idea of the performance. Is it correct to apply always a cost of `10` even though we might get one or one hundred books back? + +# Full Request Analysis + +The hot chocolate operation complexity analyzer can also take arguments into account when analyzing operation complexity. + +If we look at our data graph, we can see that the `books` field actually has an argument that defines how many books are returned. The `take` argument, in this case, specifies the maximum books that the field will return. + +When measuring the field\`s impact, we can take the argument `take` into account as a multiplier of our cost. This means we might want to lower the cost to `5` since now we get a more fine-grained cost calculation by multiplying the complexity of the field with the `take` argument. + +```graphql +type Query { + books(take: Int = 10): [Book] @cost(complexity: 5, multipliers:[take]) +} + +type Book { + title + author: Author @cost(complexity: 5) +} + +type Author { + name +} +``` + +With the multiplier in place, we now get a cost of `60` for the request since the multiplier is applied to the books field and the child fields' cost. + +Cost calculation: `(5 * 10) + (1 * 10)` + +```graphql +query { + books { + title + } +} +``` + +When drilling in further, the cost will go up to `110` since we are also now pulling in the author and by doing so causing a second database call. + +Cost calculation: `(5 * 10) + ((1 + 5) * 10)` + +```graphql +query { + books { + title + author { + name + } + } +} +``` + +```csharp +services + .AddGraphQL() + .ModifyRequestOptions(o => + { + o.Enable = true; + o.Complexity.MaximumAllowed = 1500; + }); +``` + +# Default Complexity Rules + +Hot Chocolate will automatically apply multipliers to fields that enable pagination. Moreover, explicit resolvers and resolvers compiled from async resolvers are by default weighted with `5` to mark them as having more impact than fields that do not fetch data. + +These defaults can be configured. + +```csharp +services + .AddGraphQL() + .ModifyRequestOptions(o => + { + o.Complexity.ApplyDefaults = true; + o.Complexity.DefaultComplexity = 1; + o.Complexity.DefaultDataResolverComplexity = 5; + }); +``` + +# Advanced + +Often we not only want to make sure that a consumer of our API does not do too complex queries, but we also want to make sure that the consumer does not issue too many complex queries in a given time window. For this reason, the complexity analysis will store the query complexity on the request context data. + +The context data key can be configured like the following: + +```csharp +services + .AddGraphQL() + .ModifyRequestOptions(o => + { + o.Complexity.ContextDataKey = "MyContextDataKey"; + }); +``` + +With this, it is possible to add a request middleware and aggregate the complexity over time on something like _Redis_ and fail a request if the allowed complexity was used up. + +## Custom Complexity Calculation + +The default complexity calculation is fairly basic and can be customized to fit your needs. + +```csharp +services + .AddGraphQL() + .ModifyRequestOptions(o => + { + o.Complexity.Calculation = context => + { + if (context.Multipliers.Count == 0) + { + return context.Complexity + context.ChildComplexity; + } + + var cost = context.Complexity + context.ChildComplexity; + bool needsDefaultMultiplier = true; + + foreach (MultiplierPathString multiplier in context.Multipliers) + { + if (context.TryGetArgumentValue(multiplier, out int value)) + { + cost *= value; + needsDefaultMultiplier = false; + } + } + + if(needsDefaultMultiplier && context.DefaultMultiplier.HasValue) + { + cost *= context.DefaultMultiplier.Value; + } + + return cost; + }); + }); +``` + +**Complexity Context** + +| Member | Description | +| ------------------- | --------------------------------------------------------------------- | +| Field | The `IOutputField` for which the complexity is calculated. | +| Selection | The field selection node in the query syntax tree. | +| Complexity | The field`s base complexity. | +| ChildComplexity | The calculated complexity of all child fields. | +| Multipliers | The multiplier argument names. | +| Multipliers | The default multiplier value when no multiplier argument has a value. | +| FieldDepth | The field depth in the query. | +| NodeDepth | The syntax node depth in the query syntax tree. | +| TryGetArgumentValue | Helper to get the coerced argument value of a multiplier. | diff --git a/website/src/docs/hotchocolate/v12/security/query-depth.md b/website/src/docs/hotchocolate/v12/security/query-depth.md new file mode 100644 index 00000000000..cd9752748ca --- /dev/null +++ b/website/src/docs/hotchocolate/v12/security/query-depth.md @@ -0,0 +1,3 @@ +--- +title: "Query depth" +--- diff --git a/website/src/docs/hotchocolate/v12/server/dependency-injection.md b/website/src/docs/hotchocolate/v12/server/dependency-injection.md new file mode 100644 index 00000000000..66165b3db18 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/dependency-injection.md @@ -0,0 +1,275 @@ +--- +title: Dependency Injection +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +If you are unfamiliar with the term "dependency injection", we recommend the following articles to get you started: + +- [Dependency injection in .NET](https://docs.microsoft.com/dotnet/core/extensions/dependency-injection) +- [Dependency injection in ASP.NET Core](https://docs.microsoft.com/aspnet/core/fundamentals/dependency-injection) + +Dependency injection with Hot Chocolate works almost the same as with a regular ASP.NET Core application. For instance, nothing changes about how you add services to the dependency injection container. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services + .AddSingleton() + .AddScoped() + .AddTransient(); +``` + +Injecting these services into Hot Chocolate resolvers works a bit different though. + +# Resolver injection + +The correct way to inject dependencies into your resolvers is by injecting them into your resolver method as an argument. + +[Learn more about why constructor injection into GraphQL types is a bad idea](#constructor-injection) + +Injecting dependencies at the method-level has a couple of benefits: + +- The resolver can be optimized and the execution strategy can be adjusted depending on the needs of a specific service. +- Refactoring, i.e. moving the resolver method between classes, becomes easier, since the resolver does not have any dependencies on its outer class. + +You might have already encountered this concept in regular ASP.NET Core applications in form of the [`Microsoft.AspNetCore.Mvc.FromServicesAttribute`](https://docs.microsoft.com/dotnet/api/microsoft.aspnetcore.mvc.fromservicesattribute). + +While you can also use this attribute to inject services into Hot Chocolate resolvers, we recommend using our own `HotChocolate.ServiceAttribute`. + +```csharp +public class Query +{ + public Foo GetFoo(string bar, [Service] FooService fooService) + => // Omitted code for brevity +} +``` + +Our own attribute also accepts a [ServiceKind](#servicekind) which can be used to specify the strategy with which the service should be injected. + +```csharp +public Foo GetFoo([Service(ServiceKind.Synchronized)] Service service) + => // Omitted code for brevity +``` + +If you want to avoid cluttering your resolvers with too many attributes, you can also [register your services as well-known services](#registerservice), allowing you to omit the `ServiceAttribute`. + +If you are working with the `IResolverContext`, for example in the `Resolve()` callback, you can use the `Service` method to access your dependencies. + +```csharp +descriptor + .Field("foo") + .Resolve(context => + { + FooService service = context.Service(); + + // Omitted code for brevity + }); +``` + +If you are trying to inject a Entity Framework Core `DbContext`, be sure to checkout our [guidance on working with Entity Framework Core](/docs/hotchocolate/v12/integrations/entity-framework). + +# Constructor injection + +When starting out with Hot Chocolate you might be inclined to inject dependencies into your GraphQL type definitions using the constructor. + +You should avoid doing this, because + +- GraphQL type definitions are singleton and your injected dependency will therefore also become a singleton. +- access to this dependency can not be synchronized by Hot Chocolate during the execution of a request. + +Of course this does not apply within your own dependencies. Your `ServiceA` class can still inject `ServiceB` through the constructor. + +When you need to access dependency injection services in your resolvers, try to stick to the [method-level dependency injection approach](#resolver-injection) outlined above. + +# RegisterService + +Having to specify an attribute to inject a service can become quite tedious when said service is injected into multiple resolvers. + +If you want to omit the attribute, you can simply call `RegisterService` on the `IRequestExecutorBuilder`. The Hot Chocolate Resolver Compiler will then take care of wiring up all of the `T` in the method signature of your resolvers to the dependency injection mechanism. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddTransient(); + +builder.Services + .AddGraphQLServer() + .RegisterService() + .AddQueryType(); + +public class Query +{ + public Foo GetFoo(FooService FooService) + => // Omitted code for brevity +} +``` + +> ⚠️ Note: You still have to register the service with a lifetime in the actual dependency injection container, for example by calling `services.AddTransient`. `RegisterService` on its own is not enough. + +You can also specify a [ServiceKind](#servicekind) as argument to the `RegisterService` method. + +```csharp +services + .AddGraphQLServer() + .RegisterService(ServiceKind.Synchronized); +``` + +If you are registering an interface, you need to call `RegisterService` with the interface as the generic type parameter. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddTransient(); + +builder.Services + .AddGraphQLServer() + .RegisterService() + .AddQueryType(); + +public class Query +{ + public Foo GetFoo(IFooService FooService) + => // Omitted code for brevity +} +``` + +# UseServiceScope + +Per default scoped services are scoped to the current request. If you want to resolve the services for a particular resolver using a dedicated [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope), you can use the `UseServiceScope` middleware. + + + + +```csharp +public class Query +{ + [UseServiceScope] + public Foo GetFoo([Service] Service1 service1, [Service] Service2 service2) + => // Omitted code for brevity +} +``` + + + + +```csharp +descriptor.Field("foo") + .UseServiceScope() + .Resolve(context => + { + Service1 service1 = context.Service(); + Service2 service2 = context.Service(); + + // Omitted code for brevity + }); +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +If `Service1` and `Service2` are scoped services they will both be resolved from the same [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope) that only exists for this particular resolver. If the resolver is invoked multiple times, the [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope) will be different each time. The resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope) and all services resolved with it, are disposed as soon as the resolver has been executed. + +# ServiceKind + +When injecting a service you can specify a `ServiceKind` to instruct Hot Chocolate to use a certain strategy when injecting the service. + +## ServiceKind.Default + +The services are injected according to their [service lifetime](https://docs.microsoft.com/dotnet/core/extensions/dependency-injection#service-lifetimes). + +- Singleton: The same instance of the service is injected into the resolver throughout the lifetime of the GraphQL server. +- Scoped: The same instance of the service is injected into the resolver throughout the lifetime of a request, since the service is being resolved from a request-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope). +- Transient: A new instance of the service is injected into the resolver for each resolver invocation. + +## ServiceKind.Synchronized + +Per default (most) resolvers are executed in parallel. Your service might not support being accessed concurrently. If this is the case, you can inject the service using the `ServiceKind.Synchronized`. This will cause the resolver to run serially, which means that no other resolver will be executed, while this resolver is still running. + +> ⚠️ Note: This synchronization only applies within the same request. If your service is a Singleton the `ServiceKind.Synchronized` does not prevent the resolver from running concurrently in two separate requests. + +## ServiceKind.Resolver + +If the service is scoped it will be resolved from a resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope), similar to how the [`UseServiceScope`](#useservicescope) middleware works. Except that only this specific service, not other services accessed by the resolver, is provided using this resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope). + +If two scoped services within the same resolver are injected using `ServiceKind.Resolver` they will be resolved from the same resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope). If the [`UseServiceScope`](#useservicescope) middleware is already applied to the resolver, services injected using `ServiceKind.Resolver` will be resolved from this resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope). + +The resolver-scoped [`IServiceScope`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.dependencyinjection.iservicescope) and all services resolved with it, are disposed as soon as the resolver has been executed. + +## ServiceKind.Pooled + +If your service is registered as an `ObjectPool` and the service is injected using the `ServiceKind.Pooled`, one instance of the service will be resolved from the pool for each invocation of the resolver and returned after the resolver has finished executing. + +```csharp +var builder = WebApplication.CreateBuilder(args); + +var pool = new ObjectPool(); + +builder.Services.AddSingleton>(pool); + +builder.Services + .AddGraphQLServer() + .AddQueryType(); + +public class Query +{ + public Foo GetFoo([Service(ServiceKind.Pooled)] FooService service) + => // Omitted code for brevity +} +``` + +[Learn more about `ObjectPool`](https://docs.microsoft.com/dotnet/api/microsoft.extensions.objectpool.objectpool-1) + +# Switching the service provider + +While Hot Chocolate's internals rely heavily on Microsoft's dependency injection container, you are not required to manage your own dependencies using this container. Per default Hot Chocolate uses the request-scoped [`HttpContext.RequestServices`](https://docs.microsoft.com/dotnet/api/microsoft.aspnetcore.http.httpcontext.requestservices) `IServiceProvider` to provide services to your resolvers. + +You can switch out the service provider used for GraphQL requests, as long as your dependency injection container implements the [`IServiceProvider`](https://docs.microsoft.com/dotnet/api/system.iserviceprovider) interface. + +To switch out the service provider you need to call [`SetServices`](/docs/hotchocolate/v12/server/interceptors#setservices) on the [`IQueryRequestBuilder`](/docs/hotchocolate/v12/server/interceptors#iqueryrequestbuilder) in both the [`IHttpRequestInterceptor`](/docs/hotchocolate/v12/server/interceptors#ihttprequestinterceptor) and the [`ISocketSessionInterceptor`](/docs/hotchocolate/v12/server/interceptors#isocketsessioninterceptor). + +```csharp +public class HttpRequestInterceptor : DefaultHttpRequestInterceptor +{ + public override async ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + // keeping this line is important! + await base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); + + requestBuilder.SetServices(YOUR_SERVICE_PROVIDER); + } +} + +public class SocketSessionInterceptor : DefaultSocketSessionInterceptor +{ + public override async ValueTask OnRequestAsync(ISocketConnection connection, + IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + // keeping this line is important! + await base.OnRequestAsync(connection, requestBuilder, + cancellationToken); + + requestBuilder.SetServices(YOUR_SERVICE_PROVIDER); + } +} +``` + +You also need to register these interceptors for them to take effect. + +```csharp +services + .AddGraphQLServer() + .AddHttpRequestInterceptor() + .AddSocketSessionInterceptor(); +``` + +[Learn more about interceptors](/docs/hotchocolate/v12/server/interceptors) diff --git a/website/src/docs/hotchocolate/v12/server/endpoints.md b/website/src/docs/hotchocolate/v12/server/endpoints.md new file mode 100644 index 00000000000..391a9f46384 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/endpoints.md @@ -0,0 +1,370 @@ +--- +title: Endpoints +--- + +Hot Chocolate comes with a set of ASP.NET Core middleware used for making the GraphQL server available via HTTP and WebSockets. There are also middleware for hosting our GraphQL IDE [Banana Cake Pop](/docs/bananacakepop) as well as an endpoint used for downloading the schema in its SDL representation. + +# MapGraphQL + +We can call `MapGraphQL()` on the `IEndpointRouteBuilder` to register all of the middleware a standard GraphQL server requires. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL(); + }); + } +} +``` + +If you are using .NET 6 Minimal APIs, you can also call `MapGraphQL()` on the `app` builder directly, since it implements `IEndpointRouteBuilder`: + +```csharp +var builder = WebApplication.CreateBuilder(args); + +// Omitted code for brevity + +var app = builder.Build(); + +app.MapGraphQL(); + +app.Run(); +``` + +The middleware registered by `MapGraphQL` makes the GraphQL server available at `/graphql` per default. + +We can customize the endpoint at which the GraphQL server is hosted like the following. + +```csharp +endpoints.MapGraphQL("/my/graphql/endpoint"); +``` + +Calling `MapGraphQL()` will enable the following functionality on the specified endpoint: + +- HTTP GET and HTTP POST GraphQL requests are handled (Multipart included) +- WebSocket GraphQL requests are handled (if the ASP.NET Core WebSocket Middleware has been registered) +- Including the query string `?sdl` after the endpoint will download the GraphQL schema +- Accessing the endpoint from a browser will load our GraphQL IDE [Banana Cake Pop](/docs/bananacakepop) + +We can customize the combined middleware using `GraphQLServerOptions` as shown below or we can only include the parts of the middleware we need and configure them explicitly. + +The following middleware are available: + +- [MapBananaCakePop](#mapbananacakepop) +- [MapGraphQLHttp](#mapgraphqlhttp) +- [MapGraphQLWebsocket](#mapgraphqlwebsocket) +- [MapGraphQLSchema](#mapgraphqlschema) + +## GraphQLServerOptions + +We can influence the behavior of the middleware registered by `MapGraphQL` using `GraphQLServerOptions`. + +### EnableSchemaRequests + +```csharp +endpoints.MapGraphQL().WithOptions(new GraphQLServerOptions +{ + EnableSchemaRequests = false +}); +``` + +This setting controls whether the schema of the GraphQL server can be downloaded by appending `?sdl` to the endpoint. + +### EnableGetRequests + +```csharp +endpoints.MapGraphQL().WithOptions(new GraphQLServerOptions +{ + EnableGetRequests = false +}); +``` + +This setting controls whether the GraphQL server is able to handle GraphQL operations sent via the query string in a HTTP GET request. + +### AllowedGetOperations + +```csharp +endpoints.MapGraphQL().WithOptions(new GraphQLServerOptions +{ + AllowedGetOperations = AllowedGetOperations.Query +}); +``` + +If [EnableGetRequests](#enablegetrequests) is `true` we can control the allowed operations for HTTP GET requests using the `AllowedGetOperations` setting. + +Per default only queries are accepted via HTTP GET. We can also allow mutations by setting `AllowedGetOperations` to `AllowedGetOperations.QueryAndMutation`. + +### EnableMultipartRequests + +```csharp +endpoints.MapGraphQL().WithOptions(new GraphQLServerOptions +{ + EnableMultipartRequests = false +}); +``` + +This setting controls whether the GraphQL server is able to handle HTTP Multipart forms, i.e. file uploads. + +[Learn more about uploading files](/docs/hotchocolate/v12/server/files#upload-scalar) + +### Tool + +We can specify options for the Banana Cake Pop GraphQL IDE using the `Tool` property. + +We could for example only enable Banana Cake Pop during development. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQL().WithOptions(new GraphQLServerOptions + { + Tool = { + Enable = env.IsDevelopment() + } + }); + }); + } +} +``` + +[Learn more about possible GraphQLToolOptions](#graphqltooloptions) + +# MapBananaCakePop + +We can call `MapBananaCakePop()` on the `IEndpointRouteBuilder` to serve [Banana Cake Pop](/docs/bananacakepop) on a different endpoint than the actual GraphQL endpoint. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapBananaCakePop("/graphql/ui"); + }); + } +} +``` + +This would make Banana Cake Pop accessible via a Web Browser at the `/graphql/ui` endpoint. + +## GraphQLToolOptions + +We can configure Banana Cake Pop using `GraphQLToolOptions`. + +### Enable + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + Enable = false +}); +``` + +This setting controls whether Banana Cake Pop should be served or not. + +### GraphQLEndpoint + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + GraphQLEndpoint = "/my/graphql/endpoint" +}); +``` + +This setting sets the GraphQL endpoint to use when creating new documents within Banana Cake Pop. + +### UseBrowserUrlAsGraphQLEndpoint + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + UseBrowserUrlAsGraphQLEndpoint = true +}); +``` + +If set to `true` the current Web Browser URL is treated as the GraphQL endpoint when creating new documents within Banana Cake Pop. + +> ⚠️ Note: [GraphQLEndpoint](#graphqlendpoint) takes precedence over this setting. + +### Document + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + Document = "{ __typename }" +}); +``` + +This setting allows us to set a default GraphQL document that should be a placeholder for each new document created using Banana Cake Pop. + +### HttpMethod + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + HttpMethod = DefaultHttpMethod.Get +}); +``` + +This setting controls the default HTTP method used to execute GraphQL operations when creating new documents within Banana Cake Pop. + +### HttpHeaders + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + HttpHeaders = new HeaderDictionary + { + { "Content-Type", "application/json" } + } +}); +``` + +This setting allows us to specify default HTTP Headers that will be added to each new document created using Banana Cake Pop. + +### IncludeCookies + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + IncludeCookies = true +}); +``` + +This setting specifies the default for including cookies in cross-origin when creating new documents within Banana Cake Pop. + +### Title + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + Title = "My GraphQL explorer" +}); +``` + +This setting controls the tab name, when Banana Cake Pop is opened inside of a Web Browser. + +### DisableTelemetry + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + DisableTelemetry = true +}); +``` + +This setting allows us to disable telemetry events. + +### GaTrackingId + +```csharp +endpoints.MapBananaCakePop("/ui").WithOptions(new GraphQLToolOptions +{ + GaTrackingId = "google-analytics-id" +}); +``` + +This setting allows us to set a custom Google Analytics Id, which in turn allows us to gain insights into the usage of Banana Cake Pop hosted as part of our GraphQL server. + +The following information is collected: + +| Name | Description | +| -------------------- | --------------------------------------------------------------------- | +| `deviceId` | Random string generated on a per-device basis | +| `operatingSystem` | Name of the operating system: `Windows`, `macOS`, `Linux` & `Unknown` | +| `userAgent` | `User-Agent` header | +| `applicationType` | The type of application: `app` (Electron) or `middleware` | +| `applicationVersion` | Version of Banana Cake Pop | + +# MapGraphQLHttp + +We can call `MapGraphQLHttp()` on the `IEndpointRouteBuilder` to make our GraphQL server available via HTTP at a specific endpoint. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQLHttp("/graphql/http"); + }); + } +} +``` + +With the above configuration we could now issue HTTP GET / POST requests against the `/graphql/http` endpoint. + +## GraphQLHttpOptions + +The HTTP endpoint can be configured using `GraphQLHttpOptions`. + +```csharp +endpoints.MapGraphQLHttp("/graphql/http").WithOptions(new GraphQLHttpOptions +{ + EnableGetRequests = false +}); +``` + +The `GraphQLHttpOptions` are the same as the `GraphQLServerOptions` except that there are no `Tool` and `EnableSchemaRequests` properties. + +[Learn more about GraphQLServerOptions](#graphqlserveroptions) + +# MapGraphQLWebsocket + +We can call `MapGraphQLWebSocket()` on the `IEndpointRouteBuilder` to make our GraphQL server available via WebSockets at a specific endpoint. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQLWebSocket("/graphql/ws"); + }); + } +} +``` + +With the above configuration we could now issue GraphQL subscription requests via WebSocket against the `/graphql/ws` endpoint. + +# MapGraphQLSchema + +We can call `MapGraphQLSchema()` on the `IEndpointRouteBuilder` to make our GraphQL schema available at a specific endpoint. + +```csharp +public class Startup +{ + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGraphQLSchema("/graphql/schema"); + }); + } +} +``` + +With the above configuration we could now download our `schema.graphql` file from the `/graphql/schema` endpoint. diff --git a/website/src/docs/hotchocolate/v12/server/files.md b/website/src/docs/hotchocolate/v12/server/files.md new file mode 100644 index 00000000000..195027d1b69 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/files.md @@ -0,0 +1,311 @@ +--- +title: Files +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs"; + +Handling files is traditionally not a concern of a GraphQL server, which is also why the [GraphQL over HTTP](https://github.com/graphql/graphql-over-http/blob/main/spec/GraphQLOverHTTP.md) specification doesn't mention it. + +That being said, we recognize that at some point in the development of a new application you'll likely have to deal with files in some way or another. Which is why we want to give you some guidance on this topic. + +# Uploading files + +When it comes to uploading files there are a couple of options we have. + +## Completely decoupled + +We could handle file uploads completely decoupled from our GraphQL server, for example using a dedicated web application offering a HTTP endpoint for us to upload our files to. + +This however has a couple of downsides: + +- Authentication and authorization need to be handled by this dedicated endpoint as well. +- The process of uploading a file would need to be documented outside of our GraphQL schema. + +## Upload scalar + +Hot Chocolate implements the [GraphQL multipart request specification](https://github.com/jaydenseric/graphql-multipart-request-spec) which adds a new `Upload` scalar and allows our GraphQL server to handle file upload streams. + + + +> ⚠️ Note: Files can not yet be uploaded through a gateway to stitched services using the `Upload` scalar. + +### Usage + +In order to use file upload streams in our input types or as an argument register the `Upload` scalar like the following: + +```csharp +services + .AddGraphQLServer() + .AddType(); +``` + +> Note: The `Upload` scalar can only be used as an input type and does not work on output types. + +We can use the `Upload` scalar as an argument like the following: + + + + +```csharp +public class Mutation +{ + public async Task UploadFileAsync(IFile file) + { + var fileName = file.Name; + var fileSize = file.Length; + + await using Stream stream = file.OpenReadStream(); + + // We can now work with standard stream functionality of .NET + // to handle the file. + } +} +``` + + + + +```csharp +public class MutationType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("uploadFile") + .Argument("file", a => a.Type()) + .Resolve(async context => + { + var file = context.ArgumentValue("file"); + + var fileName = file.Name; + var fileSize = file.Length; + + await using Stream stream = file.OpenReadStream(); + + // We can now work with standard stream functionality of .NET + // to handle the file. + }); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +[Learn more about arguments](/docs/hotchocolate/v12/defining-a-schema/arguments) + +In input object types it can be used like the following. + + + + +```csharp +public class ExampleInput +{ + [GraphQLType(typeof(NonNullType))] + public IFile File { get; set; } +} +``` + + + + +```csharp +public class ExampleInput +{ + public IFile File { get; set; } +} + +public class ExampleInputType : InputObjectType +{ + protected override void Configure(IInputObjectTypeDescriptor descriptor) + { + descriptor.Field(f => f.File).Type(); + } +} +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + + +[Learn more about input object types](/docs/hotchocolate/v12/defining-a-schema/input-object-types) + +If you need to upload a list of files, it works exactly as you would expect. You just use a `List` or `ListType`. + +[Learn more about lists](/docs/hotchocolate/v12/defining-a-schema/lists) + +### Client usage + +When performing a mutation with the `Upload` scalar, we need to use variables. + +An example mutation could look like the following: + +```graphql +mutation ($file: Upload!) { + uploadFile(file: $file) { + success + } +} +``` + +If we now want to send this request to our GraphQL server, we need to do so using HTTP multipart: + +```bash +curl localhost:5000/graphql \ + -F operations='{ "query": "mutation ($file: Upload!) { uploadFile(file: $file) { success } }", "variables": { "file": null } }' \ + -F map='{ "0": ["variables.file"] }' \ + -F 0=@file.txt + +``` + +> Note: The `$file` variable is intentionally `null`. It is filled in by Hot Chocolate on the server. + +[More examples can be found here](https://github.com/jaydenseric/graphql-multipart-request-spec#examples) + +You can check if your GraphQL client supports the specification [here](https://github.com/jaydenseric/graphql-multipart-request-spec#client). + +Both Relay and Apollo support this specification through community packages: + +- [react-relay-network-modern](https://github.com/relay-tools/react-relay-network-modern) using the `uploadMiddleware` +- [apollo-upload-client](https://github.com/jaydenseric/apollo-upload-client) + +> ⚠️ Note: [Strawberry Shake](/docs/strawberryshake) does not yet support the `Upload` scalar. + +### Options + +If you need to upload larger files or set custom upload size limits, you can configure those by registering custom [`FormOptions`](https://docs.microsoft.com/dotnet/api/microsoft.aspnetcore.http.features.formoptions). + +```csharp +services.Configure(options => +{ + // Set the limit to 256 MB + options.MultipartBodyLengthLimit = 268435456; +}); +``` + +Based on our WebServer we might need to configure these limits elsewhere as well. [Kestrel](https://docs.microsoft.com/aspnet/core/mvc/models/file-uploads#kestrel-maximum-request-body-size) and [IIS](https://docs.microsoft.com/aspnet/core/mvc/models/file-uploads#iis) are covered in the ASP.NET Core Documentation. + +## Presigned upload URLs + +The arguably best solution for uploading files is a hybrid of the above. Our GraphQL server still provides a mutation for uploading files, **but** the mutation is only used to setup a file upload. The actual file upload is done through a dedicated endpoint. + +We can accomplish this by returning _presigned upload URLs_ from our mutations. _Presigned upload URLs_ are URLs that point to an endpoint, through which we can upload our files. Files can only be uploaded to this endpoint, if the URL to this endpoint contains a valid token. Our mutation generates said token, appends the token to the upload URL and returns the _presigned_ URL to the client. + +Let's take a look at a quick example. We have built the following mutation resolver: + +```csharp +public record ProfilePictureUploadPayload(string UploadUrl); + +public class Mutation +{ + [Authorize] + public ProfilePictureUploadPayload UploadProfilePicture() + { + var baseUrl = "https://blob.chillicream.com/upload"; + + // Here we can handle our authorization logic + + // If the user is allowed to upload the profile picture + // we generate the token + var token = "myuploadtoken"; + + var uploadUrl = QueryHelpers.AddQueryString(baseUrl, "token", token); + + return new(uploadUrl); + } +} +``` + +If you are using any of the big cloud providers for storing your BLOBs, chances are they already come with support for _presigned upload URLs_: + +- [Azure Storage shared access signatures](https://docs.microsoft.com/azure/storage/common/storage-sas-overview) +- [AWS presigned URLS](https://docs.aws.amazon.com/AmazonS3/latest/userguide/PresignedUrlUploadObject.html) +- [GCP signed URLs](https://cloud.google.com/storage/docs/access-control/signed-urls) + +If you need to implement the file upload endpoint yourself, you can research best practices for creating _presigned upload URLs_. + +Let's take a look at how a client would upload a new profile picture. + +**Request** + +```graphql +mutation { + uploadProfilePicture { + uploadUrl + } +} +``` + +**Response** + +```json +{ + "data": { + "uploadProfilePicture": { + "uploadUrl": "https://blob.chillicream.com/upload?token=myuploadtoken" + } + } +} +``` + +Given the `uploadUrl` our client can now HTTP POST the file to this endpoint to upload his profile picture. + +This solution offers the following benefits: + +- Uploading files is treated as a separate concern and our GraphQL server is kept _pure_ in a sense. +- The GraphQL server maintains control over authorization and all of the business logic regarding granting a file upload stays in one place. +- The action of uploading a profile picture is described by the schema and therefore more discoverable for developers. + +There is still some uncertainty about how the actual file upload happens, e.g. which HTTP verb to use or which headers to send using the `uploadUrl`. These additional parameters can either be documented somewhere or be made queryable using our mutation. + +# Serving files + +Let's imagine we want to expose the file we just uploaded as the user's profile picture. How would we query for this file? + +We _could_ make the profile picture a queryable field in our graph that returns the Base64 encoded image. While this _can_ work it has a number of downsides: + +- Since the image is part of the JSON serialized GraphQL response, caching is incredibly hard. +- A query for the user's name might take a couple of milliseconds to transfer from the server to the client. Additionally querying for the image data might increase the response time by seconds. +- Let's not even think about how video playback, i.e. streaming, would work... + +The recommended solution is to serve files through a different HTTP endpoint and only referencing this endpoint in our GraphQL response. So instead of querying for the profile picture we would query for an URL that points to the profile picture. + +**Request** + +```graphql +{ + user { + name + imageUrl + } +} +``` + +**Response** + +```json +{ + "data": { + "user": { + "name": "John Doe", + "imageUrl": "https://blob.chillicream.com/john-doe.png" + } + } +} +``` + +Serving the file through a dedicated HTTP endpoint makes caching a lot easier and also allows for features like streaming video. Ultimately it gives control to the client on how a resource should be handled, given its URL. In the case of a web application we can pass the `imageUrl` as `src` to a HTML `img` element and let the browser handle the fetching and caching of the image. + +If you are using a cloud provider for file storage, chances are you are already accessing the files using an URL and you can simply expose this URL as a `String` field in your graph. If infrastructure for serving files is not already in place, you can look into how files can be served using ASP.NET Core or how to setup a dedicated web server like nginx to serve the files. diff --git a/website/src/docs/hotchocolate/v12/server/global-state.md b/website/src/docs/hotchocolate/v12/server/global-state.md new file mode 100644 index 00000000000..44e693e4d07 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/global-state.md @@ -0,0 +1,126 @@ +--- +title: Global State +--- + +import { ExampleTabs, Annotation, Code, Schema } from "../../../components/mdx/example-tabs" + +Global State allows us to define properties on a per-request basis to be made available to all resolvers and middleware. + +# Initializing Global State + +We can add Global State using the `SetProperty` method on the `IQueryRequestBuilder`. This method takes a `key` and a `value` as an argument. While the `key` needs to be a `string` the value can be of any type. + +Using an interceptor allows us to initialize the Global State before the request is being executed. + +```csharp +public class HttpRequestInterceptor : DefaultHttpRequestInterceptor +{ + public override ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + string userId = + context.User.FindFirst(ClaimTypes.NameIdentifier)?.Value; + + requestBuilder.SetProperty("UserId", userId); + // requestBuilder.SetProperty("IntegerValue", int.Parse(userId)); + // requestBuilder.SetProperty("ObjectValue", new User { Id = userId }); + + return base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); + } +} +``` + +[Learn more about interceptors](/docs/hotchocolate/v12/server/interceptors) + +# Accessing Global State + +We can access the Global State in our resolvers like the following. + + + + +```csharp +public class Query +{ + public string Example1([GlobalState("UserId")] string userId) + { + // Omitted code for brevity + } + + public string Example2([GlobalState("ObjectValue")] User user) + { + // Omitted code for brevity + } +} +``` + +The `GlobalStateAttribute` accepts the `key` of the Global State `value` as an argument. An exception is thrown if no Global State value exists for the specified `key` or if the `value` can not be coerced to the type of the argument. + +It's a good practice to create a new attribute inheriting from `GlobalStateAttribute`. + +```csharp +public class UserIdAttribute : GlobalStateAttribute +{ + public UserIdAttribute() : base("UserId") + { + + } +} + +public class Query +{ + public string Example([UserId] string userId) + { + // Omitted code for brevity + } +} +``` + + + + +```csharp +public class QueryType : ObjectType +{ + protected override void Configure(IObjectTypeDescriptor descriptor) + { + descriptor + .Field("example") + .Resolve(context => + { + var userId = context.GetGlobalValue("UserId"); + + // Omitted code for brevity + }); + } +} +``` + +> ⚠️ Note: If no value exists for the specified `key` a default value is returned an no exception is thrown. + +We can also access the Global State through the `ContextData` dictionary on the `IResolverContext`. + +```csharp +descriptor + .Field("example") + .Resolve(context => + { + if (!context.ContextData.TryGetValue("UserId", out var value) + || value is not string userId) + { + // handle failed assertion + } + + // Omitted code for brevity + }); +``` + + + + +Take a look at the Annotation-based or Code-first example. + + + diff --git a/website/src/docs/hotchocolate/v12/server/index.md b/website/src/docs/hotchocolate/v12/server/index.md new file mode 100644 index 00000000000..3c62e3ece08 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/index.md @@ -0,0 +1,49 @@ +--- +title: Overview +--- + +In this section, you will learn how you can configure your GraphQL server and we will look at some transport protocol-related details. + +# Endpoints + +Hot Chocolate comes with ASP.NET Core endpoint middleware for accepting HTTP / WebSocket GraphQL requests, downloading the GraphQL schema, or serving the [Banana Cake Pop](/docs/bananacakepop) GraphQL IDE. + +[Learn more about endpoints](/docs/hotchocolate/v12/server/endpoints) + +# Dependency injection + +Hot Chocolate allows you to access dependency injection services inside your resolvers. We will take a look at the different ways you can inject services and also how you can switch out the dependency injection provider. + +[Learn more about Dependency Injection](/docs/hotchocolate/v12/server/dependency-injection) + +# Interceptors + +Interceptors allow you to intercept GraphQL requests before they are executed. There are interceptors for both GraphQL requests sent via HTTP as well as via WebSockets. + +In the case of WebSockets, the interceptor also allows you to handle life cycle events, such as when a client first connects. + +[Learn more about interceptors](/docs/hotchocolate/v12/server/interceptors) + +# Global State + +With Global State you can define properties on a per-request basis to be made available to all resolvers and middleware. + +[Learn more about Global State](/docs/hotchocolate/v12/server/global-state) + +# Introspection + +Introspection allows you to query the type system of your GraphQL server using regular GraphQL queries. While this is a powerful feature, enabling all sorts of amazing developer tooling, it can also be used as an attack vector. We will take a look at how you can control who is allowed to issue introspection queries against your GraphQL server. + +[Learn more about introspection](/docs/hotchocolate/v12/server/introspection) + +# Files + +Though not considered one of the responsibilities of a GraphQL server, for convenience, Hot Chocolate provides file upload support. We will also take a look at what other options you have when it comes to uploading and serving files. + +[Learn more about handling files](/docs/hotchocolate/v12/server/files) + +# Instrumentation + +Hot Chocolate allows you to gather instrumentation data about your GraphQL server, by hooking into various events in the execution process of a GraphQL request. You will also learn how to setup our OpenTelemetry integration and how to utilize _Apollo Tracing_. + +[Learn more about instrumentation](/docs/hotchocolate/v12/server/instrumentation) diff --git a/website/src/docs/hotchocolate/v12/server/instrumentation.md b/website/src/docs/hotchocolate/v12/server/instrumentation.md new file mode 100644 index 00000000000..851f4f31dc4 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/instrumentation.md @@ -0,0 +1,489 @@ +--- +title: Instrumentation +--- + +Hot Chocolate allows us to create custom diagnostic event listeners, tapping into internal instrumentation events and further processing them. This abstraction allows us to use the logging or tracing infrastructure that we want to use. Further, we provide a default implementation for open telemetry. + +# Diagnostic events + +Currently, we can implement diagnostic event listeners for the following event types: + +- [Server events](#server-events) +- [Execution events](#execution-events) +- [DataLoader events](#dataloader-events) + +We will learn more about creating diagnostic event listeners for these event types in their respective sections. + +After creating a diagnostic event listener for any event type, we can register it by calling `AddDiagnosticEventListener` on the `IRequestExecutorBuilder`, specifying the newly developed diagnostic event listener as the generic type parameter. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddDiagnosticEventListener(); + } +} +``` + +If we need to access services within our event handlers, we can inject them using the constructor. Please note that injected services are effectively singleton since the diagnostic event listener is instantiated once. + +```csharp +public class MyExecutionEventListener : ExecutionDiagnosticEventListener +{ + private readonly ILogger _logger; + + public MyExecutionEventListener(ILogger logger) + => _logger = logger; + + public override void RequestError(IRequestContext context, + Exception exception) + { + _logger.LogError(exception, "A request error occured!"); + } +} +``` + +> ⚠️ Note: Diagnostic event handlers are executed synchronously as part of the GraphQL request. Long-running operations inside a diagnostic event handler will negatively impact the query performance. Expensive operations should only be enqueued from within the handler and processed by a background service. + +## Scopes + +Most diagnostic event handlers have a return type of `void`, but some return an `IDisposable`. These event handlers enclose a specific operation, sort of like a scope. This scope is instantiated at the start of the operation and disposed of at the end of the operation. + +We can simply create a class implementing `IDisposable` to create a scope. + +```csharp +public class MyExecutionEventListener : ExecutionDiagnosticEventListener +{ + private readonly ILogger _logger; + + public MyExecutionEventListener(ILogger logger) + => _logger = logger; + + // this is invoked at the start of the `ExecuteRequest` operation + public override IDisposable ExecuteRequest(IRequestContext context) + { + var start = DateTime.UtcNow; + + return new RequestScope(start, _logger); + } +} + +public class RequestScope : IDisposable +{ + private readonly ILogger _logger; + private readonly DateTime _start; + + public RequestScope(DateTime start, ILogger logger) + { + _start = start; + _logger = logger; + } + + // this is invoked at the end of the `ExecuteRequest` operation + public void Dispose() + { + var end = DateTime.UtcNow; + var elapsed = end - _start; + + _logger.LogInformation("Request finished after {Ticks} ticks", + elapsed.Ticks); + } +} +``` + +If we are not interested in the scope of a specific diagnostic event handler, we can return an `EmptyScope`. Returning an empty scope where we do not need to track a span will reduce the performance impact of triggering your event. + +```csharp +public override IDisposable ExecuteRequest(IRequestContext context) +{ + _logger.LogInformation("Request execution started!"); + + return EmptyScope; +} +``` + +## Server Events + +We can instrument server events of the Hot Chocolate transport layer by creating a class inheriting from `ServerDiagnosticEventListener`. + +```csharp +public class MyServerEventListener : ServerDiagnosticEventListener +{ + public override IDisposable ExecuteHttpRequest(IRequestContext context) + { + // Omitted code for brevity + } +} +``` + +| Method name | Description | +| -------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| ExecuteHttpRequest | Called when starting to execute a GraphQL over HTTP request in the transport layer. | +| StartSingleRequest | Called within the ExecuteHttpRequest scope and signals that a single GraphQL request will be executed. | +| StartBatchRequest | Called within the ExecuteHttpRequest scope and signals that a GraphQL batch request will be executed. | +| StartOperationBatchRequest | Called within the ExecuteHttpRequest scope and signals that a GraphQL batch request will be executed. | +| HttpRequestError | Called within the ExecuteHttpRequest scope and signals that a error occurred while processing the GraphQL over HTTP request. | +| ParseHttpRequest | Called when starting to parse a GraphQL HTTP request. | +| ParserErrors | Called within the ParseHttpRequest scope and signals that a error occurred while parsing the GraphQL request. | +| FormatHttpResponse | Called when starting to format a GraphQL query result. | +| WebSocketSession | Called when starting to establish a GraphQL WebSocket session. | +| WebSocketSessionError | Called within the WebSocketSession scope and signals that a error occurred that terminated the session. | + +## Execution Events + +We can hook into execution events of the Hot Chocolate execution engine by creating a class inheriting from `ExecutionDiagnosticEventListener`. + +```csharp +public class MyExecutionEventListener : ExecutionDiagnosticEventListener +{ + public override IDisposable ExecuteRequest(IRequestContext context) + { + // Omitted code for brevity + } +} +``` + +The following methods can be overriden. + +| Method name | Description | +| ----------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | +| ExecuteRequest | Scope that encloses the entire GraphQL request execution. Also the first diagnostic event raised during a GraphQL request. | +| RequestError | Called if the GraphQL request produced an error. Called immediately before the scope of `ExecuteRequest` is disposed. | +| ExecuteSubscription | Scope that encloses the execution of a subscription query. Scope is created once a client subscribes and disposed once the subscription ends. | +| ParseDocument | Scope that encloses the parsing of a document. | +| SyntaxError | Called if a document could not be parsed due to a syntax error. | +| ValidateDocument | Scope that encloses the validation of a document. | +| ValidationErrors | Called if errors occured during the validation of the document. | +| AnalyzeOperationComplexity | Called when starting to analyze the operation complexity. | +| OperationComplexityAnalyzerCompiled | Called within AnalyzeOperationComplexity scope and reports that an analyzer was compiled. | +| OperationComplexityResult | Called within AnalyzeOperationComplexity scope and reports the outcome of the analyzer. | +| CoerceVariables | Called when starting to coerce variables for a request. | +| CompileOperation | Called when starting to compile the GraphQL operation from the syntax tree. | +| ExecuteOperation | Called when starting to execute the GraphQL operation and its resolvers. | +| ExecuteStream | Called within the execute operation scope if the result is a streamed result. | +| ExecuteDeferredTask | Called when starting to execute a deferred part an operation within the ExecuteStream scope or within the ExecuteSubscription scope. | +| StartProcessing | Scope that encloses the scheduling of some work, e.g. invoking a DataLoader or starting execution tasks. | +| StopProcessing | Called if the execution engine has to wait for resolvers to complete or whenever the execution has completed. | +| RunTask | Scope that encloses the execution of an execution task. A `ResolverExecutionTask` uses the `ResolveFieldValue` event instead. | +| TaskError | Called if an execution task produced an error. | +| ResolveFieldValue | Scope that encloses the execution of a specific field resolver. (\*) | +| ResolverError | Called if a specific field resolver produces an error. | +| OnSubscriptionEvent | Scope that encloses the computation of a subscription result, once the event stream has yielded a new payload. | +| SubscriptionEventResult | Called once the subscription result has been successfully computed. | +| SubscriptionEventError | Called if the computation of the subscription result produced an error. | +| SubscriptionTransportError | Called if a subscription result could not be delivered to a client due to a transport issue. | +| AddedDocumentToCache | Called once a document has been added to `DocumentCache`. | +| RetrievedDocumentFromCache | Called once a document has been retrieved from the `DocumentCache`. | +| AddedOperationToCache | Called once an operation has been added to the `OperationCache`. | +| RetrievedOperationFromCache | Called once an operation has been retrieved from the `OperationCache`. | +| RetrievedDocumentFromStorage | Called once a document has been retrieved from a persisted query storage. | +| ExecutorCreated | Called once a request executor has been created. Executors are created once for a schema (includes stitched schemas) during the first request. | +| ExecutorEvicted | Called once a request executor is evicted. This can happen if the schema or the configuration of the executor changes. | + +(\*): The `ResolveFieldValue` event is not invoked per default, as it would be too much overhead to execute the event for each resolver used within a query. We have to override the `EnableResolveFieldValue` property for the execution engine to invoke the event handler. + +```csharp +public class MyExecutionEventListener : ExecutionDiagnosticEventListener +{ + public override bool EnableResolveFieldValue => true; + + public override IDisposable ResolveFieldValue(IMiddlewareContext context) + { + // Omitted code for brevity + } +} +``` + +## DataLoader Events + +We can hook into DataLoader events by creating a class inheriting from `ExecutionDiagnosticEventListener`. + +```csharp +public class MyDataLoaderEventListener : DataLoaderDiagnosticEventListener +{ + public override IDisposable ExecuteBatch(IDataLoader dataLoader, + IReadOnlyList keys) + { + // Omitted code for brevity + } +} +``` + +The following methods can be overriden. + +| Method name | Description | +| --------------------- | --------------------------------------------------------------------------------------------------------------- | +| ExecuteBatch | Scope that encloses a batch operation, i.e. the resolution of a specific set of keys. | +| BatchResults | Called once a batch operation has been completed, i.e. all items for a specific set of keys have been resolved. | +| BatchError | Called if a batch operation has failed. | +| BatchItemError | Called for a specific item that contained an error within a batch operation. | +| ResolvedTaskFromCache | Called once a task to resolve an item by its key has been added or retrieved from the `TaskCache`. | + +# OpenTelemetry + +OpenTelemetry is an open-source project and unified standard for service instrumentation or a way of measuring performance. Sponsored by the Cloud Native Computing Foundation (CNCF), it replaces OpenTracing and OpenCensus. The goal is to standardize how you collect and send telemetry data to a backend platform. + +Hot Chocolate has implemented an OpenTelemetry integration, and you can easily opt into it instead of building a custom tracing integration. + + + +## Setup + +To get started, add the HotChocolate.Diagnostics package to your project. + +```bash +dotnet add package HotChocolate.Diagnostics +``` + +> ⚠️ Note: All `HotChocolate.*` packages need to have the same version. + +Next, head over to your `Program.cs` and add `AddInstrumentation` to your GraphQL configuration. + +```csharp +builder.Services + .AddGraphQLServer() + .AddQueryType() + .AddInstrumentation(); +``` + +Now, we need to add OpenTelemetry to our project, and in the example here, we will use it with a _Jaeger_ exporter. + +Let's first add the needed packages: + +```bash +dotnet add package OpenTelemetry.Extensions.Hosting --version 1.0.0-rc8 +dotnet add package OpenTelemetry.Instrumentation.AspNetCore --version 1.0.0-rc8 +dotnet add package OpenTelemetry.Instrumentation.Http --version 1.0.0-rc8 +dotnet add package OpenTelemetry.Exporter.Jaeger --version 1.1.0 +``` + +Now add the OpenTelemetry setup code to the `Program.cs`: + +```csharp +builder.Logging.AddOpenTelemetry( + b => + { + b.IncludeFormattedMessage = true; + b.IncludeScopes = true; + b.ParseStateValues = true; + b.SetResourceBuilder(ResourceBuilder.CreateDefault().AddService("Demo")); + }); + +builder.Services.AddOpenTelemetryTracing( + b => + { + b.AddHttpClientInstrumentation(); + b.AddAspNetCoreInstrumentation(); + b.AddHotChocolateInstrumentation(); + b.AddJaegerExporter(); + }); +``` + +`AddHotChocolateInstrumentation` will register the Hot Chocolate instrumentation events with OpenTelemetry. + +Your `Program.cs` should look like the following: + +```csharp +using OpenTelemetry.Resources; +using OpenTelemetry.Trace; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services + .AddGraphQLServer() + .AddQueryType() + .AddInstrumentation(); + +builder.Logging.AddOpenTelemetry( + b => b.SetResourceBuilder(ResourceBuilder.CreateDefault().AddService("Demo"))); + +builder.Services.AddOpenTelemetryTracing( + b => + { + b.AddHttpClientInstrumentation(); + b.AddAspNetCoreInstrumentation(); + b.AddHotChocolateInstrumentation(); + b.AddJaegerExporter(); + }); + +var app = builder.Build(); +app.MapGraphQL(); +app.Run(); +``` + +When running GraphQL requests, you can now inspect in _Jaeger_ how the request performed and look into the various parts of the execution telemetry. + +![Jaeger](../../shared/jaeger1.png) + +## Options + +By default, we have not instrumented all of our execution events. You can drill deeper into the execution telemetry by adding more instrumentation scopes. + +```csharp +builder.Services + .AddGraphQLServer() + .AddQueryType() + .AddInstrumentation(o => + { + o.Scopes = ActivityScopes.All; + }); +``` + +> Beware, adding more instrumentation scopes is not free and will add more performance overhead. + +![Jaeger](../../shared/jaeger2.png) + +Further, if you work with elastic and you want to give your root activity a name that is associated with the executed operation, you can quickly just tell the instrumentation to do just that for you. + +```csharp +builder.Services + .AddGraphQLServer() + .AddQueryType() + .AddInstrumentation(o => + { + o.RenameRootActivity = true; + }); +``` + +![Jaeger](../../shared/jaeger3.png) + +## Enriching Activities + +You can inherit from `ActivityEnricher` and override the enrich method for an Activity to add custom data or remove default data. + +```csharp +public class CustomActivityEnricher : ActivityEnricher +{ + public CustomActivityEnricher( + ObjectPool stringBuilderPoolPool, + InstrumentationOptions options) + : base(stringBuilderPoolPool, options) + { + } + + public override void EnrichResolveFieldValue(IMiddlewareContext context, Activity activity) + { + base.EnrichResolveFieldValue(context, activity); + + activity.SetTag("custom", "data"); + } +} +``` + +Register the custom activity enricher as a singleton: + +```csharp +builder.Services.AddSingleton(); +``` + +![Jaeger](../../shared/jaeger4.png) + +# Apollo Tracing + +_Apollo Tracing_ is a [performance tracing specification](https://github.com/apollographql/apollo-tracing) for GraphQL servers. It works by returning tracing information about the current request alongside the computed data. While it is not part of the GraphQL specification itself, there is a common agreement in the GraphQL community that all GraphQL servers should support it. + +**Example** + +```graphql +{ + book(id: 1) { + name + author + } +} +``` + +The above request would result in the below response if _Apollo Tracing_ is enabled. + +```json +{ + "data": { + "book": { + "name": "C# in Depth", + "author": "Jon Skeet" + } + }, + "extensions": { + "tracing": { + "version": 1, + "startTime": "2021-09-25T15:31:41.6515774Z", + "endTime": "2021-09-25T15:31:43.1602255Z", + "duration": 1508648100, + "parsing": { "startOffset": 13335, "duration": 781 }, + "validation": { "startOffset": 17012, "duration": 323681 }, + "execution": { + "resolvers": [ + { + "path": ["book"], + "parentType": "Query", + "fieldName": "book", + "returnType": "Book", + "startOffset": 587048, + "duration": 1004748344 + }, + { + "path": ["book", "author"], + "parentType": "Book", + "fieldName": "author", + "returnType": "String", + "startOffset": 1005854823, + "duration": 500265020 + } + ] + } + } + } +} +``` + +## Enabling Apollo Tracing + +_Apollo Tracing_ needs to be explicitly enabled by calling `AddApolloTracing` on the `IRequestExecutorBuilder`. + +```csharp +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AddApolloTracing(); + } +} +``` + +Further, we can specify a `TracingPreference`. Per default, it is `TracingPreference.OnDemand`. + +```csharp +services + .AddGraphQLServer() + .AddApolloTracing(TracingPreference.Always); +``` + +There are three possible options for the `TracingPreference`. + +| Option | Description | +| ---------- | -------------------------------------------------------------------------------------------- | +| `Never` | _Apollo Tracing_ is disabled. Useful if we want to conditionally disable _Apollo Tracing_. | +| `OnDemand` | _Apollo Tracing_ only traces requests if a specific header is passed with the query request. | +| `Always` | _Apollo Tracing_ is always enabled, and all query requests are traced automatically. | + +## On Demand + +When _Apollo Tracing_ is added using the `TracingPreference.OnDemand`, we are required to pass one of the following HTTP headers with our query request in order to enable tracing for this specific request. + +- `GraphQL-Tracing=1` +- `X-Apollo-Tracing=1` + +When using `curl` this could look like the following. + +```bash +curl -X POST -H 'GraphQL-Tracing: 1' -H 'Content-Type: application/json' \ + -d '{"query":"{\n book(id: 1) {\n name\n author\n }\n}\n"}' \ + 'http://localhost:5000/graphql' +``` diff --git a/website/src/docs/hotchocolate/v12/server/interceptors.md b/website/src/docs/hotchocolate/v12/server/interceptors.md new file mode 100644 index 00000000000..a76f0deabe0 --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/interceptors.md @@ -0,0 +1,239 @@ +--- +title: Interceptors +--- + +Interceptors allow us to hook into protocol-specific events. We can, for example, intercept an incoming HTTP request or a client connecting or disconnecting a WebSocket session. + +# IHttpRequestInterceptor + +Each GraphQL request sent via HTTP can be intercepted using an `IHttpRequestInterceptor` before it is being executed. Per default Hot Chocolate registers a `DefaultHttpRequestInterceptor` for this purpose. + +We can create a new class inheriting from `DefaultHttpRequestInterceptor` to provide our own logic for request interception. + +```csharp +public class HttpRequestInterceptor : DefaultHttpRequestInterceptor +{ + public override ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + return base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); + } +} +``` + +Once we have defined our custom `HttpRequestInterceptor`, we also have to register it. + +```csharp +services + .AddGraphQLServer() + .AddHttpRequestInterceptor(); +``` + +If needed, we can also inject services into our custom `HttpRequestInterceptor` using its constructor. + +## OnCreateAsync + +This method is invoked for **every** GraphQL request sent via HTTP. It is a great place to set global state variables, extend the identity of the authenticated user or do anything that we want to do on a per-request basis. + +```csharp +public override ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) +{ + return base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); +} +``` + +> ⚠️ Note: `base.OnCreateAsync` should always be invoked, since the default implementation takes care of adding the dependencey injection services as well as some important global state variables, such as the `ClaimsPrinicpal`. Not doing this can lead to unexpected issues. + +Most of the configuration will be done through the `IQueryRequestBuilder`, injected as argument to this method. + +[Learn more about the IQueryRequestBuilder](#iqueryrequestbuilder) + +If we want to fail the request, before it is being executed, we can throw a `GraphQLException`. The middleware will then translate this exception to a proper GraphQL error response for the client. + +# ISocketSessionInterceptor + +Each GraphQL request sent over WebSockets can be intercepted using an `ISocketSessionInterceptor` before it is being executed. Since WebSockets are long lived connections, we can also intercept specific lifecycle events, such as connecting or disconnecting. Per default Hot Chocolate registers a `DefaultSocketSessionInterceptor` for this purpose. + +We can create a new class inheriting from `DefaultSocketSessionInterceptor` to provide our own logic for request / lifecycle interception. + +```csharp +public class SocketSessionInterceptor : DefaultSocketSessionInterceptor +{ + public override ValueTask OnConnectAsync( + ISocketConnection connection, InitializeConnectionMessage message, + CancellationToken cancellationToken) + { + return base.OnConnectAsync(connection, message, cancellationToken); + } + + public override ValueTask OnRequestAsync(ISocketConnection connection, + IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + return base.OnRequestAsync(connection, requestBuilder, + cancellationToken); + } + + public override ValueTask OnCloseAsync(ISocketConnection connection, + CancellationToken cancellationToken) + { + return base.OnCloseAsync(connection, cancellationToken); + } +} +``` + +Once we have defined our custom `SocketSessionInterceptor`, we also have to register it. + +```csharp +services + .AddGraphQLServer() + .AddSocketSessionInterceptor(); +``` + +If needed, we can also inject services into our custom `HttpRequestInterceptor` using its constructor. + +We do not have to override every method shown above, we can also only override the ones we are interested in. + +## OnConnectAsync + +This method is invoked **once**, when a client attempts to initialize a WebSocket connection. We have the option to either accept or reject specific connection requests. + +```csharp +public async override ValueTask OnConnectAsync( + ISocketConnection connection, InitializeConnectionMessage message, + CancellationToken cancellationToken) +{ + if (condition) + { + return ConnectionStatus.Reject("Connection rejected for X reason!"); + } + + return ConnectionStatus.Accept(); +} +``` + +We also get access to the `InitializeConnectionMessage`. If a client sends a payload with this message, for example an auth token, we can access the `Payload` like the following. + +```csharp +public async override ValueTask OnConnectAsync( + ISocketConnection connection, InitializeConnectionMessage message, + CancellationToken cancellationToken) +{ + if (message.Payload?.TryGetValue("MyKey", out object? value) == true) + { + // ... + } + + return ConnectionStatus.Accept(); +} +``` + +## OnRequestAsync + +This method is invoked for **every** GraphQL request a client sends using the already established WebSocket connection. It is a great place to set global state variables, extend the identity of the authenticated user or anything that we want to do on a per-request basis. + +```csharp +public override ValueTask OnRequestAsync(ISocketConnection connection, + IQueryRequestBuilder requestBuilder, CancellationToken cancellationToken) +{ + return base.OnRequestAsync(connection, requestBuilder, cancellationToken); +} +``` + +> ⚠️ Note: `base.OnRequestAsync` should always be invoked, since the default implementation takes care of adding the dependencey injection services as well as some important global state variables, such as the `ClaimsPrinicpal`. Not doing this can lead to unexpected issues. + +Most of the configuration will be done through the `IQueryRequestBuilder`, injected as argument to this method. + +[Learn more about the IQueryRequestBuilder](#iqueryrequestbuilder) + +If we want to fail the request, before it is being executed, we can throw a `GraphQLException`. The middleware will then translate this exception to a proper GraphQL error response for the client. + +## OnCloseAsync + +This method is invoked, once a client closes the WebSocket connection or the connection is terminated in any other way. + +# IQueryRequestBuilder + +The `IQueryRequestBuilder` allows us to influence the execution of a GraphQL request. + +It has many capabilities, but most of them are only used internally. In the following we are going to cover the methods that are most relevant to us as consumers. + +## Properties + +We can set `Properties`, also called Global State, on the `IQueryRequestBuilder`, which can then be referenced in middleware, field resolvers, etc. + +[Learn more about Global State](/docs/hotchocolate/v12/server/global-state) + +### SetProperty + +`SetProperty` allows us to add a key-value pair, where the key is a `string` and the value can be anything, i.e. an `object`. + +```csharp +requestBuilder.SetProperty("name", "value"); +requestBuilder.SetProperty("name", 123); +requestBuilder.SetProperty("name", new User { Name = "Joe" }); +``` + +There is also `TrySetProperty`, which only adds the property, if it hasn't yet been added. + +```csharp +requestBuilder.TryAddProperty("name", 123); +``` + +### SetProperties + +`SetProperties` allows us to set all properties at once. + +```csharp +var properties = new Dictionary +{ + { "name", "value" } +}; + +requestBuilder.SetProperties(properties); +``` + +> ⚠️ Note: This overwrites all previous properties, which is especially catastrophic, when called after the default implementation of an interceptor has added properties. + +## SetServices + +`SetServices` allows us to add an `IServiceProvider` which should be used for dependency injection during the request. + +```csharp +var provider = new ServiceCollection() + .AddSingleton() + .BuildServiceProvider(); + +requestBuilder.SetServices(provider); +``` + +There is also `TrySetServices`, which only sets the `IServiceProvider`, if it hasn't yet been set. + +## AllowIntrospection + +If we have disabled introspection globally, `AllowIntrospection` allows us to enable it for specific requests. + +```csharp +requestBuilder.AllowIntrospection(); +``` + +## SkipComplexityAnalysis + +When using the [operation complexity feature](/docs/hotchocolate/v12/security/operation-complexity), we can skip the complexity analysis for specific requests. + +```csharp +requestBuilder.SkipComplexityAnalysis(); +``` + +## SetMaximumAllowedComplexity + +When using the [operation complexity feature](/docs/hotchocolate/v12/security/operation-complexity), we can overwrite the global complexity limit for specific requests. + +```csharp +requestBuilder.SetMaximumAllowedComplexity(5000); +``` diff --git a/website/src/docs/hotchocolate/v12/server/introspection.md b/website/src/docs/hotchocolate/v12/server/introspection.md new file mode 100644 index 00000000000..b4bcdf04e4c --- /dev/null +++ b/website/src/docs/hotchocolate/v12/server/introspection.md @@ -0,0 +1,177 @@ +--- +title: Introspection +--- + +Introspection is what enables GraphQL's rich tooling echosystem as well powerful IDEs like [Banana Cake Pop](/docs/bananacakepop) or GraphiQL. + +Every GraphQL server exposes a `__schema` and `__type` field on the query type as well as an `__typename` field on each type. These fields are used to gain insights into the schema of our GraphQL server. + +Using the `__schema` field, we could for example list the names of all types our GraphQL server contains: + +```graphql +{ + __schema { + types { + name + } + } +} +``` + +We could also request the fields plus their arguments of a specific type using the `__type` field: + +```graphql +{ + __type(name: "Book") { + fields { + name + args { + name + type { + name + } + } + } + } +} +``` + +The `__typename` field will most likely be the introspection feature we as regular developers will be using the most. When working with [unions](/docs/hotchocolate/v12/defining-a-schema/unions) for example it can tell us the name of the type that's being returned, allowing us to handle the result accordingly. + +```graphql +{ + posts { + __typename + ... on VideoPost { + videoUrl + } + ... on TextPost { + text + } + } +} +``` + +While these fields can be useful to us, they are mainly intended for use in developer tooling and as regular developers we are unlikely required to write our own introspection queries on a daily basis. + +[Learn more about introspection](https://graphql.org/learn/introspection) + +# Disabling introspection + +While introspection is a powerful feature that can tremendously improve our development workflow, it can also be used as an attack vector. A malicous user could for example request all details about all the types of our GraphQL server. Depending on the number of types this can degrade the performance of our GraphQL server. If our API should not be browsed by other developers we have the option to disable the introspection feature. + +We can disable introspection by calling `AllowIntrospection()` with a `false` argument on the `IRequestExecutorBuilder`. + +```csharp +services.AddGraphQLServer().AllowIntrospection(false); +``` + +While clients can still issue introspection queries, Hot Chocolate will now return an error response. + +But we most likely do not want to disable introspection while developing, so we can make use of the `IWebHostEnvironment` to toggle introspection based on the current hosting environment. + +```csharp +public class Startup +{ + private readonly IWebHostEnvironment _webHostEnvironment; + + public Startup(IWebHostEnvironment webHostEnvironment) + { + _webHostEnvironment = webHostEnvironment; + } + + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + .AllowIntrospection(_webHostEnvironment.IsDevelopment()); + } +} +``` + +## Allowlisting requests + +We can allow introspection on a per-request basis, while keeping it disabled for the majority of requests. In order to do this we need to create a request interceptor and determine based on the request, i.e. the `HttpContext`, whether we want to allow introspection or not. + +```csharp +public class IntrospectionInterceptor : DefaultHttpRequestInterceptor +{ + public override ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + if (context.Request.Headers.ContainsKey("X-Allow-Introspection")) + { + requestBuilder.AllowIntrospection(); + } + + return base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); + } +} + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + services + .AddGraphQLServer() + // We disable introspection per default + .AllowIntrospection(false) + .AddHttpRequestInterceptor(); + } +} +``` + +[Learn more about interceptors](/docs/hotchocolate/v12/server/interceptors) + +## Custom error message + +If a client tries to execute an introspection query whilst introspection is not allowed, he will receive an error message similar to the following: + +```json +{ + "errors": [ + { + "message": "Introspection is not allowed for the current request.", + "locations": [ + { + "line": 2, + "column": 3 + } + ], + "extensions": { + "field": "__schema", + "code": "HC0046" + } + } + ] +} +``` + +If we need to customize the error message, we can do so in our request interceptor as well. + +```csharp +public class IntrospectionInterceptor : DefaultHttpRequestInterceptor +{ + public override ValueTask OnCreateAsync(HttpContext context, + IRequestExecutor requestExecutor, IQueryRequestBuilder requestBuilder, + CancellationToken cancellationToken) + { + if (context.Request.Headers.ContainsKey("X-Allow-Introspection")) + { + requestBuilder.AllowIntrospection(); + } + else + { + // the header is not present i.e. introspection continues + // to be disallowed + requestBuilder.SetIntrospectionNotAllowedMessage( + "Missing `X-Allow-Introspection` header"); + } + + return base.OnCreateAsync(context, requestExecutor, requestBuilder, + cancellationToken); + } +} +```