diff --git a/.dialyzer_ignore.exs b/.dialyzer_ignore.exs new file mode 100644 index 0000000000..a32ac33b23 --- /dev/null +++ b/.dialyzer_ignore.exs @@ -0,0 +1,4 @@ +[ + {":0:unknown_function Function :persistent_term.get/1 does not exist."}, + {":0:unknown_function Function :persistent_term.put/2 does not exist."} +] diff --git a/.formatter.exs b/.formatter.exs index b5b727b1d8..834739349d 100644 --- a/.formatter.exs +++ b/.formatter.exs @@ -8,6 +8,8 @@ locals_without_parens = [ config: 1, deprecate: 1, description: 1, + directive: 1, + directive: 2, directive: 3, enum: 2, enum: 3, @@ -18,8 +20,10 @@ locals_without_parens = [ import_fields: 2, import_fields: 1, import_types: 1, + import_types: 2, + import_sdl: 1, + import_sdl: 2, input_object: 3, - instruction: 1, interface: 1, interface: 3, interfaces: 1, @@ -31,8 +35,7 @@ locals_without_parens = [ object: 3, on: 1, parse: 1, - record_object!: 4, - recordable!: 4, + repeatable: 1, resolve: 1, resolve_type: 1, scalar: 2, @@ -43,12 +46,13 @@ locals_without_parens = [ union: 3, value: 1, value: 2, + values: 1 ] [ inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"], locals_without_parens: locals_without_parens, export: [ - locals_without_parens: locals_without_parens, + locals_without_parens: locals_without_parens ] ] diff --git a/.github/workflows/elixir.yml b/.github/workflows/elixir.yml new file mode 100644 index 0000000000..43fb166422 --- /dev/null +++ b/.github/workflows/elixir.yml @@ -0,0 +1,60 @@ +name: CI + +on: + push: + pull_request: + branches: + - master + +jobs: + test: + name: Elixir ${{matrix.elixir}} / OTP ${{matrix.otp}} + runs-on: ubuntu-latest + + strategy: + matrix: + elixir: + - '1.10' + - '1.11' + otp: + - '22' + - '23' + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set up Elixir + uses: erlef/setup-elixir@v1 + with: + elixir-version: ${{ matrix.elixir }} + otp-version: ${{ matrix.otp }} + + - name: Restore deps cache + uses: actions/cache@v2 + with: + path: | + deps + _build + key: deps-${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-${{ hashFiles('**/mix.lock') }}-${{ github.sha }} + restore-keys: | + deps-${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-${{ hashFiles('**/mix.lock') }} + deps-${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }} + + - name: Install package dependencies + run: mix deps.get + + - name: Check Formatting + run: mix format --check-formatted + + - name: Run unit tests + run: | + mix clean + mix test + + - name: Run unit tests with persistent_term backend + run: | + mix clean + mix test + env: + SCHEMA_PROVIDER: persistent_term diff --git a/.gitignore b/.gitignore index 67e4b35de2..feaa0b52ca 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,6 @@ erl_crash.dump src/*.erl .tool-versions* missing_rules.rb -.DS_Store \ No newline at end of file +.DS_Store +/priv/plts/*.plt +/priv/plts/*.plt.hash diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index b9e96ce3e6..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: elixir -elixir: - - 1.4.5 - - 1.5.1 -notifications: - recipients: - - bruce.williams@cargosense.com - - ben.wilson@cargosense.com -otp_release: - - 19.2 - - 20.0 -script: "MIX_ENV=test mix local.hex --force && MIX_ENV=test mix do deps.get, test" diff --git a/CHANGELOG.md b/CHANGELOG.md index 208fa623b4..8cc7a79b75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,132 @@ # Changelog -For changes pre v1.5 see the [v1.4](https://github.com/absinthe-graphql/absinthe/blob/v1.4/CHANGELOG.md) branch +## 1.7.0 -## v1.5.0-dev +- POTENTIALLY BREAKING Bug Fix: [Validate variable usage in according to spec](https://github.com/absinthe-graphql/absinthe/pull/1141). This could break incoming documents previously considered valid. Skip the `Absinthe.Phase.Document.Arguments.VariableTypesMatch` phase to avoid this check. See `Absinthe.Pipeline` on adjusting the document pipeline. -- Complete rewrite of schema internals +- Feature: [Add GraphQL document formatter](https://github.com/absinthe-graphql/absinthe/pull/1114) +- Bug Fix: [Fix Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced not applied to SDL schema's](https://github.com/absinthe-graphql/absinthe/pull/1142/files) +- Bug Fix: [Use inspect/1 to safely encode bad binary samples](https://github.com/absinthe-graphql/absinthe/pull/1121) +- Bug Fix: [key :is_type_of not found on Interface ](https://github.com/absinthe-graphql/absinthe/issues/1077) +- Bug Fix: [Validate object/interfaces implement all transitive interfaces](https://github.com/absinthe-graphql/absinthe/pull/1127) +- Bug Fix: [Fix check unknown types to also cover wrapped types](https://github.com/absinthe-graphql/absinthe/pull/1138) This could break incoming documents previously considered valid. Skip the `Absinthe.Phase.Validation.KnownTypeNames` phase to avoid this check. See `Absinthe.Pipeline` on adjusting the document pipeline. +- Bug Fix: [Validate field names are unique to an object, interface or an input object](https://github.com/absinthe-graphql/absinthe/pull/1135) + +## 1.6.7 (Retired) + +Originally included the items from 1.7.0, but the spec validation fix was considered +too impactful for a patch release. +## 1.6.6 + +- Feature: [Update telemetry dependency to stable ~> 1.0](https://github.com/absinthe-graphql/absinthe/pull/1097) +- Feature: [Use makeup_graphql to get GraphQL syntax highlighting in docs](https://github.com/absinthe-graphql/absinthe/pull/1099) +- Bug Fix: [Fix exception when field name contains all invalid characters](https://github.com/absinthe-graphql/absinthe/pull/1096) + +## 1.6.5 + +- Bug Fix: [Fix interface compilation behavior difference between SDL & DSL](https://github.com/absinthe-graphql/absinthe/pull/1091) +- Bug Fix: [Allow SDL syntax to contain union without member types](https://github.com/absinthe-graphql/absinthe/pull/1085) +- Bug Fix: [Account for prototype schema when rendering SDL via mix task](https://github.com/absinthe-graphql/absinthe/pull/1086) +- Feature: Always inline functions when using persistent_term backend. +- Feature: [Support optional open ended scalars](https://github.com/absinthe-graphql/absinthe/pull/1069) + +## 1.6.4 + +- Feature: [Compress registry tables by default](https://github.com/absinthe-graphql/absinthe/pull/1058) +- Bug Fix: [Fix compilation deadlocks on type imports](https://github.com/absinthe-graphql/absinthe/pull/1056) +- Bug Fix: [Raise a better error when string serialization fails](https://github.com/absinthe-graphql/absinthe/pull/1062) + +## 1.6.3 + +- Bug Fix: [Fix unicode bug when encoding parse error](https://github.com/absinthe-graphql/absinthe/pull/1044) + +## 1.6.2 + +- Bug Fix: [Fix regression in SDL rendering for enum values](https://github.com/absinthe-graphql/absinthe/pull/1041) + +## 1.6.1 + +- Feature: [Improved serialization failure messages](https://github.com/absinthe-graphql/absinthe/pull/1033) +- Bug Fix: [Render null default values in SDL](https://github.com/absinthe-graphql/absinthe/pull/1032) +- Bug Fix: [Reduce memory usage of Schema.Manager](https://github.com/absinthe-graphql/absinthe/pull/1037) + +## 1.6.0 + +- Feature: [Interfaces can now implement Interfaces](https://github.com/absinthe-graphql/absinthe/pull/1012), matching the latest spec +- Feature: Support for the [`repeatable` directive](https://github.com/absinthe-graphql/absinthe/pull/999) +- Feature: Enable [rendering](https://github.com/absinthe-graphql/absinthe/pull/1010) of Type System Directives in SDL based schemas. +- Feature: Correctly match [Introspection type specs](https://github.com/absinthe-graphql/absinthe/pull/1017) +- Bug Fix: Restore dynamic [description support](https://github.com/absinthe-graphql/absinthe/pull/1005) (Note: the `description`s are evaluated once --- at compile time) +- Bug Fix: Restore dynamic [default_value support](https://github.com/absinthe-graphql/absinthe/pull/1026) (Note: the `default_value`s evaluated once --- at compile time) +- Bug Fix: Restore dynamic [Enum value support](https://github.com/absinthe-graphql/absinthe/pull/1023) (Note: the `value` is evaluated once --- at compile time) +- Bug Fix: [Interface nullability](https://github.com/absinthe-graphql/absinthe/pull/1009) corrections +- Bug Fix: Fix [field listing for Inputs](https://github.com/absinthe-graphql/absinthe/pull/1015) that import fields +- Bug Fix: Properly [trim all descriptions](https://github.com/absinthe-graphql/absinthe/pull/1014) no matter the mechanism used to specify them +- Bug Fix: Fix incorrect specification of [`__TypeKind`](https://github.com/absinthe-graphql/absinthe/pull/1019) +- Bug Fix: Better match [introspection schema specification](https://github.com/absinthe-graphql/absinthe/pull/1029) +- Bug Fix: Add missing value to [`__DirectiveLocation`](https://github.com/absinthe-graphql/absinthe/pull/1020) +- Bug Fix: Fix [compilation problems with `import_types`](https://github.com/absinthe-graphql/absinthe/pull/1022) +- Bug Fix: Reduce [memory consumption of Subscriptions](https://github.com/absinthe-graphql/absinthe/pull/1006) + +## 1.5.5 + +- Bug Fix: Fix for `nil` in [`ArgumentsOfCorrectType` suggestions](https://github.com/absinthe-graphql/absinthe/pull/1000) + +## 1.5.4 + +- Feature: Ensure [stable ordering in introspection results](https://github.com/absinthe-graphql/absinthe/pull/997). +- Bug Fix: Fix [rendering of interfaces in SDL](https://github.com/absinthe-graphql/absinthe/pull/979) +- Bug Fix: Properly [escape single line descriptions in SDL](https://github.com/absinthe-graphql/absinthe/pull/968) +- Bug Fix: Fix [`:meta` on fields](https://github.com/absinthe-graphql/absinthe/pull/973) +- Bug Fix: Validate that [DirectivesMustBeValid](https://github.com/absinthe-graphql/absinthe/pull/954) +- Bug Fix: Handle [default value rendering with partial field set](https://github.com/absinthe-graphql/absinthe/pull/998) + +## 1.5.3 + +- Bug Fix: Handle null propagation with `non_null(list_of(non_null(type)))` properly +- Bug Fix: Fix [double escaping issue](https://github.com/absinthe-graphql/absinthe/pull/962) with string literal arguments. + +## 1.5.2 + +- Bug Fix: Fix issue with persistent term backend. + +## 1.5.1 + +- Bug Fix: Enable hydrating resolve_type on unions. #938 +- Bug Fix: #922 + +## v1.5.0 (Rc) + +- Breaking Bug Fix: Variable types must align exactly with the argument type. Previously + Absinthe allowed variables of different types to be used by accident as long as the data parsed. +- Feature (Experimental): `:persistent_term` based schema backend +- Breaking Change: `telemetry` event keys [changed](https://github.com/absinthe-graphql/absinthe/pull/901) since the beta release. + +## v1.5.0 (Beta) + +- Feature: SDL directives, other improvements +- Feature: Output rendered SDL for a schema +- Feature: Substantially lower subscription memory usage. +- Documentation: Testing guide, numerous fixes and updates +- Breaking Change: Scalar outputs are now type checked and will raise exceptions if the result tries to send the wrong data type in the result. +- Breaking Change: `telemetry` event names [changed](https://github.com/absinthe-graphql/absinthe/pull/782) from the `alpha` to match an emerging naming convention for tracing. +- Breaking Change: Added phase to check validity of field names according to GraphQL spec. Might break existing schema's. Remove the `Absinthe.Phase.Schema.Validation.NamesMustBeValid` from the schema pipeline if you want to ignore this. +- Breaking Change: To match the GraphQL spec, we [no longer](https://github.com/absinthe-graphql/absinthe/pull/816) add a non-null error when a resolver on a non-null field explicitly returns its own error. +- Breaking Change: Removed `Absinthe.Traversal` module + +## v1.5.0 (Alpha) + +Alpha 0 note: 1.5.0 alpha is safe to use on existing schemas. However, there are no schema validations at the moment, so when writing new ones you may get undefined behaviour if you write an invalid schema. + +- COLUMN NUMBERS! The Absinthe Lexer has been rewritten using `nimble_parsec` and now Absinthe includes column information. +- Complete rewrite of schema internals. This fixes a number of long standing bugs, and provides a variety of new features +- Feature: SDL support +- Feature: Schema decorators +- Feature: Customizable subscription de-duplification. See: https://github.com/absinthe-graphql/absinthe/blob/master/guides/subscriptions.md#de-duplicating-updates +- Feature: Built-in `telemetry` instrumentation (https://github.com/beam-telemetry/telemetry) +- Breaking Change: `default_value: DateTime.utc_now()` will have its time set at compile time. IE: DON'T DO THIS. It only worked by accident before anyway, and now it no longer works, which is correct. +- Breaking change: added `node_name/0` callback to `Absinthe.Subscription.PubSub` behaviour. To retain old behaviour, implement this callback to return `Kernel.node/0`. + +## v1.4 + +For changes pre-v1.5 see the [v1.4](https://github.com/absinthe-graphql/absinthe/blob/v1.4/CHANGELOG.md) branch. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index da5816d822..8d5de76cca 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -24,7 +24,7 @@ Make your change. Add tests for your change. Make the tests pass: Push to your fork (preferably to a non-`master` branch) and [submit a pull request][pr]. -[pr]: https://github.com/CargoSense/absinthe/compare/ +[pr]: https://github.com/absinthe-graphql/absinthe/compare/ We'll review and answer your pull request as soon as possible. We may suggest some changes, improvements, or alternatives. Let's work through it together. diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..56dddc5d9a --- /dev/null +++ b/ISSUE_TEMPLATE.md @@ -0,0 +1,14 @@ +* Do not use the issues tracker for help or support (try Elixir Forum, Slack, IRC, etc.) +* Questions about how to contribute are fine. + +### Environment + +* Elixir & Erlang versions (elixir --version): +* ExAws version `mix deps |grep ex_aws` +* HTTP client version. IE for hackney do `mix deps | grep hackney` + +### Current behavior + +Include code samples, errors and stacktraces if appropriate. + +### Expected behavior \ No newline at end of file diff --git a/README.md b/README.md index 3ab41bdf22..4c1a15b233 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,17 @@ # Absinthe -[GraphQL](https://facebook.github.io/graphql/) implementation for Elixir. +[![Build Status](https://github.com/absinthe-graphql/absinthe/workflows/CI/badge.svg)](https://github.com/absinthe-graphql/absinthe/actions?query=workflow%3ACI) +[![Version](https://img.shields.io/hexpm/v/absinthe.svg)](https://hex.pm/packages/absinthe) +[![Hex Docs](https://img.shields.io/badge/hex-docs-lightgreen.svg)](https://hexdocs.pm/absinthe/) +[![Download](https://img.shields.io/hexpm/dt/absinthe.svg)](https://hex.pm/packages/absinthe) +[![License](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) +[![Last Updated](https://img.shields.io/github/last-commit/absinthe-graphql/absinthe.svg)](https://github.com/absinthe-graphql/absinthe/commits/master) -[![Hex pm](http://img.shields.io/hexpm/v/absinthe.svg?style=flat)](https://hex.pm/packages/absinthe)[![Build Status](https://secure.travis-ci.org/absinthe-graphql/absinthe.svg?branch=master -"Build Status")](https://travis-ci.org/absinthe-graphql/absinthe)[![License](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) +[GraphQL](https://facebook.github.io/graphql/) implementation for Elixir. Goals: -- Complete implementation of the [GraphQL Working Draft](https://facebook.github.io/graphql). +- Complete implementation of the [GraphQL Working Draft](https://spec.graphql.org/draft/). - An idiomatic, readable, and comfortable API for Elixir developers - Extensibility based on small parts that do one thing well. - Detailed error messages and documentation. @@ -62,27 +66,17 @@ and [Relay](https://facebook.github.io/relay/) on the frontend. Of course we work out of the box with other frontend frameworks and GraphQL clients, too. -### An Active Community - -The project is under constant improvement by a growing list of -contributors, and your feedback is important. Please join us in Slack -(`#absinthe-graphql` under the Elixir Slack account) or the Elixir Forum -(tagged `absinthe`). - -Please remember that all interactions in our official spaces follow -our [Code of Conduct](./CODE_OF_CONDUCT.md). - ## Installation Install from [Hex.pm](https://hex.pm/packages/absinthe): ```elixir def deps do - [{:absinthe, "~> 1.4.0"}] + [{:absinthe, "~> 1.6.0"}] end ``` -Note: Absinthe requires Elixir 1.4 or higher. +Note: Absinthe requires Elixir 1.10 or higher. ## Upgrading @@ -90,7 +84,9 @@ See [CHANGELOG](./CHANGELOG.md) for upgrade steps between versions. ## Documentation -See [HexDocs](https://hexdocs.pm/absinthe) and . +- [Absinthe hexdocs](https://hexdocs.pm/absinthe). +- For the tutorial, guides, and general information about Absinthe-related + projects, see [http://absinthe-graphql.org](http://absinthe-graphql.org). ### Mix Tasks @@ -98,14 +94,24 @@ Absinthe includes a number of useful Mix tasks for extracting schema metadata. Run `mix help` in your project and look for tasks starting with `absinthe`. -### Roadmap - -See the [Roadmap project board](https://github.com/absinthe-graphql/absinthe/projects/2) for longer term plans. - ## Related Projects See the [GitHub organization](https://github.com/absinthe-graphql). +## Community + +The project is under constant improvement by a growing list of +contributors, and your feedback is important. Please join us in Slack +(`#absinthe-graphql` under the Elixir Slack account) or the Elixir Forum +(tagged `absinthe`). + +Please remember that all interactions in our official spaces follow +our [Code of Conduct](./CODE_OF_CONDUCT.md). + +## Contribution + +Please follow [contribution guide](./CONTRIBUTING.md). + ## License See [LICENSE.md](./LICENSE.md). diff --git a/guides/adapters.md b/guides/adapters.md index d26e3f31cd..bc97fbeef1 100644 --- a/guides/adapters.md +++ b/guides/adapters.md @@ -21,14 +21,7 @@ Absinthe ships with two adapters: * `Absinthe.Adapter.Passthrough`, which is a no-op adapter and makes no modifications. -To set the adapter, you can set an application configuration value: - -```elixir -config :absinthe, - adapter: Absinthe.Adapter.TheAdapterName -``` - -Or, you can provide it as an option to `Absinthe.run/3`: +To set the adapter, you provide it as an option to `Absinthe.run/3`: ```elixir Absinthe.run(query, MyAppWeb.Schema, @@ -39,7 +32,7 @@ Notably, this means you're able to switch adapters on case-by-case basis. In a Phoenix application, this means you could even support using different adapters for different clients. -A custom adapter module must merely implement the `Absinthe.Adapter` protocol, +A custom adapter module must merely implement the `Absinthe.Adapter` behaviour, in many cases with `use Absinthe.Adapter` and only overriding the desired functions. diff --git a/guides/batching.md b/guides/batching.md index 14512a803a..17af6ec3d9 100644 --- a/guides/batching.md +++ b/guides/batching.md @@ -1,7 +1,207 @@ # Batching Resolution -> We're sorry, this guide hasn't been written yet. -> -> You can help! Please fork the [absinthe](https://github.com/absinthe-graphql/absinthe) repository, edit `guides/batching.md`, and submit a [pull request](https://github.com/absinthe-graphql/absinthe/pulls). +## Avoiding N+1 Queries -See the documentation for [Absinthe.Middleware.Batch](Absinthe.Middleware.Batch.html) for more information. +In general, you want to make sure that when accessing Ecto associations that you +preload the data in the top level resolver functions to avoid N+1 queries. + +Imagine this scenario: You have posts and users. A `Post` has an `author` field, which +returns a user. You want to list all posts, and get the name of their author: + +```graphql +{ + posts { + author { + name + } + } +} +``` + +If you write your schema like this, you're going to have a _bad_ time due to issues with _N + 1_: + +```elixir +object :post do + @desc "Author of the post" + field :author, :user do + resolve fn post, _, _ -> + author = + post + |> Ecto.assoc(:author) + |> Repo.one + + {:ok, author} + end + end +end + +query do + field :posts, list_of(:post) do + resolve fn _, _ -> + {:ok, Post |> Repo.all} + end + end +end +``` + +What this schema will do when presented with the GraphQL query is +run `Post |> Repo.all`, which will retrieve _N_ posts. Then for each +post it will resolve child fields, which runs our `Repo.one` query +function, resulting in _N+1_ calls to the database. + +One way to handle this issue is with Absinthe's support for +batching. The idea with batching is that we're gonna aggregate all the +`author_id`s from each post, and then make one call to the user. + +Let's first make a function to get a model by ids: + +```elixir +defmodule MyAppWeb.Schema.Helpers do + def by_id(model, ids) do + import Ecto.Query + + ids = ids |> Enum.uniq + + model + |> where([m], m.id in ^ids) + |> Repo.all + |> Map.new(&{&1.id, &1}) + end +end +``` + +Now we can use this function to batch our author lookups: + +```elixir +object :post do + + @desc "Author of the post" + field :author, :user do + resolve fn post, _, _ -> + batch({MyAppWeb.Schema.Helpers, :by_id, User}, post.author_id, fn batch_results -> + {:ok, Map.get(batch_results, post.author_id)} + end) + end + end + +end +``` + +Now we make just two calls to the database. The first call loads all of the posts. +Then as Absinthe walks through each post and tries to get the author, it's instead +told to aggregate its information. That aggregate information is passed on to our `by_id/2` function from earlier. +It grabs ALL the users in just one database call, and creates a map of user ids +to users. + +Absinthe then does a second pass and calls the `batch_results` function with that +map, letting us retrieve the individual author for each post. + +Not only is this a very efficient way to query the data, it's also 100% dynamic. +If a query document asks for authors, they're loaded efficiently. If it does not, +they aren't loaded at all. + +See the documentation for `Absinthe.Middleware.Batch` for more information. + +`Absinthe.Middleware.Batch` achieves a lot and, with some helpers, was the +standard way to solve this problem for a long time. While batching still has a +place, it has a few limitations that have driven the development of Dataloader. +There are small scale annoyances like the limitation of only being able to batch +one thing at a time in a field, or the fact that the API can get very verbose. + +There's also some larger scale issues however. Ecto has a fair number of quirks +that make it a difficult library to abstract access to. If you want the +concurrent test system to work, you need to add `self()` to all the batch keys +and do `Repo.all(caller: pid)` in every batch function so that it knows which +sandbox to use. It gets very easy for your GraphQL functions to become full of +direct database access, inevitably going around important data access rules you +may want to enforce in your contexts. Alternatively, your context functions can +end up with dozens of little functions that only exist to support batching items +by ID. + +In time, people involved in larger projects have been able to build some +abstractions, helpers, and conventions around the `Absinthe.Middleware.Batch` +plugin that have done a good job of addressing these issues. That effort has been +extracted into the Dataloader project, which also draws inspiration from similar +projects in the GraphQL world. + +We've made it easier and more flexible, however, with +Elixir's [dataloader](https://hex.pm/packages/dataloader) package. + +### Dataloader + +Let's jump straight in to getting Dataloader working, and then we'll expand on +what's actually happening behind the scenes. + +Using Dataloader is as simple as doing: + +```elixir +alias MyApp.Blog # Dataloader source, see below +import Absinthe.Resolution.Helpers, only: [dataloader: 1] + +object :post do + field :posts, list_of(:post), resolve: dataloader(Blog) + + @desc "Author of the post" + field :author, :user do + resolve dataloader(Blog) + end +end +``` + +To make this work we need to setup a dataloader, add the `Blog` source to it, and +make sure our schema knows it needs to run the dataloader. + +Latest install instructions found here: https://github.com/absinthe-graphql/dataloader + +Let's start with a data source. Dataloader data sources are just structs that encode +a way of retrieving data in batches. In a Phoenix application you'll generally have one +source per context, so that each context can control how its data is loaded. + +Here is a hypothetical `Blog` context and a dataloader ecto source: + +```elixir +defmodule MyApp.Blog do + def data() do + Dataloader.Ecto.new(MyApp.Repo, query: &query/2) + end + + def query(queryable, _params) do + queryable + end +end +``` + +In this example, the query returned by `query/2` is used as a starting point by Dataloader to build the final query, which it does by traversing schema associations. +In other words, Dataloader can determine that an author has many posts, and that to retrieve posts it needs to get those with the relevant `author_id`. +If that's sufficient for your needs, `query/2` need not modify the query it's given. +But if you only want to load published posts, `query/2` can narrow the query accordingly. + +When integrating Dataloader with GraphQL, we want to place it in our context so +that we can access it in our resolvers. In your schema module add: + +```elixir +alias MyApp.{Blog} + +def context(ctx) do + loader = + Dataloader.new + |> Dataloader.add_source(Blog, Blog.data()) + + Map.put(ctx, :loader, loader) +end + +def plugins do + [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() +end +``` + +That's it! If you run a GraphQL query that hits that field, it will be loaded efficiently without N+1. + +See the documentation for [Dataloader](dataloader.md) for more information. + +### Deprecated in v1.4: Batching with Absinthe.Ecto + +The batching helper functions present +in [absinthe_ecto](https://github.com/absinthe-graphql/absinthe_ecto) +provided some early support for making it easy to get data from Ecto. These batching features are considered *DEPRECATED* in favor of +Dataloader, described above. If you're on 1.4 or earlier absinthe version feel free to check the documentation about [`absinthe_ecto` basic usage](https://hexdocs.pm/absinthe_ecto/Absinthe.Ecto.html#module-basic-usage). diff --git a/guides/client/apollo.md b/guides/client/apollo.md index c23dc23a32..dd3c2be807 100644 --- a/guides/client/apollo.md +++ b/guides/client/apollo.md @@ -1,7 +1,248 @@ # Using with Apollo Client -> We're sorry, this guide hasn't been written yet. -> -> You can help! Please fork the [absinthe](https://github.com/absinthe-graphql/absinthe) repository, edit `guides/client/apollo.md`, and submit a [pull request](https://github.com/absinthe-graphql/absinthe/pulls). +An Apollo client manages its connection to the GraphQL server using [links](https://www.apollographql.com/docs/link/) -- which are essentially middleware that tell Apollo how to resolve each query. You can configure Apollo to connect to your Absinthe server via HTTP, websockets, or both. -For now, see the [@absinthe/socket-apollo-link](https://github.com/absinthe-graphql/absinthe-socket/tree/master/packages/socket-apollo-link) package README, which includes examples showing how to configure Apollo to connect to an Absinthe GraphQL server over HTTP, Websockets, or using a hybrid configuration. +## Using an HTTP link + +Using Apollo with an HTTP link does not require any Absinthe-specific configuration. You can create an HTTP link pointed at your Absinthe server as follows: + +```javascript +import ApolloClient from "apollo-client"; +import { createHttpLink } from "apollo-link-http"; +import { InMemoryCache } from "apollo-cache-inmemory"; + +// Create an HTTP link to the Absinthe server. +const link = createHttpLink({ + uri: "http://localhost:4000/graphql" +}); + +// Apollo also requires you to provide a cache implementation +// for caching query results. The InMemoryCache is suitable +// for most use cases. +const cache = new InMemoryCache(); + +// Create the client. +const client = new ApolloClient({ + link, + cache +}); +``` + +You may find that you need to modify the HTTP request that Apollo makes -- for example, if you wish to send the value of a particular cookie in the `Authorization` header. The `setContext` helper allows you to do this, and also demonstrates how links in Apollo can be chained. + +```javascript +import ApolloClient from "apollo-client"; +import { createHttpLink } from "apollo-link-http"; +import { setContext } from "apollo-link-context"; +import { InMemoryCache } from "apollo-cache-inmemory"; +import Cookies from "js-cookie"; + +// Create an HTTP link to the Absinthe server. +const httpLink = createHttpLink({ + uri: "http://localhost:4000/graphql" +}); + +// Use setContext to create a chainable link object that sets +// the token cookie to the Authorization header. +const authLink = setContext((_, { headers }) => { + // Get the authentication token from the cookie if it exists. + const token = Cookies.get("token"); + + // Add the new Authorization header. + return { + headers: { + ...headers, + authorization: token ? `Bearer ${token}` : "" + } + }; +}); + +// Chain the HTTP link and the authorization link. +const link = authLink.concat(httpLink); + +// Apollo also requires you to provide a cache implementation +// for caching query results. The InMemoryCache is suitable +// for most use cases. +const cache = new InMemoryCache(); + +// Create the client. +const client = new ApolloClient({ + link, + cache +}); +``` + +## Using a websocket link + +An HTTP link is suitable for many basic use cases, but if you require two-way communication between the server and the client, you will need to use a websocket link. The most common use case is a client that needs to use GraphQL subscriptions to receive updates from the server when particular events occur. To implement a websocket link, we will need to use the [`@absinthe/socket`](https://www.npmjs.com/package/@absinthe/socket) and [`@absinthe/socket-apollo-link`](https://www.npmjs.com/package/@absinthe/socket-apollo-link) packages. + +```javascript +import ApolloClient from "apollo-client"; +import * as AbsintheSocket from "@absinthe/socket"; +import { createAbsintheSocketLink } from "@absinthe/socket-apollo-link"; +import { Socket as PhoenixSocket } from "phoenix"; +import { InMemoryCache } from "apollo-cache-inmemory"; +import Cookies from "js-cookie"; + +// Create a standard Phoenix websocket connection. If you need +// to provide additional params, like an authentication token, +// you can configure them in the `params` option. +const phoenixSocket = new PhoenixSocket("ws://localhost:4000/socket", { + params: () => { + if (Cookies.get("token")) { + return { token: Cookies.get("token") }; + } else { + return {}; + } + } +}); + +// Wrap the Phoenix socket in an AbsintheSocket. +const absintheSocket = AbsintheSocket.create(phoenixSocket); + +// Create an Apollo link from the AbsintheSocket instance. +const link = createAbsintheSocketLink(absintheSocket); + +// Apollo also requires you to provide a cache implementation +// for caching query results. The InMemoryCache is suitable +// for most use cases. +const cache = new InMemoryCache(); + +// Create the client. +const client = new ApolloClient({ + link, + cache +}); +``` + +### Reconnecting the websocket link + +You may find that you periodically need to reconnect the websocket with different parameters. The most common case for this is when a user logs in or logs out; you will then want to refresh their subscriptions to reflect the new authentication state. You can accomplish this by invoking `phoenixSocket.conn.close();` from your application code whenever the reconnection needs to happen. Phoenix will notice the closed connection and automatically reconnect. It is important that you provide a function that returns the websocket parameters to the socket `params` option. If you provide the parameters directly as an object, the new parameters will not be picked up when the websocket reconnects, but if you provide a function, Phoenix invokes the function on each connection to obtain the parameters. + +Note that this solution (reconnecting with `phoenixSocket.conn.close();`) is somewhat unstable because it relies upon an implementation detail of the Phoenix socket. Ideally, a future version of the Phoenix package might add a public API method to reconnect the websocket with new parameters. + +```javascript +import ApolloClient from "apollo-client"; +import * as AbsintheSocket from "@absinthe/socket"; +import { createAbsintheSocketLink } from "@absinthe/socket-apollo-link"; +import { Socket as PhoenixSocket } from "phoenix"; +import { InMemoryCache } from "apollo-cache-inmemory"; +import Cookies from "js-cookie"; + +// Create a standard Phoenix websocket connection. If you need +// to provide additional params, like an authentication token, +// you can configure them in the `params` option. +// +// If you plan to reconnect the socket with updated parameters, +// you must provide a function to the `params` option. If you +// provide the parameters directly as an object, the updated +// parameters will not be picked up when the socket reconnects. +const phoenixSocket = new PhoenixSocket("ws://localhost:4000/socket", { + params: () => { + if (Cookies.get("token")) { + return { token: Cookies.get("token") }; + } else { + return {}; + } + } +}); + +// Wrap the Phoenix socket in an AbsintheSocket. +const absintheSocket = AbsintheSocket.create(phoenixSocket); + +// Create an Apollo link from the AbsintheSocket instance. +const link = createAbsintheSocketLink(absintheSocket); + +// Apollo also requires you to provide a cache implementation +// for caching query results. The InMemoryCache is suitable +// for most use cases. +const cache = new InMemoryCache(); + +// Create the client. +const client = new ApolloClient({ + link, + cache +}); + +// Later in your application code, when you need to reconnect +// the socket. +phoenixSocket.conn.close(); +``` + +## Using both HTTP and websocket links + +A common configuration for Apollo client applications is to use both HTTP and websocket links -- HTTP for queries and mutations, and a websocket for subscriptions. We can implement this in our client using [directional composition with Apollo's `split` helper](https://www.apollographql.com/docs/link/composition#directional). + +```javascript +import ApolloClient from "apollo-client"; +import { createHttpLink } from "apollo-link-http"; +import { setContext } from "apollo-link-context"; +import * as AbsintheSocket from "@absinthe/socket"; +import { createAbsintheSocketLink } from "@absinthe/socket-apollo-link"; +import { Socket as PhoenixSocket } from "phoenix"; +import { hasSubscription } from "@jumpn/utils-graphql"; +import { split } from "apollo-link"; +import { InMemoryCache } from "apollo-cache-inmemory"; +import Cookies from "js-cookie"; + +// Create an HTTP link to the Absinthe server. +const httpLink = createHttpLink({ + uri: "http://localhost:4000/graphql" +}); + +// Use setContext to create a chainable link object that sets +// the token cookie to the Authorization header. +const authLink = setContext((_, { headers }) => { + // Get the authentication token from the cookie if it exists. + const token = Cookies.get("token"); + + // Add the new Authorization header. + return { + headers: { + ...headers, + authorization: token ? `Bearer ${token}` : "" + } + }; +}); + +// Chain the HTTP link and the authorization link. +const authedHttpLink = authLink.concat(httpLink); + +// Create a standard Phoenix websocket connection. If you need +// to provide additional params, like an authentication token, +// you can configure them in the `params` option. +const phoenixSocket = new PhoenixSocket("ws://localhost:4000/socket", { + params: () => { + if (Cookies.get("token")) { + return { token: Cookies.get("token") }; + } else { + return {}; + } + } +}); + +// Wrap the Phoenix socket in an AbsintheSocket. +const absintheSocket = AbsintheSocket.create(phoenixSocket); + +// Create an Apollo link from the AbsintheSocket instance. +const websocketLink = createAbsintheSocketLink(absintheSocket); + +// If the query contains a subscription, send it through the +// websocket link. Otherwise, send it through the HTTP link. +const link = split( + operation => hasSubscription(operation.query), + websocketLink, + authedHttpLink +); + +// Apollo also requires you to provide a cache implementation +// for caching query results. The InMemoryCache is suitable +// for most use cases. +const cache = new InMemoryCache(); + +// Create the client. +const client = new ApolloClient({ + link, + cache +}); +``` diff --git a/guides/client/javascript.md b/guides/client/javascript.md index 26bee0a496..6f41d34da8 100644 --- a/guides/client/javascript.md +++ b/guides/client/javascript.md @@ -6,8 +6,8 @@ You can interact with an Absinthe GraphQL server via HTTP (thanks to We also have special support for configuring and working with specific JavaScript frameworks. You can see the guides here: -- [Apollo Client](apollo.html) -- [Relay](relay.html) +- [Apollo Client](apollo.md) +- [Relay](relay.md) ## Over HTTP @@ -33,4 +33,4 @@ fetch('http://localhost:4000/graphql', { See the [@absinthe/socket](https://github.com/absinthe-graphql/absinthe-socket/tree/master/packages/socket) NPM package for special support for Absinthe's use of Phoenix channels for GraphQL over websockets, including support for -[subscriptions](subscriptions.html). +[subscriptions](subscriptions.md). diff --git a/guides/client/relay.md b/guides/client/relay.md index d2d70c8274..df1c008f05 100644 --- a/guides/client/relay.md +++ b/guides/client/relay.md @@ -108,14 +108,13 @@ numeric) IDs to the global ID scheme -- an opaque string (like `"UWxf59AcjK="`) will be returned instead. -

-Important: the global ID is generated based on the object's -unique identifier, which by default is the value of its existing :id -field. This is convenient, because if you are using Ecto, the -primary key :id database field is typically enough to uniquely identify an -object of a given type. It also means, however, that the internal :id of a -node object will not be available to be queried as :id. -

+**Important:** the global ID is generated based on the object's +unique identifier, which by default is **the value of its existing `:id` +field**. This is convenient, because if you are using Ecto, the +primary key `:id` database field is typically enough to uniquely identify an +object of a given type. It also means, however, that *the internal `:id` of a +node object will not be available to be queried as `:id`.* + - If you wish to generate your global IDs based on something other than the existing `:id` field (if, for instance, your internal IDs are returned as `_id`), @@ -231,17 +230,17 @@ result. Thankfully `Absinthe.Relay` abstracts these details away from the schema designer, allowing them to focus on any _other_ arguments needed or results expected. -

- Important: Remember that input fields (and arguments in - general) cannot be of one of your object types. Use input_object to - model complex argument types. -

+ +**Important:** Remember that input fields (and arguments in +general) cannot be of one of your `object` types. Use `input_object` to +model complex argument types. + In this example, we accept a list of multiple `:person_input_object` values to insert people into a database. ```elixir -defmodule YourApp.Schema +defmodule YourApp.Schema do # ... input_object :person_input_object do @@ -290,7 +289,7 @@ manually. ```elixir def bulk_create(%{persons: new_persons, group: global_group_id}, _) do - {:ok, %{type: :group, id: internal_group_id}} = Absinthe.Relay.Node.from_global_id(global_group_id, YourApp.Schema)` + {:ok, %{type: :group, id: internal_group_id}} = Absinthe.Relay.Node.from_global_id(global_group_id, YourApp.Schema) # ... manipulate your DB using internal_group_id end ``` @@ -299,12 +298,12 @@ If, of course, your client knows the internal IDs (in a peer field to `:id`, eg, `:internal_id`), you can depends on that ID -- but we recommend that you use node IDs as they are opaque values and it's the more conventional practice. -

- Important: When using from_global_id, remember to always - match the :type value to ensure the internal ID is for the type you expect, - and a global ID for the wrong type of node hasn't been mistakenly sent to the - server. -

+ +**Important:** When using `from_global_id`, remember to always +match the `:type` value to ensure the internal ID is for the type you expect, +and a global ID for the wrong type of node hasn't been mistakenly sent to the +server. + ## Connections @@ -359,7 +358,7 @@ query { Check the [documentation](https://hexdocs.pm/absinthe_relay/Absinthe.Relay.Connection.html) for more details on connections. -

- Note: These features do not require using Relay on the client as Apollo - and other client implementations generally support Relay connection configuration. -

+ +**Note:** These features do not require using Relay on the client as Apollo +and other client implementations generally support Relay connection configuration. + diff --git a/guides/complexity-analysis.md b/guides/complexity-analysis.md index 8cc4e74f64..8fddec67c0 100644 --- a/guides/complexity-analysis.md +++ b/guides/complexity-analysis.md @@ -28,8 +28,8 @@ The maximum value, `50`, is compared to complexity values calculated for each re Here's how the complexity value is calculated: -By default each field in a query will increase the complexity by 1. However it -can be useful to customize how the complexity value for a field. This is done in your schema using the +By default each field in a query will increase the complexity by 1. However, it +can be useful to customize how the complexity value for a field is calculated. This is done in your schema using the `complexity/1` macro, which can accept a function or an explicit integer value. As an example, when a field is a list, the complexity is often correlated to the @@ -57,8 +57,6 @@ defmodule MyAppWeb.Schema do object :person do field :name, :string field :age, :integer - # constant complexity for this object - complexity 3 end end @@ -68,17 +66,19 @@ For a field, the first argument to the function you supply to `complexity/1` is -- just as a field's resolver can use user arguments to resolve its value, the complexity function that you provide can use the same arguments to calculate the field's complexity. -The second argument passed to your complexity function is the child (that is, -the result of the field); in the example above, `child_complexity` would be `3`, -as the field returns a list of `:person` objects, and the complexity of -`:person` is explicitly set to `3`. +The second argument passed to your complexity function is the sum of all the complexity scores +of all the fields nested below the current field. (If a complexity function accepts three arguments, the third will be an `%Absinthe.Resolution{}` struct, just as with resolvers.) -If the value of a document's `:limit` argument was `10`, the complexity of a single -`:people` field would be calculated as `30`; `10`, the value of `:limit`, times `3`, the complexity of -the `:person` type. +If the value of a document's `:limit` argument was `10` and both `name` and `age` were queried for, +the complexity of the `:people` field would be calculated as `20`: + +* `10`, the value of `:limit` +* times `2`, the sum of the complexity of the fields requested on the `:person` + +A field's complexity will default to `1` if it's not set explicitly. So this would be okay: @@ -94,7 +94,7 @@ But this, at a complexity of `60`, wouldn't: ```graphql { - people(limit: 20) { + people(limit: 30) { name } } diff --git a/guides/context-and-authentication.md b/guides/context-and-authentication.md index d1fc8f05dc..2f0e862c46 100644 --- a/guides/context-and-authentication.md +++ b/guides/context-and-authentication.md @@ -89,7 +89,7 @@ appropriate values into the connection. Let's use this mechanism to set our current_user from the previous example via an authentication header. We will use the same Schema as before. -First, our plug. We'll be checking the for the `authorization` header, and calling +First, our plug. We'll be checking the connection for the `authorization` header, and calling out to some unspecified authentication mechanism. ```elixir @@ -139,7 +139,7 @@ some other library that provides utilities for authenticating users you can use those here too, and just add their output to the context. If there is no current user it's better to simply not have the `:current_user` -key inside the map, instead of doing `%{current_user: nil}`. This way you an +key inside the map, instead of doing `%{current_user: nil}`. This way you can just pattern match for `%{current_user: user}` in your code and not need to worry about the nil case. diff --git a/guides/custom-scalars.md b/guides/custom-scalars.md index 27b0d46f21..e6d34c5d0a 100644 --- a/guides/custom-scalars.md +++ b/guides/custom-scalars.md @@ -6,7 +6,7 @@ its object types, but is present all the way down to the scalar value level. Sometimes it makes sense to build custom scalar types to better model your domain. Here's how to do it. -The GraphQL Specification doesn't define date and datetime types, but Absinthe ships with several pre-built for use via [import_types](importing-types.html). In this example we'll look at how `:datetime` is defined. +The GraphQL Specification doesn't define date and datetime types, but Absinthe ships with several pre-built for use via [import_types](importing-types.md). In this example we'll look at how `:datetime` is defined. ## Defining a scalar diff --git a/guides/dataloader.md b/guides/dataloader.md index 70b2c58792..b6e4438a71 100644 --- a/guides/dataloader.md +++ b/guides/dataloader.md @@ -23,80 +23,98 @@ that encodes a way of retrieving data. More info in the [Sources](#sources) sect Absinthe provides some dataloader helpers out of the box that you can import into your schema ```elixir - import Absinthe.Resolution.Helpers, only: [dataloader: 1] +import Absinthe.Resolution.Helpers, only: [dataloader: 1] ``` This is needed to use the various `dataloader` helpers to resolve a field: ```elixir -field(:teams, list_of(:team), resolve: dataloader(Nhl)) +field(:posts, list_of(:post), resolve: dataloader(Blog)) ``` -It also provides a plugin you need to add to help with resolution: +Let's start with a data source. Dataloader data sources are just structs that encode +a way of retrieving data in batches. In a Phoenix application you'll generally have one +source per context, so that each context can control how its data is loaded. + +Here is a hypothetical `Blog` context and a dataloader ecto source: ```elixir -def plugins do - [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() +defmodule MyApp.Blog do + def data() do + Dataloader.Ecto.new(MyApp.Repo, query: &query/2) + end + + def query(queryable, _params) do + queryable + end end ``` -Finally you need to make sure your loader is in your context: +In this example, the query returned by `query/2` is used as a starting point by Dataloader to build the final query, which it does by traversing schema associations. +In other words, Dataloader can determine that an author has many posts, and that to retrieve posts it needs to get those with the relevant `author_id`. +If that's sufficient for your needs, `query/2` need not modify the query it's given. +But if you only want to load published posts, `query/2` can narrow the query accordingly. + +When integrating Dataloader with GraphQL, we want to place it in our context so +that we can access it in our resolvers. In your schema module add: ```elixir +alias MyApp.{Blog, Foo} + def context(ctx) do loader = - Dataloader.new() - |> Dataloader.add_source(Nhl, Nhl.data()) + Dataloader.new + |> Dataloader.add_source(Blog, Blog.data()) + |> Dataloader.add_source(Foo, Foo.data()) # Foo source could be a Redis source Map.put(ctx, :loader, loader) end + +def plugins do + [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() +end ``` -Putting all that together looks like this: +The `context/1` function is a callback specified by the `Absinthe.Schema` behaviour that gives +the schema itself an opportunity to set some values in the context that it may need in order to run. -```elixir -defmodule MyProject.Schema do - use Absinthe.Schema - use Absinthe.Schema.Notation +The `plugins/0` function has been around for a while, and specifies what plugins the schema needs to resolve. +See [the documentation](`c:Absinthe.Schema.plugins/0`) for more. - import Absinthe.Resolution.Helpers, only: [dataloader: 1] +#### Unpacking Dataloader - alias MyProject.Loaders.Nhl +The `data/0` function creates an Ecto data source, to which you pass your repo and a query function. This query function +is called every time you want to load something, and provides an opportunity to apply arguments or +set defaults. So for example if you always want to only load non-deleted posts you can do: - def context(ctx) do - loader = - Dataloader.new() - |> Dataloader.add_source(Nhl, Nhl.data()) +```elixir +def query(Post, _), do: from p in Post, where: is_nil(p.deleted_at) - Map.put(ctx, :loader, loader) - end +def query(queryable, _), do: queryable +``` - def plugins do - [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() - end +Now any time you're loading posts, you'll just get posts that haven't been +deleted. - object :team do - field(:id, non_null(:id)) - field(:name, non_null(:string)) - field(:city, non_null(:string)) - end +We can also use the context to ensure access conditions, so we can only show deleted posts for admins: - query do - field(:teams, list_of(:team), resolve: dataloader(Nhl)) - field :team, :team do - arg(:id, non_null(:id)) - resolve(dataloader(Nhl)) - end - end -end +```elixir +def query(Post, %{has_admin_rights: true}), do: Post + +def query(Post, _), do: from p in Post, where: is_nil(p.deleted_at) + +def query(queryable, _), do: queryable ``` +Helpfully, those rules are defined within your context, helping ensure +that it has the final say about data access. + ### Sources Dataloader ships with two different built in sources: -* Ecto - for easily pulling out data with ecto -* KV - a simple KV key value source. +* `Dataloader.Ecto` - for easily pulling out data with ecto +* `Dataloader.KV` - a simple KV key value source. #### KV @@ -150,6 +168,8 @@ end ``` `Dataloader.KV` requires a load function that accepts a batch and args. It must return a map of values keyed by the args. -This is the purpose of the `fetch/2` function. The `dataloader` helper we imported above uses the field name as the batch, and a map where the argument name is the key. For example: `fetch(:team, [%{ id: 1 }])` +This is the purpose of the `fetch/2` function. The `dataloader/1` helper we imported above uses the field name as the batch, and a map where the argument name is the key. For example: `fetch(:team, [%{ id: 1 }])` + +Pattern matching can be used to fetch differently depending on the batch. For example, when the :teams batch is requested, the args will actually be an empty map (i.e. `%{}`). -Pattern matching can be used to fetch differently depending on the batch. For example, when the :teams batch is requested, the args will actually be an empty map (i.e. `%{}`). \ No newline at end of file +If youโ€™re interested in more generic use of Dataloader, see the [dataloader project source](https://github.com/absinthe-graphql/dataloader). \ No newline at end of file diff --git a/guides/deprecation.md b/guides/deprecation.md index 00f90c781b..ee58150a3e 100644 --- a/guides/deprecation.md +++ b/guides/deprecation.md @@ -31,4 +31,4 @@ field :ssn, :string do end ``` -> Warning: Deprecated fields and enum values are not reported by default during [introspection](introspection.html). +> Warning: Deprecated fields and enum values are not reported by default during [introspection](introspection.md). diff --git a/guides/ecto.md b/guides/ecto.md deleted file mode 100644 index cce8b71f00..0000000000 --- a/guides/ecto.md +++ /dev/null @@ -1,293 +0,0 @@ -# Ecto Best Practices - -## Avoiding N+1 Queries - -In general, you want to make sure that when accessing Ecto associations that you -preload the data in the top level resolver functions to avoid N+1 queries. - -Imagine this scenario: You have posts and users. A `Post` has an `author` field, which -returns a user. You want to list all posts, and get the name of their author: - -```graphql -{ - posts { - author { - name - } - } -} -``` - -If you write your schema like this, you're going to have a _bad_ time due to issues with _N + 1_: - -```elixir -object :post do - @desc "Author of the post" - field :author, :user do - resolve fn post, _, _ -> - author = - post - |> Ecto.assoc(:author) - |> Repo.one - - {:ok, author} - end - end -end - -query do - field :posts, list_of(:post) do - resolve fn _, _ -> - {:ok, Post |> Repo.all} - end - end -end -``` - -What this schema will do when presented with the GraphQL query is -run `Post |> Repo.all`, which will retrieve _N_ posts. Then for each -post it will resolve child fields, which runs our `Repo.one` query -function, resulting in _N+1_ calls to the database. - -One way to handle this issue is with Absinthe's support for -batching. The idea with batching is that we're gonna aggregate all the -`author_id`s from each post, and then make one call to the user. - -Let's first make a function to get a model by ids: - -```elixir -defmodule MyAppWeb.Schema.Helpers do - def by_id(model, ids) do - import Ecto.Query - - ids = ids |> Enum.uniq - - model - |> where([m], m.id in ^ids) - |> Repo.all - |> Map.new(&{&1.id, &1}) - end -end -``` - -Now we can use this function to batch our author lookups: - -```elixir -object :post do - - @desc "Author of the post" - field :author, :user do - resolve fn post, _, _ -> - batch({MyAppWeb.Schema.Helpers, :by_id, User}, post.author_id, fn batch_results -> - {:ok, Map.get(batch_results, post.author_id)} - end) - end - end - -end -``` - -Now we make just two calls to the database. The first call loads all of the posts. -Then as Absinthe walks through each post and tries to get the author, it's instead -told to aggregate its information. - -That aggregate information is passed on to our `by_id/2` function from earlier. -It grabs ALL the users in just one database call, and creates a map of user ids -to users. - -Absinthe then does a second pass and calls the `batch_results` function with that -map, letting us retrieve the individual author for each post. - -Not only is this a very efficient way to query the data, it's also 100% dynamic. -If a query document asks for authors, they're loaded efficiently. If it does not, -they aren't loaded at all. - -We've made it easier and more flexible, however, with -Elixir's [dataloader](https://hex.pm/packages/dataloader) package. - -### Dataloader - -`Absinthe.Middleware.Batch` achieves a lot and, with some helpers, was the -standard way to solve this problem for a long time. While batching still has a -place, it has a few limitations that have driven the development of Dataloader. -There are small scale annoyances like the limitation of only being able to batch -one thing at a time in a field, or the fact that the API can get very verbose. - -There's also some larger scale issues however. Ecto has a fair number of quirks -that make it a difficult library to abstract access to. If you want the -concurrent test system to work, you need to add `self()` to all the batch keys -and do `Repo.all(caller: pid)` in every batch function so that it knows which -sandbox to use. It gets very easy for your GraphQL functions to become full of -direct database access, inevitably going around important data access rules you -may want to enforce in your contexts. Alternatively, your context functions can -end up with dozens of little functions that only exist to support batching items -by ID. - -In time, people involved in larger projects have been able to build some -abstractions, helpers, and conventions around the `Absinthe.Middleware.Batch` -plugin that have done a good job of addressing these issues. That effort has been -extracted into the Dataloader project, which also draws inspiration from similar -projects in the GraphQL world. - -#### Getting Started - -Let's jump straight in to getting Dataloader working, and then we'll expand on -what's actually happening behind the scenes. - -Using Dataloader is as simple as doing: - -```elixir -import Absinthe.Resolution.Helpers, only: [dataloader: 1] - -object :author do - @desc "Author of the post" - field :posts, list_of(:post), resolve: dataloader(Blog) -end -``` - - -To make this work we need to setup a dataloader, add the `Blog` source to it, and -make sure our schema knows it needs to run the dataloader. - -First however make sure to include the dataloader dependency in your application: - -```elixir -{:dataloader, "~> 1.0.0"} -``` - -Latest install instructions found here: https://github.com/absinthe-graphql/dataloader - -Let's start with a data source. Dataloader data sources are just structs that encode -a way of retrieving data in batches. In a Phoenix application you'll generally have one -source per context, so that each context can control how its data is loaded. - -Here is a hypothetical `Blog` context and a dataloader ecto source: - -```elixir -defmodule MyApp.Blog do - def data() do - Dataloader.Ecto.new(MyApp.Repo, query: &query/2) - end - - def query(queryable, _params) do - queryable - end -end -``` - -When integrating Dataloader with GraphQL, we want to place it in our context so -that we can access it in our resolvers. In your schema module add: - -```elixir -alias MyApp.{Blog, Foo} - -def context(ctx) do - loader = - Dataloader.new - |> Dataloader.add_source(Blog, Blog.data()) - |> Dataloader.add_source(Foo, Foo.data()) # Foo source could be a Redis source - - Map.put(ctx, :loader, loader) -end - -def plugins do - [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() -end -``` - -The `context/1` function is a callback specified by the `Absinthe.Schema` behaviour that gives -the schema itself an opportunity to set some values in the context that it may need in order to run. - -The `plugins/0` function has been around for a while, and specifies what plugins the schema needs to resolve. -See [the documentation](https://hexdocs.pm/absinthe/Absinthe.Schema.html#c:plugins/0) for more. - -That's it! If you run a GraphQL query that hits that field, it will be loaded efficiently without N+1. - -#### Unpacking Dataloader - -The `data/0` function creates an Ecto data source, to which you pass your repo and a query function. This query function -is called every time you want to load something, and provides an opportunity to apply arguments or -set defaults. So for example if you always want to only load non-deleted posts you can do: - -```elixir -def query(Post, _) do - from p in Post, where: is_nil(p.deleted_at) -end -def query(queryable, _) do - queryable -end -``` - -Now any time you're loading posts, you'll just get posts that haven't been -deleted. Helpfully, this rule is defined within your context, helping ensure -that it has the final say about data access. - -To actually use this data source we need to add a loader to your Absinthe -Context: - -```elixir -defmodule MyAppWeb.Context do - alias MyApp.Blog - def dataloader() do - Dataloader.new - |> Dataloader.add_source(Blog, Blog.data()) - end -end -``` - -### Deprecated in v1.4: Batching with Absinthe.Ecto - -The batching helper functions present -in [absinthe_ecto](https://github.com/absinthe-graphql/absinthe_ecto) -provided some early support for making it easy to get data from Ecto. - -These batching features are considered *DEPRECATED* in favor of -Dataloader, described above. - -> There are a number of useful features that may be added to absinthe_ecto in the -> future to support other integration concerns (schema definition, error handling), -> but the batching support will eventually be phased out. Please use Dataloader. - -Here's an example of use: - -```elixir -use Absinthe.Ecto, repo: MyApp.Repo - -object :post do - @desc "Author of the post" - field :author, :user, resolve: assoc(:author) -end -``` - -You can pass a function to it so that you can handle query arguments: - -```elixir -use Absinthe.Ecto, repo: MyApp.Repo -import Ecto.Query - -object :author do - @desc "posts by an author" - field :posts, list_of(:post) do - arg :category_id, :id - resolve assoc(:posts, fn query, args, _ctx -> - query |> where(category_id ^args.category_id) - end) - end -end -``` - -The issue here is that the resolvers become full of lots of on off SQL queries, -without providing your domain logic any easy opportunity to apply general rules -about how data should be accessed or loaded. - -Although Dataloader requires a little bit more setup, it is a lot more flexible -since it can handle non-Ecto data sources, and it lets each part of your code -focus on what it should be doing. Your resolvers handle translating GraphQL -specific concerns into function calls to your domain logic, and your domain -logic gets to focus on enforcing the rules you want, without getting cluttered -up with dozens and dozens of single purpose data loading functions. - -## Formatting Ecto.Changeset Errors - -You may want to look at the [errors](errors.html) guide and -the [kronky](https://hex.pm/packages/kronky) package. diff --git a/guides/errors.md b/guides/errors.md index a3abd952a5..eb77059dc5 100644 --- a/guides/errors.md +++ b/guides/errors.md @@ -50,4 +50,4 @@ Generic handler for interoperability with errors from other libraries: ## Ecto.Changeset Errors -You may want to look at the [kronky](https://hex.pm/packages/kronky) package. +You may want to look at the [Absinthe ErrorPayload](https://hex.pm/packages/absinthe_error_payload) package. diff --git a/guides/file-uploads.md b/guides/file-uploads.md index e0c1a7c55e..c815d25fa7 100644 --- a/guides/file-uploads.md +++ b/guides/file-uploads.md @@ -32,10 +32,10 @@ To send a mutation that includes a file upload, you need to use the `multipart/form-data` content type. For example, using `cURL`: ```shell -$ curl -X POST \\ --F query="mutation { uploadFile(users: \"users_csv\", metadata: \"metadata_json\")}" \\ --F users_csv=@users.csv \\ --F metadata_json=@metadata.json \\ +$ curl -X POST \ +-F query="mutation { uploadFile(users: \"users_csv\", metadata: \"metadata_json\")}" \ +-F users_csv=@users.csv \ +-F metadata_json=@metadata.json \ localhost:4000/graphql ``` @@ -48,5 +48,6 @@ we were merely putting them in the context as in other implementations. ## Integration with Client-side GraphQL Frameworks -* Apollo: [apollo-absinthe-upload-client](https://www.npmjs.com/package/apollo-absinthe-upload-client) (Note: does not support Relay Native as of v1.0.1) +* Apollo: [apollo-absinthe-upload-link](https://www.npmjs.com/package/apollo-absinthe-upload-link) +* Apollo (v1): [apollo-absinthe-upload-client](https://www.npmjs.com/package/apollo-absinthe-upload-client) (Note: does not support Relay Native as of v1.0.1) * Relay: _(None known. Please submit a pull request updating this information.)_ diff --git a/guides/importing-fields.md b/guides/importing-fields.md index 344f61f08a..6eecf3d089 100644 --- a/guides/importing-fields.md +++ b/guides/importing-fields.md @@ -79,7 +79,7 @@ end > Before you can import fields from another object type, make sure > that the type in question is available to your schema. See -> the [guide](importing-types.html) on importing types for information +> the [guide](importing-types.md) on importing types for information > on how that's done. Here's how those object types are defined. @@ -138,4 +138,4 @@ defmodule MyAppWeb.Schema.ContentTypes do end ``` -For more information on `import_types`, see [the guide](importing-types.html). +For more information on `import_types`, see [the guide](importing-types.md). diff --git a/guides/importing-types.md b/guides/importing-types.md index cdf329d572..76f7ec97fb 100644 --- a/guides/importing-types.md +++ b/guides/importing-types.md @@ -76,4 +76,4 @@ end This will look for a matching object type `:account_queries`, and pull its fields into the root query type. -For more information, see the [guide](importing-fields.html). +For more information, see the [guide](importing-fields.md). diff --git a/guides/introduction/installation.md b/guides/introduction/installation.md index d924639c1c..9cc0e0b367 100644 --- a/guides/introduction/installation.md +++ b/guides/introduction/installation.md @@ -6,7 +6,7 @@ To install Absinthe, just add an entry to your `mix.exs`: def deps do [ # ... - {:absinthe, "~> 1.4"} + {:absinthe, "~> 1.5"} ] end ``` @@ -23,7 +23,7 @@ Don't forget you can use the [:override](https://hexdocs.pm/mix/Mix.Tasks.Deps.h def deps do [ # ... - {:absinthe, "~> 1.4", override: true} + {:absinthe, "~> 1.5", override: true} ] end ``` @@ -32,4 +32,4 @@ end Most people use Absinthe to support an HTTP API. -You'll want to read the [Plug and Phoenix](../plug-phoenix.md) for specific installation and configuration options, including how you can run the handy, included GraphiQL tool directly from your application. +You'll want to read the [Plug and Phoenix](plug-phoenix.md) for specific installation and configuration options, including how you can run the handy, included GraphiQL tool directly from your application. diff --git a/guides/introduction/learning.md b/guides/introduction/learning.md index 875a993338..36a22ef2e2 100644 --- a/guides/introduction/learning.md +++ b/guides/introduction/learning.md @@ -4,7 +4,7 @@ The following are some Absinthe-specific educational resources that are availabl ## Books -* [Craft GraphQL APIs in Elixir with Absinthe](https://pragprog.com/book/wwgraphql/craft-graphql-apis-in-elixir-with-absinthe) by the creators of Absinthe (ebook in beta, print version out in late 2017/early 2018) +* [Craft GraphQL APIs in Elixir with Absinthe](https://pragprog.com/book/wwgraphql/craft-graphql-apis-in-elixir-with-absinthe) by the creators of Absinthe. ## Online Resources diff --git a/guides/introduction/overview.md b/guides/introduction/overview.md index a17bfc04a2..d5d5bc3ba7 100644 --- a/guides/introduction/overview.md +++ b/guides/introduction/overview.md @@ -17,17 +17,17 @@ Here are a few resources that might be helpful: Absinthe's functionality generally falls into two broad areas. You can read more about the details in the guides provided as part of this documentation and in the related packages/projects: -* [Defining Schemas](schemas.html). A schema: +* [Defining Schemas](schemas.md). A schema: * defines the structure of data entities and the relationships between, as well as the available queries, mutations, and subscriptions, using an elegant collection of declarative macros - * defines [custom scalar](custom-scalars.html) types - * declares any [deprecated](deprecation.html) definitions + * defines [custom scalar](custom-scalars.md) types + * declares any [deprecated](deprecation.md) definitions * defines resolution functions to access data, using a flexible and extensible middleware/plugin system * Executing Documents. A GraphQL document: * can be any standard GraphQL query, mutation, or subscription - * may include reusable [variable](variables.html) definitions - * can be analyzed for its [complexity](complexity-analysis.html) and be rejected if it's unsafe/too expensive - * has a [context](context-and-authentication.html) that you can integrate with authentication and authorization strategies - * can contain standard GraphQL [introspection](introspection.html) fields + * may include reusable [variable](variables.md) definitions + * can be analyzed for its [complexity](complexity-analysis.md) and be rejected if it's unsafe/too expensive + * has a [context](context-and-authentication.md) that you can integrate with authentication and authorization strategies + * can contain standard GraphQL [introspection](introspection.md) fields * can include multipart file uploads as GraphQL arguments (as part of the [absinthe_plug](https://hex.pm/packages/absinthe_plug) package) ## Integrations @@ -35,10 +35,10 @@ Absinthe's functionality generally falls into two broad areas. You can read more Absinthe integrates with a number of other important projects, both on the backend and frontend, to provide a better experience for developers. * Elixir - * Support for HTTP APIs using [Plug and Phoenix](plug-phoenix.html) via the [absinthe_plug](https://hex.pm/packages/absinthe_plug) and [absinthe_phoenix](https://hex.pm/packages/absinthe_phoenix) packages - * Support for [Ecto](ecto.html) via the [dataloader](https://github.com/absinthe-graphql/dataloader) package + * Support for HTTP APIs using [Plug and Phoenix](plug-phoenix.md) via the [absinthe_plug](https://hex.pm/packages/absinthe_plug) and [absinthe_phoenix](https://hex.pm/packages/absinthe_phoenix) packages + * Support for [Ecto](https://hex.pm/packages/ecto) via the [dataloader](https://github.com/absinthe-graphql/dataloader) package * JavaScript (client-side) - * Support for [Relay](relay.html) and [Apollo Client](apollo.html) + * Support for [Relay](relay.md) and [Apollo Client](apollo.md) * Support for Absinthe's channel-based subscriptions. See [absinthe-socket](https://github.com/absinthe-graphql/absinthe-socket). ## Guides diff --git a/guides/introspection.md b/guides/introspection.md index 00841daa95..4fe74344d3 100644 --- a/guides/introspection.md +++ b/guides/introspection.md @@ -88,16 +88,15 @@ Getting the name of the fields for a named type: } ``` -Note that you may have to nest several depths of type/ofType, as -type information includes any wrapping layers of List -and/or NonNull. +Note that you may have to nest several depths of `type`/`ofType`, as +type information includes any wrapping layers of [List](https://facebook.github.io/graphql/#sec-List) and/or [NonNull](https://facebook.github.io/graphql/#sec-Non-null). ## Using GraphiQL The [GraphiQL project](https://github.com/graphql/graphiql) is "an in-browser IDE for exploring GraphQL." -Absinthe provides GraphiQL via a plug in `absinthe_plug`. See the [Plug and Phoenix Guide](plug-phoenix.html) +Absinthe provides GraphiQL via a plug in `absinthe_plug`. See the [Plug and Phoenix Guide](plug-phoenix.md) for how to install that library. Once installed, usage is simple as: ```elixir @@ -105,7 +104,7 @@ plug Absinthe.Plug.GraphiQL, schema: MyAppWeb.Schema ``` If you want to use it at a particular path (in this case `graphiql` in your Phoenix -router, simply do: +router) simply do: ```elixir # filename: router.ex diff --git a/guides/middleware-and-plugins.md b/guides/middleware-and-plugins.md index 3cc271e923..815214530c 100644 --- a/guides/middleware-and-plugins.md +++ b/guides/middleware-and-plugins.md @@ -25,7 +25,7 @@ defmodule MyApp.Middlewares.HandleChangesetErrors do end ``` -The resolution struct has all kinds of useful values inside of it. You can access the Absinthe context, the root value, information about the current field's AST, and more. For more information on how the current user ends up in the context please see our full [authentication guide](context-and-authentication.html). +The resolution struct has all kinds of useful values inside of it. You can access the Absinthe context, the root value, information about the current field's AST, and more. For more information on how the current user ends up in the context please see our full [authentication guide](context-and-authentication.md). ## Using Middlewares Middleware can be placed on a field in few different ways: diff --git a/guides/plug-phoenix.md b/guides/plug-phoenix.md index 7ef86a9b79..e17742df25 100644 --- a/guides/plug-phoenix.md +++ b/guides/plug-phoenix.md @@ -1,14 +1,14 @@ # Plug and Phoenix Setup First, install Absinthe.Plug and a JSON codec of your choice, -eg, [Poison](https://hex.pm/packages/poison): +eg, [Jason](https://hex.pm/packages/jason): ```elixir # filename: mix.exs def deps do [ - {:absinthe_plug, "~> 1.4"}, - {:poison, "~> 2.1.0"}, + {:absinthe_plug, "~> 1.5"}, + {:jason, "~> 1.0"}, ] end ``` @@ -28,7 +28,7 @@ you should plug Absinthe.Plug after Plug.Parsers. ```elixir plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser], - json_decoder: Poison + json_decoder: Jason plug Absinthe.Plug, schema: MyAppWeb.Schema @@ -51,7 +51,7 @@ defmodule MyApp.Endpoint do plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], - json_decoder: Poison + json_decoder: Jason plug Absinthe.Plug, schema: MyAppWeb.Schema @@ -80,19 +80,19 @@ Now Absinthe.Plug will only serve GraphQL from the `/api` url. on to `Absinthe.run` as the context. This is how you should handle logic that uses headers -- most notably, Authentication. -For more information, see the [Context](context-and-authentication.html) guide. +For more information, see the [Context](context-and-authentication.md) guide. ## GraphiQL See the [absinthe_plug](https://github.com/absinthe-graphql/absinthe_plug) -project and the GraphiQL portion of the [Introspection](introspection.html) guide to +project and the GraphiQL portion of the [Introspection](introspection.md) guide to learn how to use the built-in `Absinthe.Plug.GraphiQL` plug. ## General Usage This plug supports requests in a number of ways: -### Via a GET +### Via a GET With a query string: @@ -173,7 +173,7 @@ configure `Plug.Parsers` (or equivalent) to parse the request body before `Absin plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], - json_decoder: Poison + json_decoder: Jason ``` For `application/graphql`, the POST body will be parsed as GraphQL query string, @@ -191,7 +191,7 @@ As a plug, `Absinthe.Plug` requires very little configuration. If you want to su plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], - json_decoder: Poison + json_decoder: Jason plug Absinthe.Plug, schema: MyApp.Linen.Schema @@ -201,3 +201,21 @@ plug Absinthe.Plug, It also takes several options. See [the documentation](https://hexdocs.pm/absinthe_plug/Absinthe.Plug.html#init/1) for the full listing. + +## Inside Phoenix controllers + +You can use GraphQL as the datasource for your Phoenix controllers. For this +you'll need to add `absinthe_phoenix` to your dependencies. See [Absinthe Phoenix](https://github.com/absinthe-graphql/absinthe_phoenix) for installation instructions. + +```elixir +@graphql """ + query ($filter: UserFilter) { + users(filter: $filter, limit: 10) + } +""" +def index(conn, %{data: data}) do + render conn, "index.html", data +end +``` +The results of the query are now available in the "index.html" template. For +more information, see [`Absinthe.Phoenix.Controller`](https://hexdocs.pm/absinthe_phoenix/Absinthe.Phoenix.Controller.html) diff --git a/guides/schemas.md b/guides/schemas.md index 3afcb5425f..337f9a4d9b 100644 --- a/guides/schemas.md +++ b/guides/schemas.md @@ -29,10 +29,9 @@ defmodule MyAppWeb.Schema do end ``` -

- You may want to refer to the Absinthe API - documentation for more detailed information as you look this over.. -

+ +You may want to refer to the [Absinthe API documentation](https://hexdocs.pm/absinthe/) for more detailed information as you look this over. + Some macros and functions used here that are worth mentioning, pulled in automatically from `Absinthe.Schema.Notation` by `use Absinthe.Schema`: @@ -58,14 +57,14 @@ object :item do end ``` -Now, you can use Absinthe to execute a query document. Keep in mind that for +Now you can use Absinthe to execute a query document. Keep in mind that for HTTP, you'll probably want to use -[Absinthe.Plug](plug-phoenix.html) instead of executing +[Absinthe.Plug](plug-phoenix.md) instead of executing GraphQL query documents yourself. Absinthe doesn't know or care about HTTP, -but the `absinthe_plug` project does -- and handles the vagaries of interacting +but the `absinthe_plug` project does: it handles the vagaries of interacting with HTTP GraphQL clients so you don't have to. -If you _were_ executing query documents yourself (lets assume for a local tool), +If you _were_ executing query documents yourself (let's assume for a local tool), it would go something like this: ```elixir @@ -82,12 +81,26 @@ it would go something like this: {:ok, %{data: %{"item" => %{"name" => "Foo"}}}} ``` +Your schemas can be further customized using the options available to +`Absinthe.Schema.Notation.field/4` to help provide for a richer experience for +your users, customize the field names, or mark fields as deprecated. + +```elixir +# filename: myapp/language_schema.ex +@desc "A Language" +object :language do + field :id, :id + field :iso_639_1, :string, description: "2 character ISO 639-1 code", name: "iso639" + field :name, :string, description: "English name of the language" +end +``` + ## Importing Types We could also move our type definitions out into a different module, for instance, `MyAppWeb.Schema.Types`, and then use `import_types` in our `MyAppWeb.Schema`: ```elixir -# filename: myapp/schema.ex +# filename: myapp/schema/types.ex defmodule MyAppWeb.Schema.Types do use Absinthe.Schema.Notation @@ -114,4 +127,4 @@ end It's a nice way of separating the top-level `query` and `mutation` information, which define the surface area of the API, with the actual types that it uses. -See [Importing Types](importing-types.html) for a full guide to importing types. +See [Importing Types](importing-types.md) for a full guide to importing types. diff --git a/guides/subscriptions.md b/guides/subscriptions.md index 3e8ed01870..39f7432603 100644 --- a/guides/subscriptions.md +++ b/guides/subscriptions.md @@ -8,15 +8,14 @@ At the moment however the most common and fully featured platform that you can r ### Absinthe.Phoenix Setup -Libraries you'll need: +Packages you'll need: ```elixir -{:absinthe, "~> 1.4.0"}, -{:absinthe_phoenix, "~> 1.4.0"}, +{:absinthe, "~> 1.5"}, +{:absinthe_phoenix, "~> 1.5"}, ``` - -You need to have a working phoenix pubsub configured. Here is what the default looks like if you create a new phoenix project: +You need to have a working Phoenix pubsub configured. Here is what the default looks like if you create a new Phoenix project: ```elixir config :my_app, MyAppWeb.Endpoint, @@ -25,28 +24,51 @@ config :my_app, MyAppWeb.Endpoint, adapter: Phoenix.PubSub.PG2] ``` -In your application supervisor add a line AFTER your existing endpoint supervision +In your application supervisor add a line _after_ your existing endpoint supervision line: +```elixir + # List all child processes to be supervised + children = [ + # Start the Ecto repository + MyAppWeb.Repo, + # Start the endpoint when the application starts + MyAppWeb.Endpoint, + {Absinthe.Subscription, MyAppWeb.Endpoint} + ] + + # See https://hexdocs.pm/elixir/Supervisor.html + # for other strategies and supported options + opts = [strategy: :one_for_one, name: MyAppWeb.Supervisor] + Supervisor.start_link(children, opts) +``` + +In older versions of phoenix (pre 1.4) you might see a slightly different syntax, +in which case add Absinthe like this: + ```elixir [ # other children ... + MyAppWeb.Repo, supervisor(MyAppWeb.Endpoint, []), # this line should already exist - supervisor(Absinthe.Subscription, [MyAppWeb.Endpoint]), # add this line + supervisor(Absinthe.Subscription, MyAppWeb.Endpoint), # add this line # other children ... ] ``` Where `MyAppWeb.Endpoint` is the name of your application's phoenix endpoint. -In your `MyApp.Web.Endpoint` module add: +In your `MyAppWeb.Endpoint` module add: + ```elixir use Absinthe.Phoenix.Endpoint ``` -In your socket add: +For your socket, different configurations are used in `MyAppWeb.UserSocket` depending on what version of +Phoenix you're using. + +#### Phoenix 1.3 and 1.4 -#### Phoenix 1.3 ```elixir use Absinthe.Phoenix.Socket, schema: MyAppWeb.Schema @@ -64,24 +86,38 @@ use Absinthe.Phoenix.Socket, Where `MyAppWeb.Schema` is the name of your Absinthe schema module. +### GraphiQL (optional) + +If you're using the GraphiQL plug, in your `MyAppWeb.Router`, specify the `socket` option: + +```elixir +forward "/graphiql", + Absinthe.Plug.GraphiQL, + schema: MyAppWeb.Schema, + socket: MyAppWeb.UserSocket +``` + That is all that's required for setup on the server. ### Setting Options -Options like the context can be configured in the `def connect` callback of your -socket +Options like the context can be configured in the `connect/2` callback in your +socket module. + +> Note: The transport macro is deprecated in phoenix 1.4 and can be omitted. ```elixir -defmodule GitHunt.Web.UserSocket do +defmodule MyAppWeb.UserSocket do use Phoenix.Socket use Absinthe.Phoenix.Socket, - schema: MyApp.Web.Schema + schema: MyAppWeb.Schema + # Deprecated in Phoenix v1.4 transport :websocket, Phoenix.Transports.WebSocket def connect(params, socket) do current_user = current_user(params) - socket = Absinthe.Phoenix.Socket.put_opts(socket, context: %{ + socket = Absinthe.Phoenix.Socket.put_options(socket, context: %{ current_user: current_user }) {:ok, socket} @@ -97,8 +133,8 @@ end ### Schema -Example schema that lets you use subscriptions to get notified when a comment -is submitted to a github repo. +Here's an example schema that lets you use subscriptions to get notified when a comment +is submitted to a GitHub repository: ```elixir mutation do @@ -126,6 +162,9 @@ subscription do # subscription { # commentAdded(repoName: "elixir-lang/elixir") { content } # } + # + # If needed, you can also provide a list of topics: + # {:ok, topic: ["absinthe-graphql/absinthe", "elixir-lang/elixir"]} config fn args, _ -> {:ok, topic: args.repo_name} end @@ -150,7 +189,7 @@ subscription do end ``` -Concretely, if client A submits a subscription doc: +Concretely, if client A submits a subscription document: ```graphql subscription { @@ -160,9 +199,9 @@ subscription { } ``` -this tells Absinthe to subscribe client A in the `:comment_added` field on the `"absinthe-graphql/absinthe"` topic, because that's what comes back from the `setup` function. +This tells Absinthe to subscribe client A in the `:comment_added` field on the `"absinthe-graphql/absinthe"` topic, because that's what comes back from the `config` function. -Then if client B submits a mutation: +Then, if client B submits a mutation: ```graphql mutation { @@ -174,7 +213,7 @@ mutation { Client B will get the normal response to their mutation, and since they just ask for the `id` that's what they'll get. -Additionally, the `:submit_comment` mutation is configured as a trigger on the `:commented_added` subscription field, so the trigger function is called. That function returns `"absinthe-graphql/absinthe"` because that's the repository name the comment was on, and now Absinthe knows it needs to get all subscriptions on the `:comment_added` field that have the `"absinthe-graphql/absinthe"` topic, so client A gets back +Additionally, the `:submit_comment` mutation is configured as a trigger on the `:comment_added` subscription field, so the trigger function is called. That function returns `"absinthe-graphql/absinthe"` because that's the repository name for the comment, and now Absinthe knows it needs to get all subscriptions on the `:comment_added` field that have the `"absinthe-graphql/absinthe"` topic, so client A gets back: ```json {"data":{"commentAdded":{"content":"Great library!"}}} @@ -193,4 +232,47 @@ If you want to subscribe to mutations from within your application, you can do: MyAppWeb.Endpoint.subscribe(topic) ``` -This guide is up to date, but incomplete. Stay tuned for more content! +### De-duplicating Updates + +By default, Absinthe will resolve each outgoing publish once per individual subscription. This ensures: + +- Different GraphQL documents each receive the different fields they requested +- User-specific updates are sent out, in case `context` contains user-specific data + +To improve the scale at which your subscriptions operate, you may tell Absinthe when it is safe to de-duplicate updates. Simply return a `context_id` from your field's `config` function: + +```elixir +subscription do + field :news_article_published, :article do + config fn _, _ -> + {:ok, topic: "*", context_id: "global"} + end + end +end +``` + +Here we return a constant (`"global"`) because our `:article` type doesn't contain any user-specific fields on it. + +Given these three active subscriptions: + +```graphql +# user 1 +subscription { + newsArticlePublished { content } +} + +# user 2 +subscription { + newsArticlePublished { content author } +} + +# user 3 +subscription { + newsArticlePublished { content } +} +``` + +Since we provided a `context_id`, Absinthe will only run two documents per publish to this field: + +1. Once for *user 1* and *user 3* because they have the same context ID (`"global"`) and sent the same document. +2. Once for *user 2*. While *user 2* has the same context ID (`"global"`), they provided a different document, so it cannot be de-duplicated with the other two. diff --git a/guides/telemetry.md b/guides/telemetry.md new file mode 100644 index 0000000000..8eacb8bcb5 --- /dev/null +++ b/guides/telemetry.md @@ -0,0 +1,64 @@ +# Telemetry + +Absinthe 1.5 uses `telemetry` to instrument its activity. + +Call `:telemetry.attach/4` or `:telemetry.attach_many/4` to attach your +handler function to any of the following event names: + +- `[:absinthe, :execute, :operation, :start]` when the operation starts +- `[:absinthe, :execute, :operation, :stop]` when the operation finishes +- `[:absinthe, :subscription, :publish, :start]` when a subscription starts +- `[:absinthe, :subscription, :publish, :stop]` when a subscription finishes +- `[:absinthe, :resolve, :field, :start]` when field resolution starts +- `[:absinthe, :resolve, :field, :stop]` when field resolution finishes +- `[:absinthe, :middleware, :batch, :start]` when the batch processing starts +- `[:absinthe, :middleware, :batch, :stop]` when the batch processing finishes + +Telemetry handlers are called with `measurements` and `metadata`. For details on +what is passed, checkout `Absinthe.Phase.Telemetry`, `Absinthe.Middleware.Telemetry`, +and `Absinthe.Middleware.Batch`. + +For async, batch, and dataloader fields, Absinthe sends the final event when +it gets the results. That might be later than when the results are ready. If +you need to know how long the underlying operation took, you'll need to hook +telemetry up to that underlying operation. See, for example, the recommended +telemetry events in the documentation for `Ecto.Repo`. + +## Interactive Telemetry + +As an example, you could attach a handler in an `iex -S mix` shell. Paste in: + +```elixir +:telemetry.attach_many( + :demo, + [ + [:absinthe, :resolve, :field, :stop] + ], + fn event_name, measurements, metadata, _config -> + %{ + event_name: event_name, + measurements: measurements, + metadata: metadata + } + |> IO.inspect() + end, + [] +) +``` + +After a query is executed, you'll see something like: + +```elixir +%{ + event_name: [:absinthe, :resolve, :field, :stop], + measurements: %{duration: 14000}, + metadata: %{ + id: -576460752303351647, + middleware: [ + {{Absinthe.Resolution, :call}, &MyApp.Resolvers.resolve_field/3} + ], + resolution: :..., + start_time: 1565830447035742000 + } +} +``` diff --git a/guides/testing.md b/guides/testing.md new file mode 100644 index 0000000000..cebbc90365 --- /dev/null +++ b/guides/testing.md @@ -0,0 +1,100 @@ +# Testing + +There are three main approaches to testing GraphQL APIs built with Absinthe: + +1. Testing resolver functions, since they do most of the work. +2. Testing GraphQL document execution directly via `Absinthe.run/3`, for the bigger picture. +3. Outside-in, testing the full HTTP request/response cycle with [absinthe_plug](https://hexdocs.pm/absinthe_plug/Absinthe.Plug.html). + +This guide focuses on the third approach, which we generally recommend since it exercises more +of your application. + +## Testing with Absinthe Plug + +GraphQL is transport independent, but it's most often served over HTTP. To test HTTP requests with `absinthe` you'll also need `absinthe_plug`. This guide will also assume you're using Phoenix, although +it is possible to use Absinthe without it (see the [Plug and Phoenix Setup Guide](plug-phoenix.md)). + +## Example + +Say we want to test the following schema: + +```elixir +defmodule MyAppWeb.Schema do + use Absinthe.Schema + + @fakedb %{ + "1" => %{name: "Bob", email: "bubba@foo.com"}, + "2" => %{name: "Fred", email: "fredmeister@foo.com"} + } + + query do + field :user, :user do + arg :id, non_null(:id) + + resolve &find_user/2 + end + end + + object :user do + field :name, :string + field :email, :string + end + + defp find_user(%{id: id}, _) do + {:ok, Map.get(@fakedb, id)} + end +end +``` + +Which we have exposed at the `/api` endpoint: + +```elixir +defmodule MyAppWeb.Router do + use Phoenix.Router + + scope "/api" do + forward "/", Absinthe.Plug, schema: MyAppWeb.Schema + end +end +``` + +The test could look something like this: + +```elixir +defmodule MyAppWeb.SchemaTest do + use MyAppWeb.ConnCase + + @user_query """ + query getUser($id: ID!) { + user(id: $id) { + name + email + } + } + """ + + test "query: user", %{conn: conn} do + conn = + post(conn, "/api", %{ + "query" => @user_query, + "variables" => %{id: 1} + }) + + assert json_response(conn, 200) == %{ + "data" => %{"user" => %{"email" => "bubba@foo.com", "name" => "Bob"}} + } + end +end + +``` + +Phoenix generates the `MyAppWeb.ConnCase` test helper module. This supplies the +`conn` variable containing the request and response. It also has helper functions +such as [`post/3`](https://hexdocs.pm/phoenix/Phoenix.ConnTest.html#post/3) +and [`json_response/2`](https://hexdocs.pm/phoenix/Phoenix.ConnTest.html#json_response/2). + +The query is stored in the `@user_query` module attribute. We post this document to +the GraphQL endpoint at `/api`, along with a map of variables which will be +transformed to arguments for the `getUser` query. + +The response to the query can then be directly asserted to be a JSON object of the right shape. diff --git a/guides/tutorial/complex-arguments.md b/guides/tutorial/complex-arguments.md index 0c6ff23d0a..505dd5fdb6 100644 --- a/guides/tutorial/complex-arguments.md +++ b/guides/tutorial/complex-arguments.md @@ -137,11 +137,11 @@ Everyone else gets an `"Access denied"` error for this field. Here's our mutation in action in GraphiQL. - + > Note we're sending a `Authorization` header to authenticate, which a > plug is handling. Make sure to read the -> related [guide](context-and-authentication.html) for more +> related [guide](context-and-authentication.md) for more > information on how to set-up authentication in your own > applications. > @@ -151,4 +151,4 @@ Here's our mutation in action in GraphiQL. ## Next Step -Now let's [wrap things up](conclusion.html). +Now let's [wrap things up](conclusion.md). diff --git a/guides/tutorial/conclusion.md b/guides/tutorial/conclusion.md index ea6cc0e698..4d10c64124 100644 --- a/guides/tutorial/conclusion.md +++ b/guides/tutorial/conclusion.md @@ -1,14 +1,14 @@ # Conclusion With this we have a basic GraphQL based API for a blog. Head on over -to [the github page](https://github.com/absinthe-graphql/absinthe_example) if +to [the github page](https://github.com/absinthe-graphql/absinthe_tutorial) if you want the final code. We hope to expand this tutorial to include a comment system that will acquaint you with Union types and Fragments in the coming days. Head on over to the topic guides for further reading, and see -the [community page](community.html) for information +the [community page](community.md) for information on how to get help, ask questions, or contribute! ## Please Help! diff --git a/guides/tutorial/dataloader.md b/guides/tutorial/dataloader.md new file mode 100644 index 0000000000..f116976c75 --- /dev/null +++ b/guides/tutorial/dataloader.md @@ -0,0 +1,122 @@ +# Dataloader + +Maybe you like good performance, or you realized that you are filling your objects with fields that need resolvers like + +```elixir +@desc "A user of the blog" + object :user do + field :id, :id + field :name, :string + field :contacts, list_of(:contact) + field :posts, list_of(:post) do + arg :date, :date + resolve &Resolvers.Content.list_posts/3 + end + end +``` + +This is going to get tedious and error-prone very quickly what if we could support a query that supports associations like + +```elixir +@desc "A user of the blog" + object :user do + field :id, :id + field :name, :string + field :contacts, list_of(:contact) + field :posts, list_of(:post) do + arg :date, :date + resolve dataloader(Content) + end + end +``` + +This way associations are all handled in the context [business logic aware](https://github.com/absinthe-graphql/absinthe/issues/443#issuecomment-405929499) conditions, to support this is actually surprisingly simple. + +Since we had already setup users to load associated posts we can change that to use dataloader to illustrate how much simpler this gets. + +Let's start by adding `dataloader` as a dependency in `mix.exs`: + +```elixir +defp deps do + [ + {:dataloader, "~> 1.0.7"} + << other deps >> + ] +``` + +Next, we need to set up dataloader in our context which allows us to load associations using rules: + +In `lib/blog/content.ex`: + +```elixir + def data(), do: Dataloader.Ecto.new(Repo, query: &query/2) + + def query(queryable, params) do + + queryable + end +``` + +This sets up a loader that can use pattern matching to load different rules for different queryables, also note this function is passed in the context as the second parameter and that can be used for further filtering. + +Then let's add a configuration to our schema (in `lib/blog_web/schema.ex`) so that we can allow Absinthe to use Dataloader: + +```elixir +defmodule BlogWeb.Schema do + use Absinthe.Schema + + def context(ctx) do + loader = + Dataloader.new() + |> Dataloader.add_source(Content, Content.data()) + + Map.put(ctx, :loader, loader) + end + + def plugins do + [Absinthe.Middleware.Dataloader | Absinthe.Plugin.defaults()] + end + + # << rest of the file>> +``` + +The loader is all set up, now let's modify the resolver to use Dataloader. In `lib/blog_web/schema/account_types.ex` modify the user object to look as follows: + +```elixir +@desc "A user of the blog" + object :user do + field :id, :id + field :name, :string + field :contacts, list_of(:contact) + field :posts, list_of(:post) do + arg :date, :date + resolve dataloader(Content) + end + end +``` + +That's it! You are now loading associations using [Dataloader](https://github.com/absinthe-graphql/dataloader) + +## More Examples +While the above examples are simple and straightforward we can use other strategies with loading associations consider the following: + +```elixir +object :user do + field :posts, list_of(:post), resolve: fn user, args, %{context: %{loader: loader}} -> + loader + |> Dataloader.load(Blog, :posts, user) + |> on_load(fn loader -> + {:ok, Dataloader.get(loader, Blog, :posts, user)} + end) + end +``` + +In this example, we are passing some args to the query in the context where our source lives. For example, this function now receives `args` as `params` meaning we can do now do fun stuff like apply rules to our queries like the following: + +```elixir +def query(query, %{has_admin_rights: true}), do: query + +def query(query, _), do: from(a in query, select_merge: %{street_number: nil}) +``` + +Check out the [docs](https://hexdocs.pm/dataloader/) for more non-trivial ways of using Dataloader. diff --git a/guides/tutorial/mutations.md b/guides/tutorial/mutations.md index eda0e98992..626e700e4c 100644 --- a/guides/tutorial/mutations.md +++ b/guides/tutorial/mutations.md @@ -48,24 +48,24 @@ end ``` > Obviously things can go wrong in a mutation. To learn more about the -> types of error results that Absinthe supports, read [the guide](errors.html). +> types of error results that Absinthe supports, read [the guide](errors.md). ## Authorization This resolver adds a new concept: authorization. The resolution struct -(that is, an [`Absinthe.Resolution`](Absinthe.Resolution.html)) +(that is, an `Absinthe.Resolution`) passed to the resolver as the third argument carries along with it the Absinthe context, a data structure that serves as the integration point with external mechanisms---like a Plug that authenticates the current user. You can learn more about how the context can be used in -the [Context and Authentication](context-and-authentication.html) +the [Context and Authentication](context-and-authentication.md) guide. Going back to the resolver code: - If the match for a current user is successful, the underlying `Blog.Content.create_post/2` function is invoked. It will return a - tuple suitable for return. (To read the Ecto-related nitty gritty, + tuple suitable for return. (To read the Ecto-related nitty-gritty, check out the [absinthe_tutorial](https://github.com/absinthe-graphql/absinthe_tutorial) repository.) - If the match for a current user isn't successful, the fall-through @@ -73,4 +73,4 @@ Going back to the resolver code: ## Next Step -Now let's take a look at [more complex arguments](complex-arguments.html). +Now let's take a look at [more complex arguments](complex-arguments.md) diff --git a/guides/tutorial/our-first-query.md b/guides/tutorial/our-first-query.md index 088d309642..3b1f16445e 100644 --- a/guides/tutorial/our-first-query.md +++ b/guides/tutorial/our-first-query.md @@ -16,7 +16,7 @@ support: To do this we're going to need a schema. Let's create some basic types for our schema, starting with a `:post`. GraphQL has several fundamental types on top of which all of our types will be -built. The [Object](Absinthe.Type.Object.html) type is the right one +built. The `Absinthe.Type.Object` type is the right one to use when representing a set of key value pairs. Since our `Post` Ecto schema lives in the `Blog.Content` Phoenix @@ -39,7 +39,7 @@ end > The GraphQL specification requires that type names be unique, TitleCased words. > Absinthe does this automatically for us, extrapolating from our type identifier -> (in this case `:post` gives us `"Post"`. If really needed, we could provide a +> (in this case `:post` gives us `"Post"`). If really needed, we could provide a > custom type name as a `:name` option to the `object` macro. If you're curious what the type `:id` is used by the `:id` field, see @@ -72,8 +72,8 @@ end ``` > For more information on the macros available to build a schema, see -> their definitions in [Absinthe.Schema](Absinthe.Schema.html) and -> [Absinthe.Schema.Notation](Absinthe.Schema.Notation.html). +> their definitions in `Absinthe.Schema` and +> `Absinthe.Schema.Notation`. This uses a resolver module we've created (again, to match the Phoenix context naming) at `blog_web/resolvers/content.ex`: @@ -104,7 +104,7 @@ which is where all the domain logic for posts lives, invoking its `list_posts/0` function, then returns the posts in an `:ok` tuple. > Resolvers can return a wide variety of results, to include errors and configuration -> for [advanced plugins](middleware-and-plugins.html) that further process the data. +> for [advanced plugins](middleware-and-plugins.md) that further process the data. > > If you're asking yourself what the implementation of the domain logic looks like, and exactly how > the related Ecto schemas are built, read through the code in the [absinthe_tutorial](http://github.com/absinthe-graphql/absinthe_tutorial) @@ -149,12 +149,12 @@ Absinthe does a number of sanity checks during compilation, so if you misspell a Once it's up-and-running, take a look at [http://localhost:4000/api/graphiql](http://localhost:4000/api/graphiql): - + Make sure that the `URL` is pointing to the correct place and press the play button. If everything goes according to plan, you should see something like this: - + ## Next Step -Now let's look at how we can [add arguments to our queries](query-arguments.html). +Now let's look at how we can [add arguments to our queries](query-arguments.md). diff --git a/guides/tutorial/query-arguments.md b/guides/tutorial/query-arguments.md index 72cbf5a4dc..3224a7bd55 100644 --- a/guides/tutorial/query-arguments.md +++ b/guides/tutorial/query-arguments.md @@ -103,7 +103,7 @@ defmodule BlogWeb.Schema do end ``` -Now lets use the argument in our resolver. In `blog_web/resolvers/accounts.ex`: +Now let's use the argument in our resolver. In `blog_web/resolvers/accounts.ex`: ```elixir defmodule BlogWeb.Resolvers.Accounts do @@ -151,7 +151,7 @@ type, `:published_at`. The GraphQL specification doesn't define any official date or time types, but it does support custom scalar types (you can read more -about them in the [related guide](custom-scalars.html), and +about them in the [related guide](custom-scalars.md)), and Absinthe ships with several built-in scalar types. We'll use `:naive_datetime` (which doesn't include timezone information) here. @@ -181,7 +181,7 @@ import_types Absinthe.Type.Custom ``` > For more information about how types are imported, -> read [the guide on the topic](importing-types.html). +> read [the guide on the topic](importing-types.md). > > For now, just remember that `import_types` should _only_ be > used in top-level schema module. (Think of it like a manifest.) @@ -227,7 +227,7 @@ end ``` For the resolver, we've added another function head to -`Resolvers.Content.find_posts/3`. This illustrates how you can use the +`Resolvers.Content.list_posts/3`. This illustrates how you can use the first argument to a resolver to match the parent object of a field. In this case, that parent object would be a `Blog.Accounts.User` Ecto schema: @@ -244,13 +244,13 @@ end ``` Here we pass on the user and arguments to the domain logic function, -`Blog.Content.list_posts/2`, which will find the posts for the user +`Blog.Content.list_posts/3`, which will find the posts for the user and date (if it's provided; the `:date` argument is optional). The resolver, just as when it's used for the top level query `:posts`, returns the posts in an `:ok` tuple. > Check out the full implementation of logic for -> `Blog.Content.list_posts/2`--and some simple seed data--in +> `Blog.Content.list_posts/3`--and some simple seed data--in > the > [absinthe_tutorial](https://github.com/absinthe-graphql/absinthe_tutorial) repository. @@ -261,8 +261,8 @@ with the query. It should look something like this: - + ## Next Step -Next up, we look at how to modify our data using [mutations](mutations.html). +Next up, we look at how to modify our data using [mutations](mutations.md). diff --git a/guides/tutorial/start.md b/guides/tutorial/start.md index 223caa38b2..54be0e9399 100644 --- a/guides/tutorial/start.md +++ b/guides/tutorial/start.md @@ -12,12 +12,12 @@ Before you start, it's a good idea to have some background into GraphQL in gener ## The Example - The tutorial expects you to have a properly set-up [Phoenix application](http://www.phoenixframework.org/docs/up-and-running) with absinthe and absinthe_plug added to the dependencies. + The tutorial expects you to have a properly set-up [Phoenix application](https://hexdocs.pm/phoenix/installation.html) with [absinthe](https://hex.pm/packages/absinthe) and [absinthe_plug](https://hex.pm/packages/absinthe_plug) added to the dependencies. > If you'd like to cheat, you can find the finished code for the tutorial -> in the Absinthe Example +> in the [Absinthe Example](https://github.com/absinthe-graphql/absinthe_tutorial) > project on GitHub. ## First Step -Let's get started with [our first query](our-first-query.html)! +Let's get started with [our first query](our-first-query.md)! diff --git a/guides/tutorial/subscriptions.md b/guides/tutorial/subscriptions.md index a63aecda24..dadaafcdf7 100644 --- a/guides/tutorial/subscriptions.md +++ b/guides/tutorial/subscriptions.md @@ -22,7 +22,7 @@ In `mix.exs` ```elixir defp deps do [ - {:absinthe_phoenix, "~> 1.4.0"} + {:absinthe_phoenix, "~> 1.5"} << other deps >> ] ``` @@ -42,32 +42,32 @@ In `lib/blog/application.ex`: -The lets add a configuration to the phoenix endpoint so it can provide some callbacks Absinthe expects, please note while this guide uses phoenix Absinthes support for Subscriptions is good enough to be used without websockets even without a browser. +The lets add a configuration to the phoenix endpoint so it can provide some callbacks Absinthe expects, please note while this guide uses phoenix. Absinthe's support for Subscriptions is good enough to be used without websockets even without a browser. In `lib/blog_web/endpoint.ex`: ```elixir defmodule BlogWeb.Endpoint do - use Phoenix.Endpoint, otp_app: :blog - use Absinthe.Phoenix.Endpoint + use Phoenix.Endpoint, otp_app: :blog # this line should already exist + use Absinthe.Phoenix.Endpoint # add this line << rest of the file>> ``` -The pubsub stuff is now set up lets configure our sockets +The `PubSub` stuff is now set up, let's configure our sockets In `lib/blog_web/channels/user_socket.ex` ``` elixir defmodule BlogWeb.UserSocket do - use Phoenix.Socket - use Absinthe.Phoenix.Socket, schema: BlogWeb.Schema + use Phoenix.Socket # this line should already exist + use Absinthe.Phoenix.Socket, schema: BlogWeb.Schema # add << rest of file>> ``` -Lets not configure GraphQL to use this socket. +Let's now configure GraphQL to use this Socket. In `lib/blog_web/router.ex` : @@ -85,7 +85,7 @@ defmodule BlogWeb.Router do forward "/graphiql", Absinthe.Plug.GraphiQL, schema: BlogWeb.Schema, - socket: BlogWeb.UserSocket + socket: BlogWeb.UserSocket # add this line forward "/", Absinthe.Plug, @@ -96,7 +96,7 @@ end ``` -Now lets set up a subscription root object in our Schema to listen to an event. For this subscription we can set it up to listen every time a new post is created. +Now let/s set up a subscription root object in our Schema to listen for an event. For this subscription we can set it up to listen every time a new post is created. In `blog_web/schema.ex` : @@ -115,11 +115,10 @@ end ``` The `new_post` field is a pretty regular field only new thing here is the `config` macro, this is -here to help us know which clients have subscribed to which fields. Much like websockets subscriptions work by allowing t a client to subscribe to a topic. +here to help us know which clients have subscribed to which fields. Much like WebSockets subscriptions work by allowing t a client to subscribe to a topic. Topics are scoped to a field and for now we shall use `*` to indicate we care about all the posts, and that's it! - If you ran the request at this moment you would get a nice message telling you that your subscriptions will appear once after they are published but you create a post and alas! no data what cut? Once a subscription is set up it waits for a target event to get published in order for us to collect this information we need to publish to this subscription @@ -131,6 +130,7 @@ def create_post(_parent, args, %{context: %{current_user: user}}) do # Blog.Content.create_post(user, args) case Blog.Content.create_post(user, args) do {:ok, post} -> + # add this line in Absinthe.Subscription.publish(BlogWeb.Endpoint, post, new_post: "*" ) @@ -142,6 +142,6 @@ def create_post(_parent, args, %{context: %{current_user: user}}) do end ``` -With this open a tab and run the query at the top of this section, then open another tab and run a mutation to add a post you should see a result in the other tab have fun. +With this, open a tab and run the query at the top of this section. Then open another tab and run a mutation to add a post you should see a result in the other tab have fun. - + diff --git a/guides/upgrading/v1.4.md b/guides/upgrading/v1.4.md index 47f56ba692..c4385a2c53 100644 --- a/guides/upgrading/v1.4.md +++ b/guides/upgrading/v1.4.md @@ -2,7 +2,7 @@ > This information is extracted and expanded from the CHANGELOG. -This version included [subscriptions](subscriptions.html), and also came packaged with a number of improvements that required breaking changes. +This version included [subscriptions](subscriptions.md), and also came packaged with a number of improvements that required breaking changes. The breaking changes primarily affect middleware and plugin authors, but some changes (like `null` handling and changes to error messages) warrant review by all Absinthe users. @@ -15,7 +15,7 @@ Default middleware are now applied eagerly. Although a small change, this will a Before v1.4, the default middleware was applied "lazily". What this means is if you had a simple field like: ```elixir -object :user +object :user do field :name, :string end ``` @@ -93,7 +93,7 @@ def pipeline(pipeline, exec) do end ``` -The reason for this is that you can also access the `context` within the `exec` value. When using something like [Dataloader](dataloader.html), it's important to have easy to the context +The reason for this is that you can also access the `context` within the `exec` value. When using something like [Dataloader](dataloader.md), it's important to have easy to the context ## Calling All Resolvers: The Null Literal Has Arrived diff --git a/guides/upgrading/v1.5.md b/guides/upgrading/v1.5.md new file mode 100644 index 0000000000..a5c245c96e --- /dev/null +++ b/guides/upgrading/v1.5.md @@ -0,0 +1,38 @@ +# Upgrading to v1.5 + +> This information is extracted and expanded from the CHANGELOG. + +This version includes: +* Schema compilation phases +* SDL based schema definitions +* SDL rendering +* Telemetry based instrumentation + +Existing macro-based schemas will work as-is, but make sure to note that the schema pipeline executes at compile time. + +## Breaking changes + +### Default values evaluated at compile time + +Default values are evaluated at compile time. For example `default_value: DateTime.utc_now()` will have its time set at compile time. You probably don't want this :) + +### Scalar output validation + +Scalar outputs are now type checked and will raise exceptions if the result tries to send the wrong data type in the result. + +### Variable types validation + +Variable types must now align exactly with the argument type. Previously Absinthe allowed variables of different types to be used by accident as long as the data parsed. + +### Field name validation + +Added a schema phase to check the validity of field names according to GraphQL spec. Remove the `Absinthe.Phase.Schema.Validation.NamesMustBeValid` from the schema pipeline if you need to retain the previous behavior. + +### `Absinthe.Subscription.PubSub` + +Added `node_name/0` callback to `Absinthe.Subscription.PubSub` behaviour. To retain old behaviour, implement this callback to return `Kernel.node/0`. + +### `Absinthe.Traversal` + +Removed the un-used `Absinthe.Traversal` module. + diff --git a/lib/absinthe.ex b/lib/absinthe.ex index 25eb6f3343..45177bae4e 100644 --- a/lib/absinthe.ex +++ b/lib/absinthe.ex @@ -7,6 +7,13 @@ defmodule Absinthe do includes guides, API information for important modules, and links to useful resources. """ + defmodule SerializationError do + @moduledoc """ + An error during serialization. + """ + defexception message: "serialization failed" + end + defmodule ExecutionError do @moduledoc """ An error during execution. @@ -29,12 +36,13 @@ defmodule Absinthe do | boolean | binary | atom + | result_selection_t | [result_selection_t] } @type result_error_t :: %{message: String.t()} - | %{message: String.t(), locations: [%{line: integer, column: integer}]} + | %{message: String.t(), locations: [%{line: pos_integer, column: integer}]} @type result_t :: %{data: nil | result_selection_t} @@ -83,6 +91,7 @@ defmodule Absinthe do root_value: term, operation_name: String.t(), analyze_complexity: boolean, + variables: %{optional(String.t()) => any()}, max_complexity: non_neg_integer | :infinity ] diff --git a/lib/absinthe/adapter.ex b/lib/absinthe/adapter.ex index 16da891d95..520302d4cc 100644 --- a/lib/absinthe/adapter.ex +++ b/lib/absinthe/adapter.ex @@ -8,12 +8,12 @@ defmodule Absinthe.Adapter do Adapters aren't a part of GraphQL, but a utility that Absinthe adds so that both client and server can use use conventions most natural to them. - Absinthe ships with two adapters: + Absinthe ships with four adapters: * `Absinthe.Adapter.LanguageConventions`, which expects schemas to be defined in `snake_case` (the standard Elixir convention), translating to/from `camelCase` for incoming query documents and outgoing results. (This is the default as of v0.3.) - * `Absinthe.Adapter.Underscore`, which is similar to the `LanguageConventions` + * `Absinthe.Adapter.Underscore`, which is similar to the `Absinthe.Adapter.LanguageConventions` adapter but converts all incoming identifiers to underscores and does not modify outgoing identifiers (since those are already expected to be underscores). Unlike `Absinthe.Adapter.Passthrough` this does not break @@ -21,6 +21,9 @@ defmodule Absinthe.Adapter do * `Absinthe.Adapter.Passthrough`, which is a no-op adapter and makes no modifications. (Note at the current time this does not support introspection if you're using camelized conventions). + * `Absinthe.Adapter.StrictLanguageConventions`, which expects schemas to be + defined in `snake_case`, translating to `camelCase` for outgoing results. + This adapter requires incoming query documents to use `camelCase`. To set an adapter, you pass a configuration option at runtime: @@ -108,7 +111,7 @@ defmodule Absinthe.Adapter do end ``` """ - @callback to_internal_name(binary, role_t) :: binary + @callback to_internal_name(binary | nil, role_t) :: binary | nil @doc """ Convert a name from an internal name to an external name. @@ -124,5 +127,5 @@ defmodule Absinthe.Adapter do end ``` """ - @callback to_external_name(binary, role_t) :: binary + @callback to_external_name(binary | nil, role_t) :: binary | nil end diff --git a/lib/absinthe/adapter/language_conventions.ex b/lib/absinthe/adapter/language_conventions.ex index b4ea44d3ef..cac3811986 100644 --- a/lib/absinthe/adapter/language_conventions.ex +++ b/lib/absinthe/adapter/language_conventions.ex @@ -6,11 +6,11 @@ defmodule Absinthe.Adapter.LanguageConventions do This defines an adapter that supports GraphQL query documents in their conventional (in JS) camelcase notation, while allowing the schema to be defined using conventional (in Elixir) underscore (snakecase) notation, and - tranforming the names as needed for lookups, results, and error messages. + transforming the names as needed for lookups, results, and error messages. For example, this document: - ``` + ```graphql { myUser: createUser(userId: 2) { firstName @@ -47,6 +47,7 @@ defmodule Absinthe.Adapter.LanguageConventions do """ @doc "Converts a camelCase to snake_case" + @impl Absinthe.Adapter def to_internal_name(nil, _role) do nil end @@ -55,16 +56,13 @@ defmodule Absinthe.Adapter.LanguageConventions do "__" <> to_internal_name(camelized_name, role) end - def to_internal_name(camelized_name, :operation) do - camelized_name - end - - def to_internal_name(camelized_name, _role) do + def to_internal_name(camelized_name, _role) when is_binary(camelized_name) do camelized_name |> Macro.underscore() end @doc "Converts a snake_case name to camelCase" + @impl Absinthe.Adapter def to_external_name(nil, _role) do nil end @@ -77,7 +75,7 @@ defmodule Absinthe.Adapter.LanguageConventions do name |> Utils.camelize() end - def to_external_name(underscored_name, _role) do + def to_external_name(underscored_name, _role) when is_binary(underscored_name) do underscored_name |> Utils.camelize(lower: true) end diff --git a/lib/absinthe/adapter/strict_language_conventions.ex b/lib/absinthe/adapter/strict_language_conventions.ex new file mode 100644 index 0000000000..e4a10bbafe --- /dev/null +++ b/lib/absinthe/adapter/strict_language_conventions.ex @@ -0,0 +1,62 @@ +defmodule Absinthe.Adapter.StrictLanguageConventions do + @moduledoc """ + Strict version of `Absinthe.Adapter.LanguageConventions` that will reject + improperly formatted external names. + + For example, this document: + + ```graphql + { + create_user(user_id: 2) { + first_name + last_name + } + } + ``` + + Would result in name-mismatch errors returned to the client. + + The client should instead send the camelcase variant of the names: + + ```graphql + { + createUser(userId: 2) { + firstName + lastName + } + } + ``` + + See `Absinthe.Adapter.LanguageConventions` for more information. + """ + + use Absinthe.Adapter + + @doc """ + Converts a camelCase to snake_case + + Returns `nil` if the converted internal name does not match the converted external name. + + See `Absinthe.Adapter.LanguageConventions.to_internal_name/2` + """ + @impl Absinthe.Adapter + def to_internal_name(external_name, role) do + internal_name = Absinthe.Adapter.LanguageConventions.to_internal_name(external_name, role) + + if external_name == Absinthe.Adapter.LanguageConventions.to_external_name(internal_name, role) do + internal_name + else + nil + end + end + + @doc """ + Converts a snake_case to camelCase + + See `Absinthe.Adapter.LanguageConventions.to_external_name/2` + """ + @impl Absinthe.Adapter + def to_external_name(internal_name, role) do + Absinthe.Adapter.LanguageConventions.to_external_name(internal_name, role) + end +end diff --git a/lib/absinthe/blueprint.ex b/lib/absinthe/blueprint.ex index 88baa1f452..ff66e5673d 100644 --- a/lib/absinthe/blueprint.ex +++ b/lib/absinthe/blueprint.ex @@ -9,32 +9,41 @@ defmodule Absinthe.Blueprint do alias __MODULE__ defstruct operations: [], - types: [], directives: [], fragments: [], name: nil, + schema_definitions: [], schema: nil, + prototype_schema: nil, adapter: nil, + initial_phases: [], # Added by phases + telemetry: %{}, flags: %{}, errors: [], input: nil, + source: nil, execution: %Blueprint.Execution{}, result: %{} @type t :: %__MODULE__{ operations: [Blueprint.Document.Operation.t()], - types: [Blueprint.Schema.t()], + schema_definitions: [Blueprint.Schema.SchemaDefinition.t()], directives: [Blueprint.Schema.DirectiveDefinition.t()], name: nil | String.t(), fragments: [Blueprint.Document.Fragment.Named.t()], schema: nil | Absinthe.Schema.t(), + prototype_schema: nil | Absinthe.Schema.t(), adapter: nil | Absinthe.Adapter.t(), # Added by phases + telemetry: map, errors: [Absinthe.Phase.Error.t()], flags: flags_t, + input: nil | Absinthe.Language.Document.t(), + source: nil | String.t() | Absinthe.Language.Source.t(), execution: Blueprint.Execution.t(), - result: result_t + result: result_t, + initial_phases: [Absinthe.Phase.t()] } @type result_t :: %{ @@ -44,7 +53,7 @@ defmodule Absinthe.Blueprint do } @type node_t :: - Blueprint.t() + t() | Blueprint.Directive.t() | Blueprint.Document.t() | Blueprint.Schema.t() @@ -80,6 +89,19 @@ defmodule Absinthe.Blueprint do found end + @doc false + # This is largely a debugging tool which replaces `schema_node` struct values + # with just the type identifier, rendering the blueprint tree much easier to read + def __compress__(blueprint) do + prewalk(blueprint, fn + %{schema_node: %{identifier: id}} = node -> + %{node | schema_node: id} + + node -> + node + end) + end + @spec fragment(t, String.t()) :: nil | Blueprint.Document.Fragment.Named.t() def fragment(blueprint, name) do Enum.find(blueprint.fragments, &(&1.name == name)) @@ -126,4 +148,75 @@ defmodule Absinthe.Blueprint do %{blueprint | operations: ops} end + + @doc """ + Append the given field or fields to the given type + """ + def extend_fields(blueprint = %Blueprint{}, ext_blueprint = %Blueprint{}) do + ext_types = types_by_name(ext_blueprint) + + schema_defs = + for schema_def = %{type_definitions: type_defs} <- blueprint.schema_definitions do + type_defs = + for type_def <- type_defs do + case ext_types[type_def.name] do + nil -> + type_def + + %{fields: new_fields} -> + %{type_def | fields: type_def.fields ++ new_fields} + end + end + + %{schema_def | type_definitions: type_defs} + end + + %{blueprint | schema_definitions: schema_defs} + end + + def extend_fields(blueprint, ext_blueprint) when is_atom(ext_blueprint) do + extend_fields(blueprint, ext_blueprint.__absinthe_blueprint__()) + end + + def add_field(blueprint = %Blueprint{}, type_def_name, new_field) do + schema_defs = + for schema_def = %{type_definitions: type_defs} <- blueprint.schema_definitions do + type_defs = + for type_def <- type_defs do + if type_def.name == type_def_name do + %{type_def | fields: type_def.fields ++ List.wrap(new_field)} + else + type_def + end + end + + %{schema_def | type_definitions: type_defs} + end + + %{blueprint | schema_definitions: schema_defs} + end + + def find_field(%{fields: fields}, name) do + Enum.find(fields, fn %{name: field_name} -> field_name == name end) + end + + @doc """ + Index the types by their name + """ + def types_by_name(blueprint = %Blueprint{}) do + for %{type_definitions: type_defs} <- blueprint.schema_definitions, + type_def <- type_defs, + into: %{} do + {type_def.name, type_def} + end + end + + def types_by_name(module) when is_atom(module) do + types_by_name(module.__absinthe_blueprint__()) + end + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/directive.ex b/lib/absinthe/blueprint/directive.ex index 32fd9abca9..5f4198fbb4 100644 --- a/lib/absinthe/blueprint/directive.ex +++ b/lib/absinthe/blueprint/directive.ex @@ -12,30 +12,38 @@ defmodule Absinthe.Blueprint.Directive do # Added by phases schema_node: nil, flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ name: String.t(), arguments: [Blueprint.Input.Argument.t()], - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), schema_node: nil | Absinthe.Type.Directive.t(), flags: Blueprint.flags_t(), - errors: [Phase.Error.t()] + errors: [Phase.Error.t()], + __reference__: nil, + __private__: [] } @spec expand(t, Blueprint.node_t()) :: {t, map} - def expand(%__MODULE__{schema_node: %{expand: nil}}, node) do + def expand(%__MODULE__{schema_node: nil}, node) do node end - def expand(%__MODULE__{schema_node: %{expand: fun}} = directive, node) do + def expand(%__MODULE__{schema_node: type} = directive, node) do args = Blueprint.Input.Argument.value_map(directive.arguments) - fun.(args, node) - end - def expand(%__MODULE__{schema_node: nil}, node) do - node + case Absinthe.Type.function(type, :expand) do + nil -> + # Directive is a no-op + node + + expansion when is_function(expansion) -> + expansion.(args, node) + end end @doc """ @@ -47,8 +55,8 @@ defmodule Absinthe.Blueprint.Directive do def placement(%Blueprint.Document.Fragment.Named{}), do: :fragment_definition def placement(%Blueprint.Document.Fragment.Spread{}), do: :fragment_spread def placement(%Blueprint.Document.Fragment.Inline{}), do: :inline_fragment - def placement(%Blueprint.Document.Operation{}), do: :operation_definition def placement(%Blueprint.Schema.SchemaDefinition{}), do: :schema + def placement(%Blueprint.Schema.SchemaDeclaration{}), do: :schema def placement(%Blueprint.Schema.ScalarTypeDefinition{}), do: :scalar def placement(%Blueprint.Schema.ObjectTypeDefinition{}), do: :object def placement(%Blueprint.Schema.FieldDefinition{}), do: :field_definition diff --git a/lib/absinthe/blueprint/document/field.ex b/lib/absinthe/blueprint/document/field.ex index c7b9aaf860..c44f157e22 100644 --- a/lib/absinthe/blueprint/document/field.ex +++ b/lib/absinthe/blueprint/document/field.ex @@ -17,7 +17,10 @@ defmodule Absinthe.Blueprint.Document.Field do source_location: nil, type_conditions: [], schema_node: nil, - complexity: nil + complexity: nil, + # Set during resolution, this holds the concrete parent type + # as determined by the resolution phase. + parent_type: nil ] @type t :: %__MODULE__{ @@ -27,7 +30,7 @@ defmodule Absinthe.Blueprint.Document.Field do directives: [Blueprint.Directive.t()], flags: Blueprint.flags_t(), errors: [Phase.Error.t()], - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), type_conditions: [Blueprint.TypeReference.Name], schema_node: Type.t(), complexity: nil | non_neg_integer diff --git a/lib/absinthe/blueprint/document/fragment/inline.ex b/lib/absinthe/blueprint/document/fragment/inline.ex index 0551d87afb..1c19422529 100644 --- a/lib/absinthe/blueprint/document/fragment/inline.ex +++ b/lib/absinthe/blueprint/document/fragment/inline.ex @@ -22,7 +22,7 @@ defmodule Absinthe.Blueprint.Document.Fragment.Inline do flags: Blueprint.flags_t(), selections: [Blueprint.Document.selection_t()], schema_node: nil | Absinthe.Type.t(), - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), type_condition: Blueprint.TypeReference.Name.t() } end diff --git a/lib/absinthe/blueprint/document/fragment/named.ex b/lib/absinthe/blueprint/document/fragment/named.ex index dae7ba5f6c..fc7e0b138e 100644 --- a/lib/absinthe/blueprint/document/fragment/named.ex +++ b/lib/absinthe/blueprint/document/fragment/named.ex @@ -24,7 +24,7 @@ defmodule Absinthe.Blueprint.Document.Fragment.Named do name: String.t(), selections: [Blueprint.Document.selection_t()], schema_node: nil | Absinthe.Type.t(), - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), flags: Blueprint.flags_t(), type_condition: Blueprint.TypeReference.Name.t() } diff --git a/lib/absinthe/blueprint/document/fragment/named/use.ex b/lib/absinthe/blueprint/document/fragment/named/use.ex index d363312e2c..108eacdd4d 100644 --- a/lib/absinthe/blueprint/document/fragment/named/use.ex +++ b/lib/absinthe/blueprint/document/fragment/named/use.ex @@ -11,6 +11,6 @@ defmodule Absinthe.Blueprint.Document.Fragment.Named.Use do @type t :: %__MODULE__{ name: String.t(), - source_location: nil | Blueprint.Document.SourceLocation.t() + source_location: nil | Blueprint.SourceLocation.t() } end diff --git a/lib/absinthe/blueprint/document/fragment/spread.ex b/lib/absinthe/blueprint/document/fragment/spread.ex index 2e74b5e7ce..07bd62c47c 100644 --- a/lib/absinthe/blueprint/document/fragment/spread.ex +++ b/lib/absinthe/blueprint/document/fragment/spread.ex @@ -19,6 +19,6 @@ defmodule Absinthe.Blueprint.Document.Fragment.Spread do errors: [Absinthe.Phase.Error.t()], name: String.t(), flags: Blueprint.flags_t(), - source_location: nil | Blueprint.Document.SourceLocation.t() + source_location: nil | Blueprint.SourceLocation.t() } end diff --git a/lib/absinthe/blueprint/document/operation.ex b/lib/absinthe/blueprint/document/operation.ex index 335c6443d7..7a972f202f 100644 --- a/lib/absinthe/blueprint/document/operation.ex +++ b/lib/absinthe/blueprint/document/operation.ex @@ -31,7 +31,7 @@ defmodule Absinthe.Blueprint.Document.Operation do variable_definitions: [Blueprint.Document.VariableDefinition.t()], variable_uses: [Blueprint.Input.Variable.Use.t()], fragment_uses: [Blueprint.Document.Fragment.Named.Use.t()], - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), schema_node: nil | Absinthe.Type.Object.t(), complexity: nil | non_neg_integer, provided_values: %{String.t() => nil | Blueprint.Input.t()}, diff --git a/lib/absinthe/blueprint/document/source_location.ex b/lib/absinthe/blueprint/document/source_location.ex deleted file mode 100644 index 3ff7df0644..0000000000 --- a/lib/absinthe/blueprint/document/source_location.ex +++ /dev/null @@ -1,25 +0,0 @@ -defmodule Absinthe.Blueprint.Document.SourceLocation do - @moduledoc false - - @enforce_keys [:line] - defstruct line: nil, - column: nil - - @type t :: %__MODULE__{ - line: integer, - column: nil | integer - } - - @doc """ - Easily generate a SourceLocation.t give a line and optional column. - """ - @spec at(integer) :: t - def at(line) do - %__MODULE__{line: line} - end - - @spec at(integer, integer) :: t - def at(line, column) do - %__MODULE__{line: line, column: column} - end -end diff --git a/lib/absinthe/blueprint/document/variable_definition.ex b/lib/absinthe/blueprint/document/variable_definition.ex index 21d0e1e103..09726b8a37 100644 --- a/lib/absinthe/blueprint/document/variable_definition.ex +++ b/lib/absinthe/blueprint/document/variable_definition.ex @@ -7,6 +7,7 @@ defmodule Absinthe.Blueprint.Document.VariableDefinition do defstruct [ :name, :type, + directives: [], default_value: nil, source_location: nil, # Added by phases @@ -19,8 +20,9 @@ defmodule Absinthe.Blueprint.Document.VariableDefinition do @type t :: %__MODULE__{ name: String.t(), type: Blueprint.TypeReference.t(), + directives: [Blueprint.Directive.t()], default_value: Blueprint.Input.t(), - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), provided_value: nil | Blueprint.Input.t(), errors: [Absinthe.Phase.Error.t()], flags: Blueprint.flags_t(), diff --git a/lib/absinthe/blueprint/execution.ex b/lib/absinthe/blueprint/execution.ex index 7de9778886..3152506c11 100644 --- a/lib/absinthe/blueprint/execution.ex +++ b/lib/absinthe/blueprint/execution.ex @@ -6,7 +6,7 @@ defmodule Absinthe.Blueprint.Execution do drive a document's execution. Here's how the execution flow works. Given a document like: - ``` + ```graphql { posts { title @@ -27,13 +27,13 @@ defmodule Absinthe.Blueprint.Execution do values within the resolution struct are pulled out and used to update the execution. """ + alias Absinthe.Blueprint.Result alias Absinthe.Phase @type acc :: map defstruct [ :adapter, - :root_value, :schema, fragments: %{}, fields_cache: %{}, @@ -56,7 +56,7 @@ defmodule Absinthe.Blueprint.Execution do | Result.Leaf def get(%{execution: %{result: nil} = exec} = bp_root, operation) do - result = %Absinthe.Blueprint.Result.Object{ + result = %Result.Object{ root_value: exec.root_value, emitter: operation } @@ -75,7 +75,7 @@ defmodule Absinthe.Blueprint.Execution do end def get_result(%__MODULE__{result: nil, root_value: root_value}, operation) do - %Absinthe.Blueprint.Result.Object{ + %Result.Object{ root_value: root_value, emitter: operation } diff --git a/lib/absinthe/blueprint/input/argument.ex b/lib/absinthe/blueprint/input/argument.ex index 717a9d5506..d8fc3f259f 100644 --- a/lib/absinthe/blueprint/input/argument.ex +++ b/lib/absinthe/blueprint/input/argument.ex @@ -19,7 +19,7 @@ defmodule Absinthe.Blueprint.Input.Argument do @type t :: %__MODULE__{ name: String.t(), input_value: Blueprint.Input.Value.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), schema_node: nil | Absinthe.Type.Argument.t(), value: any, flags: Blueprint.flags_t(), @@ -42,6 +42,6 @@ defmodule Absinthe.Blueprint.Input.Argument do arg -> arg end) - |> Map.new(&{&1.schema_node.__reference__.identifier, &1.value}) + |> Map.new(&{&1.schema_node.identifier, &1.value}) end end diff --git a/lib/absinthe/blueprint/input/boolean.ex b/lib/absinthe/blueprint/input/boolean.ex index 28a56ae8fb..121e8c2caa 100644 --- a/lib/absinthe/blueprint/input/boolean.ex +++ b/lib/absinthe/blueprint/input/boolean.ex @@ -17,7 +17,7 @@ defmodule Absinthe.Blueprint.Input.Boolean do value: true | false, flags: Blueprint.flags_t(), schema_node: nil | Absinthe.Type.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Phase.Error.t()] } end diff --git a/lib/absinthe/blueprint/input/enum.ex b/lib/absinthe/blueprint/input/enum.ex index 4ab1b58120..13a36f22fa 100644 --- a/lib/absinthe/blueprint/input/enum.ex +++ b/lib/absinthe/blueprint/input/enum.ex @@ -17,7 +17,7 @@ defmodule Absinthe.Blueprint.Input.Enum do value: String.t(), flags: Blueprint.flags_t(), schema_node: nil | Absinthe.Type.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Phase.Error.t()] } end diff --git a/lib/absinthe/blueprint/input/field.ex b/lib/absinthe/blueprint/input/field.ex index d47d1e44ef..aae10559a5 100644 --- a/lib/absinthe/blueprint/input/field.ex +++ b/lib/absinthe/blueprint/input/field.ex @@ -19,7 +19,7 @@ defmodule Absinthe.Blueprint.Input.Field do input_value: Blueprint.Input.Value.t(), flags: Blueprint.flags_t(), schema_node: nil | Type.Field.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Absinthe.Phase.Error.t()] } end diff --git a/lib/absinthe/blueprint/input/float.ex b/lib/absinthe/blueprint/input/float.ex index d850022794..08a5e73899 100644 --- a/lib/absinthe/blueprint/input/float.ex +++ b/lib/absinthe/blueprint/input/float.ex @@ -16,7 +16,7 @@ defmodule Absinthe.Blueprint.Input.Float do @type t :: %__MODULE__{ value: float, flags: Blueprint.flags_t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), schema_node: nil | Absinthe.Type.t(), errors: [Absinthe.Phase.Error.t()] } diff --git a/lib/absinthe/blueprint/input/integer.ex b/lib/absinthe/blueprint/input/integer.ex index adbbb141be..d6b259f25b 100644 --- a/lib/absinthe/blueprint/input/integer.ex +++ b/lib/absinthe/blueprint/input/integer.ex @@ -16,7 +16,7 @@ defmodule Absinthe.Blueprint.Input.Integer do @type t :: %__MODULE__{ value: integer, flags: Blueprint.flags_t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), schema_node: nil | Absinthe.Type.t(), errors: [Phase.Error.t()] } diff --git a/lib/absinthe/blueprint/input/list.ex b/lib/absinthe/blueprint/input/list.ex index 2e18725ab5..068a45fddd 100644 --- a/lib/absinthe/blueprint/input/list.ex +++ b/lib/absinthe/blueprint/input/list.ex @@ -17,7 +17,7 @@ defmodule Absinthe.Blueprint.Input.List do items: [Blueprint.Input.Value.t()], flags: Blueprint.flags_t(), schema_node: nil | Absinthe.Type.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Phase.Error.t()] } diff --git a/lib/absinthe/blueprint/input/null.ex b/lib/absinthe/blueprint/input/null.ex index d37f627ae5..e457e1ec64 100644 --- a/lib/absinthe/blueprint/input/null.ex +++ b/lib/absinthe/blueprint/input/null.ex @@ -14,7 +14,7 @@ defmodule Absinthe.Blueprint.Input.Null do @type t :: %__MODULE__{ flags: Blueprint.flags_t(), schema_node: nil | Absinthe.Type.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Phase.Error.t()] } end diff --git a/lib/absinthe/blueprint/input/object.ex b/lib/absinthe/blueprint/input/object.ex index 80a7c83d55..943cecfcc8 100644 --- a/lib/absinthe/blueprint/input/object.ex +++ b/lib/absinthe/blueprint/input/object.ex @@ -20,7 +20,7 @@ defmodule Absinthe.Blueprint.Input.Object do nil | Absinthe.Type.InputObject.t() | Absinthe.Type.NonNull.t(Absinthe.Type.InputObject.t()), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Absinthe.Phase.Error.t()] } end diff --git a/lib/absinthe/blueprint/input/raw_value.ex b/lib/absinthe/blueprint/input/raw_value.ex index 4bc28bba4e..086d66876f 100644 --- a/lib/absinthe/blueprint/input/raw_value.ex +++ b/lib/absinthe/blueprint/input/raw_value.ex @@ -1,12 +1,14 @@ defmodule Absinthe.Blueprint.Input.RawValue do @moduledoc false + alias Absinthe.Blueprint.Input.Object + @enforce_keys [:content] defstruct [ :content ] @type t :: %__MODULE__{ - content: Absinthe.Blueprint.Input.t() + content: Object.t() } end diff --git a/lib/absinthe/blueprint/input/string.ex b/lib/absinthe/blueprint/input/string.ex index ed989353d1..0d29b9c1a0 100644 --- a/lib/absinthe/blueprint/input/string.ex +++ b/lib/absinthe/blueprint/input/string.ex @@ -17,7 +17,7 @@ defmodule Absinthe.Blueprint.Input.String do value: String.t(), flags: Blueprint.flags_t(), schema_node: nil | Absinthe.Type.t(), - source_location: Blueprint.Document.SourceLocation.t(), + source_location: Blueprint.SourceLocation.t(), errors: [Phase.Error.t()] } end diff --git a/lib/absinthe/blueprint/input/value.ex b/lib/absinthe/blueprint/input/value.ex index e41724152f..c35e4fcb6e 100644 --- a/lib/absinthe/blueprint/input/value.ex +++ b/lib/absinthe/blueprint/input/value.ex @@ -38,4 +38,14 @@ defmodule Absinthe.Blueprint.Input.Value do def valid?(%{normalized: %Absinthe.Blueprint.Input.Null{}}), do: true def valid?(%{normalized: nil}), do: false def valid?(%{normalized: _}), do: true + + def build(value) do + %Absinthe.Blueprint.Input.Value{ + data: value, + normalized: nil, + raw: %Absinthe.Blueprint.Input.RawValue{ + content: Absinthe.Blueprint.Input.parse(value) + } + } + end end diff --git a/lib/absinthe/blueprint/input/variable.ex b/lib/absinthe/blueprint/input/variable.ex index 0133589f08..d06cc08399 100644 --- a/lib/absinthe/blueprint/input/variable.ex +++ b/lib/absinthe/blueprint/input/variable.ex @@ -15,7 +15,7 @@ defmodule Absinthe.Blueprint.Input.Variable do @type t :: %__MODULE__{ name: String.t(), - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Phase.Error.t()] diff --git a/lib/absinthe/blueprint/input/variable/use.ex b/lib/absinthe/blueprint/input/variable/use.ex index b495969ceb..9bd773b9f7 100644 --- a/lib/absinthe/blueprint/input/variable/use.ex +++ b/lib/absinthe/blueprint/input/variable/use.ex @@ -11,6 +11,6 @@ defmodule Absinthe.Blueprint.Input.Variable.Use do @type t :: %__MODULE__{ name: String.t(), - source_location: nil | Blueprint.Document.SourceLocation.t() + source_location: nil | Blueprint.SourceLocation.t() } end diff --git a/lib/absinthe/blueprint/result/leaf.ex b/lib/absinthe/blueprint/result/leaf.ex index d7409d17fd..fb68c9c535 100644 --- a/lib/absinthe/blueprint/result/leaf.ex +++ b/lib/absinthe/blueprint/result/leaf.ex @@ -14,7 +14,7 @@ defmodule Absinthe.Blueprint.Result.Leaf do @type t :: %__MODULE__{ emitter: Blueprint.Document.Field.t(), - value: Blueprint.Document.Resolution.node_t(), + value: Blueprint.Execution.node_t(), errors: [Phase.Error.t()], flags: Blueprint.flags_t(), extensions: %{any => any} diff --git a/lib/absinthe/blueprint/result/list.ex b/lib/absinthe/blueprint/result/list.ex index af78c02ad5..40d41896e9 100644 --- a/lib/absinthe/blueprint/result/list.ex +++ b/lib/absinthe/blueprint/result/list.ex @@ -14,7 +14,7 @@ defmodule Absinthe.Blueprint.Result.List do @type t :: %__MODULE__{ emitter: Blueprint.Document.Field.t(), - values: [Blueprint.Document.Resolution.node_t()], + values: [Blueprint.Execution.node_t()], errors: [Phase.Error.t()], flags: Blueprint.flags_t(), extensions: %{any => any} diff --git a/lib/absinthe/blueprint/result/object.ex b/lib/absinthe/blueprint/result/object.ex index 4f4edf1ab1..745dc16062 100644 --- a/lib/absinthe/blueprint/result/object.ex +++ b/lib/absinthe/blueprint/result/object.ex @@ -15,7 +15,7 @@ defmodule Absinthe.Blueprint.Result.Object do @type t :: %__MODULE__{ emitter: Blueprint.Document.Field.t(), - fields: [Blueprint.Document.Resolution.node_t()], + fields: [Blueprint.Execution.node_t()], errors: [Phase.Error.t()], flags: Blueprint.flags_t(), extensions: %{any => any} diff --git a/lib/absinthe/blueprint/schema.ex b/lib/absinthe/blueprint/schema.ex index bf2bedbcdc..f3a8c40c1b 100644 --- a/lib/absinthe/blueprint/schema.ex +++ b/lib/absinthe/blueprint/schema.ex @@ -3,11 +3,250 @@ defmodule Absinthe.Blueprint.Schema do alias __MODULE__ + alias Absinthe.Blueprint + + @type directive_t :: Schema.DirectiveDefinition.t() + + @type type_t :: + Blueprint.Schema.EnumTypeDefinition.t() + | Blueprint.Schema.InputObjectTypeDefinition.t() + | Blueprint.Schema.InterfaceTypeDefinition.t() + | Blueprint.Schema.ObjectTypeDefinition.t() + | Blueprint.Schema.ScalarTypeDefinition.t() + | Blueprint.Schema.UnionTypeDefinition.t() + @type t :: - Schema.EnumTypeDefinition.t() - | Schema.InputObjectTypeDefinition.t() - | Schema.InterfaceTypeDefinition.t() - | Schema.ObjectTypeDefinition.t() - | Schema.ScalarTypeDefinition.t() - | Schema.UnionTypeDefinition.t() + Blueprint.Schema.EnumValueDefinition.t() + | Blueprint.Schema.InputValueDefinition.t() + | Blueprint.Schema.SchemaDeclaration.t() + | Blueprint.Schema.SchemaDefinition.t() + | type_t() + | directive_t() + + @doc """ + Lookup a type definition that is part of a schema. + """ + @spec lookup_type(Blueprint.t(), atom) :: nil | Blueprint.Schema.t() + def lookup_type(blueprint, identifier) do + blueprint.schema_definitions + |> List.first() + |> Map.get(:type_definitions) + |> Enum.find(fn + %{identifier: ^identifier} -> + true + + _ -> + false + end) + end + + @doc """ + Lookup a directive definition that is part of a schema. + """ + @spec lookup_directive(Blueprint.t(), atom) :: nil | Blueprint.Schema.directive_t() + def lookup_directive(blueprint, identifier) do + blueprint.schema_definitions + |> List.first() + |> Map.get(:directive_definitions) + |> Enum.find(fn + %{identifier: ^identifier} -> + true + + _ -> + false + end) + end + + def functions(module) do + if function_exported?(module, :functions, 0) do + module.functions + else + [] + end + end + + def build([%Absinthe.Blueprint{} = bp | attrs]) do + build_types(attrs, [bp], []) + end + + defp build_types([], [bp], buffer) do + if buffer != [] do + raise """ + Unused buffer! #{inspect(buffer)} + """ + end + + Map.update!(bp, :schema_definitions, &Enum.reverse/1) + end + + # this rather insane scheme lets interior macros get back out to exterior + # scopes so that they can define top level entities as necessary, and then + # return to the regularly scheduled programming. + defp build_types([:stash | rest], [head | tail], buff) do + build_types(rest, tail, [head | buff]) + end + + defp build_types([:pop | rest], remaining, [head | buff]) do + build_types(rest, [head | remaining], buff) + end + + defp build_types([%Schema.SchemaDefinition{} = schema | rest], stack, buff) do + build_types(rest, [schema | stack], buff) + end + + @simple_open [ + Schema.ScalarTypeDefinition, + Schema.ObjectTypeDefinition, + Schema.FieldDefinition, + Schema.EnumTypeDefinition, + Schema.DirectiveDefinition, + Schema.InputObjectTypeDefinition, + Schema.InputValueDefinition, + Schema.InterfaceTypeDefinition, + Schema.UnionTypeDefinition, + Schema.EnumValueDefinition + ] + + defp build_types([%module{} = type | rest], stack, buff) when module in @simple_open do + build_types(rest, [type | stack], buff) + end + + defp build_types([{:import_fields, criterion} | rest], [obj | stack], buff) do + build_types(rest, [push(obj, :imports, criterion) | stack], buff) + end + + defp build_types([{:desc, desc} | rest], [item | stack], buff) do + build_types(rest, [%{item | description: desc} | stack], buff) + end + + defp build_types([{:middleware, middleware} | rest], [field, obj | stack], buff) do + field = Map.update!(field, :middleware, &(middleware ++ &1)) + build_types(rest, [field, obj | stack], buff) + end + + defp build_types([{:config, config} | rest], [field | stack], buff) do + field = %{field | config: config} + build_types(rest, [field | stack], buff) + end + + defp build_types([{:directive, trigger} | rest], [field | stack], buff) do + field = Map.update!(field, :directives, &[trigger | &1]) + build_types(rest, [field | stack], buff) + end + + defp build_types([{:trigger, trigger} | rest], [field | stack], buff) do + field = Map.update!(field, :triggers, &[trigger | &1]) + build_types(rest, [field | stack], buff) + end + + defp build_types([{:interface, interface} | rest], [obj | stack], buff) do + obj = Map.update!(obj, :interfaces, &[interface | &1]) + build_types(rest, [obj | stack], buff) + end + + defp build_types([{:__private__, private} | rest], [entity | stack], buff) do + entity = Map.update!(entity, :__private__, &update_private(&1, private)) + build_types(rest, [entity | stack], buff) + end + + defp build_types([{:values, values} | rest], [enum | stack], buff) do + enum = Map.update!(enum, :values, &(List.wrap(values) ++ &1)) + build_types(rest, [enum | stack], buff) + end + + defp build_types([{:sdl, sdl_definitions} | rest], [schema | stack], buff) do + # TODO: Handle directives, etc + build_types(rest, [concat(schema, :type_definitions, sdl_definitions) | stack], buff) + end + + defp build_types([{:locations, locations} | rest], [directive | stack], buff) do + directive = Map.update!(directive, :locations, &(locations ++ &1)) + build_types(rest, [directive | stack], buff) + end + + defp build_types([{attr, value} | rest], [entity | stack], buff) do + entity = %{entity | attr => value} + build_types(rest, [entity | stack], buff) + end + + defp build_types([:close | rest], [%Schema.EnumValueDefinition{} = value, enum | stack], buff) do + build_types(rest, [push(enum, :values, value) | stack], buff) + end + + defp build_types([:close | rest], [%Schema.InputValueDefinition{} = arg, field | stack], buff) do + build_types(rest, [push(field, :arguments, arg) | stack], buff) + end + + defp build_types([:close | rest], [%Schema.FieldDefinition{} = field, obj | stack], buff) do + field = + field + |> Map.update!(:middleware, &Enum.reverse/1) + |> Map.update!(:arguments, &Enum.reverse/1) + |> Map.update!(:triggers, &{:%{}, [], &1}) + |> Map.put(:function_ref, {obj.identifier, field.identifier}) + + build_types(rest, [push(obj, :fields, field) | stack], buff) + end + + defp build_types([:close | rest], [%Schema.ObjectTypeDefinition{} = obj, schema | stack], buff) do + obj = Map.update!(obj, :fields, &Enum.reverse/1) + build_types(rest, [push(schema, :type_definitions, obj) | stack], buff) + end + + defp build_types( + [:close | rest], + [%Schema.InputObjectTypeDefinition{} = obj, schema | stack], + buff + ) do + obj = Map.update!(obj, :fields, &Enum.reverse/1) + build_types(rest, [push(schema, :type_definitions, obj) | stack], buff) + end + + defp build_types( + [:close | rest], + [%Schema.InterfaceTypeDefinition{} = iface, schema | stack], + buff + ) do + iface = Map.update!(iface, :fields, &Enum.reverse/1) + build_types(rest, [push(schema, :type_definitions, iface) | stack], buff) + end + + defp build_types([:close | rest], [%Schema.UnionTypeDefinition{} = union, schema | stack], buff) do + build_types(rest, [push(schema, :type_definitions, union) | stack], buff) + end + + defp build_types([:close | rest], [%Schema.DirectiveDefinition{} = dir, schema | stack], buff) do + build_types(rest, [push(schema, :directive_definitions, dir) | stack], buff) + end + + defp build_types([:close | rest], [%Schema.EnumTypeDefinition{} = type, schema | stack], buff) do + type = Map.update!(type, :values, &Enum.reverse/1) + schema = push(schema, :type_definitions, type) + build_types(rest, [schema | stack], buff) + end + + defp build_types([:close | rest], [%Schema.ScalarTypeDefinition{} = type, schema | stack], buff) do + schema = push(schema, :type_definitions, type) + build_types(rest, [schema | stack], buff) + end + + defp build_types([:close | rest], [%Schema.SchemaDefinition{} = schema, bp], buff) do + bp = push(bp, :schema_definitions, schema) + build_types(rest, [bp], buff) + end + + defp push(entity, key, value) do + Map.update!(entity, key, &[value | &1]) + end + + defp concat(entity, key, value) do + Map.update!(entity, key, &(&1 ++ value)) + end + + defp update_private(existing_private, private) do + Keyword.merge(existing_private, private, fn + _, v1, v2 -> + update_private(v1, v2) + end) + end end diff --git a/lib/absinthe/blueprint/schema/directive_definition.ex b/lib/absinthe/blueprint/schema/directive_definition.ex index 0c3e84fa62..1a56f146c9 100644 --- a/lib/absinthe/blueprint/schema/directive_definition.ex +++ b/lib/absinthe/blueprint/schema/directive_definition.ex @@ -6,11 +6,18 @@ defmodule Absinthe.Blueprint.Schema.DirectiveDefinition do @enforce_keys [:name] defstruct [ :name, + :module, + :identifier, description: nil, directives: [], arguments: [], locations: [], - errors: [] + repeatable: false, + source_location: nil, + expand: nil, + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ @@ -18,6 +25,29 @@ defmodule Absinthe.Blueprint.Schema.DirectiveDefinition do description: nil, arguments: [Blueprint.Schema.InputValueDefinition.t()], locations: [String.t()], + repeatable: boolean(), + source_location: nil | Blueprint.SourceLocation.t(), errors: [Absinthe.Phase.Error.t()] } + + def build(type_def, schema) do + %Absinthe.Type.Directive{ + name: type_def.name, + identifier: type_def.identifier, + description: type_def.description, + args: Blueprint.Schema.ObjectTypeDefinition.build_args(type_def, schema), + locations: type_def.locations |> Enum.sort(), + definition: type_def.module, + repeatable: type_def.repeatable, + expand: type_def.expand + } + end + + @doc false + def functions(), do: [:expand] + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/schema/enum_type_definition.ex b/lib/absinthe/blueprint/schema/enum_type_definition.ex index 008887a1f0..7afbb43248 100644 --- a/lib/absinthe/blueprint/schema/enum_type_definition.ex +++ b/lib/absinthe/blueprint/schema/enum_type_definition.ex @@ -6,19 +6,81 @@ defmodule Absinthe.Blueprint.Schema.EnumTypeDefinition do @enforce_keys [:name] defstruct [ :name, + :identifier, + :description, + :module, values: [], directives: [], + source_location: nil, # Added by phases, flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ name: String.t(), - values: [String.t()], + values: [Blueprint.Schema.EnumValueDefinition.t()], directives: [Blueprint.Directive.t()], + identifier: atom, + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] } + def build(type_def, _schema) do + %Absinthe.Type.Enum{ + identifier: type_def.identifier, + name: type_def.name, + values: values_by(type_def, :identifier), + values_by_internal_value: values_by(type_def, :value), + values_by_name: values_by(type_def, :name), + definition: type_def.module, + description: type_def.description + } + end + + def values_by(type_def, key) do + for value_def <- List.flatten(type_def.values), into: %{} do + case value_def do + %Blueprint.Schema.EnumValueDefinition{} -> + value = %Absinthe.Type.Enum.Value{ + name: value_def.name, + value: value_def.value, + enum_identifier: type_def.identifier, + __reference__: value_def.__reference__, + description: value_def.description, + deprecation: value_def.deprecation + } + + {Map.fetch!(value_def, key), value} + + # Values defined via dynamic function calls don't yet get converted + # into proper Blueprints, but it works for now. This will get refactored + # in the future as we build out a general solution for dynamic values. + raw_value -> + name = raw_value |> to_string() |> String.upcase() + + value_def = %{ + name: name, + value: raw_value, + identifier: raw_value + } + + value = %Absinthe.Type.Enum.Value{ + name: name, + value: raw_value, + enum_identifier: type_def.identifier + } + + {Map.fetch!(value_def, key), value} + end + end + end + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/schema/enum_value_definition.ex b/lib/absinthe/blueprint/schema/enum_value_definition.ex index e04ea22510..d726c5e3b6 100644 --- a/lib/absinthe/blueprint/schema/enum_value_definition.ex +++ b/lib/absinthe/blueprint/schema/enum_value_definition.ex @@ -6,19 +6,26 @@ defmodule Absinthe.Blueprint.Schema.EnumValueDefinition do @enforce_keys [:value] defstruct [ :value, + :name, + :identifier, deprecation: nil, directives: [], source_location: nil, + description: nil, # Added by phases flags: %{}, - errors: [] + module: nil, + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ value: String.t(), + description: nil | String.t(), deprecation: nil | Blueprint.Schema.Deprecation.t(), directives: [Blueprint.Directive.t()], - source_location: nil | Blueprint.Document.SourceLocation.t(), + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] diff --git a/lib/absinthe/blueprint/schema/field_definition.ex b/lib/absinthe/blueprint/schema/field_definition.ex index 824cc5476c..4197ac3320 100644 --- a/lib/absinthe/blueprint/schema/field_definition.ex +++ b/lib/absinthe/blueprint/schema/field_definition.ex @@ -3,26 +3,46 @@ defmodule Absinthe.Blueprint.Schema.FieldDefinition do alias Absinthe.Blueprint - @enforce_keys [:name, :type] + @enforce_keys [:name] defstruct [ :name, + :identifier, :type, + :module, + description: nil, deprecation: nil, + config: nil, + triggers: [], + default_value: nil, arguments: [], directives: [], - # Added by phases + complexity: nil, + source_location: nil, + middleware: [], + function_ref: nil, flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ name: String.t(), + identifier: atom, + description: nil | String.t(), deprecation: nil | Blueprint.Schema.Deprecation.t(), arguments: [Blueprint.Schema.InputValueDefinition.t()], type: Blueprint.TypeReference.t(), directives: [Blueprint.Directive.t()], + source_location: nil | Blueprint.SourceLocation.t(), + # Added by DSL + description: nil | String.t(), + middleware: [any], # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] } + + @doc false + def functions(), do: [:config, :complexity, :middleware, :triggers] end diff --git a/lib/absinthe/blueprint/schema/input_object_type_definition.ex b/lib/absinthe/blueprint/schema/input_object_type_definition.ex index b67636b2cd..1cc1bee437 100644 --- a/lib/absinthe/blueprint/schema/input_object_type_definition.ex +++ b/lib/absinthe/blueprint/schema/input_object_type_definition.ex @@ -1,18 +1,23 @@ defmodule Absinthe.Blueprint.Schema.InputObjectTypeDefinition do @moduledoc false - alias Absinthe.Blueprint + alias Absinthe.{Blueprint, Type} @enforce_keys [:name] defstruct [ + :identifier, :name, + :module, description: nil, - interfaces: [], fields: [], + imports: [], directives: [], + source_location: nil, # Added by phases, flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ @@ -20,8 +25,42 @@ defmodule Absinthe.Blueprint.Schema.InputObjectTypeDefinition do description: nil | String.t(), fields: [Blueprint.Schema.InputValueDefinition.t()], directives: [Blueprint.Directive.t()], + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] } + + def build(type_def, schema) do + %Type.InputObject{ + identifier: type_def.identifier, + name: type_def.name, + fields: build_fields(type_def, schema), + description: type_def.description, + definition: type_def.module + } + end + + def build_fields(type_def, schema) do + for field_def <- type_def.fields, into: %{} do + field = %Type.Field{ + identifier: field_def.identifier, + deprecation: field_def.deprecation, + description: field_def.description, + name: field_def.name, + type: Blueprint.TypeReference.to_type(field_def.type, schema), + definition: type_def.module, + __reference__: field_def.__reference__, + __private__: field_def.__private__, + default_value: field_def.default_value + } + + {field.identifier, field} + end + end + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/schema/input_value_definition.ex b/lib/absinthe/blueprint/schema/input_value_definition.ex index 5139ac20a0..32d8204bc0 100644 --- a/lib/absinthe/blueprint/schema/input_value_definition.ex +++ b/lib/absinthe/blueprint/schema/input_value_definition.ex @@ -3,26 +3,36 @@ defmodule Absinthe.Blueprint.Schema.InputValueDefinition do alias Absinthe.Blueprint - @enforce_keys [:name, :type] defstruct [ :name, + :identifier, :type, + :module, # InputValueDefinitions can have different placements depending on Whether # they model an argument definition or a value of an input object type # definition placement: :argument_definition, + description: nil, default_value: nil, + default_value_blueprint: nil, directives: [], + source_location: nil, # Added by phases flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [], + deprecation: nil ] @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), type: Blueprint.TypeReference.t(), default_value: Blueprint.Input.t(), + default_value_blueprint: Blueprint.Draft.t(), directives: [Blueprint.Directive.t()], + source_location: nil | Blueprint.SourceLocation.t(), # The struct module of the parent placement: :argument_definition | :input_field_definition, # Added by phases diff --git a/lib/absinthe/blueprint/schema/interface_type_definition.ex b/lib/absinthe/blueprint/schema/interface_type_definition.ex index fc502a3aa8..b1fd11be9e 100644 --- a/lib/absinthe/blueprint/schema/interface_type_definition.ex +++ b/lib/absinthe/blueprint/schema/interface_type_definition.ex @@ -2,16 +2,26 @@ defmodule Absinthe.Blueprint.Schema.InterfaceTypeDefinition do @moduledoc false alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema @enforce_keys [:name] defstruct [ + :identifier, :name, + :module, description: nil, fields: [], directives: [], + interfaces: [], + interface_blueprints: [], + source_location: nil, # Added by phases flags: %{}, - errors: [] + errors: [], + resolve_type: nil, + imports: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ @@ -19,8 +29,37 @@ defmodule Absinthe.Blueprint.Schema.InterfaceTypeDefinition do description: nil | String.t(), fields: [Blueprint.Schema.FieldDefinition.t()], directives: [Blueprint.Directive.t()], + interfaces: [String.t()], + interface_blueprints: [Blueprint.Draft.t()], + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] } + + def build(type_def, schema) do + %Absinthe.Type.Interface{ + name: type_def.name, + description: type_def.description, + fields: Blueprint.Schema.ObjectTypeDefinition.build_fields(type_def, schema), + identifier: type_def.identifier, + resolve_type: type_def.resolve_type, + definition: type_def.module, + interfaces: type_def.interfaces + } + end + + def find_implementors(iface, type_defs) do + for %Schema.ObjectTypeDefinition{} = obj <- type_defs, + iface.identifier in obj.interfaces, + do: obj.identifier + end + + @doc false + def functions(), do: [:resolve_type] + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/schema/object_type_definition.ex b/lib/absinthe/blueprint/schema/object_type_definition.ex index 309aec1806..8e7bb9d7bd 100644 --- a/lib/absinthe/blueprint/schema/object_type_definition.ex +++ b/lib/absinthe/blueprint/schema/object_type_definition.ex @@ -1,28 +1,98 @@ defmodule Absinthe.Blueprint.Schema.ObjectTypeDefinition do @moduledoc false - alias Absinthe.Blueprint + alias Absinthe.{Blueprint, Type} @enforce_keys [:name] defstruct [ :name, + :identifier, + :module, description: nil, interfaces: [], + interface_blueprints: [], fields: [], directives: [], + is_type_of: nil, + source_location: nil, # Added by phases flags: %{}, - errors: [] + imports: [], + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ name: String.t(), + identifier: atom, description: nil | String.t(), fields: [Blueprint.Schema.FieldDefinition.t()], interfaces: [String.t()], + interface_blueprints: [Blueprint.Draft.t()], directives: [Blueprint.Directive.t()], + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), - errors: [Absinthe.Phase.Error.t()] + errors: [Absinthe.Phase.Error.t()], + __private__: Keyword.t() } + + @doc false + def functions(), do: [:is_type_of] + + def build(type_def, schema) do + %Type.Object{ + identifier: type_def.identifier, + name: type_def.name, + description: type_def.description, + fields: build_fields(type_def, schema), + interfaces: type_def.interfaces, + definition: type_def.module, + is_type_of: type_def.is_type_of, + __private__: type_def.__private__ + } + end + + def build_fields(type_def, schema) do + for field_def <- type_def.fields, into: %{} do + field = %Type.Field{ + identifier: field_def.identifier, + middleware: field_def.middleware, + deprecation: field_def.deprecation, + description: field_def.description, + complexity: field_def.complexity, + config: field_def.complexity, + triggers: field_def.triggers, + name: field_def.name, + type: Blueprint.TypeReference.to_type(field_def.type, schema), + args: build_args(field_def, schema), + definition: field_def.module, + __reference__: field_def.__reference__, + __private__: field_def.__private__ + } + + {field.identifier, field} + end + end + + def build_args(field_def, schema) do + Map.new(field_def.arguments, fn arg_def -> + arg = %Type.Argument{ + identifier: arg_def.identifier, + name: arg_def.name, + description: arg_def.description, + type: Blueprint.TypeReference.to_type(arg_def.type, schema), + default_value: arg_def.default_value, + deprecation: arg_def.deprecation + } + + {arg_def.identifier, arg} + end) + end + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/schema/scalar_type_definition.ex b/lib/absinthe/blueprint/schema/scalar_type_definition.ex index d190e362ab..f031aa7544 100644 --- a/lib/absinthe/blueprint/schema/scalar_type_definition.ex +++ b/lib/absinthe/blueprint/schema/scalar_type_definition.ex @@ -6,19 +6,48 @@ defmodule Absinthe.Blueprint.Schema.ScalarTypeDefinition do @enforce_keys [:name] defstruct [ :name, + :identifier, + :module, description: nil, + parse: nil, + serialize: nil, directives: [], + source_location: nil, + open_ended: false, # Added by phases flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ name: String.t(), description: nil | String.t(), directives: [Blueprint.Directive.t()], + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] } + + def build(type_def, _schema) do + %Absinthe.Type.Scalar{ + identifier: type_def.identifier, + name: type_def.name, + description: type_def.description, + definition: type_def.module, + serialize: type_def.serialize, + parse: type_def.parse, + open_ended: type_def.open_ended + } + end + + @doc false + def functions(), do: [:serialize, :parse] + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/schema/schema_declaration.ex b/lib/absinthe/blueprint/schema/schema_declaration.ex new file mode 100644 index 0000000000..30b1e01ae8 --- /dev/null +++ b/lib/absinthe/blueprint/schema/schema_declaration.ex @@ -0,0 +1,32 @@ +defmodule Absinthe.Blueprint.Schema.SchemaDeclaration do + @moduledoc false + + alias Absinthe.Blueprint + + defstruct description: nil, + module: nil, + field_definitions: [], + directives: [], + source_location: nil, + # Added by phases + flags: %{}, + errors: [], + __reference__: nil, + __private__: [] + + @type t :: %__MODULE__{ + description: nil | String.t(), + module: nil | module(), + directives: [Blueprint.Directive.t()], + field_definitions: [Blueprint.Schema.FieldDefinition.t()], + source_location: nil | Blueprint.SourceLocation.t(), + # Added by phases + flags: Blueprint.flags_t(), + errors: [Absinthe.Phase.Error.t()] + } + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end +end diff --git a/lib/absinthe/blueprint/schema/schema_definition.ex b/lib/absinthe/blueprint/schema/schema_definition.ex index eab9f5e007..cf33447966 100644 --- a/lib/absinthe/blueprint/schema/schema_definition.ex +++ b/lib/absinthe/blueprint/schema/schema_definition.ex @@ -4,16 +4,27 @@ defmodule Absinthe.Blueprint.Schema.SchemaDefinition do alias Absinthe.Blueprint defstruct description: nil, - fields: [], + module: nil, + type_definitions: [], + directive_definitions: [], + type_artifacts: [], + directive_artifacts: [], + type_extensions: [], directives: [], + source_location: nil, # Added by phases + schema_declaration: nil, flags: %{}, - errors: [] + imports: [], + errors: [], + __private__: [], + __reference__: nil @type t :: %__MODULE__{ description: nil | String.t(), - fields: [Blueprint.Schema.FieldDefinition.t()], + # types: [Blueprint.Schema.FieldDefinition.t], directives: [Blueprint.Directive.t()], + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] diff --git a/lib/absinthe/blueprint/schema/union_type_definition.ex b/lib/absinthe/blueprint/schema/union_type_definition.ex index 184ac37c1c..aed14f5a9b 100644 --- a/lib/absinthe/blueprint/schema/union_type_definition.ex +++ b/lib/absinthe/blueprint/schema/union_type_definition.ex @@ -1,17 +1,24 @@ defmodule Absinthe.Blueprint.Schema.UnionTypeDefinition do @moduledoc false - alias Absinthe.Blueprint + alias Absinthe.{Blueprint, Type} @enforce_keys [:name] defstruct [ + :identifier, :name, + :module, description: nil, + resolve_type: nil, + fields: [], directives: [], types: [], + source_location: nil, # Added by phases flags: %{}, - errors: [] + errors: [], + __reference__: nil, + __private__: [] ] @type t :: %__MODULE__{ @@ -19,8 +26,72 @@ defmodule Absinthe.Blueprint.Schema.UnionTypeDefinition do description: nil | String.t(), directives: [Blueprint.Directive.t()], types: [Blueprint.TypeReference.Name.t()], + source_location: nil | Blueprint.SourceLocation.t(), # Added by phases flags: Blueprint.flags_t(), errors: [Absinthe.Phase.Error.t()] } + + def build(type_def, schema) do + %Type.Union{ + name: type_def.name, + description: type_def.description, + identifier: type_def.identifier, + types: type_def.types |> atomize_types(schema), + fields: build_fields(type_def, schema), + definition: type_def.module, + resolve_type: type_def.resolve_type + } + end + + defp atomize_types(types, schema) do + types + |> Enum.map(&Blueprint.TypeReference.to_type(&1, schema)) + |> Enum.sort() + end + + def build_fields(type_def, schema) do + for field_def <- type_def.fields, into: %{} do + field = %Type.Field{ + identifier: field_def.identifier, + middleware: field_def.middleware, + deprecation: field_def.deprecation, + description: field_def.description, + complexity: field_def.complexity, + config: field_def.complexity, + triggers: field_def.triggers, + name: field_def.name, + type: Blueprint.TypeReference.to_type(field_def.type, schema), + args: build_args(field_def, schema), + definition: field_def.module, + __reference__: field_def.__reference__, + __private__: field_def.__private__ + } + + {field.identifier, field} + end + end + + def build_args(field_def, schema) do + Map.new(field_def.arguments, fn arg_def -> + arg = %Type.Argument{ + identifier: arg_def.identifier, + name: arg_def.name, + description: arg_def.description, + type: Blueprint.TypeReference.to_type(arg_def.type, schema), + default_value: arg_def.default_value, + deprecation: arg_def.deprecation + } + + {arg_def.identifier, arg} + end) + end + + @doc false + def functions(), do: [:resolve_type] + + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Schema.Notation.SDL.Render + end end diff --git a/lib/absinthe/blueprint/source_location.ex b/lib/absinthe/blueprint/source_location.ex new file mode 100644 index 0000000000..58aad966bb --- /dev/null +++ b/lib/absinthe/blueprint/source_location.ex @@ -0,0 +1,30 @@ +defmodule Absinthe.Blueprint.SourceLocation do + @moduledoc false + + @enforce_keys [:line, :column] + defstruct [ + :line, + :column + ] + + @type t :: %__MODULE__{ + line: pos_integer, + column: pos_integer + } + + @doc """ + Generate a `SourceLocation.t()` given a location + """ + @spec at(loc :: Absinthe.Language.loc_t()) :: t + def at(loc) do + %__MODULE__{line: loc.line, column: loc.column} + end + + @doc """ + Generate a `SourceLocation.t()` given line and column numbers + """ + @spec at(line :: pos_integer, column :: pos_integer) :: t + def at(line, column) do + %__MODULE__{line: line, column: column} + end +end diff --git a/lib/absinthe/blueprint/transform.ex b/lib/absinthe/blueprint/transform.ex index 433d6866d0..d9ce8f3991 100644 --- a/lib/absinthe/blueprint/transform.ex +++ b/lib/absinthe/blueprint/transform.ex @@ -61,7 +61,7 @@ defmodule Absinthe.Blueprint.Transform do defp pass(x, acc), do: {x, acc} nodes_with_children = %{ - Blueprint => [:fragments, :operations, :types, :directives], + Blueprint => [:fragments, :operations, :schema_definitions, :directives], Blueprint.Directive => [:arguments], Blueprint.Document.Field => [:selections, :arguments, :directives], Blueprint.Document.Operation => [:selections, :variable_definitions, :directives], @@ -70,23 +70,23 @@ defmodule Absinthe.Blueprint.Transform do Blueprint.Document.Fragment.Inline => [:selections, :directives], Blueprint.Document.Fragment.Named => [:selections, :directives], Blueprint.Document.Fragment.Spread => [:directives], - Blueprint.Document.VariableDefinition => [:type, :default_value], + Blueprint.Document.VariableDefinition => [:type, :default_value, :directives], Blueprint.Input.Argument => [:input_value], Blueprint.Input.Field => [:input_value], Blueprint.Input.Object => [:fields], Blueprint.Input.List => [:items], Blueprint.Input.RawValue => [:content], Blueprint.Input.Value => [:normalized], - Blueprint.Schema.DirectiveDefinition => [:directives, :types], + Blueprint.Schema.DirectiveDefinition => [:directives, :arguments], Blueprint.Schema.EnumTypeDefinition => [:directives, :values], Blueprint.Schema.EnumValueDefinition => [:directives], Blueprint.Schema.FieldDefinition => [:type, :arguments, :directives], - Blueprint.Schema.InputObjectTypeDefinition => [:interfaces, :fields, :directives], + Blueprint.Schema.InputObjectTypeDefinition => [:fields, :directives], Blueprint.Schema.InputValueDefinition => [:type, :default_value, :directives], - Blueprint.Schema.InterfaceTypeDefinition => [:fields, :directives], + Blueprint.Schema.InterfaceTypeDefinition => [:interfaces, :fields, :directives], Blueprint.Schema.ObjectTypeDefinition => [:interfaces, :fields, :directives], Blueprint.Schema.ScalarTypeDefinition => [:directives], - Blueprint.Schema.SchemaDefinition => [:directives, :fields], + Blueprint.Schema.SchemaDefinition => [:directive_definitions, :type_definitions, :directives], Blueprint.Schema.UnionTypeDefinition => [:directives, :types] } @@ -118,12 +118,6 @@ defmodule Absinthe.Blueprint.Transform do end for {node_name, children} <- nodes_with_children do - if :selections in children do - def maybe_walk_children(%unquote(node_name){flags: %{flat: _}} = node, acc, pre, post) do - node_with_children(node, unquote(children -- [:selections]), acc, pre, post) - end - end - def maybe_walk_children(%unquote(node_name){} = node, acc, pre, post) do node_with_children(node, unquote(children), acc, pre, post) end @@ -134,9 +128,7 @@ defmodule Absinthe.Blueprint.Transform do end defp node_with_children(node, children, acc, pre, post) do - {node, acc} = walk_children(node, children, acc, pre, post) - - post.(node, acc) + walk_children(node, children, acc, pre, post) end defp walk_children(node, children, acc, pre, post) do diff --git a/lib/absinthe/blueprint/type_reference.ex b/lib/absinthe/blueprint/type_reference.ex index a6ea22f89a..91cc22e72d 100644 --- a/lib/absinthe/blueprint/type_reference.ex +++ b/lib/absinthe/blueprint/type_reference.ex @@ -6,6 +6,7 @@ defmodule Absinthe.Blueprint.TypeReference do @type t :: TypeReference.List.t() | TypeReference.Name.t() + | TypeReference.Identifier.t() | TypeReference.NonNull.t() @wrappers [TypeReference.List, TypeReference.NonNull] @@ -18,7 +19,49 @@ defmodule Absinthe.Blueprint.TypeReference do value end + def unwrap(%TypeReference.Identifier{} = value) do + value + end + + def unwrap(value) when is_atom(value), do: value + def unwrap(%struct{of_type: inner}) when struct in @wrappers do unwrap(inner) end + + @doc """ + Get the GraphQL name for a (possibly wrapped) type reference. + + """ + def name(%__MODULE__.NonNull{of_type: type}) do + name(type) <> "!" + end + + def name(%__MODULE__.List{of_type: type}) do + "[" <> name(type) <> "]" + end + + def name(%__MODULE__.Name{name: name}) do + name + end + + def to_type(%__MODULE__.NonNull{of_type: type}, schema) do + %Absinthe.Type.NonNull{of_type: to_type(type, schema)} + end + + def to_type(%__MODULE__.List{of_type: type}, schema) do + %Absinthe.Type.List{of_type: to_type(type, schema)} + end + + def to_type(%__MODULE__.Name{name: name}, schema) do + Enum.find(schema.type_definitions, &(&1.name == name)).identifier + end + + def to_type(%__MODULE__.Identifier{id: id}, _) when is_atom(id) do + id + end + + def to_type(value, _) when is_atom(value) do + value + end end diff --git a/lib/absinthe/blueprint/type_reference/identifier.ex b/lib/absinthe/blueprint/type_reference/identifier.ex new file mode 100644 index 0000000000..dccd0cae9b --- /dev/null +++ b/lib/absinthe/blueprint/type_reference/identifier.ex @@ -0,0 +1,17 @@ +defmodule Absinthe.Blueprint.TypeReference.Identifier do + @moduledoc false + + alias Absinthe.Phase + + @enforce_keys [:id] + defstruct [ + :id, + :schema_node, + errors: [] + ] + + @type t :: %__MODULE__{ + id: any(), + errors: [Phase.Error.t()] + } +end diff --git a/lib/absinthe/formatter.ex b/lib/absinthe/formatter.ex new file mode 100644 index 0000000000..5b273638f4 --- /dev/null +++ b/lib/absinthe/formatter.ex @@ -0,0 +1,36 @@ +defmodule Absinthe.Formatter do + @moduledoc """ + Formatter task for graphql + + Will format files with the extensions .graphql or .gql + + ## Example + ```elixir + Absinthe.Formatter.format("{ version }") + "{\n version\n}\n" + ``` + + + From Elixir 1.13 onwards the Absinthe.Formatter can be added to + the formatter as a plugin: + + ```elixir + # .formatter.exs + [ + # Define the desired plugins + plugins: [Absinthe.Formatter], + # Remember to update the inputs list to include the new extensions + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}", "{lib, priv}/**/*.{gql,graphql}"] + ] + ``` + """ + + def features(_opts) do + [sigils: [], extensions: [".graphql", ".gql"]] + end + + def format(contents, _opts \\ []) do + {:ok, blueprint} = Absinthe.Phase.Parse.run(contents, []) + inspect(blueprint.input, pretty: true) + end +end diff --git a/lib/absinthe/introspection/directive_location.ex b/lib/absinthe/introspection/directive_location.ex new file mode 100644 index 0000000000..0156e5520a --- /dev/null +++ b/lib/absinthe/introspection/directive_location.ex @@ -0,0 +1,34 @@ +defmodule Absinthe.Introspection.DirectiveLocation do + @moduledoc false + + # https://spec.graphql.org/draft/#sec-Schema-Introspection + + @executable_directive_locations [ + :query, + :mutation, + :subscription, + :field, + :fragment_definition, + :fragment_spread, + :inline_fragment, + :variable_definition + ] + @type_system_directive_locations [ + :schema, + :scalar, + :object, + :field_definition, + :argument_definition, + :interface, + :union, + :enum, + :enum_value, + :input_object, + :input_field_definition + ] + + def values do + @executable_directive_locations ++ + @type_system_directive_locations + end +end diff --git a/lib/absinthe/introspection/field.ex b/lib/absinthe/introspection/field.ex deleted file mode 100644 index ff305ca8de..0000000000 --- a/lib/absinthe/introspection/field.ex +++ /dev/null @@ -1,76 +0,0 @@ -defmodule Absinthe.Introspection.Field do - @moduledoc false - - use Absinthe.Schema.Notation - - alias Absinthe.Schema - alias Absinthe.Type - - def meta("typename") do - %Type.Field{ - name: "__typename", - type: :string, - description: "The name of the object type currently being queried.", - middleware: [ - Absinthe.Resolution.resolver_spec(fn - _, %{parent_type: %Type.Object{} = type} -> - {:ok, type.name} - - _, %{source: source, parent_type: %Type.Interface{} = iface} = env -> - case Type.Interface.resolve_type(iface, source, env) do - nil -> - {:error, "Could not resolve type of concrete " <> iface.name} - - type -> - {:ok, type.name} - end - - _, %{source: source, parent_type: %Type.Union{} = union} = env -> - case Type.Union.resolve_type(union, source, env) do - nil -> - {:error, "Could not resolve type of concrete " <> union.name} - - type -> - {:ok, type.name} - end - end) - ] - } - end - - def meta("type") do - %Type.Field{ - name: "__type", - type: :__type, - description: "Represents scalars, interfaces, object types, unions, enums in the system", - args: %{ - name: %Type.Argument{ - name: "name", - type: non_null(:string), - description: "The name of the type to introspect", - __reference__: %{ - identifier: :name - } - } - }, - middleware: [ - Absinthe.Resolution.resolver_spec(fn %{name: name}, %{schema: schema} -> - {:ok, Schema.lookup_type(schema, name)} - end) - ] - } - end - - def meta("schema") do - %Type.Field{ - name: "__schema", - type: :__schema, - description: "Represents the schema", - middleware: [ - Absinthe.Resolution.resolver_spec(fn _, %{schema: schema} -> - {:ok, schema} - end) - ] - } - end -end diff --git a/lib/absinthe/introspection/kind.ex b/lib/absinthe/introspection/kind.ex deleted file mode 100644 index b7f6990220..0000000000 --- a/lib/absinthe/introspection/kind.ex +++ /dev/null @@ -1,26 +0,0 @@ -defmodule Absinthe.Introspection.Kind do - @moduledoc false - - defmacro __using__(_opts) do - quote do - @behaviour unquote(__MODULE__) - def kind do - __MODULE__ - |> Module.split() - |> List.last() - |> Absinthe.Introspection.Kind.upcase() - end - - defoverridable kind: 0 - end - end - - def upcase(name) do - Regex.scan(~r{[A-Z]+[a-z]+}, name) - |> List.flatten() - |> Enum.map(&String.upcase/1) - |> Enum.join("_") - end - - @callback kind :: binary -end diff --git a/lib/absinthe/introspection/type_kind.ex b/lib/absinthe/introspection/type_kind.ex new file mode 100644 index 0000000000..44b862e47f --- /dev/null +++ b/lib/absinthe/introspection/type_kind.ex @@ -0,0 +1,37 @@ +defmodule Absinthe.Introspection.TypeKind do + @moduledoc false + + # https://spec.graphql.org/draft/#sec-Type-Kinds + + defmacro __using__(kind) do + quote do + @behaviour unquote(__MODULE__) + def kind, do: unquote(kind) + end + end + + @type type_kind :: + :scalar + | :object + | :interface + | :union + | :enum + | :input_object + | :list + | :non_null + + @callback kind() :: type_kind() + + def values do + [ + :scalar, + :object, + :interface, + :union, + :enum, + :input_object, + :list, + :non_null + ] + end +end diff --git a/lib/absinthe/language.ex b/lib/absinthe/language.ex index c6ab87f140..1e8eb915a6 100644 --- a/lib/absinthe/language.ex +++ b/lib/absinthe/language.ex @@ -9,6 +9,7 @@ defmodule Absinthe.Language do Language.Argument.t() | Language.BooleanValue.t() | Language.Directive.t() + | Language.DirectiveDefinition.t() | Language.Document.t() | Language.EnumTypeDefinition.t() | Language.EnumValue.t() @@ -64,7 +65,7 @@ defmodule Absinthe.Language do | Language.InputObjectTypeDefinition.t() | Language.TypeExtensionDefinition.t() - @type loc_t :: %{start_line: nil | integer, end_line: nil | integer} + @type loc_t :: %{line: pos_integer, column: pos_integer} @type input_t :: Language.BooleanValue diff --git a/lib/absinthe/language/argument.ex b/lib/absinthe/language/argument.ex index c2c2266a2f..9904ffa03c 100644 --- a/lib/absinthe/language/argument.ex +++ b/lib/absinthe/language/argument.ex @@ -25,12 +25,6 @@ defmodule Absinthe.Language.Argument do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) - end - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - [node.value] - end + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/boolean_value.ex b/lib/absinthe/language/boolean_value.ex index 5cdc83cbe6..77224edd9d 100644 --- a/lib/absinthe/language/boolean_value.ex +++ b/lib/absinthe/language/boolean_value.ex @@ -22,6 +22,6 @@ defmodule Absinthe.Language.BooleanValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/directive.ex b/lib/absinthe/language/directive.ex index 43f0af32e8..890bfde35b 100644 --- a/lib/absinthe/language/directive.ex +++ b/lib/absinthe/language/directive.ex @@ -23,6 +23,6 @@ defmodule Absinthe.Language.Directive do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/directive_definition.ex b/lib/absinthe/language/directive_definition.ex index 71180f12dd..a3233e5fc8 100644 --- a/lib/absinthe/language/directive_definition.ex +++ b/lib/absinthe/language/directive_definition.ex @@ -4,27 +4,38 @@ defmodule Absinthe.Language.DirectiveDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, arguments: [], directives: [], locations: [], - loc: %{start_line: nil} + loc: %{line: nil}, + repeatable: false @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), directives: [Language.Directive.t()], arguments: [Language.Argument.t()], locations: [String.t()], - loc: Language.loc_t() + loc: Language.loc_t(), + repeatable: boolean() } defimpl Blueprint.Draft do def convert(node, doc) do %Blueprint.Schema.DirectiveDefinition{ name: node.name, + identifier: Macro.underscore(node.name) |> String.to_atom(), + description: node.description, arguments: Absinthe.Blueprint.Draft.convert(node.arguments, doc), directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), - locations: node.locations + locations: node.locations, + repeatable: node.repeatable, + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/document.ex b/lib/absinthe/language/document.ex index d2483f20ca..4dd409f17b 100644 --- a/lib/absinthe/language/document.ex +++ b/lib/absinthe/language/document.ex @@ -1,16 +1,14 @@ defmodule Absinthe.Language.Document do @moduledoc false - require Logger - alias Absinthe.{Blueprint, Language} defstruct definitions: [], - loc: %{start_line: nil} + loc: %{line: nil} @typedoc false @type t :: %__MODULE__{ - definitions: [Absinthe.Traversal.Node.t()], + definitions: [Absinthe.Language.t()], loc: Language.loc_t() } @@ -74,7 +72,7 @@ defmodule Absinthe.Language.Document do end defp convert_definition(%struct{} = node, doc, blueprint) when struct in @types do - update_in(blueprint.types, &[Blueprint.Draft.convert(node, doc) | &1]) + update_in(blueprint.schema_definitions, &[Blueprint.Draft.convert(node, doc) | &1]) end defp convert_definition(%struct{} = node, doc, blueprint) when struct in @directives do @@ -86,7 +84,8 @@ defmodule Absinthe.Language.Document do end end - defimpl Absinthe.Traversal.Node do - def children(%{definitions: definitions}, _schema), do: definitions + defimpl Inspect do + defdelegate inspect(term, options), + to: Absinthe.Language.Render end end diff --git a/lib/absinthe/language/enum_type_definition.ex b/lib/absinthe/language/enum_type_definition.ex index 024ac18437..b120306f56 100644 --- a/lib/absinthe/language/enum_type_definition.ex +++ b/lib/absinthe/language/enum_type_definition.ex @@ -4,12 +4,14 @@ defmodule Absinthe.Language.EnumTypeDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, values: [], directives: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), values: [String.t()], directives: [Language.Directive.t()], loc: Language.loc_t() @@ -19,9 +21,15 @@ defmodule Absinthe.Language.EnumTypeDefinition do def convert(node, doc) do %Blueprint.Schema.EnumTypeDefinition{ name: node.name, + description: node.description, + identifier: Macro.underscore(node.name) |> String.to_atom(), values: Absinthe.Blueprint.Draft.convert(node.values, doc), - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/enum_value.ex b/lib/absinthe/language/enum_value.ex index a6c915bccc..dc50ab6c1f 100644 --- a/lib/absinthe/language/enum_value.ex +++ b/lib/absinthe/language/enum_value.ex @@ -4,7 +4,7 @@ defmodule Absinthe.Language.EnumValue do alias Absinthe.{Blueprint, Language} defstruct value: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ value: any, @@ -20,6 +20,6 @@ defmodule Absinthe.Language.EnumValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/enum_value_definition.ex b/lib/absinthe/language/enum_value_definition.ex index e3a1d88268..701cbc7e13 100644 --- a/lib/absinthe/language/enum_value_definition.ex +++ b/lib/absinthe/language/enum_value_definition.ex @@ -6,12 +6,14 @@ defmodule Absinthe.Language.EnumValueDefinition do @enforce_keys [:value] defstruct [ :value, + description: nil, directives: [], - loc: %{start_line: nil} + loc: %{line: nil, column: nil} ] @type t :: %__MODULE__{ value: String.t(), + description: nil | String.t(), directives: [Language.Directive.t()], loc: Language.loc_t() } @@ -19,13 +21,16 @@ defmodule Absinthe.Language.EnumValueDefinition do defimpl Blueprint.Draft do def convert(node, doc) do %Blueprint.Schema.EnumValueDefinition{ - value: node.value, + value: node.value |> Macro.underscore() |> String.to_atom(), + name: node.value, + identifier: node.value |> Macro.underscore() |> String.to_atom(), + description: node.description, directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), source_location: source_location(node) } end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/field.ex b/lib/absinthe/language/field.ex index 80b4703553..3019731d67 100644 --- a/lib/absinthe/language/field.ex +++ b/lib/absinthe/language/field.ex @@ -8,7 +8,7 @@ defmodule Absinthe.Language.Field do arguments: [], directives: [], selection_set: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ alias: nil | String.t(), @@ -32,7 +32,7 @@ defmodule Absinthe.Language.Field do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) @spec selections(nil | Language.SelectionSet.t()) :: [ Language.Field.t() | Language.InlineFragment.t() | Language.FragmentSpread.t() @@ -40,11 +40,4 @@ defmodule Absinthe.Language.Field do defp selections(nil), do: [] defp selections(node), do: node.selections end - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - [node.arguments, node.directives, node.selection_set |> List.wrap()] - |> Enum.concat() - end - end end diff --git a/lib/absinthe/language/field_definition.ex b/lib/absinthe/language/field_definition.ex index 7179df4112..8488a42b59 100644 --- a/lib/absinthe/language/field_definition.ex +++ b/lib/absinthe/language/field_definition.ex @@ -4,13 +4,16 @@ defmodule Absinthe.Language.FieldDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, arguments: [], directives: [], type: nil, - loc: %{start_line: nil} + complexity: nil, + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), arguments: [Language.Argument.t()], directives: [Language.Directive.t()], type: Language.type_reference_t(), @@ -20,11 +23,18 @@ defmodule Absinthe.Language.FieldDefinition do defimpl Blueprint.Draft do def convert(node, doc) do %Blueprint.Schema.FieldDefinition{ - name: node.name, + name: node.name |> Macro.underscore(), + description: node.description, + identifier: node.name |> Macro.underscore() |> String.to_atom(), arguments: Absinthe.Blueprint.Draft.convert(node.arguments, doc), directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), - type: Absinthe.Blueprint.Draft.convert(node.type, doc) + type: Absinthe.Blueprint.Draft.convert(node.type, doc), + complexity: node.complexity, + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/float_value.ex b/lib/absinthe/language/float_value.ex index 0a25fb9a0a..334aef87b0 100644 --- a/lib/absinthe/language/float_value.ex +++ b/lib/absinthe/language/float_value.ex @@ -22,6 +22,6 @@ defmodule Absinthe.Language.FloatValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/fragment.ex b/lib/absinthe/language/fragment.ex index 93c956ba2a..315f90e3d6 100644 --- a/lib/absinthe/language/fragment.ex +++ b/lib/absinthe/language/fragment.ex @@ -7,7 +7,7 @@ defmodule Absinthe.Language.Fragment do type_condition: nil, directives: [], selection_set: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), @@ -24,23 +24,16 @@ defmodule Absinthe.Language.Fragment do type_condition: Blueprint.Draft.convert(node.type_condition, doc), selections: Blueprint.Draft.convert(node.selection_set.selections, doc), directives: Blueprint.Draft.convert(node.directives, doc), - source_location: source_location(node.loc) + source_location: source_location(node) } end - defp source_location(nil) do + defp source_location(%{loc: nil}) do nil end - defp source_location(%{start_line: number}) do - Blueprint.Document.SourceLocation.at(number) - end - end - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - [node.directives, List.wrap(node.selection_set)] - |> Enum.concat() + defp source_location(%{loc: loc}) do + Blueprint.SourceLocation.at(loc) end end end diff --git a/lib/absinthe/language/fragment_spread.ex b/lib/absinthe/language/fragment_spread.ex index 2f1eeb6efc..70612f8f4f 100644 --- a/lib/absinthe/language/fragment_spread.ex +++ b/lib/absinthe/language/fragment_spread.ex @@ -5,7 +5,7 @@ defmodule Absinthe.Language.FragmentSpread do defstruct name: nil, directives: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), @@ -17,22 +17,16 @@ defmodule Absinthe.Language.FragmentSpread do %Blueprint.Document.Fragment.Spread{ name: node.name, directives: Blueprint.Draft.convert(node.directives, doc), - source_location: source_location(node.loc) + source_location: source_location(node) } end - defp source_location(nil) do + defp source_location(%{loc: nil}) do nil end - defp source_location(%{start_line: number}) do - Blueprint.Document.SourceLocation.at(number) - end - end - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - node.directives + defp source_location(%{loc: loc}) do + Blueprint.SourceLocation.at(loc) end end end diff --git a/lib/absinthe/language/inline_fragment.ex b/lib/absinthe/language/inline_fragment.ex index a70c98f6ff..d595cef7a0 100644 --- a/lib/absinthe/language/inline_fragment.ex +++ b/lib/absinthe/language/inline_fragment.ex @@ -6,7 +6,7 @@ defmodule Absinthe.Language.InlineFragment do defstruct type_condition: nil, directives: [], selection_set: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ type_condition: nil | Language.NamedType.t(), @@ -21,23 +21,16 @@ defmodule Absinthe.Language.InlineFragment do type_condition: Blueprint.Draft.convert(node.type_condition, doc), selections: Blueprint.Draft.convert(node.selection_set.selections, doc), directives: Blueprint.Draft.convert(node.directives, doc), - source_location: source_location(node.loc) + source_location: source_location(node) } end - defp source_location(nil) do + defp source_location(%{loc: nil}) do nil end - defp source_location(%{start_line: number}) do - Blueprint.Document.SourceLocation.at(number) - end - end - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - [List.wrap(node.type_condition), node.directives, List.wrap(node.selection_set)] - |> Enum.concat() + defp source_location(%{loc: loc}) do + Blueprint.SourceLocation.at(loc) end end end diff --git a/lib/absinthe/language/input_object_type_definition.ex b/lib/absinthe/language/input_object_type_definition.ex index 6099da228a..12bbfea237 100644 --- a/lib/absinthe/language/input_object_type_definition.ex +++ b/lib/absinthe/language/input_object_type_definition.ex @@ -4,13 +4,15 @@ defmodule Absinthe.Language.InputObjectTypeDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, fields: [], directives: [], - loc: %{start_line: nil}, + loc: %{line: nil}, errors: [] @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), fields: [Language.InputValueDefinition.t()], directives: [Language.Directive.t()], loc: Language.loc_t() @@ -19,13 +21,19 @@ defmodule Absinthe.Language.InputObjectTypeDefinition do defimpl Blueprint.Draft do def convert(node, doc) do %Blueprint.Schema.InputObjectTypeDefinition{ + identifier: node.name |> Macro.underscore() |> String.to_atom(), name: node.name, + description: node.description, fields: for value <- Absinthe.Blueprint.Draft.convert(node.fields, doc) do %{value | placement: :input_field_definition} end, - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/input_value_definition.ex b/lib/absinthe/language/input_value_definition.ex index 32ec15e251..143827dba8 100644 --- a/lib/absinthe/language/input_value_definition.ex +++ b/lib/absinthe/language/input_value_definition.ex @@ -7,13 +7,15 @@ defmodule Absinthe.Language.InputValueDefinition do defstruct [ :name, :type, + description: nil, default_value: nil, directives: [], - loc: %{start_line: nil} + loc: %{line: nil} ] @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), type: Language.input_t(), default_value: Language.input_t(), directives: [Language.Directive.t()], @@ -23,11 +25,36 @@ defmodule Absinthe.Language.InputValueDefinition do defimpl Blueprint.Draft do def convert(node, doc) do %Blueprint.Schema.InputValueDefinition{ - name: node.name, + name: Macro.underscore(node.name), + description: node.description, type: Blueprint.Draft.convert(node.type, doc), - default_value: Blueprint.Draft.convert(node.default_value, doc), - directives: Blueprint.Draft.convert(node.directives, doc) + identifier: Macro.underscore(node.name) |> String.to_atom(), + default_value: to_term(node.default_value), + default_value_blueprint: Blueprint.Draft.convert(node.default_value, doc), + directives: Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) + + defp to_term(nil), + do: nil + + defp to_term(%Language.EnumValue{value: value}), + do: value |> Macro.underscore() |> String.to_atom() + + defp to_term(%Language.ListValue{values: values}), + do: Enum.map(values, &to_term/1) + + defp to_term(%Language.NullValue{}), + do: nil + + defp to_term(%Language.ObjectValue{fields: fields}), + do: Enum.into(fields, %{}, &{String.to_atom(&1.name), to_term(&1.value)}) + + defp to_term(%{value: value}), + do: value end end diff --git a/lib/absinthe/language/int_value.ex b/lib/absinthe/language/int_value.ex index fba8607531..0b786d775b 100644 --- a/lib/absinthe/language/int_value.ex +++ b/lib/absinthe/language/int_value.ex @@ -22,6 +22,6 @@ defmodule Absinthe.Language.IntValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/interface_type_definition.ex b/lib/absinthe/language/interface_type_definition.ex index 326fe9bc68..444808963d 100644 --- a/lib/absinthe/language/interface_type_definition.ex +++ b/lib/absinthe/language/interface_type_definition.ex @@ -4,14 +4,18 @@ defmodule Absinthe.Language.InterfaceTypeDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, fields: [], directives: [], - loc: %{start_line: nil} + interfaces: [], + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), fields: [Language.FieldDefinition.t()], directives: [Language.Directive.t()], + interfaces: [Language.NamedType.t()], loc: Language.loc_t() } @@ -19,9 +23,23 @@ defmodule Absinthe.Language.InterfaceTypeDefinition do def convert(node, doc) do %Blueprint.Schema.InterfaceTypeDefinition{ name: node.name, + description: node.description, + identifier: Macro.underscore(node.name) |> String.to_atom(), fields: Absinthe.Blueprint.Draft.convert(node.fields, doc), - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + interfaces: interfaces(node.interfaces, doc), + interface_blueprints: Absinthe.Blueprint.Draft.convert(node.interfaces, doc), + source_location: source_location(node) } end + + defp interfaces(interfaces, doc) do + interfaces + |> Absinthe.Blueprint.Draft.convert(doc) + |> Enum.map(&(&1.name |> Macro.underscore() |> String.to_atom())) + end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/list_type.ex b/lib/absinthe/language/list_type.ex index a989b91539..dbd306b8f4 100644 --- a/lib/absinthe/language/list_type.ex +++ b/lib/absinthe/language/list_type.ex @@ -4,7 +4,7 @@ defmodule Absinthe.Language.ListType do alias Absinthe.{Blueprint, Language} defstruct type: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ type: Language.type_reference_t(), diff --git a/lib/absinthe/language/list_value.ex b/lib/absinthe/language/list_value.ex index aebf97eebc..ef4f50de60 100644 --- a/lib/absinthe/language/list_value.ex +++ b/lib/absinthe/language/list_value.ex @@ -24,6 +24,6 @@ defmodule Absinthe.Language.ListValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/named_type.ex b/lib/absinthe/language/named_type.ex index a88047c550..cd4c8cdbed 100644 --- a/lib/absinthe/language/named_type.ex +++ b/lib/absinthe/language/named_type.ex @@ -4,7 +4,7 @@ defmodule Absinthe.Language.NamedType do alias Absinthe.{Blueprint, Language} defstruct name: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), diff --git a/lib/absinthe/language/non_null_type.ex b/lib/absinthe/language/non_null_type.ex index 5406bc2056..6f8705b3bd 100644 --- a/lib/absinthe/language/non_null_type.ex +++ b/lib/absinthe/language/non_null_type.ex @@ -4,7 +4,7 @@ defmodule Absinthe.Language.NonNullType do alias Absinthe.{Blueprint, Language} defstruct type: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ type: Language.type_reference_t(), diff --git a/lib/absinthe/language/null_value.ex b/lib/absinthe/language/null_value.ex index 0e46f31d97..0815b19b01 100644 --- a/lib/absinthe/language/null_value.ex +++ b/lib/absinthe/language/null_value.ex @@ -19,6 +19,6 @@ defmodule Absinthe.Language.NullValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/object_field.ex b/lib/absinthe/language/object_field.ex index 33b77c606b..3643792b97 100644 --- a/lib/absinthe/language/object_field.ex +++ b/lib/absinthe/language/object_field.ex @@ -5,7 +5,7 @@ defmodule Absinthe.Language.ObjectField do defstruct name: nil, value: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), @@ -23,6 +23,6 @@ defmodule Absinthe.Language.ObjectField do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/object_type_definition.ex b/lib/absinthe/language/object_type_definition.ex index 2d775e5c38..4ca8d55603 100644 --- a/lib/absinthe/language/object_type_definition.ex +++ b/lib/absinthe/language/object_type_definition.ex @@ -4,13 +4,15 @@ defmodule Absinthe.Language.ObjectTypeDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, directives: [], interfaces: [], fields: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), directives: [Language.Directive.t()], interfaces: [Language.NamedType.t()], fields: [Language.FieldDefinition.t()], @@ -21,10 +23,23 @@ defmodule Absinthe.Language.ObjectTypeDefinition do def convert(node, doc) do %Blueprint.Schema.ObjectTypeDefinition{ name: node.name, + description: node.description, + identifier: Macro.underscore(node.name) |> String.to_atom(), fields: Absinthe.Blueprint.Draft.convert(node.fields, doc), - interfaces: Absinthe.Blueprint.Draft.convert(node.interfaces, doc), - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + interfaces: interfaces(node.interfaces, doc), + interface_blueprints: Absinthe.Blueprint.Draft.convert(node.interfaces, doc), + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) + + defp interfaces(interfaces, doc) do + interfaces + |> Absinthe.Blueprint.Draft.convert(doc) + |> Enum.map(&(&1.name |> Macro.underscore() |> String.to_atom())) + end end end diff --git a/lib/absinthe/language/object_value.ex b/lib/absinthe/language/object_value.ex index a193556094..3e7f813e5d 100644 --- a/lib/absinthe/language/object_value.ex +++ b/lib/absinthe/language/object_value.ex @@ -20,6 +20,6 @@ defmodule Absinthe.Language.ObjectValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/operation_definition.ex b/lib/absinthe/language/operation_definition.ex index ffd4d28254..b47b782621 100644 --- a/lib/absinthe/language/operation_definition.ex +++ b/lib/absinthe/language/operation_definition.ex @@ -8,7 +8,8 @@ defmodule Absinthe.Language.OperationDefinition do variable_definitions: [], directives: [], selection_set: nil, - loc: %{start_line: nil} + shorthand: false, + loc: %{line: nil} @type t :: %__MODULE__{ operation: :query | :mutation | :subscription, @@ -16,6 +17,7 @@ defmodule Absinthe.Language.OperationDefinition do variable_definitions: [Language.VariableDefinition.t()], directives: [Language.Directive.t()], selection_set: Language.SelectionSet.t(), + shorthand: boolean(), loc: Language.loc_t() } @@ -27,23 +29,11 @@ defmodule Absinthe.Language.OperationDefinition do directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), variable_definitions: Blueprint.Draft.convert(node.variable_definitions, doc), selections: Blueprint.Draft.convert(node.selection_set.selections, doc), - source_location: source_location(node.loc) + source_location: source_location(node) } end - defp source_location(nil) do - nil - end - - defp source_location(%{start_line: number}) do - Blueprint.Document.SourceLocation.at(number) - end - end - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - [node.variable_definitions, node.directives, List.wrap(node.selection_set)] - |> Enum.concat() - end + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/render.ex b/lib/absinthe/language/render.ex new file mode 100644 index 0000000000..7abe22260e --- /dev/null +++ b/lib/absinthe/language/render.ex @@ -0,0 +1,279 @@ +defmodule Absinthe.Language.Render do + @moduledoc false + import Inspect.Algebra + import Absinthe.Utils.Render + + @line_width 120 + + def inspect(term, %{pretty: true}) do + term + |> render() + |> concat(line()) + |> format(@line_width) + |> to_string + end + + def inspect(term, options) do + Inspect.Any.inspect(term, options) + end + + defp render(bp) + + defp render(%Absinthe.Language.Document{} = doc) do + doc.definitions |> Enum.map(&render/1) |> join([line(), line()]) + end + + defp render(%Absinthe.Language.OperationDefinition{} = op) do + if op.shorthand do + concat(operation_definition(op), block(render_list(op.selection_set.selections))) + else + glue( + concat([to_string(op.operation), operation_definition(op)]), + block(render_list(op.selection_set.selections)) + ) + end + end + + defp render(%Absinthe.Language.Field{} = field) do + case field.selection_set do + nil -> + field_definition(field) + + selection_set -> + concat([ + field_definition(field), + " ", + block(render_list(selection_set.selections)) + ]) + end + end + + defp render(%Absinthe.Language.VariableDefinition{} = variable_definition) do + concat([ + "$", + variable_definition.variable.name, + ": ", + render(variable_definition.type), + default_value(variable_definition) + ]) + end + + defp render(%Absinthe.Language.NamedType{} = named_type) do + named_type.name + end + + defp render(%Absinthe.Language.NonNullType{} = non_null) do + concat(render(non_null.type), "!") + end + + defp render(%Absinthe.Language.Argument{} = argument) do + concat([argument.name, ": ", render(argument.value)]) + end + + defp render(%Absinthe.Language.Directive{} = directive) do + concat([" @", directive.name, arguments(directive.arguments)]) + end + + defp render(%Absinthe.Language.FragmentSpread{} = spread) do + concat(["...", spread.name, directives(spread.directives)]) + end + + defp render(%Absinthe.Language.InlineFragment{} = fragment) do + concat([ + "...", + inline_fragment_name(fragment), + directives(fragment.directives), + " ", + block(render_list(fragment.selection_set.selections)) + ]) + end + + defp render(%Absinthe.Language.Variable{} = variable) do + concat("$", variable.name) + end + + defp render(%Absinthe.Language.StringValue{value: value}) do + render_string_value(value) + end + + defp render(%Absinthe.Language.FloatValue{value: value}) do + "#{value}" + end + + defp render(%Absinthe.Language.ObjectField{} = object_field) do + concat([object_field.name, ": ", render(object_field.value)]) + end + + defp render(%Absinthe.Language.ObjectValue{fields: fields}) do + fields = fields |> Enum.map(&render(&1)) |> join(", ") + + concat(["{ ", fields, " }"]) + end + + defp render(%Absinthe.Language.NullValue{}) do + "null" + end + + defp render(%Absinthe.Language.ListType{type: type}) do + concat(["[", render(type), "]"]) + end + + defp render(%Absinthe.Language.ListValue{values: values}) do + values = values |> Enum.map(&render(&1)) |> join(", ") + + concat(["[", values, "]"]) + end + + defp render(%Absinthe.Language.Fragment{} = fragment) do + concat([ + "fragment ", + fragment.name, + " on ", + fragment.type_condition.name, + directives(fragment.directives) + ]) + |> block(render_list(fragment.selection_set.selections)) + end + + defp render(%{value: value}) do + to_string(value) + end + + defp operation_definition(%{name: nil} = op) do + case op.variable_definitions do + [] -> + concat( + variable_definitions(op.variable_definitions), + directives(op.directives) + ) + + _ -> + operation_definition(%{op | name: ""}) + end + end + + defp operation_definition(%{name: name} = op) do + concat([" ", name, variable_definitions(op.variable_definitions), directives(op.directives)]) + end + + defp variable_definitions([]) do + empty() + end + + defp variable_definitions(definitions) do + definitions = Enum.map(definitions, &render(&1)) + + concat([ + "(", + join(definitions, ", "), + ")" + ]) + end + + defp field_definition(field) do + concat([ + field_alias(field), + field.name, + arguments(field.arguments), + directives(field.directives) + ]) + end + + defp default_value(%{default_value: nil}) do + empty() + end + + defp default_value(%{default_value: value}) do + concat(" = ", render(value)) + end + + defp directives([]) do + empty() + end + + defp directives(directives) do + directives |> Enum.map(&render(&1)) |> join(" ") + end + + defp inline_fragment_name(%{type_condition: nil}) do + empty() + end + + defp inline_fragment_name(%{type_condition: %{name: name}}) do + " on #{name}" + end + + defp field_alias(%{alias: nil}) do + empty() + end + + defp field_alias(%{alias: alias}) do + concat(alias, ": ") + end + + defp arguments([]) do + empty() + end + + defp arguments(args) do + group( + glue( + nest( + glue( + "(", + "", + render_list(args, ", ") + ), + 2, + :break + ), + "", + ")" + ) + ) + end + + # Helpers + + defp block(docs) do + do_block(docs) + end + + defp block(:doc_nil, docs) do + do_block(docs) + end + + defp block(name, docs) do + glue( + name, + do_block(docs) + ) + end + + defp do_block(docs) do + group( + glue( + nest( + force_unfit( + glue( + "{", + "", + docs + ) + ), + 2, + :always + ), + "", + "}" + ) + ) + end + + defp render_list(items, separator \\ line()) do + List.foldr(items, :doc_nil, fn + item, :doc_nil -> render(item) + item, acc -> concat([render(item)] ++ [separator] ++ [acc]) + end) + end +end diff --git a/lib/absinthe/language/scalar_type_definition.ex b/lib/absinthe/language/scalar_type_definition.ex index 5594934bf3..318578d64b 100644 --- a/lib/absinthe/language/scalar_type_definition.ex +++ b/lib/absinthe/language/scalar_type_definition.ex @@ -4,11 +4,13 @@ defmodule Absinthe.Language.ScalarTypeDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, directives: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), directives: [Language.Directive.t()], loc: Language.t() } @@ -17,8 +19,14 @@ defmodule Absinthe.Language.ScalarTypeDefinition do def convert(node, doc) do %Blueprint.Schema.ScalarTypeDefinition{ name: node.name, - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + description: node.description, + identifier: Macro.underscore(node.name) |> String.to_atom(), + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/schema_declaration.ex b/lib/absinthe/language/schema_declaration.ex new file mode 100644 index 0000000000..7b7b330f4f --- /dev/null +++ b/lib/absinthe/language/schema_declaration.ex @@ -0,0 +1,31 @@ +defmodule Absinthe.Language.SchemaDeclaration do + @moduledoc false + + alias Absinthe.{Blueprint, Language} + + defstruct description: nil, + directives: [], + fields: [], + loc: %{line: nil} + + @type t :: %__MODULE__{ + description: nil | String.t(), + directives: [Language.Directive.t()], + fields: [Language.FieldDefinition.t()], + loc: Language.loc_t() + } + + defimpl Blueprint.Draft do + def convert(node, doc) do + %Blueprint.Schema.SchemaDeclaration{ + description: node.description, + field_definitions: Absinthe.Blueprint.Draft.convert(node.fields, doc), + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) + } + end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) + end +end diff --git a/lib/absinthe/language/schema_definition.ex b/lib/absinthe/language/schema_definition.ex index 146a61ca83..ec29ba92b8 100644 --- a/lib/absinthe/language/schema_definition.ex +++ b/lib/absinthe/language/schema_definition.ex @@ -3,11 +3,13 @@ defmodule Absinthe.Language.SchemaDefinition do alias Absinthe.{Blueprint, Language} - defstruct directives: [], + defstruct description: nil, + directives: [], fields: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ + description: nil | String.t(), directives: [Language.Directive.t()], fields: [Language.FieldDefinition.t()], loc: Language.loc_t() @@ -16,9 +18,14 @@ defmodule Absinthe.Language.SchemaDefinition do defimpl Blueprint.Draft do def convert(node, doc) do %Blueprint.Schema.SchemaDefinition{ - fields: Absinthe.Blueprint.Draft.convert(node.fields, doc), - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + description: node.description, + type_definitions: Absinthe.Blueprint.Draft.convert(node.fields, doc), + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/selection_set.ex b/lib/absinthe/language/selection_set.ex index bd1cce063c..adcfed8b2c 100644 --- a/lib/absinthe/language/selection_set.ex +++ b/lib/absinthe/language/selection_set.ex @@ -4,7 +4,7 @@ defmodule Absinthe.Language.SelectionSet do alias Absinthe.Language defstruct selections: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ selections: [ @@ -12,10 +12,4 @@ defmodule Absinthe.Language.SelectionSet do ], loc: Language.loc_t() } - - defimpl Absinthe.Traversal.Node do - def children(node, _schema) do - node.selections - end - end end diff --git a/lib/absinthe/language/string_value.ex b/lib/absinthe/language/string_value.ex index bdabbeb3af..e6a5417893 100644 --- a/lib/absinthe/language/string_value.ex +++ b/lib/absinthe/language/string_value.ex @@ -22,6 +22,6 @@ defmodule Absinthe.Language.StringValue do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/type_extension_definition.ex b/lib/absinthe/language/type_extension_definition.ex index 40a361fbd1..046566ec19 100644 --- a/lib/absinthe/language/type_extension_definition.ex +++ b/lib/absinthe/language/type_extension_definition.ex @@ -1,13 +1,25 @@ defmodule Absinthe.Language.TypeExtensionDefinition do @moduledoc false - alias Absinthe.Language + alias Absinthe.{Language, Blueprint} defstruct definition: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ definition: Language.ObjectTypeDefinition.t(), loc: Language.loc_t() } + + defimpl Blueprint.Draft do + def convert(node, _doc) do + raise Absinthe.Schema.Notation.Error, + """ + \n + SDL Compilation failed: + ----------------------- + Keyword `extend` is not yet supported (#{inspect(node.loc)}) + """ + end + end end diff --git a/lib/absinthe/language/union_type_definition.ex b/lib/absinthe/language/union_type_definition.ex index e8e154c266..ccf787c0d0 100644 --- a/lib/absinthe/language/union_type_definition.ex +++ b/lib/absinthe/language/union_type_definition.ex @@ -4,12 +4,14 @@ defmodule Absinthe.Language.UnionTypeDefinition do alias Absinthe.{Blueprint, Language} defstruct name: nil, + description: nil, directives: [], types: [], - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), + description: nil | String.t(), directives: [Language.Directive.t()], types: [Language.NamedType.t()], loc: Language.loc_t() @@ -19,9 +21,15 @@ defmodule Absinthe.Language.UnionTypeDefinition do def convert(node, doc) do %Blueprint.Schema.UnionTypeDefinition{ name: node.name, + description: node.description, + identifier: Macro.underscore(node.name) |> String.to_atom(), types: Absinthe.Blueprint.Draft.convert(node.types, doc), - directives: Absinthe.Blueprint.Draft.convert(node.directives, doc) + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/variable.ex b/lib/absinthe/language/variable.ex index b987033ee7..a16bc0c135 100644 --- a/lib/absinthe/language/variable.ex +++ b/lib/absinthe/language/variable.ex @@ -4,7 +4,7 @@ defmodule Absinthe.Language.Variable do alias Absinthe.{Blueprint, Language} defstruct name: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ name: String.t(), @@ -20,6 +20,6 @@ defmodule Absinthe.Language.Variable do end defp source_location(%{loc: nil}), do: nil - defp source_location(%{loc: loc}), do: Blueprint.Document.SourceLocation.at(loc.start_line) + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/language/variable_definition.ex b/lib/absinthe/language/variable_definition.ex index cba7e7eeb0..882ddeea2c 100644 --- a/lib/absinthe/language/variable_definition.ex +++ b/lib/absinthe/language/variable_definition.ex @@ -5,8 +5,9 @@ defmodule Absinthe.Language.VariableDefinition do defstruct variable: nil, type: nil, + directives: [], default_value: nil, - loc: %{start_line: nil} + loc: %{line: nil} @type t :: %__MODULE__{ variable: Language.Variable.t(), @@ -20,9 +21,13 @@ defmodule Absinthe.Language.VariableDefinition do %Blueprint.Document.VariableDefinition{ name: node.variable.name, type: Blueprint.Draft.convert(node.type, doc), + directives: Absinthe.Blueprint.Draft.convert(node.directives, doc), default_value: Blueprint.Draft.convert(node.default_value, doc), - source_location: Blueprint.Document.SourceLocation.at(node.loc.start_line) + source_location: source_location(node) } end + + defp source_location(%{loc: nil}), do: nil + defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc) end end diff --git a/lib/absinthe/lexer.ex b/lib/absinthe/lexer.ex new file mode 100644 index 0000000000..9a79f8fadd --- /dev/null +++ b/lib/absinthe/lexer.ex @@ -0,0 +1,370 @@ +defmodule Absinthe.Lexer do + import NimbleParsec + + # Codepoints + @horizontal_tab 0x0009 + @newline 0x000A + @carriage_return 0x000D + @space 0x0020 + @unicode_bom 0xFEFF + + # SourceCharacter :: /[\u0009\u000A\u000D\u0020-\uFFFF]/ + + any_unicode = utf8_char([]) + + # ## Ignored Tokens + + # UnicodeBOM :: "Byte Order Mark (U+FEFF)" + unicode_bom = utf8_char([@unicode_bom]) + + # WhiteSpace :: + # - "Horizontal Tab (U+0009)" + # - "Space (U+0020)" + whitespace = + ascii_char([ + @horizontal_tab, + @space + ]) + + # LineTerminator :: + # - "New Line (U+000A)" + # - "Carriage Return (U+000D)" [ lookahead ! "New Line (U+000A)" ] + # - "Carriage Return (U+000D)" "New Line (U+000A)" + line_terminator = + choice([ + ascii_char([@newline]), + ascii_char([@carriage_return]) + |> optional(ascii_char([@newline])) + ]) + + # Comment :: `#` CommentChar* + # CommentChar :: SourceCharacter but not LineTerminator + comment = + string("#") + |> repeat_while(any_unicode, {:not_line_terminator, []}) + + # Comma :: , + comma = ascii_char([?,]) + + # Ampersand :: & + ampersand = ascii_char([?&]) + + # Ignored :: + # - UnicodeBOM + # - WhiteSpace + # - LineTerminator + # - Comment + # - Comma + # - Ampersand + ignored = + choice([ + unicode_bom, + whitespace, + line_terminator, + comment, + comma, + ampersand + ]) + + # ## Lexical Tokens + + # - Punctuator + # - Name + # - IntValue + # - FloatValue + # - StringValue + + punctuator = + choice([ + ascii_char([ + ?!, + ?$, + ?(, + ?), + ?:, + ?=, + ?@, + ?[, + ?], + ?{, + ?|, + ?} + ]), + times(ascii_char([?.]), 3) + ]) + |> post_traverse({:atom_token, []}) + + boolean_value_or_name_or_reserved_word = + ascii_char([?_, ?A..?Z, ?a..?z]) + |> repeat(ascii_char([?_, ?0..?9, ?A..?Z, ?a..?z])) + |> post_traverse({:boolean_value_or_name_or_reserved_word, []}) + + # NegativeSign :: - + negative_sign = ascii_char([?-]) + + # Digit :: one of 0 1 2 3 4 5 6 7 8 9 + digit = ascii_char([?0..?9]) + + # NonZeroDigit :: Digit but not `0` + non_zero_digit = ascii_char([?1..?9]) + + # IntegerPart :: + # - NegativeSign? 0 + # - NegativeSign? NonZeroDigit Digit* + integer_part = + optional(negative_sign) + |> choice([ + ascii_char([?0]), + non_zero_digit |> repeat(digit) + ]) + + # IntValue :: IntegerPart + int_value = + empty() + |> concat(integer_part) + |> post_traverse({:labeled_token, [:int_value]}) + + # FractionalPart :: . Digit+ + fractional_part = + ascii_char([?.]) + |> times(digit, min: 1) + + # ExponentIndicator :: one of `e` `E` + exponent_indicator = ascii_char([?e, ?E]) + + # Sign :: one of + - + sign = ascii_char([?+, ?-]) + + # ExponentPart :: ExponentIndicator Sign? Digit+ + exponent_part = + exponent_indicator + |> optional(sign) + |> times(digit, min: 1) + + # FloatValue :: + # - IntegerPart FractionalPart + # - IntegerPart ExponentPart + # - IntegerPart FractionalPart ExponentPart + float_value = + choice([ + integer_part |> concat(fractional_part) |> concat(exponent_part), + integer_part |> post_traverse({:fill_mantissa, []}) |> concat(exponent_part), + integer_part |> concat(fractional_part) + ]) + |> post_traverse({:labeled_token, [:float_value]}) + + # EscapedUnicode :: /[0-9A-Fa-f]{4}/ + escaped_unicode = + times(ascii_char([?0..?9, ?A..?F, ?a..?f]), 4) + |> post_traverse({:unescape_unicode, []}) + + # EscapedCharacter :: one of `"` \ `/` b f n r t + escaped_character = + choice([ + ascii_char([?"]), + ascii_char([?\\]), + ascii_char([?/]), + ascii_char([?b]) |> replace(?\b), + ascii_char([?f]) |> replace(?\f), + ascii_char([?n]) |> replace(?\n), + ascii_char([?r]) |> replace(?\r), + ascii_char([?t]) |> replace(?\t) + ]) + + # StringCharacter :: + # - SourceCharacter but not `"` or \ or LineTerminator + # - \u EscapedUnicode + # - \ EscapedCharacter + string_character = + choice([ + ignore(string(~S(\u))) |> concat(escaped_unicode), + ignore(ascii_char([?\\])) |> concat(escaped_character), + any_unicode + ]) + + # BlockStringCharacter :: + # - SourceCharacter but not `"""` or `\"""` + # - `\"""` + + # Note: Block string values are interpreted to exclude blank initial and trailing + # lines and uniform indentation with {BlockStringValue()}. + block_string_character = + choice([ + ignore(ascii_char([?\\])) |> concat(times(ascii_char([?"]), 3)), + any_unicode + ]) + + # StringValue :: + # - `"` StringCharacter* `"` + # - `"""` BlockStringCharacter* `"""` + string_value = + ignore(ascii_char([?"])) + |> post_traverse({:mark_string_start, []}) + |> repeat_while(string_character, {:not_end_of_quote, []}) + |> ignore(ascii_char([?"])) + |> post_traverse({:string_value_token, []}) + + block_string_value = + ignore(string(~S("""))) + |> post_traverse({:mark_block_string_start, []}) + |> repeat_while(block_string_character, {:not_end_of_block_quote, []}) + |> ignore(string(~S("""))) + |> post_traverse({:block_string_value_token, []}) + + defp not_end_of_quote(<>, context, _, _) do + {:halt, context} + end + + defp not_end_of_quote(rest, context, current_line, current_offset) do + not_line_terminator(rest, context, current_line, current_offset) + end + + defp not_end_of_block_quote(<>, context, _, _) do + {:halt, context} + end + + defp not_end_of_block_quote(_, context, _, _) do + {:cont, context} + end + + @spec tokenize(binary()) :: {:ok, [any()]} | {:error, binary(), {integer(), non_neg_integer()}} + def tokenize(input) do + lines = String.split(input, ~r/\r?\n/) + + case do_tokenize(input) do + {:ok, tokens, "", _, _, _} -> + tokens = Enum.map(tokens, &convert_token_column(&1, lines)) + {:ok, tokens} + + {:ok, _, rest, _, {line, line_offset}, byte_offset} -> + byte_column = byte_offset - line_offset + 1 + {:error, rest, byte_loc_to_char_loc({line, byte_column}, lines)} + end + end + + defp convert_token_column({ident, loc, data}, lines) do + {ident, byte_loc_to_char_loc(loc, lines), data} + end + + defp convert_token_column({ident, loc}, lines) do + {ident, byte_loc_to_char_loc(loc, lines)} + end + + defp byte_loc_to_char_loc({line, byte_col}, lines) do + current_line = Enum.at(lines, line - 1) + byte_prefix = binary_part(current_line, 0, byte_col) + char_col = String.length(byte_prefix) + {line, char_col} + end + + @spec do_tokenize(binary()) :: + {:ok, [any()], binary(), map(), {pos_integer(), pos_integer()}, pos_integer()} + defparsec( + :do_tokenize, + repeat( + choice([ + ignore(ignored), + comment, + punctuator, + block_string_value, + string_value, + float_value, + int_value, + boolean_value_or_name_or_reserved_word + ]) + ) + ) + + defp fill_mantissa(_rest, raw, context, _, _), do: {'0.' ++ raw, context} + + defp unescape_unicode(_rest, content, context, _loc, _) do + code = content |> Enum.reverse() + value = :erlang.list_to_integer(code, 16) + binary = :unicode.characters_to_binary([value]) + {[binary], context} + end + + @boolean_words ~w( + true + false + ) |> Enum.map(&String.to_charlist/1) + + @reserved_words ~w( + directive + enum + extend + fragment + implements + input + interface + mutation + null + on + ON + query + repeatable + scalar + schema + subscription + type + union + ) |> Enum.map(&String.to_charlist/1) + + defp boolean_value_or_name_or_reserved_word(rest, chars, context, loc, byte_offset) do + value = chars |> Enum.reverse() + do_boolean_value_or_name_or_reserved_word(rest, value, context, loc, byte_offset) + end + + defp do_boolean_value_or_name_or_reserved_word(_rest, value, context, loc, byte_offset) + when value in @boolean_words do + {[{:boolean_value, line_and_column(loc, byte_offset, length(value)), value}], context} + end + + defp do_boolean_value_or_name_or_reserved_word(_rest, value, context, loc, byte_offset) + when value in @reserved_words do + token_name = value |> List.to_atom() + {[{token_name, line_and_column(loc, byte_offset, length(value))}], context} + end + + defp do_boolean_value_or_name_or_reserved_word(_rest, value, context, loc, byte_offset) do + {[{:name, line_and_column(loc, byte_offset, length(value)), value}], context} + end + + defp labeled_token(_rest, chars, context, loc, byte_offset, token_name) do + value = chars |> Enum.reverse() + {[{token_name, line_and_column(loc, byte_offset, length(value)), value}], context} + end + + defp mark_string_start(_rest, chars, context, loc, byte_offset) do + {[chars], Map.put(context, :token_location, line_and_column(loc, byte_offset, 1))} + end + + defp mark_block_string_start(_rest, _chars, context, loc, byte_offset) do + {[], Map.put(context, :token_location, line_and_column(loc, byte_offset, 3))} + end + + defp block_string_value_token(_rest, chars, context, _loc, _byte_offset) do + value = '"""' ++ (chars |> Enum.reverse()) ++ '"""' + {[{:block_string_value, context.token_location, value}], Map.delete(context, :token_location)} + end + + defp string_value_token(_rest, chars, context, _loc, _byte_offset) do + value = '"' ++ tl(chars |> Enum.reverse()) ++ '"' + {[{:string_value, context.token_location, value}], Map.delete(context, :token_location)} + end + + defp atom_token(_rest, chars, context, loc, byte_offset) do + value = chars |> Enum.reverse() + token_atom = value |> List.to_atom() + {[{token_atom, line_and_column(loc, byte_offset, length(value))}], context} + end + + def line_and_column({line, line_offset}, byte_offset, column_correction) do + column = byte_offset - line_offset - column_correction + 1 + {line, column} + end + + defp not_line_terminator(<>, context, _, _), do: {:halt, context} + defp not_line_terminator(<>, context, _, _), do: {:halt, context} + defp not_line_terminator(_, context, _, _), do: {:cont, context} +end diff --git a/lib/absinthe/logger.ex b/lib/absinthe/logger.ex index dd05383f59..ba364dfd54 100644 --- a/lib/absinthe/logger.ex +++ b/lib/absinthe/logger.ex @@ -21,6 +21,9 @@ defmodule Absinthe.Logger do includes the terms `token`, `password`, or `secret`. The match is case sensitive. + Note that filtering only applies to GraphQL variables - the query itself is + logged before any parsing happens. + The default is `#{inspect(@default_filter_variables)}`. ## Pipeline display @@ -77,7 +80,7 @@ defmodule Absinthe.Logger do end @doc false - @spec document(Absinthe.Pipeline.data_t()) :: iolist + @spec document(Absinthe.Pipeline.data_t()) :: binary def document(value) when value in ["", nil] do "[EMPTY]" end diff --git a/lib/absinthe/middleware.ex b/lib/absinthe/middleware.ex index 42c68d0732..38c58b2e2d 100644 --- a/lib/absinthe/middleware.ex +++ b/lib/absinthe/middleware.ex @@ -48,7 +48,7 @@ defmodule Absinthe.Middleware do Middleware can be placed on a field in three different ways: 1. Using the `Absinthe.Schema.Notation.middleware/2` - macro used inside a field definition + macro used inside a field definition. 2. Using the `middleware/3` callback in your schema. 3. Returning a `{:middleware, middleware_spec, config}` tuple from a resolution function. @@ -105,7 +105,7 @@ defmodule Absinthe.Middleware do end ``` - ## The `middleware/3` callback. + ## The `middleware/3` callback `middleware/3` is a function callback on a schema. When you `use Absinthe.Schema` a default implementation of this function is placed in your @@ -138,7 +138,7 @@ defmodule Absinthe.Middleware do ``` Given a document like: - ``` + ```graphql { lookupUser { name }} ``` @@ -150,8 +150,8 @@ defmodule Absinthe.Middleware do ``` YourSchema.middleware([{Absinthe.Resolution, #Function<20.52032458/0>}], lookup_user_field_of_root_query_object, root_query_object) - YourSchema.middleware([{Absinthe.Middleware.Map.Get, :name}], name_field_of_user, user_object) - YourSchema.middleware([{Absinthe.Middleware.Map.Get, :age}], age_field_of_user, user_object) + YourSchema.middleware([{Absinthe.Middleware.MapGet, :name}], name_field_of_user, user_object) + YourSchema.middleware([{Absinthe.Middleware.MapGet, :age}], age_field_of_user, user_object) ``` In the latter two cases we see that the middleware list is empty. In the first @@ -160,9 +160,9 @@ defmodule Absinthe.Middleware do ### Default Middleware - One use of `middleware/3` is setting the default middleware on a field + One use of `middleware/3` is setting the default middleware on a field. By default middleware is placed on a - field that looks up a field by its snake case identifier, ie `:resource_name` + field that looks up a field by its snake case identifier, ie `:resource_name`. Here is an example of how to change the default to use a camel cased string, IE, "resourceName". @@ -213,7 +213,7 @@ defmodule Absinthe.Middleware do function. If no middleware was applied in the function and it also returned `[]`, THEN Absinthe would apply the default. - This made it very easy to accidently break your schema if you weren't + This made it very easy to accidentally break your schema if you weren't particularly careful with your pattern matching. Now the defaults are applied FIRST by absinthe, and THEN passed to `middleware/3`. Consequently, the middleware list argument should always have at least one value. This is also @@ -277,4 +277,52 @@ defmodule Absinthe.Middleware do was passed to the `middleware` call that setup the middleware. """ @callback call(Absinthe.Resolution.t(), term) :: Absinthe.Resolution.t() + + @doc false + def shim(res, {object, field, middleware}) do + schema = res.schema + object = Absinthe.Schema.lookup_type(schema, object) + field = Map.fetch!(object.fields, field) + + middleware = expand(schema, middleware, field, object) + + %{res | middleware: middleware} + end + + @doc "For testing and inspection purposes" + def unshim([{{__MODULE__, :shim}, {object, field, middleware}}], schema) do + object = Absinthe.Schema.lookup_type(schema, object) + field = Map.fetch!(object.fields, field) + expand(schema, middleware, field, object) + end + + @doc false + def expand(schema, middleware, field, object) do + expanded = + middleware + |> Enum.flat_map(&get_functions/1) + |> Absinthe.Schema.Notation.__ensure_middleware__(field, object) + + case middleware do + [{:ref, Absinthe.Phase.Schema.Introspection, _}] -> + expanded + + [{:ref, Absinthe.Type.BuiltIns.Introspection, _}] -> + expanded + + [{:ref, Absinthe.Phase.Schema.DeprecatedDirectiveFields, _}] -> + expanded + + _ -> + schema.middleware(expanded, field, object) + end + end + + defp get_functions({:ref, module, identifier}) do + module.__absinthe_function__(identifier, :middleware) + end + + defp get_functions(val) do + List.wrap(val) + end end diff --git a/lib/absinthe/middleware/async.ex b/lib/absinthe/middleware/async.ex index 2e0cae8698..f5003ce17f 100644 --- a/lib/absinthe/middleware/async.ex +++ b/lib/absinthe/middleware/async.ex @@ -23,7 +23,7 @@ defmodule Absinthe.Middleware.Async do resolve fn _, _, _ -> task = Task.async(fn -> {:ok, long_time_consuming_function()} - end + end) {:middleware, #{__MODULE__}, task} end end @@ -50,8 +50,11 @@ defmodule Absinthe.Middleware.Async do # This function inserts additional middleware into the remaining middleware # stack for this field. On the next resolution pass, we need to `Task.await` the # task so we have actual data. Thus, we prepend this module to the middleware stack. - def call(%{state: :unresolved} = res, {fun, opts}) do - task_data = {Task.async(fun), opts} + def call(%{state: :unresolved} = res, {fun, opts}) when is_function(fun), + do: call(res, {Task.async(fun), opts}) + + def call(%{state: :unresolved} = res, {task, opts}) do + task_data = {task, opts} %{ res @@ -61,6 +64,8 @@ defmodule Absinthe.Middleware.Async do } end + def call(%{state: :unresolved} = res, %Task{} = task), do: call(res, {task, []}) + # This is the clause that gets called on the second pass. There's very little # to do here. We just need to await the task started in the previous pass. # @@ -68,7 +73,7 @@ defmodule Absinthe.Middleware.Async do # we handle the different tuple results. # # The `put_result` function handles setting the appropriate state. - # If the result is an `{:ok, value} | {:error, reasoon}` tuple it will set + # If the result is an `{:ok, value} | {:error, reason}` tuple it will set # the state to `:resolved`, and if it is another middleware tuple it will # set the state to unresolved. def call(%{state: :suspended} = res, {task, opts}) do diff --git a/lib/absinthe/middleware/batch.ex b/lib/absinthe/middleware/batch.ex index 3822c3bfdd..8e1f2add83 100644 --- a/lib/absinthe/middleware/batch.ex +++ b/lib/absinthe/middleware/batch.ex @@ -55,15 +55,6 @@ defmodule Absinthe.Middleware.Batch do are the second argument to the batching function. - `fn batch_results`: This function takes the results from the batching function. it should return one of the resolution function values. - - Clearly some of this could be derived for ecto functions. Check out the Absinthe.Ecto - library for something that provides this: - - ```elixir - field :author, :user, resolve: assoc(:author) - ``` - - Such a function could be easily built upon the API of this module. """ @behaviour Absinthe.Middleware @@ -142,17 +133,59 @@ defmodule Absinthe.Middleware.Batch do input |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) |> Enum.map(fn {{batch_fun, batch_opts}, batch_data} -> - {batch_opts, - Task.async(fn -> - {batch_fun, call_batch_fun(batch_fun, batch_data)} - end)} + system_time = System.system_time() + start_time_mono = System.monotonic_time() + + task = + Task.async(fn -> + {batch_fun, call_batch_fun(batch_fun, batch_data)} + end) + + metadata = emit_start_event(system_time, batch_fun, batch_opts, batch_data) + + {batch_opts, task, start_time_mono, metadata} end) - |> Map.new(fn {batch_opts, task} -> + |> Map.new(fn {batch_opts, task, start_time_mono, metadata} -> timeout = Keyword.get(batch_opts, :timeout, 5_000) - Task.await(task, timeout) + result = Task.await(task, timeout) + + duration = System.monotonic_time() - start_time_mono + emit_stop_event(duration, metadata, result) + + result end) end + @batch_start [:absinthe, :middleware, :batch, :start] + @batch_stop [:absinthe, :middleware, :batch, :stop] + defp emit_start_event(system_time, batch_fun, batch_opts, batch_data) do + id = :erlang.unique_integer() + + metadata = %{ + id: id, + telemetry_span_context: id, + batch_fun: batch_fun, + batch_opts: batch_opts, + batch_data: batch_data + } + + :telemetry.execute( + @batch_start, + %{system_time: system_time}, + metadata + ) + + metadata + end + + defp emit_stop_event(duration, metadata, result) do + :telemetry.execute( + @batch_stop, + %{duration: duration}, + Map.put(metadata, :result, result) + ) + end + defp call_batch_fun({module, fun}, batch_data) do call_batch_fun({module, fun, []}, batch_data) end diff --git a/lib/absinthe/middleware/dataloader.ex b/lib/absinthe/middleware/dataloader.ex index 63b56e108c..2f7b17b2b7 100644 --- a/lib/absinthe/middleware/dataloader.ex +++ b/lib/absinthe/middleware/dataloader.ex @@ -3,6 +3,7 @@ if Code.ensure_loaded?(Dataloader) do @behaviour Absinthe.Middleware @behaviour Absinthe.Plugin + @impl Absinthe.Plugin def before_resolution(%{context: context} = exec) do context = with %{loader: loader} <- context do @@ -12,9 +13,12 @@ if Code.ensure_loaded?(Dataloader) do %{exec | context: context} end + @impl Absinthe.Middleware def call(%{state: :unresolved} = resolution, {loader, callback}) do if !Dataloader.pending_batches?(loader) do - get_result(resolution, callback) + resolution.context.loader + |> put_in(loader) + |> get_result(callback) else %{ resolution @@ -34,10 +38,12 @@ if Code.ensure_loaded?(Dataloader) do Absinthe.Resolution.put_result(resolution, value) end + @impl Absinthe.Plugin def after_resolution(exec) do exec end + @impl Absinthe.Plugin def pipeline(pipeline, exec) do with %{loader: loader} <- exec.context, true <- Dataloader.pending_batches?(loader) do diff --git a/lib/absinthe/middleware/telemetry.ex b/lib/absinthe/middleware/telemetry.ex new file mode 100644 index 0000000000..e85e4c9c72 --- /dev/null +++ b/lib/absinthe/middleware/telemetry.ex @@ -0,0 +1,60 @@ +defmodule Absinthe.Middleware.Telemetry do + @moduledoc """ + Gather and report telemetry about an individual field resolution + """ + @field_start [:absinthe, :resolve, :field, :start] + @field_stop [:absinthe, :resolve, :field, :stop] + + @behaviour Absinthe.Middleware + + @impl Absinthe.Middleware + def call(resolution, _) do + id = :erlang.unique_integer() + system_time = System.system_time() + start_time_mono = System.monotonic_time() + + :telemetry.execute( + @field_start, + %{system_time: system_time}, + %{id: id, telemetry_span_context: id, resolution: resolution} + ) + + %{ + resolution + | middleware: + resolution.middleware ++ + [ + {{__MODULE__, :on_complete}, + %{ + id: id, + start_time_mono: start_time_mono, + middleware: resolution.middleware + }} + ] + } + end + + def on_complete( + %{state: :resolved} = resolution, + %{ + id: id, + start_time_mono: start_time_mono, + middleware: middleware + } + ) do + end_time_mono = System.monotonic_time() + + :telemetry.execute( + @field_stop, + %{duration: end_time_mono - start_time_mono}, + %{ + id: id, + telemetry_span_context: id, + middleware: middleware, + resolution: resolution + } + ) + + resolution + end +end diff --git a/lib/absinthe/phase.ex b/lib/absinthe/phase.ex index fac23d2edd..18b26af833 100644 --- a/lib/absinthe/phase.ex +++ b/lib/absinthe/phase.ex @@ -14,7 +14,8 @@ defmodule Absinthe.Phase do | {:jump, any, t} | {:insert, any, t | [t]} | {:replace, any, t | [t]} - | {:error, String.t()} + | {:error, any} + | {:record_phases, any, (any, any -> any)} alias __MODULE__ alias Absinthe.Blueprint diff --git a/lib/absinthe/phase/debug.ex b/lib/absinthe/phase/debug.ex index 5f66e651fc..59bb374b7f 100644 --- a/lib/absinthe/phase/debug.ex +++ b/lib/absinthe/phase/debug.ex @@ -8,7 +8,7 @@ defmodule Absinthe.Phase.Debug do @spec run(any, Keyword.t()) :: {:ok, Blueprint.t()} def run(input, _options \\ []) do if System.get_env("DEBUG") do - IO.inspect(input) + IO.inspect(input, label: :debug_blueprint_output) end {:ok, input} diff --git a/lib/absinthe/phase/document/arguments/data.ex b/lib/absinthe/phase/document/arguments/data.ex index 0a6d2ae162..87ff4788af 100644 --- a/lib/absinthe/phase/document/arguments/data.ex +++ b/lib/absinthe/phase/document/arguments/data.ex @@ -53,7 +53,12 @@ defmodule Absinthe.Phase.Document.Arguments.Data do def handle_node(%Input.Value{normalized: %Input.Object{fields: fields}} = node) do data = for field <- fields, include_field?(field), into: %{} do - {field.schema_node.__reference__.identifier, field.input_value.data} + # Scalar child nodes will not have a schema_node + if field.schema_node != nil do + {field.schema_node.identifier, field.input_value.data} + else + {field.name, field.input_value.data} + end end %{node | data: data} diff --git a/lib/absinthe/phase/document/arguments/flag_invalid.ex b/lib/absinthe/phase/document/arguments/flag_invalid.ex index f6d1166409..b1792377d0 100644 --- a/lib/absinthe/phase/document/arguments/flag_invalid.ex +++ b/lib/absinthe/phase/document/arguments/flag_invalid.ex @@ -5,7 +5,7 @@ defmodule Absinthe.Phase.Document.Arguments.FlagInvalid do # # This is later used by the ArgumentsOfCorrectType phase. - alias Absinthe.{Blueprint, Phase} + alias Absinthe.{Blueprint, Phase, Type} use Absinthe.Phase @@ -34,6 +34,10 @@ defmodule Absinthe.Phase.Document.Arguments.FlagInvalid do check_children(node, node.items |> Enum.map(& &1.normalized), :bad_list) end + defp handle_node(%Blueprint.Input.Object{schema_node: %Type.Scalar{open_ended: true}} = node) do + node + end + defp handle_node(%Blueprint.Input.Object{} = node) do check_children(node, node.fields, :bad_object) end diff --git a/lib/absinthe/phase/document/arguments/parse.ex b/lib/absinthe/phase/document/arguments/parse.ex index e5ed469227..5c1a3a72a0 100644 --- a/lib/absinthe/phase/document/arguments/parse.ex +++ b/lib/absinthe/phase/document/arguments/parse.ex @@ -12,10 +12,6 @@ defmodule Absinthe.Phase.Document.Arguments.Parse do {:ok, result} end - defp handle_node(%{schema_node: nil} = node, _context) do - {:halt, node} - end - defp handle_node(%{normalized: nil} = node, _context) do node end @@ -39,7 +35,8 @@ defmodule Absinthe.Phase.Document.Arguments.Parse do {:error, :non_null} end - defp build_value(normalized, %Type.Scalar{} = schema_node, context) do + defp build_value(%{__struct__: struct} = normalized, %Type.Scalar{} = schema_node, context) + when struct in [Input.Boolean, Input.Float, Input.Integer, Input.String, Input.Null] do case Type.Scalar.parse(schema_node, normalized, context) do :error -> {:error, :bad_parse} @@ -49,6 +46,28 @@ defmodule Absinthe.Phase.Document.Arguments.Parse do end end + defp build_value( + %Input.Object{} = normalized, + %Type.Scalar{open_ended: true} = schema_node, + context + ) do + case Type.Scalar.parse(schema_node, normalized, context) do + :error -> + {:error, :bad_parse} + + {:ok, val} -> + {:ok, val} + end + end + + defp build_value(_normalized, %Type.Scalar{}, _context) do + {:error, :bad_parse} + end + + defp build_value(%{value: value} = _normalized, nil = _schema_node, _context) do + {:ok, value} + end + defp build_value(%Input.Null{}, %Type.Enum{}, _) do {:ok, nil} end @@ -67,6 +86,11 @@ defmodule Absinthe.Phase.Document.Arguments.Parse do build_value(normalized, inner_type, context) end + defp build_value(%{__struct__: struct}, %Type.InputObject{}, _) + when struct in [Input.Boolean, Input.Float, Input.Integer, Input.String] do + {:error, :bad_parse} + end + defp build_value(_, _, _) do :not_leaf_node end diff --git a/lib/absinthe/phase/document/arguments/variable_types_match.ex b/lib/absinthe/phase/document/arguments/variable_types_match.ex new file mode 100644 index 0000000000..efa838b485 --- /dev/null +++ b/lib/absinthe/phase/document/arguments/variable_types_match.ex @@ -0,0 +1,192 @@ +defmodule Absinthe.Phase.Document.Arguments.VariableTypesMatch do + @moduledoc false + + # Implements: 5.8.5. All Variable Usages are Allowed + # Specifically, it implements "Variable usages must be compatible with the arguments they are passed to." + # See relevant counter-example: https://spec.graphql.org/draft/#example-2028e + + use Absinthe.Phase + + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Document.{Operation, Fragment} + alias Absinthe.Type + + def run(blueprint, _) do + blueprint = + blueprint + |> check_operations() + |> check_fragments() + + {:ok, blueprint} + end + + def check_operations(%Blueprint{} = blueprint) do + blueprint + |> Map.update!(:operations, fn operations -> + Enum.map(operations, &check_variable_types/1) + end) + end + + # A single fragment may be used by multiple operations. + # Each operation may define its own variables. + # This checks that each fragment is simultaneously consistent with the + # variables defined in each of the operations which use that fragment. + def check_fragments(%Blueprint{} = blueprint) do + blueprint + |> Map.update!(:fragments, fn fragments -> + fragments + |> Enum.map(fn fragment -> + blueprint.operations + |> Enum.filter(&Operation.uses?(&1, fragment)) + |> Enum.reduce(fragment, fn operation, fragment_acc -> + check_variable_types(operation, fragment_acc) + end) + end) + end) + end + + def check_variable_types(%Operation{} = op) do + variable_defs = Map.new(op.variable_definitions, &{&1.name, &1}) + Blueprint.prewalk(op, &check_variable_type(&1, op.name, variable_defs)) + end + + def check_variable_types(%Operation{} = op, %Fragment.Named{} = fragment) do + variable_defs = Map.new(op.variable_definitions, &{&1.name, &1}) + Blueprint.prewalk(fragment, &check_variable_type(&1, op.name, variable_defs)) + end + + defp check_variable_type(%{schema_node: nil} = node, _, _) do + {:halt, node} + end + + defp check_variable_type( + %Absinthe.Blueprint.Input.Argument{ + input_value: %Blueprint.Input.Value{ + raw: %{content: %Blueprint.Input.Variable{} = variable} + } + } = node, + operation_name, + variable_defs + ) do + location_type = node.input_value.schema_node + location_definition = node.schema_node + + case Map.get(variable_defs, variable.name) do + %{schema_node: variable_type} = variable_definition -> + if types_compatible?( + variable_type, + location_type, + variable_definition, + location_definition + ) do + node + else + variable = + put_error( + variable, + error(operation_name, variable, variable_definition, location_type) + ) + + {:halt, put_in(node.input_value.raw.content, variable)} + end + + _ -> + node + end + end + + defp check_variable_type(node, _, _) do + node + end + + def types_compatible?(type, type, _, _) do + true + end + + def types_compatible?( + %Type.NonNull{of_type: nullable_variable_type}, + location_type, + variable_definition, + location_definition + ) do + types_compatible?( + nullable_variable_type, + location_type, + variable_definition, + location_definition + ) + end + + def types_compatible?( + %Type.List{of_type: item_variable_type}, + %Type.List{ + of_type: item_location_type + }, + variable_definition, + location_definition + ) do + types_compatible?( + item_variable_type, + item_location_type, + variable_definition, + location_definition + ) + end + + # https://github.com/graphql/graphql-spec/blame/October2021/spec/Section%205%20--%20Validation.md#L1885-L1893 + # if argument has default value the variable can be nullable + def types_compatible?(nullable_type, %Type.NonNull{of_type: nullable_type}, _, %{ + default_value: default_value + }) + when not is_nil(default_value) do + true + end + + # https://github.com/graphql/graphql-spec/blame/main/spec/Section%205%20--%20Validation.md#L2000-L2005 + # This behavior is explicitly supported for compatibility with earlier editions of this specification. + def types_compatible?( + nullable_type, + %Type.NonNull{of_type: nullable_type}, + %{ + default_value: value + }, + _ + ) + when is_struct(value) do + true + end + + def types_compatible?(_, _, _, _) do + false + end + + defp error(operation_name, variable, variable_definition, location_type) do + # need to rely on the type reference here, since the schema node may not be available + # as the type could not exist in the schema + variable_name = Absinthe.Blueprint.TypeReference.name(variable_definition.type) + + %Absinthe.Phase.Error{ + phase: __MODULE__, + message: + error_message( + operation_name, + variable, + variable_name, + Absinthe.Type.name(location_type) + ), + locations: [variable.source_location] + } + end + + def error_message(op, variable, variable_name, location_type) do + start = + case op || "" do + "" -> "Variable" + op -> "In operation `#{op}`, variable" + end + + "#{start} `#{Blueprint.Input.inspect(variable)}` of type `#{variable_name}` found as input to argument of type `#{ + location_type + }`." + end +end diff --git a/lib/absinthe/phase/document/complexity/analysis.ex b/lib/absinthe/phase/document/complexity/analysis.ex index 83d4004afa..5f93e0e497 100644 --- a/lib/absinthe/phase/document/complexity/analysis.ex +++ b/lib/absinthe/phase/document/complexity/analysis.ex @@ -3,7 +3,7 @@ defmodule Absinthe.Phase.Document.Complexity.Analysis do # Analyses document complexity. - alias Absinthe.{Blueprint, Phase, Complexity} + alias Absinthe.{Blueprint, Phase, Complexity, Type} use Absinthe.Phase @@ -49,6 +49,14 @@ defmodule Absinthe.Phase.Document.Complexity.Analysis do %{node | complexity: sum_complexity(fields)} end + def handle_node( + %Blueprint.Document.Fragment.Inline{selections: fields} = node, + _info, + _fragments + ) do + %{node | complexity: sum_complexity(fields)} + end + def handle_node( %Blueprint.Document.Field{ complexity: nil, @@ -74,6 +82,11 @@ defmodule Absinthe.Phase.Document.Complexity.Analysis do # pretty unwieldy. For now, simple types it is. child_complexity = sum_complexity(fields) + schema_node = %{ + schema_node + | complexity: Type.function(schema_node, :complexity) + } + case field_complexity(schema_node, args, child_complexity, info, node) do complexity when is_integer(complexity) and complexity >= 0 -> %{node | complexity: complexity} diff --git a/lib/absinthe/phase/document/complexity/result.ex b/lib/absinthe/phase/document/complexity/result.ex index 4bb27c7c12..11e99b704f 100644 --- a/lib/absinthe/phase/document/complexity/result.ex +++ b/lib/absinthe/phase/document/complexity/result.ex @@ -79,4 +79,8 @@ defmodule Absinthe.Phase.Document.Complexity.Result do defp describe_node(%Blueprint.Document.Fragment.Spread{name: name}) do "Spread #{name}" end + + defp describe_node(%Blueprint.Document.Fragment.Inline{}) do + "Inline Fragment" + end end diff --git a/lib/absinthe/phase/document/execution/resolution.ex b/lib/absinthe/phase/document/execution/resolution.ex index a1b619275a..bde57714c7 100644 --- a/lib/absinthe/phase/document/execution/resolution.ex +++ b/lib/absinthe/phase/document/execution/resolution.ex @@ -48,11 +48,27 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do exec = plugins |> run_callbacks(:before_resolution, exec, run_callbacks?) - {result, exec} = + common = + Map.take(exec, [:adapter, :context, :acc, :root_value, :schema, :fragments, :fields_cache]) + + res = + %Absinthe.Resolution{ + path: nil, + source: nil, + parent_type: nil, + middleware: nil, + definition: nil, + arguments: nil + } + |> Map.merge(common) + + {result, res} = exec.result - |> walk_result(operation, operation.schema_node, exec, [operation]) + |> walk_result(operation, operation.schema_node, res, [operation]) |> propagate_null_trimming + exec = update_persisted_fields(exec, res) + exec = plugins |> run_callbacks(:after_resolution, exec, run_callbacks?) %{exec | result: result} @@ -68,48 +84,48 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do This function walks through any existing results. If no results are found at a given node, it will call the requisite function to expand and build those results """ - def walk_result(%{fields: nil} = result, bp_node, _schema_type, exec, path) do - {fields, exec} = resolve_fields(bp_node, exec, result.root_value, path) - {%{result | fields: fields}, exec} + def walk_result(%{fields: nil} = result, bp_node, _schema_type, res, path) do + {fields, res} = resolve_fields(bp_node, res, result.root_value, path) + {%{result | fields: fields}, res} end - def walk_result(%{fields: fields} = result, bp_node, schema_type, exec, path) do - {fields, exec} = walk_results(fields, bp_node, schema_type, exec, [0 | path], []) + def walk_result(%{fields: fields} = result, bp_node, schema_type, res, path) do + {fields, res} = walk_results(fields, bp_node, schema_type, res, [0 | path], []) - {%{result | fields: fields}, exec} + {%{result | fields: fields}, res} end - def walk_result(%Result.Leaf{} = result, _, _, exec, _) do - {result, exec} + def walk_result(%Result.Leaf{} = result, _, _, res, _) do + {result, res} end - def walk_result(%{values: values} = result, bp_node, schema_type, exec, path) do - {values, exec} = walk_results(values, bp_node, schema_type, exec, [0 | path], []) - {%{result | values: values}, exec} + def walk_result(%{values: values} = result, bp_node, schema_type, res, path) do + {values, res} = walk_results(values, bp_node, schema_type, res, [0 | path], []) + {%{result | values: values}, res} end - def walk_result(%Absinthe.Resolution{} = res, _bp_node, _schema_type, exec, _path) do - res = update_persisted_fields(res, exec) - do_resolve_field(res, exec, res.source, res.path) + def walk_result(%Absinthe.Resolution{} = old_res, _bp_node, _schema_type, res, _path) do + res = update_persisted_fields(old_res, res) + do_resolve_field(res, res.source, res.path) end # walk list results - defp walk_results([value | values], bp_node, inner_type, exec, [i | sub_path] = path, acc) do - {result, exec} = walk_result(value, bp_node, inner_type, exec, path) - walk_results(values, bp_node, inner_type, exec, [i + 1 | sub_path], [result | acc]) + defp walk_results([value | values], bp_node, inner_type, res, [i | sub_path] = path, acc) do + {result, res} = walk_result(value, bp_node, inner_type, res, path) + walk_results(values, bp_node, inner_type, res, [i + 1 | sub_path], [result | acc]) end - defp walk_results([], _, _, exec, _, acc), do: {:lists.reverse(acc), exec} + defp walk_results([], _, _, res, _, acc), do: {:lists.reverse(acc), res} - defp resolve_fields(parent, exec, source, path) do + defp resolve_fields(parent, res, source, path) do # parent is the parent field, we need to get the return type of that field # that return type could be an interface or union, so let's make it concrete parent |> get_return_type - |> get_concrete_type(source, exec) + |> get_concrete_type(source, res) |> case do nil -> - {[], exec} + {[], res} parent_type -> {fields, fields_cache} = @@ -117,13 +133,13 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do parent.selections, parent_type, path, - exec.fields_cache, - exec + res.fields_cache, + res ) - exec = %{exec | fields_cache: fields_cache} + res = %{res | fields_cache: fields_cache} - do_resolve_fields(fields, exec, source, parent_type, path, []) + do_resolve_fields(fields, res, source, parent_type, path, []) end end @@ -137,43 +153,42 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do defp get_return_type(type), do: type - defp get_concrete_type(%Type.Union{} = parent_type, source, exec) do - Type.Union.resolve_type(parent_type, source, exec) + defp get_concrete_type(%Type.Union{} = parent_type, source, res) do + Type.Union.resolve_type(parent_type, source, res) end - defp get_concrete_type(%Type.Interface{} = parent_type, source, exec) do - Type.Interface.resolve_type(parent_type, source, exec) + defp get_concrete_type(%Type.Interface{} = parent_type, source, res) do + Type.Interface.resolve_type(parent_type, source, res) end - defp get_concrete_type(parent_type, _source, _exec) do + defp get_concrete_type(parent_type, _source, _res) do parent_type end - defp do_resolve_fields([field | fields], exec, source, parent_type, path, acc) do - {result, exec} = resolve_field(field, exec, source, parent_type, [field | path]) - do_resolve_fields(fields, exec, source, parent_type, path, [result | acc]) + defp do_resolve_fields([field | fields], res, source, parent_type, path, acc) do + field = %{field | parent_type: parent_type} + {result, res} = resolve_field(field, res, source, parent_type, [field | path]) + do_resolve_fields(fields, res, source, parent_type, path, [result | acc]) end - defp do_resolve_fields([], exec, _, _, _, acc), do: {:lists.reverse(acc), exec} + defp do_resolve_fields([], res, _, _, _, acc), do: {:lists.reverse(acc), res} - def resolve_field(field, exec, source, parent_type, path) do - exec + def resolve_field(field, res, source, parent_type, path) do + res |> build_resolution_struct(field, source, parent_type, path) - |> do_resolve_field(exec, source, path) + |> do_resolve_field(source, path) end # bp_field needs to have a concrete schema node, AKA no unions or interfaces - defp do_resolve_field(res, exec, source, path) do + defp do_resolve_field(res, source, path) do res |> reduce_resolution |> case do %{state: :resolved} = res -> - exec = update_persisted_fields(exec, res) - build_result(res, exec, source, path) + build_result(res, source, path) %{state: :suspended} = res -> - exec = update_persisted_fields(exec, res) - {res, exec} + {res, res} final_res -> raise """ @@ -188,19 +203,25 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do %{dest | acc: acc, context: context, fields_cache: cache} end - defp build_resolution_struct(exec, bp_field, source, parent_type, path) do - common = - Map.take(exec, [:adapter, :context, :acc, :root_value, :schema, :fragments, :fields_cache]) - - %Absinthe.Resolution{ - path: path, - source: source, - parent_type: parent_type, - middleware: bp_field.schema_node.middleware, - definition: bp_field, - arguments: bp_field.argument_data + defp build_resolution_struct( + res, + %{argument_data: args, schema_node: %{middleware: middleware}} = bp_field, + source, + parent_type, + path + ) do + %{ + res + | path: path, + state: :unresolved, + value: nil, + errors: [], + source: source, + parent_type: parent_type, + middleware: middleware, + definition: bp_field, + arguments: args } - |> Map.merge(common) end defp reduce_resolution(%{middleware: []} = res), do: res @@ -231,14 +252,16 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do fun.(res, []) end - defp build_result(%{errors: errors} = res, exec, source, path) do + defp build_result(res, source, path) do %{ value: value, definition: bp_field, - extensions: extensions + extensions: extensions, + schema: schema, + errors: errors } = res - full_type = Type.expand(bp_field.schema_node.type, exec.schema) + full_type = Type.expand(bp_field.schema_node.type, schema) bp_field = put_in(bp_field.schema_node.type, full_type) @@ -254,26 +277,26 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do value |> to_result(bp_field, full_type, extensions) |> add_errors(Enum.reverse(errors), &put_result_error_value(&1, &2, bp_field, source, path)) - |> walk_result(bp_field, full_type, exec, path) + |> walk_result(bp_field, full_type, res, path) |> propagate_null_trimming end - defp maybe_add_non_null_error(errors, nil, %Type.NonNull{}) do - ["Cannot return null for non-nullable field" | errors] + defp maybe_add_non_null_error([], nil, %Type.NonNull{}) do + ["Cannot return null for non-nullable field"] end defp maybe_add_non_null_error(errors, _, _) do errors end - defp propagate_null_trimming({%{values: values} = node, exec}) do + defp propagate_null_trimming({%{values: values} = node, res}) do values = Enum.map(values, &do_propagate_null_trimming/1) node = %{node | values: values} - {do_propagate_null_trimming(node), exec} + {do_propagate_null_trimming(node), res} end - defp propagate_null_trimming({node, exec}) do - {do_propagate_null_trimming(node), exec} + defp propagate_null_trimming({node, res}) do + {do_propagate_null_trimming(node), res} end defp do_propagate_null_trimming(node) do @@ -327,12 +350,19 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do true end + defp non_null_list_violation?(%{ + value: nil, + emitter: %{ + schema_node: %{type: %Type.NonNull{of_type: %Type.List{of_type: %Type.NonNull{}}}} + } + }) do + true + end + defp non_null_list_violation?(_) do false end - # defp maybe_add_non_null_error(errors, nil, %) - defp add_errors(result, errors, fun) do Enum.reduce(errors, result, fun) end @@ -342,13 +372,19 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do {[], _} -> raise Absinthe.Resolution.result_error(error_value, bp_field, source) + {[message: message, path: error_path], extra} -> + put_error( + result, + error(bp_field, message, Enum.reverse(error_path) ++ path, Map.new(extra)) + ) + {[message: message], extra} -> put_error(result, error(bp_field, message, path, Map.new(extra))) end end defp split_error_value(error_value) when is_list(error_value) or is_map(error_value) do - Keyword.split(Enum.to_list(error_value), [:message]) + Keyword.split(Enum.to_list(error_value), [:message, :path]) end defp split_error_value(error_value) when is_binary(error_value) do diff --git a/lib/absinthe/phase/document/missing_literals.ex b/lib/absinthe/phase/document/missing_literals.ex index 60917697d1..ffc9ce2d78 100644 --- a/lib/absinthe/phase/document/missing_literals.ex +++ b/lib/absinthe/phase/document/missing_literals.ex @@ -110,7 +110,7 @@ defmodule Absinthe.Phase.Document.MissingLiterals do nodes |> Enum.filter(& &1.schema_node) |> Enum.reduce(schema_nodes, fn - %{schema_node: %{__reference__: %{identifier: id}}}, acc -> + %{schema_node: %{identifier: id}}, acc -> Map.delete(acc, id) _, acc -> diff --git a/lib/absinthe/phase/document/override_root.ex b/lib/absinthe/phase/document/override_root.ex new file mode 100644 index 0000000000..57bacfa89a --- /dev/null +++ b/lib/absinthe/phase/document/override_root.ex @@ -0,0 +1,9 @@ +defmodule Absinthe.Phase.Document.OverrideRoot do + @moduledoc false + + @behaviour Absinthe.Phase + + def run(blueprint, root_value: new_root) do + {:ok, put_in(blueprint.execution.root_value, new_root)} + end +end diff --git a/lib/absinthe/phase/document/result.ex b/lib/absinthe/phase/document/result.ex index 99d80c3ab6..14414bc171 100644 --- a/lib/absinthe/phase/document/result.ex +++ b/lib/absinthe/phase/document/result.ex @@ -15,6 +15,9 @@ defmodule Absinthe.Phase.Document.Result do defp process(blueprint) do result = case blueprint.execution do + %{validation_errors: [], result: nil} -> + {:ok, data(%{value: nil}, [])} + %{validation_errors: [], result: result} -> {:ok, data(result, [])} @@ -25,10 +28,6 @@ defmodule Absinthe.Phase.Document.Result do format_result(result) end - defp format_result(:execution_failed) do - %{data: nil} - end - defp format_result({:ok, {data, []}}) do %{data: data} end @@ -43,10 +42,6 @@ defmodule Absinthe.Phase.Document.Result do %{errors: errors} end - defp format_result({:parse_failed, error}) do - %{errors: [format_error(error)]} - end - defp data(%{errors: [_ | _] = field_errors}, errors), do: {nil, field_errors ++ errors} # Leaf @@ -56,7 +51,22 @@ defmodule Absinthe.Phase.Document.Result do value = case Type.unwrap(emitter.schema_node.type) do %Type.Scalar{} = schema_node -> - Type.Scalar.serialize(schema_node, value) + try do + Type.Scalar.serialize(schema_node, value) + rescue + _e in [Absinthe.SerializationError, Protocol.UndefinedError] -> + raise( + Absinthe.SerializationError, + """ + Could not serialize term #{inspect(value)} as type #{schema_node.name} + + When serializing the field: + #{emitter.parent_type.name}.#{emitter.schema_node.name} (#{ + emitter.schema_node.__reference__.location.file + }:#{emitter.schema_node.__reference__.location.line}) + """ + ) + end %Type.Enum{} = schema_node -> Type.Enum.serialize(schema_node, value) diff --git a/lib/absinthe/phase/document/validation/arguments_of_correct_type.ex b/lib/absinthe/phase/document/validation/arguments_of_correct_type.ex index 5640742b18..f97c3faefc 100644 --- a/lib/absinthe/phase/document/validation/arguments_of_correct_type.ex +++ b/lib/absinthe/phase/document/validation/arguments_of_correct_type.ex @@ -3,7 +3,7 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType do # Validates document to ensure that all arguments are of the correct type. - alias Absinthe.{Blueprint, Phase, Schema, Type} + alias Absinthe.{Blueprint, Phase, Phase.Document.Validation.Utils, Schema, Type} use Absinthe.Phase @@ -13,6 +13,7 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType do @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() def run(input, _options \\ []) do result = Blueprint.prewalk(input, &handle_node(&1, input.schema)) + {:ok, result} end @@ -79,7 +80,15 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType do node.fields |> Enum.flat_map(fn %{flags: %{invalid: _}, schema_node: nil} = child -> - [unknown_field_error_message(child.name)] + field_suggestions = + case Type.unwrap(node.schema_node) do + %Type.Scalar{} -> [] + %Type.Enum{} -> [] + nil -> [] + _ -> suggested_field_names(node.schema_node, child.name) + end + + [unknown_field_error_message(child.name, field_suggestions)] %{flags: %{invalid: _}} = child -> child_type_name = @@ -113,6 +122,13 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType do [] end + defp suggested_field_names(schema_node, name) do + schema_node.fields + |> Map.values() + |> Enum.map(& &1.name) + |> Absinthe.Utils.Suggestion.sort_list(name) + end + # Generate the error for the node @spec error(Blueprint.node_t(), String.t()) :: Phase.Error.t() defp error(node, message) do @@ -142,10 +158,17 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType do ~s(In field "#{id}": ) <> expected_type_error_message(expected_type_name, inspected_value) end - def unknown_field_error_message(field_name) do + def unknown_field_error_message(field_name, suggestions \\ []) + + def unknown_field_error_message(field_name, []) do ~s(In field "#{field_name}": Unknown field.) end + def unknown_field_error_message(field_name, suggestions) do + ~s(In field "#{field_name}": Unknown field.) <> + Utils.MessageSuggestions.suggest_message(suggestions) + end + defp expected_type_error_message(expected_type_name, inspected_value) do ~s(Expected type "#{expected_type_name}", found #{inspected_value}.) end diff --git a/lib/absinthe/phase/document/validation/fields_on_correct_type.ex b/lib/absinthe/phase/document/validation/fields_on_correct_type.ex index a2945c947d..9d36d59107 100644 --- a/lib/absinthe/phase/document/validation/fields_on_correct_type.ex +++ b/lib/absinthe/phase/document/validation/fields_on_correct_type.ex @@ -3,7 +3,7 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do # Validates document to ensure that all fields are provided on the correct type. - alias Absinthe.{Blueprint, Phase, Schema, Type} + alias Absinthe.{Blueprint, Phase, Phase.Document.Validation.Utils, Schema, Type} use Absinthe.Phase @@ -120,7 +120,7 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do %Type.Object{identifier: identifier} -> [identifier] - %Type.Interface{__reference__: %{identifier: identifier}} -> + %Type.Interface{identifier: identifier} -> schema.__absinthe_interface_implementors__ |> Map.fetch!(identifier) @@ -152,8 +152,6 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do } end - @suggest 5 - @doc """ Generate an error for a field """ @@ -166,13 +164,12 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do def error_message(field_name, type_name, [], field_suggestions) do error_message(field_name, type_name) <> - " Did you mean " <> to_quoted_or_list(field_suggestions |> Enum.take(@suggest)) <> "?" + Utils.MessageSuggestions.suggest_message(field_suggestions) end def error_message(field_name, type_name, type_suggestions, []) do error_message(field_name, type_name) <> - " Did you mean to use an inline fragment on " <> - to_quoted_or_list(type_suggestions |> Enum.take(@suggest)) <> "?" + Utils.MessageSuggestions.suggest_fragment_message(type_suggestions) end def error_message(field_name, type_name, type_suggestions, _) do @@ -180,22 +177,35 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do end defp suggested_type_names(external_field_name, type, blueprint) do - internal_field_name = blueprint.adapter.to_internal_name(external_field_name, :field) + internal_field_name = + case blueprint.adapter.to_internal_name(external_field_name, :field) do + nil -> external_field_name + internal_field_name -> internal_field_name + end + possible_types = find_possible_types(internal_field_name, type, blueprint.schema) possible_interfaces = find_possible_interfaces(internal_field_name, possible_types, blueprint.schema) - Enum.map(possible_interfaces, & &1.name) ++ Enum.map(possible_types, & &1.name) + possible_interfaces + |> Enum.map(& &1.name) + |> Enum.concat(Enum.map(possible_types, & &1.name)) + |> Enum.sort() end defp suggested_field_names(external_field_name, %{fields: _} = type, blueprint) do - internal_field_name = blueprint.adapter.to_internal_name(external_field_name, :field) + internal_field_name = + case blueprint.adapter.to_internal_name(external_field_name, :field) do + nil -> external_field_name + internal_field_name -> internal_field_name + end Map.values(type.fields) |> Enum.map(& &1.name) |> Absinthe.Utils.Suggestion.sort_list(internal_field_name) |> Enum.map(&blueprint.adapter.to_external_name(&1, :field)) + |> Enum.sort() end defp suggested_field_names(_, _, _) do @@ -255,21 +265,4 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do defp type_with_field?(_, _) do false end - - defp to_quoted_or_list([a]), do: ~s("#{a}") - defp to_quoted_or_list([a, b]), do: ~s("#{a}" or "#{b}") - defp to_quoted_or_list(other), do: to_longer_quoted_or_list(other) - - defp to_longer_quoted_or_list(list, acc \\ "") - defp to_longer_quoted_or_list([word], acc), do: acc <> ~s(, or "#{word}") - - defp to_longer_quoted_or_list([word | rest], "") do - rest - |> to_longer_quoted_or_list(~s("#{word}")) - end - - defp to_longer_quoted_or_list([word | rest], acc) do - rest - |> to_longer_quoted_or_list(acc <> ~s(, "#{word}")) - end end diff --git a/lib/absinthe/phase/validation/known_directives.ex b/lib/absinthe/phase/document/validation/known_directives.ex similarity index 88% rename from lib/absinthe/phase/validation/known_directives.ex rename to lib/absinthe/phase/document/validation/known_directives.ex index 3731cfc3ec..f1934afc6b 100644 --- a/lib/absinthe/phase/validation/known_directives.ex +++ b/lib/absinthe/phase/document/validation/known_directives.ex @@ -1,4 +1,4 @@ -defmodule Absinthe.Phase.Validation.KnownDirectives do +defmodule Absinthe.Phase.Document.Validation.KnownDirectives do @moduledoc false alias Absinthe.{Blueprint, Phase} @@ -29,7 +29,8 @@ defmodule Absinthe.Phase.Validation.KnownDirectives do end defp handle_node(%{directives: _} = node) do - check_directives(node) + node + |> check_directives |> inherit_invalid(node.directives, :bad_directive) end @@ -63,7 +64,7 @@ defmodule Absinthe.Phase.Validation.KnownDirectives do defp error_unknown(node) do %Phase.Error{ phase: __MODULE__, - message: "Unknown directive.", + message: "Unknown directive `#{node.name}'.", locations: [node.source_location] } end @@ -74,7 +75,7 @@ defmodule Absinthe.Phase.Validation.KnownDirectives do %Phase.Error{ phase: __MODULE__, - message: "May not be used on #{placement_name}.", + message: "Directive `#{node.name}' may not be used on #{placement_name}.", locations: [node.source_location] } end diff --git a/lib/absinthe/phase/document/validation/no_fragment_cycles.ex b/lib/absinthe/phase/document/validation/no_fragment_cycles.ex index ac7c309fb9..43a30ab284 100644 --- a/lib/absinthe/phase/document/validation/no_fragment_cycles.ex +++ b/lib/absinthe/phase/document/validation/no_fragment_cycles.ex @@ -45,7 +45,8 @@ defmodule Absinthe.Phase.Document.Validation.NoFragmentCycles do graph |> :digraph_utils.topsort() |> Enum.reverse() - |> Enum.map(&Map.fetch!(fragments, &1)) + |> Enum.map(&Map.get(fragments, &1)) + |> Enum.reject(&is_nil/1) {fragments, 0} end diff --git a/lib/absinthe/phase/document/validation/no_unused_variables.ex b/lib/absinthe/phase/document/validation/no_unused_variables.ex index c32a2be3a2..aaacf2b04b 100644 --- a/lib/absinthe/phase/document/validation/no_unused_variables.ex +++ b/lib/absinthe/phase/document/validation/no_unused_variables.ex @@ -52,7 +52,7 @@ defmodule Absinthe.Phase.Document.Validation.NoUnusedVariables do %Phase.Error{ phase: __MODULE__, message: error_message(node.name, operation.name), - locations: [node.source_location, operation.source_location] + locations: Enum.uniq([node.source_location, operation.source_location]) } end diff --git a/lib/absinthe/phase/document/validation/provided_non_null_arguments.ex b/lib/absinthe/phase/document/validation/provided_non_null_arguments.ex index bb37835dca..2d99267c4c 100644 --- a/lib/absinthe/phase/document/validation/provided_non_null_arguments.ex +++ b/lib/absinthe/phase/document/validation/provided_non_null_arguments.ex @@ -12,7 +12,11 @@ defmodule Absinthe.Phase.Document.Validation.ProvidedNonNullArguments do """ @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() def run(input, _options \\ []) do - result = Blueprint.prewalk(input, &handle_node(&1, input.schema)) + result = + Blueprint.update_current(input, fn op -> + Blueprint.prewalk(op, &handle_node(&1, input.schema)) + end) + {:ok, result} end diff --git a/lib/absinthe/phase/document/validation/provided_non_null_variables.ex b/lib/absinthe/phase/document/validation/provided_non_null_variables.ex index e2c9e06199..efe69c4bc7 100644 --- a/lib/absinthe/phase/document/validation/provided_non_null_variables.ex +++ b/lib/absinthe/phase/document/validation/provided_non_null_variables.ex @@ -14,7 +14,11 @@ defmodule Absinthe.Phase.Document.Validation.ProvidedNonNullVariables do """ @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() def run(input, _options \\ []) do - result = Blueprint.prewalk(input, &handle_node(&1, input.schema)) + result = + Blueprint.update_current(input, fn op -> + Blueprint.prewalk(op, &handle_node(&1, input.schema)) + end) + {:ok, result} end diff --git a/lib/absinthe/phase/document/validation/repeatable_directives.ex b/lib/absinthe/phase/document/validation/repeatable_directives.ex new file mode 100644 index 0000000000..d84288333e --- /dev/null +++ b/lib/absinthe/phase/document/validation/repeatable_directives.ex @@ -0,0 +1,86 @@ +defmodule Absinthe.Phase.Document.Validation.RepeatableDirectives do + @moduledoc false + + alias Absinthe.{Blueprint, Phase} + + use Absinthe.Phase + use Absinthe.Phase.Validation + + @doc """ + Run the validation. + """ + @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() + def run(input, _options \\ []) do + result = Blueprint.postwalk(input, &handle_node/1) + {:ok, result} + end + + defp handle_node(%Blueprint.Directive{} = node) do + node + end + + defp handle_node(%{directives: []} = node) do + node + end + + defp handle_node(%{directives: _} = node) do + node + |> check_directives + |> inherit_invalid(node.directives, :bad_directive) + end + + defp handle_node(node) do + node + end + + defp check_directives(node) do + directives = + for directive <- node.directives do + case directive do + %{schema_node: nil} -> + directive + + %{schema_node: %{repeatable: true}} -> + directive + + directive -> + check_duplicates( + directive, + Enum.filter( + node.directives, + &compare_directive_schema_node(directive.schema_node, &1.schema_node) + ) + ) + end + end + + %{node | directives: directives} + end + + defp compare_directive_schema_node(_, nil), do: false + + defp compare_directive_schema_node(%{identifier: identifier}, %{identifier: identifier}), + do: true + + defp compare_directive_schema_node(_, _), do: false + + # Generate the error for the node + @spec error_repeated(Blueprint.node_t()) :: Phase.Error.t() + defp error_repeated(node) do + %Phase.Error{ + phase: __MODULE__, + message: "Directive `#{node.name}' cannot be applied repeatedly.", + locations: [node.source_location] + } + end + + defp check_duplicates(directive, [_single]) do + directive + end + + defp check_duplicates(directive, _multiple) do + directive + |> flag_invalid(:duplicate_directive) + |> put_error(error_repeated(directive)) + end +end diff --git a/lib/absinthe/phase/document/validation/scalar_leafs.ex b/lib/absinthe/phase/document/validation/scalar_leafs.ex index d297f5359d..bb32fcfac0 100644 --- a/lib/absinthe/phase/document/validation/scalar_leafs.ex +++ b/lib/absinthe/phase/document/validation/scalar_leafs.ex @@ -71,7 +71,7 @@ defmodule Absinthe.Phase.Document.Validation.ScalarLeafs do end defp process(%{selections: s} = node, %unwrapped{}, type) - when s != [] and not (unwrapped in @has_subfields) do + when s != [] and unwrapped not in @has_subfields do bad_node(node, type, :bad_subfields) end diff --git a/lib/absinthe/phase/document/validation/selected_current_operation.ex b/lib/absinthe/phase/document/validation/selected_current_operation.ex index ac5f1aafbd..29c83a03f2 100644 --- a/lib/absinthe/phase/document/validation/selected_current_operation.ex +++ b/lib/absinthe/phase/document/validation/selected_current_operation.ex @@ -12,13 +12,15 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperation do Run the validation. """ @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() - def run(input, _options \\ []) do + def run(input, options \\ []) do result = case {Blueprint.current_operation(input), length(input.operations)} do - {nil, count} when count > 1 -> + {nil, count} when count >= 1 -> + operation_name = Keyword.get(options, :operation_name) + input |> flag_invalid(:no_current_operation) - |> put_error(error()) + |> put_error(error(operation_name, count)) _ -> input @@ -28,15 +30,35 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperation do end # Generate the error - @spec error :: Phase.Error.t() - defp error do + @spec error(String.t(), integer()) :: Phase.Error.t() + defp error(operation_name, operation_count) do %Phase.Error{ phase: __MODULE__, - message: error_message() + message: error_message(operation_name, operation_count) } end - def error_message do - ~s(Must provide a valid operation name if query contains multiple operations.) + def error_message(nil, _) do + """ + Must provide a valid operation name if query contains multiple operations. + + No operation name was given. + """ + end + + def error_message(operation_name, 1) do + """ + The provided operation name did not match the operation in the query. + + The provided operation name was: #{inspect(operation_name)} + """ + end + + def error_message(operation_name, _) do + """ + Must provide a valid operation name if query contains multiple operations. + + The provided operation name was: #{inspect(operation_name)} + """ end end diff --git a/lib/absinthe/phase/document/validation/utils/message_suggestions.ex b/lib/absinthe/phase/document/validation/utils/message_suggestions.ex new file mode 100644 index 0000000000..a605096b40 --- /dev/null +++ b/lib/absinthe/phase/document/validation/utils/message_suggestions.ex @@ -0,0 +1,33 @@ +defmodule Absinthe.Phase.Document.Validation.Utils.MessageSuggestions do + @moduledoc false + @suggest 5 + + @doc """ + Generate an suggestions message for a incorrect field + """ + def suggest_message(suggestions) do + " Did you mean " <> to_quoted_or_list(suggestions |> Enum.take(@suggest)) <> "?" + end + + def suggest_fragment_message(suggestions) do + " Did you mean to use an inline fragment on " <> + to_quoted_or_list(suggestions |> Enum.take(@suggest)) <> "?" + end + + defp to_quoted_or_list([a]), do: ~s("#{a}") + defp to_quoted_or_list([a, b]), do: ~s("#{a}" or "#{b}") + defp to_quoted_or_list(other), do: to_longer_quoted_or_list(other) + + defp to_longer_quoted_or_list(list, acc \\ "") + defp to_longer_quoted_or_list([word], acc), do: acc <> ~s(, or "#{word}") + + defp to_longer_quoted_or_list([word | rest], "") do + rest + |> to_longer_quoted_or_list(~s("#{word}")) + end + + defp to_longer_quoted_or_list([word | rest], acc) do + rest + |> to_longer_quoted_or_list(acc <> ~s(, "#{word}")) + end +end diff --git a/lib/absinthe/phase/document/validation/variables_are_input_types.ex b/lib/absinthe/phase/document/validation/variables_are_input_types.ex index 21d91e26c2..66d97923c0 100644 --- a/lib/absinthe/phase/document/validation/variables_are_input_types.ex +++ b/lib/absinthe/phase/document/validation/variables_are_input_types.ex @@ -20,19 +20,22 @@ defmodule Absinthe.Phase.Document.Validation.VariablesAreInputTypes do # Find variable definitions @spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t() - defp handle_node(%Blueprint.Document.VariableDefinition{schema_node: nil} = node, _) do - node - end - defp handle_node(%Blueprint.Document.VariableDefinition{} = node, schema) do - type = Schema.lookup_type(schema, node.schema_node) - - if Type.input_type?(Type.unwrap(type)) do - node - else - node - |> flag_invalid(:non_input_type) - |> put_error(error(node, Type.name(node.schema_node))) + schema + |> Schema.lookup_type(node.schema_node) + |> Type.unwrap() + |> case do + nil -> + node + + type -> + if Type.input_type?(type) do + node + else + node + |> flag_invalid(:non_input_type) + |> put_error(error(node, Type.name(node.schema_node))) + end end end diff --git a/lib/absinthe/phase/document/variables.ex b/lib/absinthe/phase/document/variables.ex index 349a4b6a91..538e1429a4 100644 --- a/lib/absinthe/phase/document/variables.ex +++ b/lib/absinthe/phase/document/variables.ex @@ -10,7 +10,7 @@ defmodule Absinthe.Phase.Document.Variables do # # Given a GraphQL document that looks like: # - # ``` + # ```graphql # query Item($id: ID!, $text = String = "Another") { # item(id: $id, category: "Things") { # name @@ -25,9 +25,9 @@ defmodule Absinthe.Phase.Document.Variables do # `` # # - The operation's `variables` field would have an `"id"` value set to - # `%Blueprint.Input.StringValue{value: "1234"}` + # `%Blueprint.Input.String{value: "1234"}` # - The operation's `variables` field would have an `"text"` value set to - # `%Blueprint.Input.StringValue{value: "Another"}` + # `%Blueprint.Input.String{value: "Another"}` # # ``` # run(blueprint, %{}) @@ -36,7 +36,7 @@ defmodule Absinthe.Phase.Document.Variables do # - The operation's `variables` field would have an `"id"` value set to # `nil` # - The operation's `variables` field would have an `"text"` value set to - # `%Blueprint.Input.StringValue{value: "Another"}` + # `%Blueprint.Input.String{value: "Another"}` # # Note that no validation occurs in this phase. diff --git a/lib/absinthe/phase/error.ex b/lib/absinthe/phase/error.ex index 0e83b5898a..aeafca6571 100644 --- a/lib/absinthe/phase/error.ex +++ b/lib/absinthe/phase/error.ex @@ -10,7 +10,7 @@ defmodule Absinthe.Phase.Error do path: [] ] - @type loc_t :: %{optional(any) => any, line: integer, column: nil | integer} + @type loc_t :: %{optional(any) => any, line: pos_integer, column: pos_integer} @type t :: %__MODULE__{ message: String.t(), diff --git a/lib/absinthe/phase/init.ex b/lib/absinthe/phase/init.ex new file mode 100644 index 0000000000..4e628d289b --- /dev/null +++ b/lib/absinthe/phase/init.ex @@ -0,0 +1,23 @@ +defmodule Absinthe.Phase.Init do + @moduledoc false + + use Absinthe.Phase + + alias Absinthe.{Blueprint, Language, Phase} + + @spec run(String.t() | Language.Source.t() | Blueprint.t(), Keyword.t()) :: Phase.result_t() + def run(input, _options \\ []) do + {:record_phases, make_blueprint(input), + fn bp, phases -> + %{bp | initial_phases: phases} + end} + end + + defp make_blueprint(%Absinthe.Blueprint{} = blueprint) do + blueprint + end + + defp make_blueprint(input) do + %Blueprint{input: input} + end +end diff --git a/lib/absinthe/phase/parse.ex b/lib/absinthe/phase/parse.ex index 8ac2976f87..7b04faf130 100644 --- a/lib/absinthe/phase/parse.ex +++ b/lib/absinthe/phase/parse.ex @@ -3,9 +3,12 @@ defmodule Absinthe.Phase.Parse do use Absinthe.Phase - alias Absinthe.{Language, Phase} + alias Absinthe.{Blueprint, Language, Phase} - @spec run(Language.Source.t(), Keyword.t()) :: Phase.result_t() + # This is because Dialyzer is telling us tokenizing can never fail, + # but we know it's possible. + @dialyzer {:no_match, run: 2} + @spec run(Language.Source.t() | %Blueprint{}, Keyword.t()) :: Phase.result_t() def run(input, options \\ []) def run(%Absinthe.Blueprint{} = blueprint, options) do @@ -26,6 +29,9 @@ defmodule Absinthe.Phase.Parse do run(%Absinthe.Blueprint{input: input}, options) end + # This is because Dialyzer is telling us tokenizing can never fail, + # but we know it's possible. + @dialyzer {:no_unused, add_validation_error: 2} defp add_validation_error(bp, error) do put_in(bp.execution.validation_errors, [error]) end @@ -38,28 +44,28 @@ defmodule Absinthe.Phase.Parse do {:error, blueprint} end - @spec tokenize(binary) :: {:ok, [tuple]} | {:error, binary} - defp tokenize(input) do - chars = :erlang.binary_to_list(input) + @spec tokenize(binary) :: {:ok, [tuple]} | {:error, String.t()} + def tokenize(input) do + case Absinthe.Lexer.tokenize(input) do + {:error, rest, loc} -> + {:error, format_raw_parse_error({:lexer, rest, loc})} - case :absinthe_lexer.string(chars) do - {:ok, tokens, _line_count} -> - {:ok, tokens} - - {:error, raw_error, _} -> - {:error, format_raw_parse_error(raw_error)} + other -> + other end end - @spec parse(binary) :: {:ok, Language.Document.t()} | {:error, tuple} - @spec parse(Language.Source.t()) :: {:ok, Language.Document.t()} | {:error, tuple} + # This is because Dialyzer is telling us tokenizing can never fail, + # but we know it's possible. + @dialyzer {:no_match, parse: 1} + @spec parse(binary | Language.Source.t()) :: {:ok, Language.Document.t()} | {:error, tuple} defp parse(input) when is_binary(input) do parse(%Language.Source{body: input}) end defp parse(input) do try do - case input.body |> tokenize do + case tokenize(input.body) do {:ok, []} -> {:ok, %Language.Document{}} @@ -81,16 +87,28 @@ defmodule Absinthe.Phase.Parse do end end - @spec format_raw_parse_error({integer, :absinthe_parser, [charlist]}) :: Phase.Error.t() + @spec format_raw_parse_error({{integer, integer}, :absinthe_parser, [charlist]}) :: + Phase.Error.t() + defp format_raw_parse_error({{line, column}, :absinthe_parser, msgs}) do + message = msgs |> Enum.map(&to_string/1) |> Enum.join("") + %Phase.Error{message: message, locations: [%{line: line, column: column}], phase: __MODULE__} + end + + @spec format_raw_parse_error({integer, :absinthe_parser, [charlist]}) :: + Phase.Error.t() defp format_raw_parse_error({line, :absinthe_parser, msgs}) do message = msgs |> Enum.map(&to_string/1) |> Enum.join("") %Phase.Error{message: message, locations: [%{line: line, column: 0}], phase: __MODULE__} end - @spec format_raw_parse_error({integer, :absinthe_lexer, {atom, charlist}}) :: Phase.Error.t() - defp format_raw_parse_error({line, :absinthe_lexer, {problem, field}}) do - message = "#{problem}: #{field}" - %Phase.Error{message: message, locations: [%{line: line, column: 0}], phase: __MODULE__} + @spec format_raw_parse_error({:lexer, String.t(), {line :: pos_integer, column :: pos_integer}}) :: + Phase.Error.t() + defp format_raw_parse_error({:lexer, rest, {line, column}}) do + sample_slice = String.slice(rest, 0, 10) + sample = if String.valid?(sample_slice), do: sample_slice, else: inspect(sample_slice) + + message = "Parsing failed at `#{sample}`" + %Phase.Error{message: message, locations: [%{line: line, column: column}], phase: __MODULE__} end @unknown_error_msg "An unknown error occurred during parsing" diff --git a/lib/absinthe/phase/schema.ex b/lib/absinthe/phase/schema.ex index 70924e9b6c..5e4e620244 100644 --- a/lib/absinthe/phase/schema.ex +++ b/lib/absinthe/phase/schema.ex @@ -3,7 +3,7 @@ defmodule Absinthe.Phase.Schema do # Populate all schema nodes and the adapter for the blueprint tree. If the # blueprint tree is a _schema_ tree, this schema is the meta schema (source of - # IDL directives, etc). + # SDL directives, etc). # # Note that no validation occurs in this phase. @@ -22,17 +22,29 @@ defmodule Absinthe.Phase.Schema do # Thus at each node we need only concern ourselves with immediate children. @spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()} def run(input, options \\ []) do - schema = Keyword.fetch!(options, :schema) - adapter = Keyword.get(options, :adapter, Absinthe.Adapter.LanguageConventions) + {input, schema} = apply_settings(input, Map.new(options)) result = input |> update_context(schema) - |> Blueprint.prewalk(&handle_node(&1, schema, adapter)) + |> Blueprint.prewalk(&handle_node(&1, schema, input.adapter)) {:ok, result} end + # Set schema and adapter settings on the blueprint appropriate to whether we're + # applying a normal schema for a document or a prototype schema used to define + # a schema. + defp apply_settings(input, %{prototype_schema: schema} = options) do + adapter = Map.get(options, :adapter, Absinthe.Adapter.LanguageConventions) + {%{input | prototype_schema: schema, adapter: adapter}, schema} + end + + defp apply_settings(input, options) do + adapter = Map.get(options, :adapter, Absinthe.Adapter.LanguageConventions) + {%{input | schema: options.schema, adapter: adapter}, options.schema} + end + defp update_context(input, nil), do: input defp update_context(input, schema) do @@ -41,7 +53,7 @@ defmodule Absinthe.Phase.Schema do end defp handle_node(%Blueprint{} = node, schema, adapter) do - set_children(%{node | schema: schema, adapter: adapter}, schema, adapter) + set_children(node, schema, adapter) end defp handle_node(%Absinthe.Blueprint.Document.VariableDefinition{} = node, _, _) do @@ -68,17 +80,12 @@ defmodule Absinthe.Phase.Schema do schema, _adapter ) do - schema_node = Absinthe.Schema.cached_lookup_type(schema, type_name) + schema_node = Absinthe.Schema.lookup_type(schema, type_name) %{node | schema_node: schema_node, type_condition: %{condition | schema_node: schema_node}} end defp set_schema_node(%Blueprint.Directive{name: name} = node, _parent, schema, adapter) do - schema_node = - name - |> adapter.to_internal_name(:directive) - |> schema.__absinthe_directive__ - - %{node | schema_node: schema_node} + %{node | schema_node: find_schema_directive(name, schema, adapter)} end defp set_schema_node( @@ -87,7 +94,7 @@ defmodule Absinthe.Phase.Schema do schema, _adapter ) do - %{node | schema_node: Absinthe.Schema.cached_lookup_type(schema, op_type)} + %{node | schema_node: Absinthe.Schema.lookup_type(schema, op_type)} end defp set_schema_node( @@ -97,7 +104,7 @@ defmodule Absinthe.Phase.Schema do schema, _adapter ) do - schema_node = Absinthe.Schema.cached_lookup_type(schema, type_name) + schema_node = Absinthe.Schema.lookup_type(schema, type_name) %{node | schema_node: schema_node, type_condition: %{condition | schema_node: schema_node}} end @@ -111,12 +118,7 @@ defmodule Absinthe.Phase.Schema do type_reference |> type_reference_to_type(schema) - wrapped - |> Type.unwrap() - |> case do - nil -> node - _ -> %{node | schema_node: wrapped} - end + %{node | schema_node: wrapped} end defp set_schema_node(node, %{schema_node: nil}, _, _) do @@ -152,7 +154,8 @@ defmodule Absinthe.Phase.Schema do end defp set_schema_node(%Blueprint.Input.Argument{name: name} = node, parent, _schema, adapter) do - %{node | schema_node: find_schema_argument(parent.schema_node, name, adapter)} + schema_node = find_schema_argument(parent.schema_node, name, adapter) + %{node | schema_node: schema_node} end defp set_schema_node(%Blueprint.Document.Fragment.Spread{} = node, _, _, _) do @@ -214,12 +217,17 @@ defmodule Absinthe.Phase.Schema do |> Enum.find(&match?(%{name: ^internal_name}, &1)) end + # Given a name, lookup a schema directive + @spec find_schema_directive(String.t(), Absinthe.Schema.t(), Absinthe.Adapter.t()) :: + nil | Type.Directive.t() + defp find_schema_directive(name, schema, adapter) do + internal_name = adapter.to_internal_name(name, :directive) + schema.__absinthe_directive__(internal_name) + end + # Given a schema type, lookup a child field definition @spec find_schema_field(nil | Type.t(), String.t(), Absinthe.Schema.t(), Absinthe.Adapter.t()) :: nil | Type.Field.t() - defp find_schema_field(_, "__" <> introspection_field, _, _) do - Absinthe.Introspection.Field.meta(introspection_field) - end defp find_schema_field(%{of_type: type}, name, schema, adapter) do find_schema_field(type, name, schema, adapter) diff --git a/lib/absinthe/phase/schema/apply_declaration.ex b/lib/absinthe/phase/schema/apply_declaration.ex new file mode 100644 index 0000000000..f0b4e589a2 --- /dev/null +++ b/lib/absinthe/phase/schema/apply_declaration.ex @@ -0,0 +1,116 @@ +defmodule Absinthe.Phase.Schema.ApplyDeclaration do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + + @type operation :: :query | :mutation | :subscription + + @type root_mappings :: %{operation() => Blueprint.TypeReference.Name.t()} + + def run(blueprint, _opts) do + blueprint = process(blueprint) + {:ok, blueprint} + end + + # Apply schema declaration to each schema definition + @spec process(blueprint :: Blueprint.t()) :: Blueprint.t() + defp process(blueprint = %Blueprint{}) do + %{ + blueprint + | schema_definitions: Enum.map(blueprint.schema_definitions, &process_schema_definition/1) + } + end + + # Strip the schema declaration out of the schema's type definitions and apply it + @spec process_schema_definition(schema_definition :: Blueprint.Schema.SchemaDefinition.t()) :: + Blueprint.Schema.SchemaDefinition.t() + defp process_schema_definition(schema_definition) do + {declarations, type_defs} = + Enum.split_with( + schema_definition.type_definitions, + &match?(%Blueprint.Schema.SchemaDeclaration{}, &1) + ) + + # Remove declaration + schema_definition = %{schema_definition | type_definitions: type_defs} + + case declarations do + [declaration] -> + root_mappings = + declaration + |> extract_root_mappings + + %{ + schema_definition + | type_definitions: + Enum.map(schema_definition.type_definitions, &maybe_mark_root(&1, root_mappings)), + schema_declaration: declaration + } + + [] -> + schema_definition + + [_first | extra_declarations] -> + extra_declarations + |> Enum.reduce(schema_definition, fn declaration, acc -> + acc + |> put_error(error(declaration)) + end) + end + end + + # Generate an error for extraneous schema declarations + @spec error(declaration :: Blueprint.Schema.SchemaDeclaration.t()) :: Absinthe.Phase.Error.t() + defp error(declaration) do + %Absinthe.Phase.Error{ + message: + "More than one schema declaration found. Only one instance of `schema' should be present in SDL.", + locations: [declaration.__reference__.location], + phase: __MODULE__ + } + end + + # Extract the declared root type names + @spec extract_root_mappings(declaration :: Blueprint.Schema.SchemaDeclaration.t()) :: + root_mappings() + defp extract_root_mappings(declaration) do + for field_def <- declaration.field_definitions, + field_def.identifier in ~w(query mutation subscription)a, + into: %{} do + {field_def.identifier, field_def.type} + end + end + + # If the type definition is declared as a root type, set the identifier appropriately + @spec maybe_mark_root(type_def :: Blueprint.Schema.t(), root_mappings :: root_mappings()) :: + Blueprint.Schema.t() + defp maybe_mark_root(%Blueprint.Schema.ObjectTypeDefinition{} = type_def, root_mappings) do + case operation_root_identifier(type_def, root_mappings) do + nil -> + type_def + + identifier -> + %{type_def | identifier: identifier} + end + end + + defp maybe_mark_root(type_def, _root_mappings), do: type_def + + # Determine which, if any, root identifier should be applied to an object type definition + @spec operation_root_identifier( + type_def :: Blueprint.Schema.ObjectTypeDefinition.t(), + root_mappings :: root_mappings() + ) :: nil | operation() + defp operation_root_identifier(type_def, root_mappings) do + match_name = type_def.name + + Enum.find_value(root_mappings, fn + {ident, %{name: ^match_name}} -> + ident + + _ -> + false + end) + end +end diff --git a/lib/absinthe/phase/schema/arguments/data.ex b/lib/absinthe/phase/schema/arguments/data.ex new file mode 100644 index 0000000000..205c07a80a --- /dev/null +++ b/lib/absinthe/phase/schema/arguments/data.ex @@ -0,0 +1,46 @@ +defmodule Absinthe.Phase.Schema.Arguments.Data do + @moduledoc false + + # Populate all arguments in the SDL with their provided data values. + # + # See Absinthe.Phase.Document.Arguments.Data for a more expansive + # explanation; this phase limits itself to arguments and values. + + alias Absinthe.Blueprint.Input + alias Absinthe.{Blueprint} + use Absinthe.Phase + + def run(input, _options \\ []) do + # By using a postwalk we can worry about leaf nodes first (scalars, enums), + # and then for list and objects merely grab the data values. + result = Blueprint.postwalk(input, &handle_node/1) + {:ok, result} + end + + def handle_node(%Input.Argument{input_value: input} = node) do + %{node | value: input.data} + end + + def handle_node(%Input.Value{normalized: %Input.List{items: items}} = node) do + data_list = for %{data: data} = item <- items, Input.Value.valid?(item), do: data + %{node | data: data_list} + end + + def handle_node(%Input.Value{normalized: %Input.Object{fields: fields}} = node) do + data = + for field <- fields, include_field?(field), into: %{} do + {field.schema_node.identifier, field.input_value.data} + end + + %{node | data: data} + end + + def handle_node(node) do + node + end + + defp include_field?(%{input_value: %{normalized: %Input.Null{}}}), do: true + defp include_field?(%{input_value: %{data: nil}}), do: false + defp include_field?(%{schema_node: nil}), do: false + defp include_field?(_), do: true +end diff --git a/lib/absinthe/phase/schema/arguments/normalize.ex b/lib/absinthe/phase/schema/arguments/normalize.ex new file mode 100644 index 0000000000..2c3d8019af --- /dev/null +++ b/lib/absinthe/phase/schema/arguments/normalize.ex @@ -0,0 +1,32 @@ +defmodule Absinthe.Phase.Schema.Arguments.Normalize do + @moduledoc false + + # Populate all arguments in the document with their provided values: + # + # - If a literal value is provided for an argument, set the `Argument.t`'s + # `normalized_value` field to that value. + # + # Note that no validation occurs in this phase. + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Input + + @spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()} + def run(input, _options \\ []) do + node = Blueprint.prewalk(input, &handle_node/1) + {:ok, node} + end + + # Set provided value from the raw value + defp handle_node(%Input.RawValue{} = node) do + %Input.Value{ + normalized: node.content, + raw: node + } + end + + defp handle_node(node) do + node + end +end diff --git a/lib/absinthe/phase/schema/arguments/parse.ex b/lib/absinthe/phase/schema/arguments/parse.ex new file mode 100644 index 0000000000..435a1d7127 --- /dev/null +++ b/lib/absinthe/phase/schema/arguments/parse.ex @@ -0,0 +1,73 @@ +defmodule Absinthe.Phase.Schema.Arguments.Parse do + @moduledoc false + + # Parses Leaf Node inputs + + alias Absinthe.Blueprint.Input + alias Absinthe.{Blueprint, Type} + use Absinthe.Phase + + def run(input, options \\ []) do + result = Blueprint.prewalk(input, &handle_node(&1, options[:context] || %{})) + {:ok, result} + end + + # defp handle_node(%{schema_node: nil} = node, _context) do + # {:halt, node} + # end + + defp handle_node(%{normalized: nil} = node, _context) do + node + end + + defp handle_node(%Input.Value{normalized: normalized} = node, context) do + case build_value(normalized, node.schema_node, context) do + {:ok, value} -> + %{node | data: value} + + :not_leaf_node -> + node + + {:error, flag} -> + %{node | normalized: normalized |> flag_invalid(flag)} + end + end + + defp handle_node(node, _context), do: node + + defp build_value(%Input.Null{}, %Type.NonNull{}, _) do + {:error, :non_null} + end + + defp build_value(normalized, %Type.Scalar{} = schema_node, context) do + case Type.Scalar.parse(schema_node, normalized, context) do + :error -> + {:error, :bad_parse} + + {:ok, val} -> + {:ok, val} + end + end + + defp build_value(%Input.Null{}, %Type.Enum{}, _) do + {:ok, nil} + end + + defp build_value(normalized, %Type.Enum{} = schema_node, _) do + case Type.Enum.parse(schema_node, normalized) do + {:ok, %{value: value}} -> + {:ok, value} + + :error -> + {:error, :bad_parse} + end + end + + defp build_value(normalized, %Type.NonNull{of_type: inner_type}, context) do + build_value(normalized, inner_type, context) + end + + defp build_value(_, _, _) do + :not_leaf_node + end +end diff --git a/lib/absinthe/phase/schema/build.ex b/lib/absinthe/phase/schema/build.ex new file mode 100644 index 0000000000..da2f5a58c0 --- /dev/null +++ b/lib/absinthe/phase/schema/build.ex @@ -0,0 +1,41 @@ +defmodule Absinthe.Phase.Schema.Build do + @moduledoc false + + def run(blueprint, _opts) do + %{schema_definitions: [schema]} = blueprint + + types = build_types(blueprint) + directives = build_directives(blueprint) + + schema = %{schema | type_artifacts: types, directive_artifacts: directives} + + blueprint = %{blueprint | schema_definitions: [schema]} + + {:ok, blueprint} + end + + def build_types(%{schema_definitions: [schema]}) do + for %module{} = type_def <- schema.type_definitions do + type = module.build(type_def, schema) + + %{ + type + | __reference__: type_def.__reference__, + __private__: type_def.__private__ + } + end + end + + def build_directives(%{schema_definitions: [schema]}) do + for %module{} = type_def <- schema.directive_definitions do + type = module.build(type_def, schema) + + %{ + type + | definition: type_def.module, + __reference__: type_def.__reference__, + __private__: type_def.__private__ + } + end + end +end diff --git a/lib/absinthe/phase/schema/compile.ex b/lib/absinthe/phase/schema/compile.ex new file mode 100644 index 0000000000..ece2db7241 --- /dev/null +++ b/lib/absinthe/phase/schema/compile.ex @@ -0,0 +1,148 @@ +defmodule Absinthe.Phase.Schema.Compile do + @moduledoc false + + alias Absinthe.Blueprint.Schema + + def run(blueprint, opts) do + module_name = Module.concat(opts[:schema], Compiled) + + %{schema_definitions: [schema]} = blueprint + + type_ast = build_types(schema.type_artifacts) + directive_ast = build_directives(schema.directive_artifacts) + + type_list = + Map.new(schema.type_definitions, fn type_def -> + {type_def.identifier, type_def.name} + end) + + referenced_types = + for type_def <- schema.type_definitions, + type_def.__private__[:__absinthe_referenced__], + into: %{}, + do: {type_def.identifier, type_def.name} + + directive_list = + Map.new(schema.directive_definitions, fn type_def -> + {type_def.identifier, type_def.name} + end) + + prototype_schema = Keyword.fetch!(opts, :prototype_schema) + + metadata = build_metadata(schema) + + implementors = build_implementors(schema) + + body = + quote do + @moduledoc false + + unquote_splicing(type_ast) + unquote_splicing(directive_ast) + + def __absinthe_types__() do + __absinthe_types__(:referenced) + end + + def __absinthe_types__(:referenced) do + unquote(Macro.escape(referenced_types)) + end + + def __absinthe_types__(:all) do + unquote(Macro.escape(type_list)) + end + + def __absinthe_directives__() do + unquote(Macro.escape(directive_list)) + end + + def __absinthe_interface_implementors__() do + unquote(Macro.escape(implementors)) + end + + def __absinthe_prototype_schema__() do + unquote(Macro.escape(prototype_schema)) + end + + unquote_splicing(metadata) + end + + Module.create(module_name, body, Macro.Env.location(__ENV__)) + + {:ok, blueprint} + end + + def build_metadata(schema) do + for type <- schema.type_definitions do + quote do + def __absinthe_reference__(unquote(type.identifier)) do + unquote(Macro.escape(type.__reference__)) + end + end + end + end + + def build_types(types) do + for type <- types do + if !type.definition, + do: + raise(""" + No definition set! + #{inspect(type)} + """) + + ast = Macro.escape(type, unquote: true) + + quote do + def __absinthe_type__(unquote(type.identifier)) do + unquote(ast) + end + + def __absinthe_type__(unquote(type.name)) do + unquote(ast) + end + end + end + |> Enum.concat([ + quote do + def __absinthe_type__(_type) do + nil + end + end + ]) + end + + def build_directives(directives) do + for type <- directives do + ast = Macro.escape(type) + + quote do + def __absinthe_directive__(unquote(type.identifier)) do + unquote(ast) + end + + def __absinthe_directive__(unquote(type.name)) do + unquote(ast) + end + end + end + |> Enum.concat([ + quote do + def __absinthe_directive__(_type) do + nil + end + end + ]) + end + + defp build_implementors(schema) do + schema.type_definitions + |> Enum.filter(&match?(%Schema.InterfaceTypeDefinition{}, &1)) + |> Map.new(fn iface -> + implementors = + Schema.InterfaceTypeDefinition.find_implementors(iface, schema.type_definitions) + + {iface.identifier, Enum.sort(implementors)} + end) + end +end diff --git a/lib/absinthe/phase/schema/deprecated_directive_fields.ex b/lib/absinthe/phase/schema/deprecated_directive_fields.ex new file mode 100644 index 0000000000..40b69633e4 --- /dev/null +++ b/lib/absinthe/phase/schema/deprecated_directive_fields.ex @@ -0,0 +1,58 @@ +defmodule Absinthe.Phase.Schema.DeprecatedDirectiveFields do + @moduledoc false + # The spec of Oct 2015 has the onOperation, onFragment and onField + # fields for directives (https://spec.graphql.org/October2015/#sec-Schema-Introspection) + # See https://github.com/graphql/graphql-spec/pull/152 for the rationale. + # These fields are deprecated and can be removed in the future. + alias Absinthe.Blueprint + + use Absinthe.Schema.Notation + + @behaviour Absinthe.Phase + + def run(input, _options \\ []) do + blueprint = Blueprint.prewalk(input, &handle_node/1) + + {:ok, blueprint} + end + + defp handle_node(%Blueprint.Schema.ObjectTypeDefinition{identifier: :__directive} = node) do + [types] = __MODULE__.__absinthe_blueprint__().schema_definitions + + new_node = Enum.find(types.type_definitions, &(&1.identifier == :deprecated_directive_fields)) + + fields = node.fields ++ new_node.fields + + %{node | fields: fields} + end + + defp handle_node(node) do + node + end + + object :deprecated_directive_fields do + field :on_operation, :boolean do + deprecate "Check `locations` field for enum value OPERATION" + + resolve fn _, %{source: source} -> + {:ok, Enum.any?(source.locations, &Enum.member?([:query, :mutation, :subscription], &1))} + end + end + + field :on_fragment, :boolean do + deprecate "Check `locations` field for enum value FRAGMENT_SPREAD" + + resolve fn _, %{source: source} -> + {:ok, Enum.member?(source.locations, :fragment_spread)} + end + end + + field :on_field, :boolean do + deprecate "Check `locations` field for enum value FIELD" + + resolve fn _, %{source: source} -> + {:ok, Enum.member?(source.locations, :field)} + end + end + end +end diff --git a/lib/absinthe/phase/schema/directives.ex b/lib/absinthe/phase/schema/directives.ex new file mode 100644 index 0000000000..9418b64507 --- /dev/null +++ b/lib/absinthe/phase/schema/directives.ex @@ -0,0 +1,27 @@ +defmodule Absinthe.Phase.Schema.Directives do + @moduledoc false + + # Expand all directives in the document. + # + # Note that no validation occurs in this phase. + + use Absinthe.Phase + alias Absinthe.Blueprint + + @spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()} + def run(input, _options \\ []) do + node = Blueprint.prewalk(input, &handle_node/1) + {:ok, node} + end + + @spec handle_node(Blueprint.node_t()) :: Blueprint.node_t() + defp handle_node(%{directives: directives} = node) do + Enum.reduce(directives, node, fn directive, acc -> + Blueprint.Directive.expand(directive, acc) + end) + end + + defp handle_node(node) do + node + end +end diff --git a/lib/absinthe/phase/schema/field_imports.ex b/lib/absinthe/phase/schema/field_imports.ex new file mode 100644 index 0000000000..55fd5d9fbd --- /dev/null +++ b/lib/absinthe/phase/schema/field_imports.ex @@ -0,0 +1,57 @@ +defmodule Absinthe.Phase.Schema.FieldImports do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + def run(blueprint, _opts) do + blueprint = Blueprint.prewalk(blueprint, &handle_imports/1) + {:ok, blueprint} + end + + def handle_imports(%Schema.SchemaDefinition{} = schema) do + # Per Phase.Schema.ValidateTypeReferences, the types are already + # in the order they need to be in to accumulate imports properly. + types = + Enum.reduce(schema.type_definitions, %{}, fn type, types -> + Map.put(types, type.identifier, import_fields(type, types)) + end) + + types = Enum.map(schema.type_definitions, &Map.fetch!(types, &1.identifier)) + {:halt, %{schema | type_definitions: types}} + end + + def handle_imports(node), do: node + + @can_import [ + Schema.ObjectTypeDefinition, + Schema.InputObjectTypeDefinition, + Schema.InterfaceTypeDefinition + ] + @exclude_fields [ + :__typename + ] + def import_fields(%def_type{} = type, types) when def_type in @can_import do + Enum.reduce(type.imports, type, fn {source, opts}, type -> + source_type = Map.fetch!(types, source) + + rejections = Keyword.get(opts, :except, []) ++ @exclude_fields + + fields = source_type.fields |> Enum.reject(&(&1.identifier in rejections)) + + fields = + case Keyword.fetch(opts, :only) do + {:ok, selections} -> + Enum.filter(fields, &(&1.identifier in selections)) + + _ -> + fields + end + + %{type | fields: fields ++ type.fields} + end) + end + + def import_fields(type, _), do: type +end diff --git a/lib/absinthe/phase/schema/hydrate.ex b/lib/absinthe/phase/schema/hydrate.ex new file mode 100644 index 0000000000..869486c949 --- /dev/null +++ b/lib/absinthe/phase/schema/hydrate.ex @@ -0,0 +1,242 @@ +defmodule Absinthe.Phase.Schema.Hydrate do + @moduledoc false + @behaviour Absinthe.Schema.Hydrator + + use Absinthe.Phase + alias Absinthe.Blueprint + + @hydrate [ + Blueprint.Schema.DirectiveDefinition, + Blueprint.Schema.EnumTypeDefinition, + Blueprint.Schema.EnumValueDefinition, + Blueprint.Schema.FieldDefinition, + Blueprint.Schema.InputObjectTypeDefinition, + Blueprint.Schema.InputValueDefinition, + Blueprint.Schema.InterfaceTypeDefinition, + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.ScalarTypeDefinition, + Blueprint.Schema.SchemaDefinition, + Blueprint.Schema.UnionTypeDefinition + ] + + @impl Absinthe.Phase + def run(blueprint, opts \\ []) do + {:ok, schema} = Keyword.fetch(opts, :schema) + hydrator = Keyword.get(opts, :hydrator, __MODULE__) + blueprint = Blueprint.prewalk(blueprint, &handle_node(&1, [], schema, hydrator)) + {:ok, blueprint} + end + + defp handle_node(%Blueprint{} = node, ancestors, schema, hydrator) do + node + |> hydrate_node(ancestors, schema, hydrator) + |> set_children(ancestors, schema, hydrator) + end + + defp handle_node(%node_module{} = node, ancestors, schema, hydrator) + when node_module in @hydrate do + node + |> hydrate_node(ancestors, schema, hydrator) + |> set_children(ancestors, schema, hydrator) + end + + defp handle_node(node, ancestors, schema, hydrator) do + set_children(node, ancestors, schema, hydrator) + end + + defp set_children(parent, ancestors, schema, hydrator) do + Blueprint.prewalk(parent, fn + ^parent -> parent + child -> {:halt, handle_node(child, [parent | ancestors], schema, hydrator)} + end) + end + + defp hydrate_node(%{} = node, ancestors, schema, hydrator) do + hydrations = schema.hydrate(node, ancestors) + apply_hydrations(node, hydrations, hydrator) + end + + defp apply_hydrations(node, hydrations, hydrator) do + hydrations + |> List.wrap() + |> Enum.reduce(node, fn hydration, node -> + hydrator.apply_hydration(node, hydration) + end) + end + + @impl Absinthe.Schema.Hydrator + + def apply_hydration( + node, + {:meta, keyword_list} + ) + when is_list(keyword_list) do + %{node | __private__: Keyword.put(node.__private__, :meta, keyword_list)} + end + + def apply_hydration( + node, + {:description, text} + ) do + %{node | description: text} + end + + def apply_hydration( + %Blueprint.Schema.FieldDefinition{} = node, + {:resolve, resolver} + ) do + %{node | middleware: [{Absinthe.Resolution, resolver}]} + end + + def apply_hydration( + %Blueprint.Schema.FieldDefinition{} = node, + {:middleware, {_module, _opts} = middleware} + ) do + %{node | middleware: [middleware]} + end + + def apply_hydration( + %Blueprint.Schema.FieldDefinition{} = node, + {:complexity, complexity} + ) + when is_integer(complexity) do + %{node | complexity: complexity} + end + + def apply_hydration( + %Blueprint.Schema.ScalarTypeDefinition{} = node, + {:parse, parse} + ) + when is_function(parse) do + %{node | parse: parse} + end + + def apply_hydration( + %Blueprint.Schema.ScalarTypeDefinition{} = node, + {:serialize, serialize} + ) + when is_function(serialize) do + %{node | serialize: serialize} + end + + def apply_hydration( + %Blueprint.Schema.InterfaceTypeDefinition{} = node, + {:resolve_type, resolve_type} + ) + when is_function(resolve_type) do + %{node | resolve_type: resolve_type} + end + + def apply_hydration( + %Blueprint.Schema.UnionTypeDefinition{} = node, + {:resolve_type, resolve_type} + ) + when is_function(resolve_type) do + %{node | resolve_type: resolve_type} + end + + def apply_hydration( + %Blueprint.Schema.ObjectTypeDefinition{} = node, + {:is_type_of, is_type_of} + ) + when is_function(is_type_of) do + %{node | is_type_of: is_type_of} + end + + def apply_hydration( + %Blueprint.Schema.EnumValueDefinition{} = node, + {:as, value} + ) do + %{node | value: value} + end + + @hydration_level1 [ + Blueprint.Schema.DirectiveDefinition, + Blueprint.Schema.EnumTypeDefinition, + Blueprint.Schema.InputObjectTypeDefinition, + Blueprint.Schema.InterfaceTypeDefinition, + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.ScalarTypeDefinition, + Blueprint.Schema.UnionTypeDefinition + ] + + @hydration_level2 [ + Blueprint.Schema.FieldDefinition, + Blueprint.Schema.EnumValueDefinition + ] + + @hydration_level3 [ + Blueprint.Schema.InputValueDefinition + ] + + def apply_hydration(%Absinthe.Blueprint{} = root, %{} = sub_hydrations) do + {root, _} = + Blueprint.prewalk(root, nil, fn + %module{identifier: ident} = node, nil when module in @hydration_level1 -> + case Map.fetch(sub_hydrations, ident) do + :error -> + {node, nil} + + {:ok, type_hydrations} -> + {apply_hydrations(node, type_hydrations, __MODULE__), nil} + end + + node, nil -> + {node, nil} + end) + + root + end + + def apply_hydration(%module{} = root, %{} = sub_hydrations) + when module in @hydration_level1 do + {root, _} = + Blueprint.prewalk(root, nil, fn + %module{identifier: ident} = node, nil when module in @hydration_level2 -> + case Map.fetch(sub_hydrations, ident) do + :error -> + {node, nil} + + {:ok, type_hydrations} -> + {apply_hydrations(node, type_hydrations, __MODULE__), nil} + end + + node, nil -> + {node, nil} + end) + + root + end + + def apply_hydration(%module{} = root, %{} = sub_hydrations) + when module in @hydration_level2 do + {root, _} = + Blueprint.prewalk(root, nil, fn + %module{identifier: ident} = node, nil when module in @hydration_level3 -> + case Map.fetch(sub_hydrations, ident) do + :error -> + {node, nil} + + {:ok, type_hydrations} -> + {apply_hydrations(node, type_hydrations, __MODULE__), nil} + end + + node, nil -> + {node, nil} + end) + + root + end + + def apply_hydration(root, result) do + raise ArgumentError, """ + Invalid hydration! + + #{inspect(result)} + + is not a valid way to hydrate + + #{inspect(root)} + """ + end +end diff --git a/lib/absinthe/phase/schema/inline_functions.ex b/lib/absinthe/phase/schema/inline_functions.ex new file mode 100644 index 0000000000..c5b4f9ef1d --- /dev/null +++ b/lib/absinthe/phase/schema/inline_functions.ex @@ -0,0 +1,83 @@ +defmodule Absinthe.Phase.Schema.InlineFunctions do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + alias Absinthe.Type + + def run(blueprint, opts) do + blueprint = Blueprint.prewalk(blueprint, &inline_functions(&1, blueprint.schema, opts)) + + {:ok, blueprint} + end + + def inline_functions(%Schema.SchemaDefinition{} = schema_def, schema, opts) do + schema_def = %{ + schema_def + | type_artifacts: Enum.map(schema_def.type_artifacts, &inline_functions(&1, schema, opts)), + directive_artifacts: + Enum.map(schema_def.directive_artifacts, &inline_functions(&1, schema, opts)) + } + + {:halt, schema_def} + end + + def inline_functions(%type{identifier: _} = node, schema, opts) do + type + |> Schema.functions() + # middleware gets handled specially + |> Enum.reject(&(&1 in [:middleware])) + |> Enum.reduce(node, &inline_function(&1, &2, opts)) + |> inline_middleware(schema, opts) + end + + def inline_functions(node, _, _) do + node + end + + defp inline_function(attr, node, opts) do + function = Type.function(node, attr) + + if Absinthe.Utils.escapable?(function) || opts[:inline_always] do + %{node | attr => function} + else + node + end + end + + def inline_middleware(%type_name{} = type, schema, opts) + when type_name in [Type.Object, Type.Union, Type.Interface] do + Map.update!(type, :fields, fn fields -> + fields = + Enum.map(fields, fn {field_ident, field} -> + {field_ident, inline_functions(field, schema, opts)} + end) + + Map.new(fields, fn + {field_ident, %{middleware: middleware} = field} -> + expanded_middleware = Absinthe.Middleware.expand(schema, middleware, field, type) + + if Absinthe.Utils.escapable?(expanded_middleware) || opts[:inline_always] do + {field_ident, %{field | middleware: expanded_middleware}} + else + middleware_shim = { + {Absinthe.Middleware, :shim}, + {type.identifier, field.identifier, middleware} + } + + {field_ident, %{field | middleware: [middleware_shim]}} + end + + {field_ident, field} -> + middleware = Absinthe.Middleware.expand(schema, field.middleware, field, type) + + {field_ident, %{field | middleware: middleware}} + end) + end) + end + + def inline_middleware(type, _, _) do + type + end +end diff --git a/lib/absinthe/phase/schema/introspection.ex b/lib/absinthe/phase/schema/introspection.ex new file mode 100644 index 0000000000..b0e8081e7a --- /dev/null +++ b/lib/absinthe/phase/schema/introspection.ex @@ -0,0 +1,171 @@ +defmodule Absinthe.Phase.Schema.Introspection do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + + alias Absinthe.Blueprint.Schema.FieldDefinition + alias Absinthe.Blueprint.Schema.InputValueDefinition + alias Absinthe.Blueprint.TypeReference.NonNull + alias Absinthe.Blueprint.Schema.ObjectTypeDefinition + alias Absinthe.Blueprint.Schema.ListTypeDefinition + alias Absinthe.Blueprint.Schema.UnionTypeDefinition + alias Absinthe.Blueprint.Schema.InterfaceTypeDefinition + + def __absinthe_function__(identifier, :middleware) do + [{{Absinthe.Resolution, :call}, resolve_fn(identifier)}] + end + + def run(blueprint, _opts) do + blueprint = attach_introspection_fields(blueprint) + {:ok, blueprint} + end + + @doc """ + Append the given field or fields to the given type + """ + def attach_introspection_fields(blueprint = %Blueprint{}) do + %{blueprint | schema_definitions: update_schema_defs(blueprint.schema_definitions)} + end + + def update_schema_defs(schema_definitions) do + for schema_def = %{type_definitions: type_defs} <- schema_definitions do + %{schema_def | type_definitions: update_type_defs(type_defs)} + end + end + + def update_type_defs(type_defs) do + for type_def = %struct_type{} <- type_defs do + cond do + type_def.name in ["RootQueryType", "Query"] -> + type_field = field_def(:type) + schema_field = field_def(:schema) + typename_field = field_def(:typename) + %{type_def | fields: [type_field, schema_field, typename_field | type_def.fields]} + + struct_type in [ + ObjectTypeDefinition, + ListTypeDefinition, + UnionTypeDefinition, + InterfaceTypeDefinition + ] -> + typename_field = field_def(:typename) + %{type_def | fields: [typename_field | type_def.fields]} + + true -> + type_def + end + end + end + + def field_def(:typename) do + %FieldDefinition{ + name: "__typename", + identifier: :__typename, + module: __MODULE__, + type: :string, + description: "The name of the object type currently being queried.", + complexity: 0, + triggers: %{}, + middleware: [ + {:ref, __MODULE__, :typename} + ], + flags: %{reserved_name: true}, + __reference__: Absinthe.Schema.Notation.build_reference(__ENV__) + } + end + + def field_def(:type) do + %FieldDefinition{ + __reference__: Absinthe.Schema.Notation.build_reference(__ENV__), + name: "__type", + identifier: :__type, + type: :__type, + module: __MODULE__, + description: "Represents scalars, interfaces, object types, unions, enums in the system", + triggers: %{}, + arguments: [ + %InputValueDefinition{ + __reference__: Absinthe.Schema.Notation.build_reference(__ENV__), + module: __MODULE__, + identifier: :name, + name: "name", + type: %NonNull{of_type: :string}, + description: "The name of the type to introspect" + } + ], + middleware: [ + {:ref, __MODULE__, :type} + ], + flags: %{reserved_name: true} + } + end + + def field_def(:schema) do + %FieldDefinition{ + name: "__schema", + identifier: :__schema, + type: :__schema, + module: __MODULE__, + description: "Represents the schema", + triggers: %{}, + middleware: [ + {:ref, __MODULE__, :schema} + ], + flags: %{reserved_name: true}, + __reference__: Absinthe.Schema.Notation.build_reference(__ENV__) + } + end + + def resolve_fn(:schema) do + fn _, %{schema: schema} -> + {:ok, schema} + end + end + + def resolve_fn(:type) do + fn %{name: name}, %{schema: schema} -> + type_def = + case Absinthe.Schema.lookup_type(schema, name) do + type_def = %{fields: fields} -> + %{type_def | fields: filter_fields(fields)} + + type_def -> + type_def + end + + {:ok, type_def} + end + end + + def resolve_fn(:typename) do + fn + _, %{parent_type: %Absinthe.Type.Object{} = type} -> + {:ok, type.name} + + _, %{source: source, parent_type: %Absinthe.Type.Interface{} = iface} = env -> + case Absinthe.Type.Interface.resolve_type(iface, source, env) do + nil -> + {:error, "Could not resolve type of concrete " <> iface.name} + + type -> + {:ok, type.name} + end + + _, %{source: source, parent_type: %Absinthe.Type.Union{} = union} = env -> + case Absinthe.Type.Union.resolve_type(union, source, env) do + nil -> + {:error, "Could not resolve type of concrete " <> union.name} + + type -> + {:ok, type.name} + end + end + end + + def filter_fields(fields) do + for {key, field = %{name: name}} <- fields, not String.starts_with?(name, "__"), into: %{} do + {key, field} + end + end +end diff --git a/lib/absinthe/phase/schema/mark_referenced.ex b/lib/absinthe/phase/schema/mark_referenced.ex new file mode 100644 index 0000000000..f009252796 --- /dev/null +++ b/lib/absinthe/phase/schema/mark_referenced.ex @@ -0,0 +1,151 @@ +defmodule Absinthe.Phase.Schema.MarkReferenced do + @moduledoc false + + use Absinthe.Phase + + alias Absinthe.Blueprint.{Schema, TypeReference} + + def run(blueprint, _opts) do + %{schema_definitions: [schema]} = blueprint + + schema = + Map.update!(schema, :type_definitions, &mark_referenced(&1, schema.directive_definitions)) + + {:ok, %{blueprint | schema_definitions: [schema]}} + end + + @roots [:query, :mutation, :subscription] + defp mark_referenced(type_defs, directive_defs) do + types_by_ref = + Enum.reduce(type_defs, %{}, fn type_def, acc -> + acc + |> Map.put(type_def.identifier, type_def) + |> Map.put(type_def.name, type_def) + end) + + referenced_type_ids = + @roots + |> Enum.map(&Map.get(types_by_ref, &1)) + |> Enum.reject(&is_nil/1) + |> Enum.concat(directive_defs) + |> Enum.reduce(MapSet.new(), &referenced_types(&1, types_by_ref, &2)) + + for type <- type_defs do + if type.identifier in referenced_type_ids do + put_in(type.__private__[:__absinthe_referenced__], true) + else + type + end + end + end + + defp referenced_types(%Schema.InputValueDefinition{type: type}, types, acc) do + referenced_types(type, types, acc) + end + + defp referenced_types(%Schema.DirectiveDefinition{} = type, types, acc) do + type.arguments + |> Enum.reduce(acc, &referenced_types(&1, types, &2)) + end + + defp referenced_types(%Schema.EnumTypeDefinition{identifier: identifier}, _types, acc) do + MapSet.put(acc, identifier) + end + + defp referenced_types(%Schema.FieldDefinition{} = field, types, acc) do + acc = + field.arguments + |> Enum.reduce(acc, &referenced_types(&1, types, &2)) + + referenced_types(field.type, types, acc) + end + + defp referenced_types( + %Schema.InputObjectTypeDefinition{identifier: identifier} = input_object, + types, + acc + ) do + if identifier in acc do + acc + else + acc = MapSet.put(acc, identifier) + + input_object.fields + |> Enum.reduce(acc, &referenced_types(&1, types, &2)) + end + end + + defp referenced_types( + %Schema.InterfaceTypeDefinition{identifier: identifier} = interface, + schema, + acc + ) do + if identifier in acc do + acc + else + acc = MapSet.put(acc, identifier) + + acc = + interface + |> Schema.InterfaceTypeDefinition.find_implementors(Map.values(schema)) + |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) + + interface.fields + |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) + end + end + + defp referenced_types(%TypeReference.List{of_type: inner_type}, schema, acc) do + referenced_types(inner_type, schema, acc) + end + + defp referenced_types(%TypeReference.NonNull{of_type: inner_type}, schema, acc) do + referenced_types(inner_type, schema, acc) + end + + defp referenced_types( + %Schema.ObjectTypeDefinition{identifier: identifier} = object, + schema, + acc + ) do + if identifier in acc do + acc + else + acc = MapSet.put(acc, identifier) + + acc = + object.fields + |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) + + object.interfaces + |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) + end + end + + defp referenced_types(%Schema.ScalarTypeDefinition{identifier: identifier}, _schema, acc) do + MapSet.put(acc, identifier) + end + + defp referenced_types(%Schema.UnionTypeDefinition{identifier: identifier} = union, schema, acc) do + if identifier in acc do + acc + else + acc = MapSet.put(acc, identifier) + + union.types + |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) + end + end + + defp referenced_types(%TypeReference.Identifier{} = ref, schema, acc) do + referenced_types(Map.fetch!(schema, ref.id), schema, acc) + end + + defp referenced_types(%TypeReference.Name{} = ref, schema, acc) do + referenced_types(Map.fetch!(schema, ref.name), schema, acc) + end + + defp referenced_types(type, schema, acc) when is_atom(type) and type != nil do + referenced_types(Map.fetch!(schema, type), schema, acc) + end +end diff --git a/lib/absinthe/phase/schema/populate_persistent_term.ex b/lib/absinthe/phase/schema/populate_persistent_term.ex new file mode 100644 index 0000000000..d7b0b6dd18 --- /dev/null +++ b/lib/absinthe/phase/schema/populate_persistent_term.ex @@ -0,0 +1,93 @@ +if Code.ensure_loaded?(:persistent_term) do + defmodule Absinthe.Phase.Schema.PopulatePersistentTerm do + @moduledoc false + + alias Absinthe.Blueprint.Schema + + def run(blueprint, opts) do + %{schema_definitions: [schema]} = blueprint + + type_list = + for %{identifier: identifier} = type <- schema.type_definitions, + into: %{}, + do: {identifier, type.__reference__} + + types_map = + schema.type_artifacts + |> Enum.flat_map(fn type -> [{type.identifier, type}, {type.name, type}] end) + |> Map.new() + + referenced_types = + for type_def <- schema.type_definitions, + type_def.__private__[:__absinthe_referenced__], + into: %{}, + do: {type_def.identifier, type_def.name} + + directive_list = + Map.new(schema.directive_definitions, fn type_def -> + {type_def.identifier, type_def.name} + end) + + directives_map = + schema.directive_artifacts + |> Enum.flat_map(fn type -> [{type.identifier, type}, {type.name, type}] end) + |> Map.new() + + prototype_schema = Keyword.fetch!(opts, :prototype_schema) + + metadata = build_metadata(schema) + + implementors = build_implementors(schema) + + schema_content = %{ + __absinthe_types__: %{ + referenced: referenced_types, + all: type_list + }, + __absinthe_directives__: directive_list, + __absinthe_interface_implementors__: implementors, + __absinthe_prototype_schema__: prototype_schema, + __absinthe_type__: types_map, + __absinthe_directive__: directives_map, + __absinthe_reference__: metadata + } + + schema_name = opts[:schema] || raise "no schema name provided" + + put_schema(schema_name, schema_content) + + {:ok, blueprint} + end + + @dialyzer {:nowarn_function, [put_schema: 2]} + defp put_schema(schema_name, content) do + :persistent_term.put( + {Absinthe.Schema.PersistentTerm, schema_name}, + content + ) + end + + def build_metadata(schema) do + for %{identifier: identifier} = type <- schema.type_definitions do + {identifier, type.__reference__} + end + end + + defp build_implementors(schema) do + schema.type_definitions + |> Enum.filter(&match?(%Schema.InterfaceTypeDefinition{}, &1)) + |> Map.new(fn iface -> + implementors = + Schema.InterfaceTypeDefinition.find_implementors(iface, schema.type_definitions) + + {iface.identifier, Enum.sort(implementors)} + end) + end + end +else + defmodule Absinthe.Phase.Schema.PopulatePersistentTerm do + def run(_, _) do + raise "This phase requires OTP >= 21.2" + end + end +end diff --git a/lib/absinthe/phase/schema/reformat_descriptions.ex b/lib/absinthe/phase/schema/reformat_descriptions.ex new file mode 100644 index 0000000000..5db4d4cf35 --- /dev/null +++ b/lib/absinthe/phase/schema/reformat_descriptions.ex @@ -0,0 +1,24 @@ +defmodule Absinthe.Phase.Schema.ReformatDescriptions do + @moduledoc false + + # Trim all Descriptions + + use Absinthe.Phase + alias Absinthe.Blueprint + + @spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()} + def run(input, _options \\ []) do + node = Blueprint.prewalk(input, &handle_node/1) + {:ok, node} + end + + @spec handle_node(Blueprint.node_t()) :: Blueprint.node_t() + defp handle_node(%{description: description} = node) + when is_binary(description) do + %{node | description: String.trim(description)} + end + + defp handle_node(node) do + node + end +end diff --git a/lib/absinthe/phase/schema/register_triggers.ex b/lib/absinthe/phase/schema/register_triggers.ex new file mode 100644 index 0000000000..7dc1cf5dd1 --- /dev/null +++ b/lib/absinthe/phase/schema/register_triggers.ex @@ -0,0 +1,66 @@ +defmodule Absinthe.Phase.Schema.RegisterTriggers do + @moduledoc false + + use Absinthe.Phase + + def run(blueprint, _opts) do + %{schema_definitions: [schema]} = blueprint + + subscription_object = + Enum.find(schema.type_definitions, fn type -> + type.identifier == :subscription + end) + + mutation_object = + Enum.find(schema.type_definitions, fn type -> + type.identifier == :mutation + end) + + mutation_object = + if subscription_object && mutation_object do + mutation_object + |> register_triggers(subscription_object.fields) + |> setup_middleware + else + # TODO: return errors if there isn't a mutation field that is on the + # triggers list + mutation_object + end + + schema = + Map.update!(schema, :type_definitions, fn definitions -> + Enum.map(definitions, fn + %{identifier: :subscription} -> subscription_object + %{identifier: :mutation} -> mutation_object + type -> type + end) + end) + + {:ok, %{blueprint | schema_definitions: [schema]}} + end + + defp register_triggers(mutation_object, sub_fields) do + update_fields(mutation_object, fn mut_field -> + triggers = + for sub_field <- sub_fields, + sub_triggers = Absinthe.Type.function(sub_field, :triggers), + is_map(sub_triggers), + Map.has_key?(sub_triggers, mut_field.identifier), + do: sub_field.identifier + + %{mut_field | triggers: triggers} + end) + end + + defp setup_middleware(mutation_object) do + update_fields(mutation_object, fn field -> + Map.update!(field, :middleware, &Absinthe.Subscription.add_middleware/1) + end) + end + + defp update_fields(mutation_object, fun) do + Map.update!(mutation_object, :fields, fn fields -> + Enum.map(fields, fun) + end) + end +end diff --git a/lib/absinthe/phase/schema/type_imports.ex b/lib/absinthe/phase/schema/type_imports.ex new file mode 100644 index 0000000000..04e14e7ee7 --- /dev/null +++ b/lib/absinthe/phase/schema/type_imports.ex @@ -0,0 +1,85 @@ +defmodule Absinthe.Phase.Schema.TypeImports do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + + alias Absinthe.Blueprint.Schema + + def run(blueprint, _opts) do + blueprint = Blueprint.prewalk(blueprint, &handle_imports/1) + {:ok, blueprint} + end + + @default_imports [ + {Absinthe.Type.BuiltIns.Scalars, []}, + {Absinthe.Type.BuiltIns.Directives, []}, + {Absinthe.Type.BuiltIns.Introspection, []} + ] + def handle_imports(%Schema.SchemaDefinition{} = schema) do + {types, schema} = + do_imports(@default_imports ++ schema.imports, schema.type_definitions, schema) + + # special casing the import of the built in directives + [builtins] = Absinthe.Type.BuiltIns.Directives.__absinthe_blueprint__().schema_definitions + directives = schema.directive_definitions ++ builtins.directive_definitions + {:halt, %{schema | type_definitions: types, directive_definitions: directives}} + end + + def handle_imports(node), do: node + + defp do_imports([], types, schema) do + {types, schema} + end + + defp do_imports([{module, opts} | rest], acc, schema) do + case ensure_compiled(module) do + {:module, module} -> + [other_def] = module.__absinthe_blueprint__.schema_definitions + + rejections = + MapSet.new([:query, :mutation, :subscription] ++ Keyword.get(opts, :except, [])) + + types = Enum.reject(other_def.type_definitions, &(&1.identifier in rejections)) + + types = + case Keyword.fetch(opts, :only) do + {:ok, selections} -> + Enum.filter(types, &(&1.identifier in selections)) + + _ -> + types + end + + do_imports(other_def.imports ++ rest, types ++ acc, schema) + + {:error, reason} -> + do_imports(rest, acc, schema |> put_error(error(module, reason))) + end + end + + # Elixir v1.12 includes a Code.ensure_compiled!/1 that tells + # the compiler it should only continue if the module is available. + # This gives the Elixir compiler more information to address + # deadlocks. + # TODO: Remove the else clause once we require Elixir v1.12+. + @compile {:no_warn_undefined, {Code, :ensure_compiled!, 1}} + @dialyzer {:nowarn_function, [ensure_compiled: 1]} + defp ensure_compiled(module) do + if function_exported?(Code, :ensure_compiled!, 1) do + {:module, Code.ensure_compiled!(module)} + else + Code.ensure_compiled(module) + end + end + + # Generate an error when loading module fails + @spec error(module :: module(), error :: :embedded | :badfile | :nofile | :on_load_failure) :: + Absinthe.Phase.Error.t() + defp error(module, reason) do + %Absinthe.Phase.Error{ + message: "Could not load module `#{module}`. It returned reason: `#{reason}`.", + phase: __MODULE__ + } + end +end diff --git a/lib/absinthe/phase/schema/validation/default_enum_value_present.ex b/lib/absinthe/phase/schema/validation/default_enum_value_present.ex new file mode 100644 index 0000000000..b4761575e7 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/default_enum_value_present.ex @@ -0,0 +1,99 @@ +defmodule Absinthe.Phase.Schema.Validation.DefaultEnumValuePresent do + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + def run(blueprint, _opts) do + blueprint = Blueprint.prewalk(blueprint, &validate_schema/1) + + {:ok, blueprint} + end + + def validate_schema(%Schema.SchemaDefinition{} = schema) do + enums = + schema.type_definitions + |> Enum.filter(&match?(%Schema.EnumTypeDefinition{}, &1)) + |> Map.new(&{&1.identifier, &1}) + + schema = Blueprint.prewalk(schema, &validate_defaults(&1, enums)) + {:halt, schema} + end + + def validate_schema(node), do: node + + def validate_defaults(%{default_value: nil} = node, _) do + node + end + + def validate_defaults(%{default_value: default_value, type: type} = node, enums) do + type = Blueprint.TypeReference.unwrap(type) + + case Map.fetch(enums, type) do + {:ok, enum} -> + values = Enum.map(enum.values, & &1.value) + value_list = Enum.map(values, &"\n * #{inspect(&1)}") + + case value_conforms_to_enum(node.type, default_value, values) do + {:error, value} -> + detail = %{ + value_list: value_list, + type: type, + default_value: value + } + + node |> put_error(error(node, detail)) + + {:ok, _} -> + node + end + + _ -> + node + end + end + + def validate_defaults(node, _) do + node + end + + defp value_conforms_to_enum(%Blueprint.TypeReference.List{of_type: of_type}, value, enum_values) + when is_list(value) do + value + |> Enum.map(&value_conforms_to_enum(of_type, &1, enum_values)) + |> Enum.find({:ok, value}, &match?({:error, _}, &1)) + end + + defp value_conforms_to_enum(%_{of_type: of_type}, value, enum_values) do + value_conforms_to_enum(of_type, value, enum_values) + end + + defp value_conforms_to_enum(_, value, enum_values) do + if value in enum_values do + {:ok, value} + else + {:error, value} + end + end + + defp error(node, data) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: [node.__reference__.location], + phase: __MODULE__, + extra: data + } + end + + @moduledoc false + + def explanation(%{default_value: default_value, type: type, value_list: value_list}) do + """ + The default_value for an enum must be present in the enum values. + + Could not use default value of `#{inspect(default_value)}` for #{inspect(type)}. + + Valid values are: + #{value_list} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/directives_must_be_valid.ex b/lib/absinthe/phase/schema/validation/directives_must_be_valid.ex new file mode 100644 index 0000000000..f09af66975 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/directives_must_be_valid.ex @@ -0,0 +1,85 @@ +defmodule Absinthe.Phase.Schema.Validation.DirectivesMustBeValid do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + + @spec_link "https://spec.graphql.org/draft/#sec-Type-System.Directives" + @directive_locations Absinthe.Introspection.DirectiveLocation.values() + + @doc """ + Run the validation. + """ + def run(bp, _) do + bp = Blueprint.prewalk(bp, &handle_schemas/1) + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema) do + directive_definitions = Enum.map(schema.directive_definitions, &validate_directive(&1)) + {:halt, %{schema | directive_definitions: directive_definitions}} + end + + defp handle_schemas(obj) do + obj + end + + defp validate_directive(%Blueprint.Schema.DirectiveDefinition{locations: []} = directive) do + directive |> put_error(error_locations_absent(directive)) + end + + defp validate_directive(%Blueprint.Schema.DirectiveDefinition{locations: locations} = directive) do + Enum.reduce(locations, directive, fn location, directive -> + validate_location(directive, location) + end) + end + + defp validate_location(directive, location) when location in @directive_locations do + directive + end + + defp validate_location(directive, location) do + directive |> put_error(error_unknown_directive_location(directive, location)) + end + + defp error_unknown_directive_location(directive, location) do + %Absinthe.Phase.Error{ + message: explanation(directive, location), + locations: [directive.__reference__.location], + phase: __MODULE__, + extra: %{ + location: location + } + } + end + + defp error_locations_absent(directive) do + %Absinthe.Phase.Error{ + message: explanation(directive), + locations: [directive.__reference__.location], + phase: __MODULE__ + } + end + + defp explanation(directive, location) do + """ + Directive "#{directive.name}" must use a valid DirectiveLocation + + Found: #{inspect(location)} + + Expected one/multiple of: #{inspect(@directive_locations)} + + Reference: #{@spec_link} + """ + end + + defp explanation(directive) do + """ + Directive "#{directive.name}" must set DirectiveLocations + + Expected one/multiple of: #{inspect(@directive_locations)} + + Reference: #{@spec_link} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/input_output_types_correctly_placed.ex b/lib/absinthe/phase/schema/validation/input_output_types_correctly_placed.ex new file mode 100644 index 0000000000..7ea2b06a23 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/input_output_types_correctly_placed.ex @@ -0,0 +1,138 @@ +defmodule Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced do + use Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &handle_schemas/1) + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema) do + types = Map.new(schema.type_definitions, &{&1.identifier, &1}) + schema = Blueprint.prewalk(schema, &validate_type(&1, types, schema)) + {:halt, schema} + end + + defp handle_schemas(obj) do + obj + end + + defp validate_type(%Blueprint.Schema.InputValueDefinition{} = arg, types, schema) do + type = + Blueprint.TypeReference.unwrap(arg.type) + |> Blueprint.TypeReference.to_type(schema) + + arg_type = Map.get(types, type) + + if arg_type && wrong_type?(Blueprint.Schema.InputValueDefinition, arg_type) do + detail = %{ + argument: arg.identifier, + type: arg_type.identifier, + struct: arg_type.__struct__ + } + + arg |> put_error(error(arg.__reference__.location, detail)) + else + arg + end + end + + defp validate_type(%struct{fields: fields} = type, types, schema) do + fields = + Enum.map(fields, fn + %{type: _} = field -> + type = + Blueprint.TypeReference.unwrap(field.type) + |> Blueprint.TypeReference.to_type(schema) + + field_type = Map.get(types, type) + + if field_type && wrong_type?(struct, field_type) do + detail = %{ + field: field.identifier, + type: field_type.identifier, + struct: field_type.__struct__, + parent: struct + } + + field |> put_error(error(field.__reference__.location, detail)) + else + field + end + + field -> + field + end) + + %{type | fields: fields} + end + + defp validate_type(type, _types, _schema) do + type + end + + @output_types [ + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.UnionTypeDefinition, + Blueprint.Schema.InterfaceTypeDefinition + ] + defp wrong_type?(type, field_type) when type in @output_types do + !output_type?(field_type) + end + + @input_types [ + Blueprint.Schema.InputObjectTypeDefinition, + Blueprint.Schema.InputValueDefinition + ] + defp wrong_type?(type, field_type) when type in @input_types do + !input_type?(field_type) + end + + defp error(location, data) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: [location], + phase: __MODULE__, + extra: data + } + end + + @moduledoc false + + @description """ + Only input types may be used as inputs. Input types may not be used as output types + + Input types consist of Scalars, Enums, and Input Objects. + """ + + def explanation(%{argument: argument, type: type, struct: struct}) do + struct = struct |> Module.split() |> List.last() + + """ + #{inspect(type)} is not a valid input type for argument #{inspect(argument)} because + #{inspect(type)} is an #{Macro.to_string(struct)}. Arguments may only be input types. + + #{@description} + """ + end + + def explanation(%{field: field, type: type, struct: struct, parent: parent}) do + struct = struct |> Module.split() |> List.last() + parent = parent |> Module.split() |> List.last() + + """ + #{inspect(type)} is not a valid type for field #{inspect(field)} because + #{inspect(type)} is an #{struct}, and this field is part of an #{parent}. + + #{@description} + """ + end + + defp input_type?(%Blueprint.Schema.ScalarTypeDefinition{}), do: true + defp input_type?(%Blueprint.Schema.EnumTypeDefinition{}), do: true + defp input_type?(%Blueprint.Schema.InputObjectTypeDefinition{}), do: true + defp input_type?(_), do: false + + defp output_type?(%Blueprint.Schema.InputObjectTypeDefinition{}), do: false + defp output_type?(_), do: true +end diff --git a/lib/absinthe/phase/schema/validation/interfaces_must_resolve_types.ex b/lib/absinthe/phase/schema/validation/interfaces_must_resolve_types.ex new file mode 100644 index 0000000000..5c774dce44 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/interfaces_must_resolve_types.ex @@ -0,0 +1,78 @@ +defmodule Absinthe.Phase.Schema.Validation.InterfacesMustResolveTypes do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &handle_schemas/1) + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema) do + implementors = + schema.type_definitions + |> Enum.filter(&match?(%Blueprint.Schema.ObjectTypeDefinition{}, &1)) + |> Enum.flat_map(fn obj -> + for iface <- obj.interfaces do + {iface, obj} + end + end) + |> Enum.group_by(fn {iface, _obj} -> iface end, fn {_iface, obj} -> obj end) + + schema = Blueprint.prewalk(schema, &validate_interface(&1, implementors)) + {:halt, schema} + end + + defp handle_schemas(obj) do + obj + end + + defp validate_interface(%Blueprint.Schema.InterfaceTypeDefinition{} = iface, implementors) do + resolve_type = Absinthe.Type.function(iface, :resolve_type) + + if(resolve_type || all_objects_is_type_of?(iface, implementors)) do + iface + else + iface |> put_error(error(iface)) + end + end + + defp validate_interface(type, _) do + type + end + + defp all_objects_is_type_of?(iface, implementors) do + implementors + |> Map.get(iface.identifier, []) + |> Enum.all?(&Absinthe.Type.function(&1, :is_type_of)) + end + + defp error(interface) do + %Absinthe.Phase.Error{ + message: explanation(interface.identifier), + locations: [interface.__reference__.location], + phase: __MODULE__, + extra: interface.identifier + } + end + + @description """ + An interface must be able to resolve the implementing types of results. + + > The interface type should have some way of determining which object a given + > result corresponds to. + + Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces + """ + + def explanation(interface) do + """ + Interface type "#{interface}" either: + * Does not have a `resolve_type` function. + * Is missing a `is_type_of` function on all implementing types. + + #{@description} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/known_directives.ex b/lib/absinthe/phase/schema/validation/known_directives.ex new file mode 100644 index 0000000000..23a0bb2d5a --- /dev/null +++ b/lib/absinthe/phase/schema/validation/known_directives.ex @@ -0,0 +1,82 @@ +defmodule Absinthe.Phase.Schema.Validation.KnownDirectives do + @moduledoc false + + alias Absinthe.{Blueprint, Phase} + + use Absinthe.Phase + use Absinthe.Phase.Validation + + @doc """ + Run the validation. + """ + @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() + def run(input, _options \\ []) do + result = Blueprint.postwalk(input, &handle_node/1) + {:ok, result} + end + + defp handle_node(%Blueprint.Directive{schema_node: nil} = node) do + node + |> put_error(error_unknown(node)) + end + + defp handle_node(%Blueprint.Directive{} = node) do + node + end + + defp handle_node(%{directives: []} = node) do + node + end + + defp handle_node(%{directives: _} = node) do + node + |> check_directives + |> inherit_invalid(node.directives, :bad_directive) + end + + defp handle_node(node) do + node + end + + defp check_directives(node) do + placement = Blueprint.Directive.placement(node) + + directives = + for directive <- node.directives do + if directive.schema_node do + if placement in directive.schema_node.locations do + directive + else + directive + |> put_error(error_misplaced(directive, placement)) + |> flag_invalid(:bad_placement) + end + else + directive + end + end + + %{node | directives: directives} + end + + # Generate the error for the node + @spec error_unknown(Blueprint.node_t()) :: Phase.Error.t() + defp error_unknown(node) do + %Phase.Error{ + phase: __MODULE__, + message: "Unknown directive `#{node.name}'.", + locations: [node.__reference__.location] + } + end + + @spec error_misplaced(Blueprint.node_t(), atom) :: Phase.Error.t() + defp error_misplaced(node, placement) do + placement_name = placement |> to_string |> String.upcase() + + %Phase.Error{ + phase: __MODULE__, + message: "Directive `#{node.name}' may not be used on #{placement_name}.", + locations: [node.__reference__.location] + } + end +end diff --git a/lib/absinthe/phase/schema/validation/names_must_be_valid.ex b/lib/absinthe/phase/schema/validation/names_must_be_valid.ex new file mode 100644 index 0000000000..4411852991 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/names_must_be_valid.ex @@ -0,0 +1,74 @@ +defmodule Absinthe.Phase.Schema.Validation.NamesMustBeValid do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + @valid_name_regex ~r/^[_A-Za-z][_0-9A-Za-z]*$/ + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &validate_names/1) + {:ok, bp} + end + + defp validate_names(%{name: nil} = entity) do + entity + end + + defp validate_names(%struct{name: name} = entity) do + if valid_name?(name) do + entity + else + kind = struct_to_kind(struct) + detail = %{artifact: "#{kind} name", value: entity.name} + entity |> put_error(error(entity, detail)) + end + end + + defp validate_names(entity) do + entity + end + + defp valid_name?(name) do + Regex.match?(@valid_name_regex, name) + end + + defp error(object, data) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: [object.__reference__.location], + phase: __MODULE__, + extra: data + } + end + + defp struct_to_kind(Schema.InputValueDefinition), do: "argument" + defp struct_to_kind(Schema.FieldDefinition), do: "field" + defp struct_to_kind(Schema.DirectiveDefinition), do: "directive" + defp struct_to_kind(Schema.ScalarTypeDefinition), do: "scalar" + defp struct_to_kind(Schema.ObjectTypeDefinition), do: "object" + defp struct_to_kind(Schema.InputObjectTypeDefinition), do: "input object" + defp struct_to_kind(Schema.EnumValueDefinition), do: "enum value" + defp struct_to_kind(_), do: "type" + + @description """ + Name does not match possible #{inspect(@valid_name_regex)} regex. + + > Names in GraphQL are limited to this ASCII subset of possible characters to + > support interoperation with as many other systems as possible. + + Reference: https://graphql.github.io/graphql-spec/June2018/#sec-Names + + """ + + def explanation(%{artifact: artifact, value: value}) do + artifact_name = String.capitalize(artifact) + + """ + #{artifact_name} #{inspect(value)} has invalid characters. + + #{@description} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/no_circular_field_imports.ex b/lib/absinthe/phase/schema/validation/no_circular_field_imports.ex new file mode 100644 index 0000000000..53f3af781c --- /dev/null +++ b/lib/absinthe/phase/schema/validation/no_circular_field_imports.ex @@ -0,0 +1,90 @@ +defmodule Absinthe.Phase.Schema.Validation.NoCircularFieldImports do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + def run(blueprint, _opts) do + blueprint = Blueprint.prewalk(blueprint, &validate_schema/1) + {:ok, blueprint} + end + + def validate_schema(%Schema.SchemaDefinition{type_definitions: types} = schema) do + {:halt, %{schema | type_definitions: sort_and_validate_types(types)}} + end + + def validate_schema(node), do: node + + def sort_and_validate_types(types) do + graph = :digraph.new([:cyclic]) + + try do + _ = build_import_graph(types, graph) + + {types, cycles?} = + Enum.reduce(types, {%{}, false}, fn type, {types, cycles?} -> + if cycle = :digraph.get_cycle(graph, type.identifier) do + type = type |> put_error(error(type, cycle)) + {Map.put(types, type.identifier, type), true} + else + {Map.put(types, type.identifier, type), cycles?} + end + end) + + if cycles? do + Map.values(types) + else + graph + |> :digraph_utils.topsort() + |> Enum.reverse() + |> Enum.flat_map(fn identifier -> + case Map.fetch(types, identifier) do + {:ok, type} -> [type] + _ -> [] + end + end) + end + after + :digraph.delete(graph) + end + end + + defp error(type, deps) do + %Absinthe.Phase.Error{ + message: + String.trim(""" + Field Import Cycle Error + + Field Import in object `#{type.identifier}' `import_fields(#{inspect(type.imports)}) forms a cycle via: (#{ + inspect(deps) + }) + """), + locations: [type.__reference__.location], + phase: __MODULE__, + extra: type.identifier + } + end + + defp build_import_graph(types, graph) do + Enum.each(types, &add_to_graph(&1, graph)) + end + + defp add_to_graph(type, graph) do + :digraph.add_vertex(graph, type.identifier) + + with %{imports: imports} <- type do + for {ident, _} <- imports do + :digraph.add_vertex(graph, ident) + + case :digraph.add_edge(graph, type.identifier, ident) do + {:error, _} -> + raise "edge failed" + + _ -> + :ok + end + end + end + end +end diff --git a/lib/absinthe/phase/schema/validation/no_interface_cycles.ex b/lib/absinthe/phase/schema/validation/no_interface_cycles.ex new file mode 100644 index 0000000000..ec361e4edd --- /dev/null +++ b/lib/absinthe/phase/schema/validation/no_interface_cycles.ex @@ -0,0 +1,78 @@ +defmodule Absinthe.Phase.Schema.Validation.NoInterfaceCyles do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + def run(blueprint, _opts) do + blueprint = check(blueprint) + + {:ok, blueprint} + end + + defp check(blueprint) do + graph = :digraph.new([:cyclic]) + + try do + _ = build_interface_graph(blueprint, graph) + + Blueprint.prewalk(blueprint, &validate_schema(&1, graph)) + after + :digraph.delete(graph) + end + end + + defp validate_schema(%Schema.InterfaceTypeDefinition{} = interface, graph) do + if cycle = :digraph.get_cycle(graph, interface.identifier) do + interface |> put_error(error(interface, cycle)) + else + interface + end + end + + defp validate_schema(node, _graph) do + node + end + + defp build_interface_graph(blueprint, graph) do + _ = Blueprint.prewalk(blueprint, &vertex(&1, graph)) + end + + defp vertex(%Schema.InterfaceTypeDefinition{} = implementor, graph) do + :digraph.add_vertex(graph, implementor.identifier) + + for interface <- implementor.interfaces do + edge(implementor, interface, graph) + end + + implementor + end + + defp vertex(implementor, _graph) do + implementor + end + + # Add an edge, modeling the relationship between two interfaces + defp edge(implementor, interface, graph) do + :digraph.add_vertex(graph, interface) + + :digraph.add_edge(graph, implementor.identifier, interface) + + true + end + + defp error(type, deps) do + %Absinthe.Phase.Error{ + message: + String.trim(""" + Interface Cycle Error + + Interface `#{type.identifier}' forms a cycle via: (#{inspect(deps)}) + """), + locations: [type.__reference__.location], + phase: __MODULE__, + extra: type.identifier + } + end +end diff --git a/lib/absinthe/phase/schema/validation/object_interfaces_must_be_valid.ex b/lib/absinthe/phase/schema/validation/object_interfaces_must_be_valid.ex new file mode 100644 index 0000000000..a14f33e31f --- /dev/null +++ b/lib/absinthe/phase/schema/validation/object_interfaces_must_be_valid.ex @@ -0,0 +1,111 @@ +defmodule Absinthe.Phase.Schema.Validation.ObjectInterfacesMustBeValid do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &handle_schemas/1) + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema) do + ifaces = + schema.type_definitions + |> Enum.filter(&match?(%Blueprint.Schema.InterfaceTypeDefinition{}, &1)) + |> Map.new(&{&1.identifier, &1}) + + schema = Blueprint.prewalk(schema, &validate_objects(&1, ifaces)) + {:halt, schema} + end + + defp handle_schemas(obj) do + obj + end + + defp validate_objects(%struct{} = object, all_interfaces) + when struct in [ + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.InterfaceTypeDefinition + ] do + check_transitive_interfaces(object, object.interfaces, all_interfaces, nil, []) + end + + defp validate_objects(type, _) do + type + end + + # check that the object declares it implements all interfaces up the + # hierarchy chain as per spec https://github.com/graphql/graphql-spec/blame/October2021/spec/Section%203%20--%20Type%20System.md#L1158-L1161 + defp check_transitive_interfaces( + object, + [object_interface | tail], + all_interfaces, + implemented_by, + visited + ) do + current_interface = all_interfaces[object_interface] + + if current_interface && current_interface.identifier in object.interfaces do + case current_interface do + %{interfaces: interfaces} = interface -> + # to prevent walking in cycles we need to filter out visited interfaces + interfaces = Enum.filter(interfaces, &(&1 not in visited)) + + check_transitive_interfaces(object, tail ++ interfaces, all_interfaces, interface, [ + object_interface | visited + ]) + + _ -> + check_transitive_interfaces(object, tail, all_interfaces, implemented_by, [ + object_interface | visited + ]) + end + else + detail = %{ + object: object.identifier, + interface: object_interface, + implemented_by: implemented_by + } + + object |> put_error(error(object, detail)) + end + end + + defp check_transitive_interfaces(object, [], _, _, _) do + object + end + + defp error(object, data) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: [object.__reference__.location], + phase: __MODULE__, + extra: data + } + end + + @description """ + Only interfaces may be present in an Object's interface list. + + Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces + """ + + def explanation(%{object: obj, interface: interface, implemented_by: nil}) do + """ + Type "#{obj}" must implement interface type "#{interface}" + + #{@description} + """ + end + + def explanation(%{object: obj, interface: interface, implemented_by: implemented}) do + """ + Type "#{obj}" must implement interface type "#{interface}" because it is implemented by "#{ + implemented.identifier + }". + + #{@description} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/object_must_implement_interfaces.ex b/lib/absinthe/phase/schema/validation/object_must_implement_interfaces.ex new file mode 100644 index 0000000000..16d14bf5d3 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/object_must_implement_interfaces.ex @@ -0,0 +1,222 @@ +defmodule Absinthe.Phase.Schema.Validation.ObjectMustImplementInterfaces do + use Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &handle_schemas/1) + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema) do + ifaces = + schema.type_definitions + |> Enum.filter(&match?(%Blueprint.Schema.InterfaceTypeDefinition{}, &1)) + |> Map.new(&{&1.identifier, &1}) + + types = + schema.type_definitions + |> Map.new(&{&1.identifier, &1}) + + schema = Blueprint.prewalk(schema, &validate_objects(&1, ifaces, types)) + {:halt, schema} + end + + defp handle_schemas(obj) do + obj + end + + @interface_types [ + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.InterfaceTypeDefinition + ] + + defp validate_objects(%struct{} = object, ifaces, types) when struct in @interface_types do + Enum.reduce(object.interfaces, object, fn ident, object -> + case Map.fetch(ifaces, ident) do + {:ok, iface} -> validate_object(object, iface, types) + _ -> object + end + end) + end + + defp validate_objects(type, _, _) do + type + end + + def validate_object(object, iface, types) do + case check_implements(iface, object, types) do + :ok -> + object + + {:error, invalid_fields} -> + detail = %{ + object: object.identifier, + interface: iface.identifier, + fields: invalid_fields + } + + object |> put_error(error(object, detail)) + end + end + + defp error(object, data) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: [object.__reference__.location], + phase: __MODULE__, + extra: data + } + end + + @moduledoc false + + @description """ + An object type must be a super-set of all interfaces it implements. + + * The object type must include a field of the same name for every field + defined in an interface. + * The object field must be of a type which is equal to or a sub-type of + the interface field (covariant). + * An object field type is a valid sub-type if it is equal to (the same + type as) the interface field type. + * An object field type is a valid sub-type if it is an Object type and the + interface field type is either an Interface type or a Union type and the + object field type is a possible type of the interface field type. + * An object field type is a valid sub-type if it is a List type and the + interface field type is also a List type and the list-item type of the + object field type is a valid sub-type of the list-item type of the + interface field type. + * An object field type is a valid sub-type if it is a Non-Null variant of a + valid sub-type of the interface field type. + * The object field must include an argument of the same name for every + argument defined in the interface field. + * The object field argument must accept the same type (invariant) as the + interface field argument. + * The object field may include additional arguments not defined in the + interface field, but any additional argument must not be required. + + Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#object-type-validation + """ + + def explanation(%{object: obj, interface: interface, fields: fields}) do + """ + Type "#{obj}" does not fully implement interface type "#{interface}" \ + for fields #{inspect(fields)} + + #{@description} + """ + end + + def check_implements(interface, type, types) do + check_covariant(interface, type, nil, types) + end + + defp check_covariant( + %Blueprint.Schema.InterfaceTypeDefinition{fields: ifields}, + %{fields: type_fields}, + _field_ident, + types + ) do + Enum.reduce(ifields, [], fn %{identifier: ifield_ident} = ifield, invalid_fields -> + case Enum.find(type_fields, &(&1.identifier == ifield_ident)) do + nil -> + [ifield_ident | invalid_fields] + + field -> + case check_covariant(ifield.type, field.type, ifield_ident, types) do + :ok -> + invalid_fields + + {:error, invalid_field} -> + [invalid_field | invalid_fields] + end + end + end) + |> case do + [] -> + :ok + + invalid_fields -> + {:error, invalid_fields} + end + end + + defp check_covariant( + %Blueprint.Schema.InterfaceTypeDefinition{identifier: interface_ident}, + interface_ident, + _field_ident, + _types + ) do + :ok + end + + defp check_covariant( + %Blueprint.Schema.InterfaceTypeDefinition{identifier: interface_ident}, + field_type, + field_ident, + types + ) do + %{interfaces: field_type_interfaces} = Map.get(types, field_type) + (interface_ident in field_type_interfaces && :ok) || {:error, field_ident} + end + + defp check_covariant( + %wrapper{of_type: inner_type1}, + %wrapper{of_type: inner_type2}, + field_ident, + types + ) do + check_covariant(inner_type1, inner_type2, field_ident, types) + end + + defp check_covariant( + itype, + %Absinthe.Blueprint.TypeReference.NonNull{of_type: inner_type}, + field_ident, + types + ) do + check_covariant(itype, inner_type, field_ident, types) + end + + defp check_covariant(%{identifier: identifier}, %{identifier: identifier}, _field_ident, _types) do + :ok + end + + defp check_covariant( + %Blueprint.TypeReference.Name{name: name}, + %Blueprint.TypeReference.Name{name: name}, + _field_ident, + _types + ) do + :ok + end + + defp check_covariant( + %Blueprint.TypeReference.Name{name: iface_name}, + %Blueprint.TypeReference.Name{name: type_name}, + field_ident, + types + ) do + {_, itype} = Enum.find(types, fn {_, %{name: name}} -> name == iface_name end) + {_, type} = Enum.find(types, fn {_, %{name: name}} -> name == type_name end) + + check_covariant(itype, type, field_ident, types) + end + + defp check_covariant(nil, _, field_ident, _), do: {:error, field_ident} + defp check_covariant(_, nil, field_ident, _), do: {:error, field_ident} + + defp check_covariant(itype, type, field_ident, types) when is_atom(itype) do + itype = Map.get(types, itype) + check_covariant(itype, type, field_ident, types) + end + + defp check_covariant(itype, type, field_ident, types) when is_atom(type) do + type = Map.get(types, type) + check_covariant(itype, type, field_ident, types) + end + + defp check_covariant(_, _, field_ident, _types) do + {:error, field_ident} + end +end diff --git a/lib/absinthe/phase/schema/validation/query_type_must_be_object.ex b/lib/absinthe/phase/schema/validation/query_type_must_be_object.ex new file mode 100644 index 0000000000..6e09285dad --- /dev/null +++ b/lib/absinthe/phase/schema/validation/query_type_must_be_object.ex @@ -0,0 +1,65 @@ +defmodule Absinthe.Phase.Schema.Validation.QueryTypeMustBeObject do + use Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &validate_schemas/1) + {:ok, bp} + end + + defp validate_schemas(%Blueprint.Schema.SchemaDefinition{} = schema) do + case Enum.find( + schema.type_definitions, + &match?(%Blueprint.Schema.ObjectTypeDefinition{identifier: :query}, &1) + ) do + nil -> + schema |> put_error(error(schema)) + + _ -> + schema + end + end + + defp validate_schemas(node), do: node + + defp error(schema) do + %Absinthe.Phase.Error{ + message: explanation(nil), + locations: [schema.__reference__.location], + phase: __MODULE__ + } + end + + @moduledoc false + + @description """ + + # Example + defmodule MyApp.Schema do + use Absinthe.Schema + + query do + # Fields go here + end + end + + -------------------------------------- + From the graqhql schema specification + + A GraphQL schema includes types, indicating where query and mutation + operations start. This provides the initial entry points into the type system. + The query type must always be provided, and is an Object base type. The + mutation type is optional; if it is null, that means the system does not + support mutations. If it is provided, it must be an object base type. + + Reference: https://facebook.github.io/graphql/#sec-Initial-types + """ + + def explanation(_value) do + """ + The root query type must be implemented and be of type Object + + #{@description} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/result.ex b/lib/absinthe/phase/schema/validation/result.ex new file mode 100644 index 0000000000..e916fda801 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/result.ex @@ -0,0 +1,38 @@ +defmodule Absinthe.Phase.Schema.Validation.Result do + @moduledoc false + + alias Absinthe.{Blueprint, Phase} + + use Absinthe.Phase + + @doc """ + Run the validation. + """ + @spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t() + def run(input, _opts) do + {input, errors} = Blueprint.prewalk(input, [], &handle_node/2) + errors = errors |> :lists.reverse() |> Enum.uniq() + + case errors do + [] -> + {:ok, input} + + _ -> + {:error, errors} + end + end + + # Collect the validation errors from nodes + @spec handle_node(Blueprint.node_t(), [Phase.Error.t()]) :: + {Blueprint.node_t(), [Phase.Error.t()]} + defp handle_node(%{errors: errs} = node, errors) do + {node, :lists.reverse(errs) ++ errors} + end + + defp handle_node(%{raw: raw} = node, errors) do + {_, errors} = Blueprint.prewalk(raw, errors, &handle_node/2) + {node, errors} + end + + defp handle_node(node, acc), do: {node, acc} +end diff --git a/lib/absinthe/phase/schema/validation/type_names_are_reserved.ex b/lib/absinthe/phase/schema/validation/type_names_are_reserved.ex new file mode 100644 index 0000000000..bc4bc1ad2c --- /dev/null +++ b/lib/absinthe/phase/schema/validation/type_names_are_reserved.ex @@ -0,0 +1,101 @@ +defmodule Absinthe.Phase.Schema.Validation.TypeNamesAreReserved do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + def run(bp, _) do + bp = Blueprint.prewalk(bp, &validate_reserved/1) + {:ok, bp} + end + + def allow_reserved(node = %{flags: nil}) do + allow_reserved(%{node | flags: %{}}) + end + + def allow_reserved(node = %{flags: flags}) do + flags = + flags + |> Map.put(:reserved_name, true) + + %{node | flags: flags} + end + + def make_reserved(node = %{name: "__" <> _}) do + allow_reserved(node) + end + + def make_reserved(node = %{name: name, identifier: identifier}) do + node = %{ + node + | name: name |> String.replace_prefix("", "__"), + identifier: + identifier |> to_string() |> String.replace_prefix("", "__") |> String.to_atom() + } + + allow_reserved(node) + end + + defp validate_reserved(%struct{name: "__" <> _} = entity) do + reserved_ok = + Absinthe.Type.built_in_module?(entity.__reference__.module) || + reserved_name_ok_flag?(entity) + + if reserved_ok do + entity + else + kind = struct_to_kind(struct) + + detail = %{artifact: "#{kind} name", value: entity.name} + + entity |> put_error(error(entity, detail)) + end + end + + defp validate_reserved(entity) do + entity + end + + defp reserved_name_ok_flag?(%{flags: flags}) do + flags[:reserved_name] + end + + defp reserved_name_ok_flag?(_) do + false + end + + defp error(object, data) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: [object.__reference__.location], + phase: __MODULE__, + extra: data + } + end + + defp struct_to_kind(Schema.InputValueDefinition), do: "argument" + defp struct_to_kind(Schema.FieldDefinition), do: "field" + defp struct_to_kind(Schema.DirectiveDefinition), do: "directive" + defp struct_to_kind(_), do: "type" + + @description """ + Type system artifacts must not begin with two leading underscores. + + > GraphQL type system authors must not define any types, fields, arguments, + > or any other type system artifact with two leading underscores. + + Reference: https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#naming-conventions + + """ + + def explanation(%{artifact: artifact, value: value}) do + artifact_name = String.capitalize(artifact) + + """ + #{artifact_name} #{inspect(value)} starts with two leading underscores. + + #{@description} + """ + end +end diff --git a/lib/absinthe/phase/schema/validation/type_names_are_unique.ex b/lib/absinthe/phase/schema/validation/type_names_are_unique.ex new file mode 100644 index 0000000000..827537c629 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/type_names_are_unique.ex @@ -0,0 +1,105 @@ +defmodule Absinthe.Phase.Schema.Validation.TypeNamesAreUnique do + use Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = + bp + |> Blueprint.prewalk(&handle_schemas(&1, :identifier)) + |> Blueprint.prewalk(&handle_schemas(&1, :name)) + + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema, key) do + if Enum.any?(schema.type_definitions, fn + %Blueprint.Schema.SchemaDefinition{} -> + true + + _ -> + false + end) do + raise "SchemaDefinition Inside Schema Definition" + end + + types = Enum.group_by(schema.type_definitions, &Map.fetch!(&1, key)) + directives = Enum.group_by(schema.directive_definitions, &Map.fetch!(&1, key)) + + types = Map.merge(types, directives) + + schema = Blueprint.prewalk(schema, &validate_types(&1, types, key)) + {:halt, schema} + end + + defp handle_schemas(obj, _) do + obj + end + + @types [ + Blueprint.Schema.DirectiveDefinition, + Blueprint.Schema.EnumTypeDefinition, + Blueprint.Schema.InputObjectTypeDefinition, + Blueprint.Schema.InterfaceTypeDefinition, + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.ScalarTypeDefinition, + Blueprint.Schema.UnionTypeDefinition + ] + defp validate_types(%type{} = object, types, key) when type in @types do + ident = Map.fetch!(object, key) + + case Map.fetch!(types, ident) do + [_] -> + object + + others -> + detail = %{ + value: ident, + artifact: + case key do + :identifier -> "Absinthe type identifier" + :name -> "Type name" + end + } + + object |> put_error(error(detail, others)) + end + end + + defp validate_types(type, _, _) do + type + end + + defp error(data, types) do + %Absinthe.Phase.Error{ + message: explanation(data), + locations: types |> Enum.map(& &1.__reference__.location), + phase: __MODULE__, + extra: data + } + end + + @moduledoc false + + @description """ + References to types must be unique. + + > All types within a GraphQL schema must have unique names. No two provided + > types may have the same name. No provided type may have a name which + > conflicts with any built in types (including Scalar and Introspection + > types). + + Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#type-system + """ + + def explanation(%{artifact: artifact, value: name}) do + """ + #{artifact} #{inspect(name)} is not unique. + + #{@description} + """ + end + + # This rule is only used for its explanation. Error details are added during + # compilation. + def check(_), do: [] +end diff --git a/lib/absinthe/phase/schema/validation/type_references_exist.ex b/lib/absinthe/phase/schema/validation/type_references_exist.ex new file mode 100644 index 0000000000..ba4920eec9 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/type_references_exist.ex @@ -0,0 +1,119 @@ +defmodule Absinthe.Phase.Schema.Validation.TypeReferencesExist do + @moduledoc false + + use Absinthe.Phase + alias Absinthe.Blueprint + alias Absinthe.Blueprint.Schema + + def run(blueprint, _opts) do + blueprint = Blueprint.prewalk(blueprint, &validate_schema/1) + + {:ok, blueprint} + end + + def validate_schema(%Schema.SchemaDefinition{} = schema) do + types = + schema.type_definitions + |> Enum.flat_map(&[&1.name, &1.identifier]) + |> MapSet.new() + + schema = Blueprint.prewalk(schema, &validate_types(&1, types)) + {:halt, schema} + end + + def validate_schema(node), do: node + + def validate_types(%Blueprint.Schema.FieldDefinition{} = field, types) do + check_or_error(field, field.type, types) + end + + def validate_types(%Blueprint.Schema.ObjectTypeDefinition{} = object, types) do + object + |> check_types(:interfaces, &check_or_error(&2, &1, types)) + |> check_types(:imports, fn {type, _}, obj -> check_or_error(obj, type, types) end) + end + + def validate_types(%Blueprint.Schema.InterfaceTypeDefinition{} = interface, types) do + check_types(interface, :interfaces, &check_or_error(&2, &1, types)) + end + + def validate_types(%Blueprint.Schema.InputObjectTypeDefinition{} = object, types) do + check_types(object, :imports, fn {type, _}, obj -> check_or_error(obj, type, types) end) + end + + def validate_types(%Blueprint.Schema.InputValueDefinition{} = input, types) do + check_or_error(input, input.type, types) + end + + def validate_types(%Blueprint.Schema.UnionTypeDefinition{} = union, types) do + check_types(union, :types, &check_or_error(&2, &1, types)) + end + + @no_types [ + Blueprint.Schema.DirectiveDefinition, + Blueprint.Schema.EnumTypeDefinition, + Blueprint.Schema.EnumValueDefinition, + Blueprint.Schema.InterfaceTypeDefinition, + Blueprint.Schema.ObjectTypeDefinition, + Blueprint.Schema.ScalarTypeDefinition, + Blueprint.Schema.SchemaDefinition, + Blueprint.TypeReference.NonNull, + Blueprint.TypeReference.ListOf, + Absinthe.Blueprint.TypeReference.Name + ] + def validate_types(%struct{} = type, _) when struct in @no_types do + type + end + + def validate_types(type, _) do + type + end + + defp check_types(entity, key, fun) do + entity + |> Map.fetch!(key) + |> Enum.reduce(entity, fun) + end + + defp check_or_error(thing, type, types) do + type = unwrap(type) + + if type in types do + thing + else + put_error(thing, error(thing, type)) + end + end + + defp unwrap(value) when is_binary(value) or is_atom(value) do + value + end + + defp unwrap(%Absinthe.Blueprint.TypeReference.Name{name: name}) do + name + end + + defp unwrap(type) do + unwrap_type = Absinthe.Blueprint.TypeReference.unwrap(type) + + if unwrap_type == type do + type + else + unwrap(unwrap_type) + end + end + + defp error(thing, type) do + artifact_name = String.capitalize(thing.name) + + %Absinthe.Phase.Error{ + message: """ + In #{artifact_name}, #{inspect(type)} is not defined in your schema. + + Types must exist if referenced. + """, + locations: [thing.__reference__.location], + phase: __MODULE__ + } + end +end diff --git a/lib/absinthe/phase/schema/validation/unique_field_names.ex b/lib/absinthe/phase/schema/validation/unique_field_names.ex new file mode 100644 index 0000000000..40a9774520 --- /dev/null +++ b/lib/absinthe/phase/schema/validation/unique_field_names.ex @@ -0,0 +1,69 @@ +defmodule Absinthe.Phase.Schema.Validation.UniqueFieldNames do + @moduledoc false + + @behaviour Absinthe.Phase + alias Absinthe.Blueprint + + def run(bp, _) do + bp = + bp + |> Blueprint.prewalk(&handle_schemas(&1, :name)) + + {:ok, bp} + end + + defp handle_schemas(%Blueprint.Schema.SchemaDefinition{} = schema, key) do + schema = Blueprint.prewalk(schema, &validate_types(&1, key)) + {:halt, schema} + end + + defp handle_schemas(obj, _) do + obj + end + + defp validate_types(%type{} = object, key) + when type in [ + Blueprint.Schema.InputObjectTypeDefinition, + Blueprint.Schema.InterfaceTypeDefinition, + Blueprint.Schema.ObjectTypeDefinition + ] do + fields = + for field <- object.fields do + name_counts = Enum.frequencies_by(object.fields, &Map.get(&1, key)) + + if duplicate?(name_counts, field, key) do + Absinthe.Phase.put_error(field, error(field, object)) + else + field + end + end + + %{object | fields: fields} + end + + defp validate_types(type, _) do + type + end + + defp duplicate?(name_counts, field, key) do + field_identifier = Map.get(field, key) + Map.get(name_counts, field_identifier, 0) > 1 + end + + defp error(field, object) do + %Absinthe.Phase.Error{ + message: explanation(field, object), + locations: [field.__reference__.location], + phase: __MODULE__, + extra: field + } + end + + def explanation(field, object) do + """ + The field #{inspect(field.name)} is not unique in type #{inspect(object.name)}. + + The field must have a unique name within that Object type; no two fields may share the same name. + """ + end +end diff --git a/lib/absinthe/phase/subscription/subscribe_self.ex b/lib/absinthe/phase/subscription/subscribe_self.ex index c21635ba39..ffb86860a6 100644 --- a/lib/absinthe/phase/subscription/subscribe_self.ex +++ b/lib/absinthe/phase/subscription/subscribe_self.ex @@ -19,14 +19,20 @@ defmodule Absinthe.Phase.Subscription.SubscribeSelf do context = blueprint.execution.context pubsub = ensure_pubsub!(context) - hash = :erlang.phash2(blueprint) - doc_id = "__absinthe__:doc:#{hash}" - %{selections: [field]} = op - with {:ok, field_key} <- get_field_key(field, context) do - Absinthe.Subscription.subscribe(pubsub, field_key, doc_id, blueprint) - {:replace, blueprint, [{Phase.Subscription.Result, topic: doc_id}]} + with {:ok, config} <- get_config(field, context, blueprint) do + field_keys = get_field_keys(field, config) + subscription_id = get_subscription_id(config, blueprint, options) + + for field_key <- field_keys, + do: Absinthe.Subscription.subscribe(pubsub, field_key, subscription_id, blueprint) + + {:replace, blueprint, + [ + {Phase.Subscription.Result, topic: subscription_id}, + {Phase.Telemetry, Keyword.put(options, :event, [:execute, :operation, :stop])} + ]} else {:error, error} -> blueprint = update_in(blueprint.execution.validation_errors, &[error | &1]) @@ -39,13 +45,17 @@ defmodule Absinthe.Phase.Subscription.SubscribeSelf do end end - defp get_field_key(%{schema_node: schema_node, argument_data: argument_data} = field, context) do + defp get_config( + %{schema_node: schema_node, argument_data: argument_data} = field, + context, + blueprint + ) do name = schema_node.identifier config = - case schema_node.config do + case Absinthe.Type.function(schema_node, :config) do fun when is_function(fun, 2) -> - apply(fun, [argument_data, %{context: context}]) + apply(fun, [argument_data, %{context: context, document: blueprint}]) fun when is_function(fun, 1) -> IO.write( @@ -61,8 +71,7 @@ defmodule Absinthe.Phase.Subscription.SubscribeSelf do case config do {:ok, config} -> - key = find_key!(config) - {:ok, {name, key}} + {:ok, config} {:error, msg} -> error = %Phase.Error{ @@ -77,23 +86,18 @@ defmodule Absinthe.Phase.Subscription.SubscribeSelf do raise """ Invalid return from config function! - Config function must returne `{:ok, config}` or `{:error, msg}`. You returned: + A config function must return `{:ok, config}` or `{:error, msg}`. You returned: #{inspect(val)} """ end end - defp find_key!(config) do - topic = - config[:topic] || - raise """ - Subscription config must include a non null topic! - - #{inspect(config)} - """ + defp get_field_keys(%{schema_node: schema_node} = _field, config) do + name = schema_node.identifier - to_string(topic) + find_field_keys!(config) + |> Enum.map(fn key -> {name, key} end) end defp ensure_pubsub!(context) do @@ -109,4 +113,54 @@ defmodule Absinthe.Phase.Subscription.SubscribeSelf do """ end end + + defp find_field_keys!(config) do + topic = + config[:topic] || + raise """ + Subscription config must include a non null topic! + + #{inspect(config)} + """ + + case topic do + [] -> + raise """ + Subscription config must not provide an empty list of topics! + + #{inspect(config)} + """ + + val -> + List.wrap(val) + |> Enum.map(&to_string/1) + end + end + + defp get_subscription_id(config, blueprint, options) do + context_id = get_context_id(config) + document_id = get_document_id(config, blueprint, options) + + "__absinthe__:doc:#{context_id}:#{document_id}" + end + + defp get_context_id(config) do + context_id = config[:context_id] || :erlang.unique_integer() + to_string(context_id) + end + + defp get_document_id(config, blueprint, options) do + case config[:document_id] do + nil -> + binary = + {blueprint.source || blueprint.input, options[:variables] || %{}} + |> :erlang.term_to_binary() + + :crypto.hash(:sha256, binary) + |> Base.encode16() + + val -> + val + end + end end diff --git a/lib/absinthe/phase/telemetry.ex b/lib/absinthe/phase/telemetry.ex new file mode 100644 index 0000000000..797da6d8ce --- /dev/null +++ b/lib/absinthe/phase/telemetry.ex @@ -0,0 +1,82 @@ +defmodule Absinthe.Phase.Telemetry do + @moduledoc """ + Gather and report telemetry about an operation. + """ + @operation_start [:absinthe, :execute, :operation, :start] + @operation_stop [:absinthe, :execute, :operation, :stop] + + @subscription_start [:absinthe, :subscription, :publish, :start] + @subscription_stop [:absinthe, :subscription, :publish, :stop] + + use Absinthe.Phase + + def run(blueprint, options) do + event = Keyword.fetch!(options, :event) + do_run(blueprint, event, options) + end + + defp do_run(blueprint, [:execute, :operation, :start], options) do + id = :erlang.unique_integer() + system_time = System.system_time() + start_time_mono = System.monotonic_time() + + :telemetry.execute( + @operation_start, + %{system_time: system_time}, + %{id: id, telemetry_span_context: id, blueprint: blueprint, options: options} + ) + + {:ok, + %{ + blueprint + | source: blueprint.input, + telemetry: %{id: id, start_time_mono: start_time_mono} + }} + end + + defp do_run(blueprint, [:subscription, :publish, :start], options) do + id = :erlang.unique_integer() + system_time = System.system_time() + start_time_mono = System.monotonic_time() + + :telemetry.execute( + @subscription_start, + %{system_time: system_time}, + %{id: id, telemetry_span_context: id, blueprint: blueprint, options: options} + ) + + {:ok, + %{ + blueprint + | telemetry: %{id: id, start_time_mono: start_time_mono} + }} + end + + defp do_run(blueprint, [:subscription, :publish, :stop], options) do + end_time_mono = System.monotonic_time() + + with %{id: id, start_time_mono: start_time_mono} <- blueprint.telemetry do + :telemetry.execute( + @subscription_stop, + %{duration: end_time_mono - start_time_mono}, + %{id: id, telemetry_span_context: id, blueprint: blueprint, options: options} + ) + end + + {:ok, blueprint} + end + + defp do_run(blueprint, [:execute, :operation, :stop], options) do + end_time_mono = System.monotonic_time() + + with %{id: id, start_time_mono: start_time_mono} <- blueprint.telemetry do + :telemetry.execute( + @operation_stop, + %{duration: end_time_mono - start_time_mono}, + %{id: id, telemetry_span_context: id, blueprint: blueprint, options: options} + ) + end + + {:ok, blueprint} + end +end diff --git a/lib/absinthe/phase/validation.ex b/lib/absinthe/phase/validation.ex index bc22d5a235..0ee7629cb2 100644 --- a/lib/absinthe/phase/validation.ex +++ b/lib/absinthe/phase/validation.ex @@ -10,6 +10,8 @@ defmodule Absinthe.Phase.Validation do end defmodule Helpers do + @moduledoc false + @spec any_invalid?([Blueprint.node_t()]) :: boolean def any_invalid?(nodes) do Enum.any?(nodes, fn diff --git a/lib/absinthe/phase/validation/known_type_names.ex b/lib/absinthe/phase/validation/known_type_names.ex index 46d623f131..7e6efd9b6b 100644 --- a/lib/absinthe/phase/validation/known_type_names.ex +++ b/lib/absinthe/phase/validation/known_type_names.ex @@ -12,6 +12,7 @@ defmodule Absinthe.Phase.Validation.KnownTypeNames do # ``` alias Absinthe.{Blueprint, Phase} + alias Absinthe.Phase.Document.Validation.Utils use Absinthe.Phase use Absinthe.Phase.Validation @@ -33,16 +34,18 @@ defmodule Absinthe.Phase.Validation.KnownTypeNames do |> put_error(error(node, name)) end - defp handle_node(%Blueprint.Document.VariableDefinition{schema_node: nil} = node, schema) do + defp handle_node(%Blueprint.Document.VariableDefinition{} = node, schema) do name = Blueprint.TypeReference.unwrap(node.type).name inner_schema_type = schema.__absinthe_lookup__(name) if inner_schema_type do node else + suggestions = suggested_type_names(schema, name) + node |> flag_invalid(:bad_type_name) - |> put_error(error(node, name)) + |> put_error(error(node, name, suggestions)) end end @@ -50,12 +53,27 @@ defmodule Absinthe.Phase.Validation.KnownTypeNames do node end + defp suggested_type_names(schema, name) do + schema + |> Absinthe.Schema.referenced_types() + |> Enum.map(& &1.name) + |> Absinthe.Utils.Suggestion.sort_list(name) + end + @spec error(Blueprint.node_t(), String.t()) :: Phase.Error.t() - defp error(node, name) do + defp error(node, name, suggestions \\ []) do %Phase.Error{ phase: __MODULE__, - message: ~s(Unknown type "#{name}".), + message: message(name, suggestions), locations: [node.source_location] } end + + defp message(name, []) do + ~s(Unknown type "#{name}".) + end + + defp message(name, suggestions) do + ~s(Unknown type "#{name}".) <> Utils.MessageSuggestions.suggest_message(suggestions) + end end diff --git a/lib/absinthe/pipeline.ex b/lib/absinthe/pipeline.ex index 089bc8d40c..3ee2079a89 100644 --- a/lib/absinthe/pipeline.ex +++ b/lib/absinthe/pipeline.ex @@ -4,12 +4,16 @@ defmodule Absinthe.Pipeline do A pipeline is merely a list of phases. This module contains functions for building, modifying, and executing pipelines of phases. + + Pipelines are used to build, validate and manipulate GraphQL documents or schema's. + + * See [`Absinthe.Plug`](https://hexdocs.pm/absinthe_plug/Absinthe.Plug.html) on adjusting the document pipeline for GraphQL over http requests. + * See [`Absinthe.Phoenix`](https://hexdocs.pm/absinthe_phoenix/) on adjusting the document pipeline for GraphQL over Phoenix channels. + * See `Absinthe.Schema` on adjusting the schema pipeline for schema manipulation. """ alias Absinthe.Phase - require Logger - @type data_t :: any @type phase_config_t :: Phase.t() | {Phase.t(), Keyword.t()} @@ -40,10 +44,15 @@ defmodule Absinthe.Pipeline do @spec for_document(Absinthe.Schema.t()) :: t @spec for_document(Absinthe.Schema.t(), Keyword.t()) :: t + @doc """ + The default document pipeline + """ def for_document(schema, options \\ []) do options = options(Keyword.put(options, :schema, schema)) [ + Phase.Init, + {Phase.Telemetry, Keyword.put(options, :event, [:execute, :operation, :start])}, # Parse Document {Phase.Parse, options}, # Convert to Blueprint @@ -56,12 +65,11 @@ defmodule Absinthe.Pipeline do # Validate Document Structure {Phase.Document.Validation.NoFragmentCycles, options}, Phase.Document.Validation.LoneAnonymousOperation, - Phase.Document.Validation.SelectedCurrentOperation, + {Phase.Document.Validation.SelectedCurrentOperation, options}, Phase.Document.Validation.KnownFragmentNames, Phase.Document.Validation.NoUndefinedVariables, Phase.Document.Validation.NoUnusedVariables, - # TODO: uncomment in 1.5 - # Phase.Document.Validation.NoUnusedFragments + Phase.Document.Validation.NoUnusedFragments, Phase.Document.Validation.UniqueFragmentNames, Phase.Document.Validation.UniqueOperationNames, Phase.Document.Validation.UniqueVariableNames, @@ -74,6 +82,7 @@ defmodule Absinthe.Pipeline do {Phase.Schema, options}, # Ensure Types Phase.Validation.KnownTypeNames, + Phase.Document.Arguments.VariableTypesMatch, # Process Arguments Phase.Document.Arguments.CoerceEnums, Phase.Document.Arguments.CoerceLists, @@ -82,7 +91,8 @@ defmodule Absinthe.Pipeline do Phase.Document.MissingLiterals, Phase.Document.Arguments.FlagInvalid, # Validate Full Document - Phase.Validation.KnownDirectives, + Phase.Document.Validation.KnownDirectives, + Phase.Document.Validation.RepeatableDirectives, Phase.Document.Validation.ScalarLeafs, Phase.Document.Validation.VariablesAreInputTypes, Phase.Document.Validation.ArgumentsOfCorrectType, @@ -105,34 +115,76 @@ defmodule Absinthe.Pipeline do {Phase.Subscription.SubscribeSelf, options}, {Phase.Document.Execution.Resolution, options}, # Format Result - Phase.Document.Result + Phase.Document.Result, + {Phase.Telemetry, Keyword.put(options, :event, [:execute, :operation, :stop])} ] end - @defaults [ - adapter: Absinthe.Adapter.LanguageConventions - ] + @default_prototype_schema Absinthe.Schema.Prototype @spec for_schema(nil | Absinthe.Schema.t()) :: t @spec for_schema(nil | Absinthe.Schema.t(), Keyword.t()) :: t - def for_schema(prototype_schema, options \\ []) do + @doc """ + The default schema pipeline + """ + def for_schema(schema, options \\ []) do options = - @defaults - |> Keyword.merge(Keyword.put(options, :schema, prototype_schema)) + options + |> Enum.reject(fn {_, v} -> is_nil(v) end) + |> Keyword.put(:schema, schema) + |> Keyword.put_new(:prototype_schema, @default_prototype_schema) [ - Phase.Parse, - Phase.Blueprint, + Phase.Schema.TypeImports, + Phase.Schema.DeprecatedDirectiveFields, + Phase.Schema.ApplyDeclaration, + Phase.Schema.Introspection, + {Phase.Schema.Hydrate, options}, + Phase.Schema.Arguments.Normalize, {Phase.Schema, options}, - Phase.Validation.KnownTypeNames, - Phase.Validation.KnownDirectives + Phase.Schema.Validation.TypeNamesAreUnique, + Phase.Schema.Validation.TypeReferencesExist, + Phase.Schema.Validation.TypeNamesAreReserved, + # This phase is run once now because a lot of other + # validations aren't possible if type references are invalid. + Phase.Schema.Validation.NoCircularFieldImports, + {Phase.Schema.Validation.Result, pass: :initial}, + Phase.Schema.FieldImports, + Phase.Schema.Validation.KnownDirectives, + Phase.Document.Validation.KnownArgumentNames, + {Phase.Schema.Arguments.Parse, options}, + Phase.Schema.Arguments.Data, + Phase.Schema.Directives, + Phase.Schema.Validation.DefaultEnumValuePresent, + Phase.Schema.Validation.DirectivesMustBeValid, + Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced, + Phase.Schema.Validation.InterfacesMustResolveTypes, + Phase.Schema.Validation.ObjectInterfacesMustBeValid, + Phase.Schema.Validation.ObjectMustImplementInterfaces, + Phase.Schema.Validation.NoInterfaceCyles, + Phase.Schema.Validation.QueryTypeMustBeObject, + Phase.Schema.Validation.NamesMustBeValid, + Phase.Schema.Validation.UniqueFieldNames, + Phase.Schema.RegisterTriggers, + Phase.Schema.MarkReferenced, + Phase.Schema.ReformatDescriptions, + # This phase is run again now after additional validations + {Phase.Schema.Validation.Result, pass: :final}, + Phase.Schema.Build, + Phase.Schema.InlineFunctions, + {Phase.Schema.Compile, options} ] end @doc """ Return the part of a pipeline before a specific phase. + + ## Examples + + iex> Pipeline.before([A, B, C], B) + [A] """ - @spec before(t, atom) :: t + @spec before(t, phase_config_t) :: t def before(pipeline, phase) do result = List.flatten(pipeline) @@ -149,6 +201,11 @@ defmodule Absinthe.Pipeline do @doc """ Return the part of a pipeline after (and including) a specific phase. + + ## Examples + + iex> Pipeline.from([A, B, C], B) + [B, C] """ @spec from(t, atom) :: t def from(pipeline, phase) do @@ -197,12 +254,10 @@ defmodule Absinthe.Pipeline do replacement {_, opts} -> - case is_atom(replacement) do - true -> - {replacement, opts} - - false -> - replacement + if is_atom(replacement) do + {replacement, opts} + else + replacement end end @@ -215,38 +270,93 @@ defmodule Absinthe.Pipeline do # Whether a phase configuration is for a given phase @spec match_phase?(Phase.t(), phase_config_t) :: boolean defp match_phase?(phase, phase), do: true - defp match_phase?(phase, {phase, _}), do: true + defp match_phase?(phase, {phase, _}) when is_atom(phase), do: true defp match_phase?(_, _), do: false @doc """ Return the part of a pipeline up to and including a specific phase. + + ## Examples + + iex> Pipeline.upto([A, B, C], B) + [A, B] """ - @spec upto(t, atom) :: t + @spec upto(t, phase_config_t) :: t def upto(pipeline, phase) do beginning = before(pipeline, phase) item = get_in(pipeline, [Access.at(length(beginning))]) beginning ++ [item] end + @doc """ + Return the pipeline with the supplied phase removed. + + ## Examples + + iex> Pipeline.without([A, B, C], B) + [A, C] + """ @spec without(t, Phase.t()) :: t def without(pipeline, phase) do pipeline |> Enum.filter(&(not match_phase?(phase, &1))) end + @doc """ + Return the pipeline with the phase/list of phases inserted before + the supplied phase. + + ## Examples + + Add one phase before another: + + iex> Pipeline.insert_before([A, C, D], C, B) + [A, B, C, D] + + Add list of phase before another: + + iex> Pipeline.insert_before([A, D, E], D, [B, C]) + [A, B, C, D, E] + + """ @spec insert_before(t, Phase.t(), phase_config_t | [phase_config_t]) :: t def insert_before(pipeline, phase, additional) do beginning = before(pipeline, phase) beginning ++ List.wrap(additional) ++ (pipeline -- beginning) end + @doc """ + Return the pipeline with the phase/list of phases inserted after + the supplied phase. + + ## Examples + + Add one phase after another: + + iex> Pipeline.insert_after([A, C, D], A, B) + [A, B, C, D] + + Add list of phases after another: + + iex> Pipeline.insert_after([A, D, E], A, [B, C]) + [A, B, C, D, E] + + """ @spec insert_after(t, Phase.t(), phase_config_t | [phase_config_t]) :: t def insert_after(pipeline, phase, additional) do beginning = upto(pipeline, phase) beginning ++ List.wrap(additional) ++ (pipeline -- beginning) end - @spec reject(t, Regex.t() | (Module.t() -> boolean)) :: t + @doc """ + Return the pipeline with the phases matching the regex removed. + + ## Examples + + iex> Pipeline.reject([A, B, C], ~r/A|B/) + [C] + """ + @spec reject(t, Regex.t() | (module -> boolean)) :: t def reject(pipeline, %Regex{} = pattern) do reject(pipeline, fn phase -> Regex.match?(pattern, Atom.to_string(phase)) @@ -268,10 +378,14 @@ defmodule Absinthe.Pipeline do {:ok, input, done} end - def run_phase([phase_config | todo], input, done) do + def run_phase([phase_config | todo] = all_phases, input, done) do {phase, options} = phase_invocation(phase_config) case phase.run(input, options) do + {:record_phases, result, fun} -> + result = fun.(result, all_phases) + run_phase(todo, result, [phase | done]) + {:ok, result} -> run_phase(todo, result, [phase | done]) diff --git a/lib/absinthe/pipeline/batch_resolver.ex b/lib/absinthe/pipeline/batch_resolver.ex index 615e304f32..8e1c59c6ba 100644 --- a/lib/absinthe/pipeline/batch_resolver.ex +++ b/lib/absinthe/pipeline/batch_resolver.ex @@ -8,6 +8,7 @@ defmodule Absinthe.Pipeline.BatchResolver do def run([], _), do: [] def run([bp | _] = blueprints, options) do + {initial_phases, options} = Keyword.pop(options, :initial_phases, []) schema = Keyword.fetch!(options, :schema) plugins = schema.plugins() @@ -25,16 +26,15 @@ defmodule Absinthe.Pipeline.BatchResolver do } resolution_phase = {Execution.Resolution, [plugin_callbacks: false] ++ options} + phases = initial_phases ++ [resolution_phase] - do_resolve(blueprints, [resolution_phase], exec, plugins, resolution_phase, options) + do_resolve(blueprints, phases, exec, plugins, resolution_phase, options) end defp init(blueprints, attr) do Enum.reduce(blueprints, %{}, &Map.merge(Map.fetch!(&1.execution, attr), &2)) end - # defp update() - defp do_resolve(blueprints, phases, exec, plugins, resolution_phase_template, options) do exec = Enum.reduce(plugins, exec, fn plugin, exec -> @@ -93,7 +93,7 @@ defmodule Absinthe.Pipeline.BatchResolver do {:ok, blueprint} rescue e -> - pipeline_error(e) + pipeline_error(e, __STACKTRACE__) :error end @@ -101,9 +101,9 @@ defmodule Absinthe.Pipeline.BatchResolver do %{bp | execution: %{execution | acc: acc, context: ctx}} end - def pipeline_error(exception) do + def pipeline_error(exception, trace) do message = Exception.message(exception) - stacktrace = System.stacktrace() |> Exception.format_stacktrace() + stacktrace = trace |> Exception.format_stacktrace() Logger.error(""" #{message} diff --git a/lib/absinthe/pipeline/error_result.ex b/lib/absinthe/pipeline/error_result.ex deleted file mode 100644 index 2cf81d825b..0000000000 --- a/lib/absinthe/pipeline/error_result.ex +++ /dev/null @@ -1,20 +0,0 @@ -defmodule Absinthe.Pipeline.ErrorResult do - @moduledoc """ - A basic struct that wraps phase errors for - reporting to the user. - """ - - alias Absinthe.Phase - - defstruct errors: [] - - @type t :: %__MODULE__{ - errors: [Phase.Error.t()] - } - - @doc "Generate a new ErrorResult for one or more phase errors" - @spec new(Phase.Error.t() | [Phase.Error.t()]) :: t - def new(errors) do - %__MODULE__{errors: List.wrap(errors)} - end -end diff --git a/lib/absinthe/plugin.ex b/lib/absinthe/plugin.ex index 225fd1d376..ed916c2b25 100644 --- a/lib/absinthe/plugin.ex +++ b/lib/absinthe/plugin.ex @@ -15,7 +15,8 @@ defmodule Absinthe.Plugin do NOTE: This function is given the full accumulator. Namespacing is suggested to avoid conflicts. """ - @callback before_resolution(execution :: Document.Execution.t()) :: Document.Execution.t() + @callback before_resolution(execution :: Absinthe.Blueprint.Execution.t()) :: + Absinthe.Blueprint.Execution.t() @doc """ callback to do something with the resolution accumulator after @@ -24,7 +25,8 @@ defmodule Absinthe.Plugin do NOTE: This function is given the full accumulator. Namespacing is suggested to avoid conflicts. """ - @callback after_resolution(execution :: Document.Execution.t()) :: Document.Execution.t() + @callback after_resolution(execution :: Absinthe.Blueprint.Execution.t()) :: + Absinthe.Blueprint.Execution.t() @doc """ callback used to specify additional phases to run. @@ -36,7 +38,10 @@ defmodule Absinthe.Plugin do NOTE: This function is given the whole pipeline to be inserted after the current phase completes. """ - @callback pipeline(next_pipeline :: Absinthe.Pipeline.t(), execution :: Document.Execution.t()) :: + @callback pipeline( + next_pipeline :: Absinthe.Pipeline.t(), + execution :: Absinthe.Blueprint.Execution.t() + ) :: Absinthe.Pipeline.t() @doc """ diff --git a/lib/absinthe/resolution.ex b/lib/absinthe/resolution.ex index 38081929f0..9808232d71 100644 --- a/lib/absinthe/resolution.ex +++ b/lib/absinthe/resolution.ex @@ -21,7 +21,7 @@ defmodule Absinthe.Resolution do When a `%Resolution{}` is accessed via middleware, you may want to update the context (e.g. to cache a dataloader instance or the result of an ecto query). Updating the context can be done simply by using the map updating syntax (or - `Map.put/4`): + `Map.put/3`): ```elixir %{resolution | context: new_context} @@ -112,7 +112,7 @@ defmodule Absinthe.Resolution do ## Examples Given some query: - ``` + ```graphql {users { email }} ``` @@ -146,7 +146,7 @@ defmodule Absinthe.Resolution do ## Example Given a document like: - ``` + ```graphql { user { id name }} ``` @@ -162,7 +162,7 @@ defmodule Absinthe.Resolution do `child_fields` will be `["id", "name"]`. It correctly handles fragments, so for example if you had the document: - ``` + ```graphql { user { ... on User { @@ -218,6 +218,16 @@ defmodule Absinthe.Resolution do Instead got: #{inspect(resolution_function)} + Resolving field: + + #{res.definition.name} + + Defined at: + + #{res.definition.schema_node.__reference__.location.file}:#{ + res.definition.schema_node.__reference__.location.line + } + Info: #{inspect(res)} """ end diff --git a/lib/absinthe/resolution/helpers.ex b/lib/absinthe/resolution/helpers.ex index 6163ab6e37..94a42e19de 100644 --- a/lib/absinthe/resolution/helpers.ex +++ b/lib/absinthe/resolution/helpers.ex @@ -16,9 +16,27 @@ defmodule Absinthe.Resolution.Helpers do This is a helper function for using the `Absinthe.Middleware.Async`. Forbidden in mutation fields. (TODO: actually enforce this) + + ## Options + - `:timeout` default: `30_000`. The maximum timeout to wait for running + the task. + + ## Example + + Using the `Absinthe.Resolution.Helpers.async/1` helper function: + ```elixir + field :time_consuming, :thing do + resolve fn _, _, _ -> + async(fn -> + {:ok, long_time_consuming_function()} + end) + end + end + ``` """ @spec async((() -> term)) :: {:middleware, Middleware.Async, term} - @spec async((() -> term), Keyword.t()) :: {:middleware, Middleware.Async, term} + @spec async((() -> term), opts :: [{:timeout, pos_integer}]) :: + {:middleware, Middleware.Async, term} def async(fun, opts \\ []) do {:middleware, Middleware.Async, {fun, opts}} end @@ -28,7 +46,12 @@ defmodule Absinthe.Resolution.Helpers do Helper function for creating `Absinthe.Middleware.Batch` - # Example + ## Options + - `:timeout` default: `5_000`. The maximum timeout to wait for running + a batch. + + ## Example + Raw usage: ```elixir object :post do @@ -49,13 +72,13 @@ defmodule Absinthe.Resolution.Helpers do ``` """ @spec batch(Middleware.Batch.batch_fun(), term, Middleware.Batch.post_batch_fun()) :: - {:plugin, Middleware.Batch, term} + {:middleware, Middleware.Batch, term} @spec batch( Middleware.Batch.batch_fun(), term, Middleware.Batch.post_batch_fun(), - opts :: Keyword.t() - ) :: {:plugin, Middleware.Batch, term} + opts :: [{:timeout, pos_integer}] + ) :: {:middleware, Middleware.Batch, term} def batch(batch_fun, batch_data, post_batch_fun, opts \\ []) do batch_config = {batch_fun, batch_data, post_batch_fun, opts} {:middleware, Middleware.Batch, batch_config} @@ -88,7 +111,7 @@ defmodule Absinthe.Resolution.Helpers do reports = loader |> Dataloader.get(SourceName, :automatic_reports, shipment) - |> Enum.concat(Dataloader.load(loader, SourceName, :manual_reports, shipment)) + |> Enum.concat(Dataloader.get(loader, SourceName, :manual_reports, shipment)) |> Enum.sort_by(&reported_at/1) {:ok, reports} end) @@ -106,7 +129,10 @@ defmodule Absinthe.Resolution.Helpers do Absinthe.Resolution.arguments(), Absinthe.Resolution.t() -> {any, map}) - @type dataloader_opt :: {:args, map} | {:use_parent, true | false} + @type dataloader_opt :: + {:args, map} + | {:use_parent, true | false} + | {:callback, (map(), map(), map() -> any())} @doc """ Resolve a field with a dataloader source. @@ -131,11 +157,50 @@ defmodule Absinthe.Resolution.Helpers do field :author, :user, resolve: dataloader(Blog, :author, []) ``` """ - @spec dataloader(Dataloader.source_name()) :: dataloader_tuple + @spec dataloader(Dataloader.source_name()) :: dataloader_key_fun() def dataloader(source) do + dataloader(source, []) + end + + @doc """ + Resolve a field with a dataloader source. + + This function is not imported by default. To make it available in your module do + + ``` + import Absinthe.Resolution.Helpers + ``` + + Same as `dataloader/3`, but it infers the resource name from the field name. For `opts` see + `dataloader/3` on what options can be passed in. + + ## Examples + + ``` + object :user do + field :posts, list_of(:post), + resolve: dataloader(Blog, args: %{deleted: false}) + + field :organization, :organization do + resolve dataloader(Accounts, use_parent: false) + end + + field(:account_active, non_null(:boolean), resolve: dataloader( + Accounts, callback: fn account, _parent, _args -> + {:ok, account.active} + end + ) + ) + end + ``` + + """ + @dialyzer {:no_contracts, dataloader: 2} + @spec dataloader(Dataloader.source_name(), [dataloader_opt]) :: dataloader_key_fun() + def dataloader(source, opts) when is_list(opts) do fn parent, args, %{context: %{loader: loader}} = res -> resource = res.definition.schema_node.identifier - do_dataloader(loader, source, resource, args, parent, []) + do_dataloader(loader, source, {resource, args}, parent, opts) end end @@ -203,14 +268,16 @@ defmodule Absinthe.Resolution.Helpers do - `:args` default: `%{}`. Any arguments you want to always pass into the `Dataloader.load/4` call. Resolver arguments are merged into this value and, in the event of a conflict, the resolver arguments win. - - `:callback` default: `default_callback/3`. Callback that is run with result - of dataloader. It receives the result as the first argument, and the parent - and args as second and third. Can be used to e.g. compute fields on the return - value of the loader. Should return an ok or error tuple. - - `:use_parent` default: `true`. This option affects whether or not the `dataloader/2` + - `:callback` default: return result wrapped in ok or error tuple. + Callback that is run with result of dataloader. It receives the result as + the first argument, and the parent and args as second and third. Can be used + to e.g. compute fields on the return value of the loader. Should return an + ok or error tuple. + - `:use_parent` default: `false`. This option affects whether or not the `dataloader/2` helper will use any pre-existing value on the parent. IE if you return `%{author: %User{...}}` from a blog post the helper will by default simply use - the pre-existing author. Set it to false if you always want it to load it fresh. + the pre-existing author. Set it to true if you want to opt into using the + pre-existing value instead of loading it fresh. Ultimately, this helper calls `Dataloader.load/4` using the loader in your context, the source you provide, the tuple `{resource, args}` @@ -226,53 +293,82 @@ defmodule Absinthe.Resolution.Helpers do {:ok, Dataloader.get(loader, source_name, {resource, args}, parent)} end) end + end ``` """ def dataloader(source, fun, opts \\ []) + @spec dataloader(Dataloader.source_name(), any) :: dataloader_key_fun @spec dataloader(Dataloader.source_name(), dataloader_key_fun | any, [dataloader_opt]) :: - dataloader_tuple + dataloader_key_fun def dataloader(source, fun, opts) when is_function(fun, 3) do fn parent, args, %{context: %{loader: loader}} = res -> - {resource, args} = fun.(parent, args, res) - do_dataloader(loader, source, resource, args, parent, opts) + {batch_key, parent} = + case fun.(parent, args, res) do + {resource, args} -> {{resource, args}, parent} + %{batch: batch, item: item} -> {batch, item} + end + + do_dataloader(loader, source, batch_key, parent, opts) end end def dataloader(source, resource, opts) do fn parent, args, %{context: %{loader: loader}} -> - do_dataloader(loader, source, resource, args, parent, opts) + do_dataloader(loader, source, {resource, args}, parent, opts) end end - defp use_parent(loader, source, resource, parent, args, opts) do + defp use_parent(loader, source, batch_key, parent, opts) when is_map(parent) do + resource = + case batch_key do + {_cardinality, resource, _args} -> resource + {resource, _args} -> resource + end + with true <- Keyword.get(opts, :use_parent, false), - {:ok, val} <- is_map(parent) && Map.fetch(parent, resource) do - Dataloader.put(loader, source, {resource, args}, parent, val) + {:ok, val} <- Map.fetch(parent, resource) do + Dataloader.put(loader, source, batch_key, parent, val) else _ -> loader end end - defp do_dataloader(loader, source, resource, args, parent, opts) do - args = - opts - |> Keyword.get(:args, %{}) - |> Map.merge(args) + defp use_parent(loader, _source, _batch_key, _parent, _opts), do: loader + + defp do_dataloader(loader, source, batch_key, parent, opts) do + args_from_opts = Keyword.get(opts, :args, %{}) + + {batch_key, args} = + case batch_key do + {cardinality, resource, args} -> + args = Map.merge(args_from_opts, args) + {{cardinality, resource, args}, args} + + {resource, args} -> + args = Map.merge(args_from_opts, args) + {{resource, args}, args} + end loader - |> use_parent(source, resource, parent, args, opts) - |> Dataloader.load(source, {resource, args}, parent) + |> use_parent(source, batch_key, parent, opts) + |> Dataloader.load(source, batch_key, parent) |> on_load(fn loader -> - callback = Keyword.get(opts, :callback, &default_callback/3) + callback = Keyword.get(opts, :callback, default_callback(loader)) loader - |> Dataloader.get(source, {resource, args}, parent) + |> Dataloader.get(source, batch_key, parent) |> callback.(parent, args) end) end - defp default_callback(result, _parent, _args), do: {:ok, result} + defp default_callback(%{options: loader_options}) do + if loader_options[:get_policy] == :tuples do + fn result, _parent, _args -> result end + else + fn result, _parent, _args -> {:ok, result} end + end + end end end diff --git a/lib/absinthe/resolution/projector.ex b/lib/absinthe/resolution/projector.ex index 46a91f3b5d..967ecbbdf4 100644 --- a/lib/absinthe/resolution/projector.ex +++ b/lib/absinthe/resolution/projector.ex @@ -9,11 +9,15 @@ defmodule Absinthe.Resolution.Projector do Projection amounts to collecting the next set of fields to operate on, based on the current field. This is a non trivial operation because you have to handle the various type conditions that come along with fragments / inline fragments, - field merging, and other wondeful stuff like that. + field merging, and other wonderful stuff like that. """ - def project(selections, %{identifier: identifier} = parent_type, path, cache, exec) do - path_names = for %{name: name, alias: alias} <- path, name, do: alias || name - key = {identifier, path_names} + def project(selections, %{identifier: parent_ident} = parent_type, path, cache, exec) do + path = + for %{parent_type: %{identifier: i}, name: name, alias: alias} <- path do + {i, alias || name} + end + + key = [parent_ident | path] case Map.fetch(cache, key) do {:ok, fields} -> @@ -113,7 +117,6 @@ defmodule Absinthe.Resolution.Projector do defp conditionally_collect(condition, selections, fragments, parent_type, schema, index, acc) do condition - |> Type.unwrap() |> normalize_condition(schema) |> passes_type_condition?(parent_type) |> case do @@ -137,12 +140,11 @@ defmodule Absinthe.Resolution.Projector do normalize_condition(condition, schema) end - defp normalize_condition(%{} = condition, _schema) do - condition - end - defp normalize_condition(condition, schema) do - Absinthe.Schema.lookup_type(schema, condition) + case Type.unwrap(condition) do + %{} = condition -> condition + value -> Absinthe.Schema.lookup_type(schema, value) + end end defp passes_type_condition?(%Type.Object{name: name}, %Type.Object{name: name}) do diff --git a/lib/absinthe/schema.ex b/lib/absinthe/schema.ex index 0289825544..b13c321657 100644 --- a/lib/absinthe/schema.ex +++ b/lib/absinthe/schema.ex @@ -1,154 +1,120 @@ defmodule Absinthe.Schema do - import Absinthe.Schema.Notation + alias Absinthe.Type + alias __MODULE__ - @moduledoc """ - Define a GraphQL schema. + @type t :: module - See also `Absinthe.Schema.Notation` for a reference of the macros imported by - this module available to build types for your schema. + @moduledoc """ + Build GraphQL Schemas - ## Basic Usage + ## Custom Schema Manipulation (in progress) + In Absinthe 1.5 schemas are built using the same process by which queries are + executed. All the macros in this module and in `Notation` build up an intermediary tree of structs in the + `%Absinthe.Blueprint{}` namespace, which we generally call "Blueprint structs". - To define a schema, `use Absinthe.Schema` within - a module. This marks your module as adhering to the - `Absinthe.Schema` behaviour, and sets up some macros - and utility functions for your use: + At the top you've got a `%Blueprint{}` struct which holds onto some schema + definitions that look a bit like this: ``` - defmodule App.Schema do - use Absinthe.Schema - - # ... define it here! - - end + %Blueprint.Schema.SchemaDefinition{ + type_definitions: [ + %Blueprint.Schema.ObjectTypeDefinition{identifier: :query, ...}, + %Blueprint.Schema.ObjectTypeDefinition{identifier: :mutation, ...}, + %Blueprint.Schema.ObjectTypeDefinition{identifier: :user, ...}, + %Blueprint.Schema.EnumTypeDefinition{identifier: :sort_order, ...}, + ] + } ``` - Now, define a `query` (and optionally, `mutation` - and `subscription`). - - We'll define a `query` that has one field, `item`, to support - querying for an item record by its ID: + You can see what your schema's blueprint looks like by calling + `__absinthe_blueprint__` on any schema or type definition module. ``` - # Just for the example. You're probably using Ecto or - # something much more interesting than a module attribute-based - # database! - @fake_db %{ - "foo" => %{id: "foo", name: "Foo", value: 4}, - "bar" => %{id: "bar", name: "Bar", value: 5} - } - - query do - @desc "Get an item by ID" - field :item, :item do + defmodule MyAppWeb.Schema do + use Absinthe.Schema - @desc "The ID of the item" - arg :id, type: non_null(:id) + query do - resolve fn %{id: id}, _ -> - {:ok, Map.get(@fake_db, id)} - end end end - ``` - For more information on object types (especially how the `resolve` - function works above), see `Absinthe.Type.Object`. + > MyAppWeb.Schema.__absinthe_blueprint__ + #=> %Absinthe.Blueprint{...} + ``` - You may also notice we've declared that the resolved value of the field - to be of `type: :item`. We now need to define exactly what an `:item` is, - and what fields it contains. + These blueprints are manipulated by phases, which validate and ultimately + construct a schema. This pipeline of phases you can hook into like you do for + queries. ``` - @desc "A valuable Item" - object :item do - field :id, :id + defmodule MyAppWeb.Schema do + use Absinthe.Schema - @desc "The item's name" - field :name, :string, + @pipeline_modifier MyAppWeb.CustomSchemaPhase - field :value, :integer, description: "Recently appraised value" - end - ``` + query do - We can also load types from other modules using the `import_types` - macro: + end - ``` - defmodule App.Schema do - use Absinthe.Schema + end - import_types App.Schema.Scalars - import_types App.Schema.Objects + defmodule MyAppWeb.CustomSchemaPhase do + alias Absinthe.{Phase, Pipeline, Blueprint} - # ... schema definition + # Add this module to the pipeline of phases + # to run on the schema + def pipeline(pipeline) do + Pipeline.insert_after(pipeline, Phase.Schema.TypeImports, __MODULE__) + end + # Here's the blueprint of the schema, do whatever you want with it. + def run(blueprint, _) do + {:ok, blueprint} + end end ``` - Our `:item` type above could then move into `App.Schema.Objects`: + The blueprint structs are pretty complex, but if you ever want to figure out + how to construct something in blueprints you can always just create the thing + in the normal AST and then look at the output. Let's see what interfaces look + like for example: ``` - defmodule App.Schema.Objects do + defmodule Foo do use Absinthe.Schema.Notation - object :item do - # ... type definition + interface :named do + field :name, :string end - - # ... other objects! - end + + Foo.__absinthe_blueprint__ #=> ... ``` """ - @typedoc """ - A module defining a schema. - """ - @type t :: module + defmacro __using__(opts) do + Module.register_attribute(__CALLER__.module, :pipeline_modifier, + accumulate: true, + persist: true + ) - alias Absinthe.Type - alias Absinthe.Language - alias __MODULE__ + Module.register_attribute(__CALLER__.module, :prototype_schema, persist: true) - defmacro __using__(opts \\ []) do - quote(generated: true) do + quote do use Absinthe.Schema.Notation, unquote(opts) import unquote(__MODULE__), only: :macros - import_types Absinthe.Type.BuiltIns - @after_compile unquote(__MODULE__) - @behaviour unquote(__MODULE__) + @before_compile unquote(__MODULE__) + @prototype_schema Absinthe.Schema.Prototype - @doc false - def __absinthe_middleware__(middleware, field, %{identifier: :mutation} = object) do - # mutation objects should run publication triggers - middleware - |> Absinthe.Schema.__ensure_middleware__(field, object) - |> Absinthe.Subscription.add_middleware() - |> __do_absinthe_middleware__(field, object) - end + @schema_provider Absinthe.Schema.Compiled - def __absinthe_middleware__(middleware, field, object) do - __do_absinthe_middleware__(middleware, field, object) + def __absinthe_lookup__(name) do + __absinthe_type__(name) end - defp __do_absinthe_middleware__(middleware, field, object) do - # run field against user supplied function - middleware - |> Absinthe.Schema.__ensure_middleware__(field, object) - |> __MODULE__.middleware(field, object) - |> case do - [] -> - raise """ - Middleware callback must return a non empty list of middleware! - """ - - middleware -> - middleware - end - end + @behaviour Absinthe.Schema @doc false def middleware(middleware, _field, _object) do @@ -156,121 +122,33 @@ defmodule Absinthe.Schema do end @doc false - def context(context) do - context + def plugins do + Absinthe.Plugin.defaults() end @doc false - def __absinthe_lookup__(key) do - key - |> __absinthe_type__ - |> case do - %Absinthe.Type.Object{} = object -> - fields = - Map.new(object.fields, fn {identifier, field} -> - {identifier, - %{field | middleware: __absinthe_middleware__(field.middleware, field, object)}} - end) - - %{object | fields: fields} - - type -> - type - end + def context(context) do + context end @doc false - def plugins do - Absinthe.Plugin.defaults() + def hydrate(_node, _ancestors) do + [] end - defoverridable middleware: 3, plugins: 0, context: 1 + defoverridable(context: 1, middleware: 3, plugins: 0, hydrate: 2) end end - @doc false - def __ensure_middleware__([], _field, %{identifier: :subscription}) do - [Absinthe.Middleware.PassParent] - end - - def __ensure_middleware__([], %{identifier: identifier}, _) do - [{Absinthe.Middleware.MapGet, identifier}] - end - - def __ensure_middleware__(middleware, _field, _object) do - middleware - end - - @doc """ - Run the introspection query on a schema. - - Convenience function. - """ - @spec introspect(schema :: t, opts :: Absinthe.run_opts()) :: Absinthe.run_result() - def introspect(schema, opts \\ []) do - [:code.priv_dir(:absinthe), "graphql", "introspection.graphql"] - |> Path.join() - |> File.read!() - |> Absinthe.run(schema, opts) - end - - @doc """ - Replace the default middleware - - ## Examples - Replace the default for all fields with a string lookup instead of an atom lookup: - ``` - def middleware(middleware, field, object) do - new_middleware = {Absinthe.Middleware.MapGet, to_string(field.identifier)} - middleware - |> Absinthe.Schema.replace_default(new_middleware, field, object) - end - ``` - """ - def replace_default(middleware_list, new_middleware, %{identifier: identifer}, _object) do - Enum.map(middleware_list, fn middleware -> - case middleware do - {Absinthe.Middleware.MapGet, ^identifer} -> - new_middleware - - middleware -> - middleware - end - end) + def child_spec(schema) do + %{ + id: {__MODULE__, schema}, + start: {__MODULE__.Manager, :start_link, [schema]}, + type: :worker + } end - @doc """ - List of Plugins to run before / after resolution. - - Plugins are modules that implement the `Absinthe.Plugin` behaviour. These modules - have the opportunity to run callbacks before and after the resolution of the entire - document, and have access to the resolution accumulator. - - Plugins must be specified by the schema, so that Absinthe can make sure they are - all given a chance to run prior to resolution. - """ - @callback plugins() :: [Absinthe.Plugin.t()] - @callback middleware([Absinthe.Middleware.spec(), ...], Type.Field.t(), Type.Object.t()) :: [ - Absinthe.Middleware.spec(), - ... - ] - @callback context(map) :: map - - @doc false - def __after_compile__(env, _bytecode) do - [ - env.module.__absinthe_errors__, - Schema.Rule.check(env.module) - ] - |> List.flatten() - |> case do - [] -> - nil - - details -> - raise Absinthe.Schema.Error, details - end - end + @object_type Absinthe.Blueprint.Schema.ObjectTypeDefinition @default_query_name "RootQueryType" @doc """ @@ -284,10 +162,8 @@ defmodule Absinthe.Schema do attrs = raw_attrs |> Keyword.put_new(:name, @default_query_name) - |> Keyword.put(:identifier, :query) - Absinthe.Schema.Notation.scope(env, :object, :query, attrs, block) - Absinthe.Schema.Notation.desc_attribute_recorder(:query) + Absinthe.Schema.Notation.record!(env, @object_type, :query, attrs, block) end @default_mutation_name "RootMutationType" @@ -313,10 +189,8 @@ defmodule Absinthe.Schema do attrs = raw_attrs |> Keyword.put_new(:name, @default_mutation_name) - |> Keyword.put(:identifier, :mutation) - Absinthe.Schema.Notation.scope(env, :object, :mutation, attrs, block) - Absinthe.Schema.Notation.desc_attribute_recorder(:query) + Absinthe.Schema.Notation.record!(env, @object_type, :mutation, attrs, block) end @default_subscription_name "RootSubscriptionType" @@ -327,12 +201,12 @@ defmodule Absinthe.Schema do outlines what data they want to receive in the event of particular updates. For a full walk through of how to setup your project with subscriptions and - Phoenix see the Absinthe.Phoenix project moduledoc. + `Phoenix` see the `Absinthe.Phoenix` project moduledoc. When you push a mutation, you can have selections on that mutation result to get back data you need, IE - ``` + ```graphql mutation { createUser(accountId: 1, name: "bob") { id @@ -344,7 +218,7 @@ defmodule Absinthe.Schema do However, what if you want to know when OTHER people create a new user, so that your UI can update as well. This is the point of subscriptions. - ``` + ```graphql subscription { newUsers { id @@ -366,7 +240,7 @@ defmodule Absinthe.Schema do field :new_users, :user do arg :account_id, non_null(:id) - config fn args,_info -> + config fn args, _info -> {:ok, topic: args.account_id} end end @@ -416,33 +290,250 @@ defmodule Absinthe.Schema do attrs = raw_attrs |> Keyword.put_new(:name, @default_subscription_name) - |> Keyword.put(:identifier, :subscription) - Absinthe.Schema.Notation.scope(env, :object, :subscription, attrs, block) - Absinthe.Schema.Notation.desc_attribute_recorder(:query) + Absinthe.Schema.Notation.record!(env, @object_type, :subscription, attrs, block) + end + + defmacro __before_compile__(_) do + quote do + @doc false + def __absinthe_pipeline_modifiers__ do + [@schema_provider] ++ @pipeline_modifier + end + + def __absinthe_schema_provider__ do + @schema_provider + end + + def __absinthe_type__(name) do + @schema_provider.__absinthe_type__(__MODULE__, name) + end + + def __absinthe_directive__(name) do + @schema_provider.__absinthe_directive__(__MODULE__, name) + end + + def __absinthe_types__() do + @schema_provider.__absinthe_types__(__MODULE__) + end + + def __absinthe_types__(group) do + @schema_provider.__absinthe_types__(__MODULE__, group) + end + + def __absinthe_directives__() do + @schema_provider.__absinthe_directives__(__MODULE__) + end + + def __absinthe_interface_implementors__() do + @schema_provider.__absinthe_interface_implementors__(__MODULE__) + end + + def __absinthe_prototype_schema__() do + @prototype_schema + end + end end - # Lookup a directive that in used by/available to a schema + @spec apply_modifiers(Absinthe.Pipeline.t(), t) :: Absinthe.Pipeline.t() + def apply_modifiers(pipeline, schema) do + Enum.reduce(schema.__absinthe_pipeline_modifiers__, pipeline, fn + {module, function}, pipeline -> + apply(module, function, [pipeline]) + + module, pipeline -> + module.pipeline(pipeline) + end) + end + + def __after_compile__(env, _) do + prototype_schema = + env.module + |> Module.get_attribute(:prototype_schema) + + pipeline = + env.module + |> Absinthe.Pipeline.for_schema(prototype_schema: prototype_schema) + |> apply_modifiers(env.module) + + env.module.__absinthe_blueprint__ + |> Absinthe.Pipeline.run(pipeline) + |> case do + {:ok, _, _} -> + [] + + {:error, errors, _} -> + raise Absinthe.Schema.Error, phase_errors: List.wrap(errors) + end + end + + ### Helpers + @doc """ - Lookup a directive. + Run the introspection query on a schema. + + Convenience function. """ - @spec lookup_directive(t, atom | binary) :: Type.Directive.t() | nil - def lookup_directive(schema, name) do - schema.__absinthe_directive__(name) + @spec introspect(schema :: t, opts :: Absinthe.run_opts()) :: Absinthe.run_result() + def introspect(schema, opts \\ []) do + [:code.priv_dir(:absinthe), "graphql", "introspection.graphql"] + |> Path.join() + |> File.read!() + |> Absinthe.run(schema, opts) + end + + @doc """ + Replace the default middleware. + + ## Examples + + Replace the default for all fields with a string lookup instead of an atom lookup: + + ``` + def middleware(middleware, field, object) do + new_middleware = {Absinthe.Middleware.MapGet, to_string(field.identifier)} + middleware + |> Absinthe.Schema.replace_default(new_middleware, field, object) + end + ``` + """ + def replace_default(middleware_list, new_middleware, %{identifier: identifier}, _object) do + Enum.map(middleware_list, fn middleware -> + case middleware do + {Absinthe.Middleware.MapGet, ^identifier} -> + new_middleware + + middleware -> + middleware + end + end) end @doc """ - Lookup a type by name, identifier, or by unwrapping. + Used to define the list of plugins to run before and after resolution. + + Plugins are modules that implement the `Absinthe.Plugin` behaviour. These modules + have the opportunity to run callbacks before and after the resolution of the entire + document, and have access to the resolution accumulator. + + Plugins must be specified by the schema, so that Absinthe can make sure they are + all given a chance to run prior to resolution. """ - @spec lookup_type(atom, Type.wrapping_t() | Type.t() | Type.identifier_t(), Keyword.t()) :: - Type.t() | nil + @callback plugins() :: [Absinthe.Plugin.t()] + + @doc """ + Used to apply middleware on all or a group of fields based on pattern matching. + + It is passed the existing middleware for a field, the field itself, and the object + that the field is a part of. + + ## Examples + + Adding a `HandleChangesetError` middleware only to mutations: + + ``` + # if it's a field for the mutation object, add this middleware to the end + def middleware(middleware, _field, %{identifier: :mutation}) do + middleware ++ [MyAppWeb.Middleware.HandleChangesetErrors] + end + + # if it's any other object keep things as is + def middleware(middleware, _field, _object), do: middleware + ``` + """ + @callback middleware([Absinthe.Middleware.spec(), ...], Type.Field.t(), Type.Object.t()) :: [ + Absinthe.Middleware.spec(), + ... + ] + + @doc """ + Used to set some values in the context that it may need in order to run. + + ## Examples + + Setup dataloader: + + ``` + def context(context) do + loader = + Dataloader.new + |> Dataloader.add_source(Blog, Blog.data()) + + Map.put(context, :loader, loader) + end + ``` + """ + @callback context(map) :: map + + @doc """ + Used to hydrate the schema with dynamic attributes. + + While this is normally used to add resolvers, etc, to schemas + defined using `import_sdl/1` and `import_sdl2`, it can also be + used in schemas defined using other macros. + + The function is passed the blueprint definition node as the first + argument and its ancestors in a list (with its parent node as the + head) as its second argument. + + See the `Absinthe.Phase.Schema.Hydrate` implementation of + `Absinthe.Schema.Hydrator` callbacks to see what hydration + values can be returned. + + ## Examples + + Add a resolver for a field: + + ``` + def hydrate(%Absinthe.Blueprint.Schema.FieldDefinition{identifier: :health}, [%Absinthe.Blueprint.Schema.ObjectTypeDefinition{identifier: :query} | _]) do + {:resolve, &__MODULE__.health/3} + end + + # Resolver implementation: + def health(_, _, _), do: {:ok, "alive!"} + ``` + + Note that the values provided must be macro-escapable; notably, anonymous functions cannot + be used. + + You can, of course, omit the struct names for brevity: + + ``` + def hydrate(%{identifier: :health}, [%{identifier: :query} | _]) do + {:resolve, &__MODULE__.health/3} + end + ``` + + Add a description to a type: + + ``` + def hydrate(%Absinthe.Blueprint.Schema.ObjectTypeDefinition{identifier: :user}, _) do + {:description, "A user"} + end + ``` + + If you define `hydrate/2`, don't forget to include a fallback, e.g.: + + ``` + def hydrate(_node, _ancestors), do: [] + ``` + """ + @callback hydrate( + node :: Absinthe.Blueprint.Schema.t(), + ancestors :: [Absinthe.Blueprint.Schema.t()] + ) :: Absinthe.Schema.Hydrator.hydration() + + def lookup_directive(schema, name) do + schema.__absinthe_directive__(name) + end + def lookup_type(schema, type, options \\ [unwrap: true]) do cond do is_atom(type) -> - cached_lookup_type(schema, type) + schema.__absinthe_lookup__(type) is_binary(type) -> - cached_lookup_type(schema, type) + schema.__absinthe_lookup__(type) Type.wrapped?(type) -> if Keyword.get(options, :unwrap) do @@ -456,50 +547,6 @@ defmodule Absinthe.Schema do end end - @doc false - def cached_lookup_type(schema, type) do - # Originally, schema types were entirely literals, and very fast to lookup. - # Fast lookup types are assumed throughout the codebase, as it is often mandatory - # to lookup a type in several different places. - # - # Now, type/field imports, middleware logic, and other things means they aren't - # literals anymore, and aren't as fast as they should be. Thus the use of the pdict - # to make sure we only pay this cost once. - # - # Ideal solution: mandate that types are macro-escapable, and then we can turn - # them back into literals. The main issue there is resolution functions. - - case :erlang.get({schema, type}) do - :undefined -> - result = schema.__absinthe_lookup__(type) - :erlang.put({schema, type}, result) - result - - result -> - result - end - end - - @doc """ - List all types on a schema - """ - @spec types(t) :: [Type.t()] - def types(schema) do - schema.__absinthe_types__ - |> Map.keys() - |> Enum.map(&lookup_type(schema, &1)) - end - - @doc """ - Get all introspection types - """ - @spec introspection_types(t) :: [Type.t()] - def introspection_types(schema) do - schema - |> Schema.types() - |> Enum.filter(&Type.introspection?/1) - end - @doc """ Get all concrete types for union, interface, or object """ @@ -523,15 +570,20 @@ defmodule Absinthe.Schema do @doc """ Get all types that are used by an operation """ + @deprecated "Use Absinthe.Schema.referenced_types/1 instead" @spec used_types(t) :: [Type.t()] def used_types(schema) do - [:query, :mutation, :subscription] - |> Enum.map(&lookup_type(schema, &1)) - |> Enum.concat(directives(schema)) - |> Enum.filter(&(!is_nil(&1))) - |> Enum.flat_map(&Type.referenced_types(&1, schema)) - |> MapSet.new() - |> Enum.map(&Schema.lookup_type(schema, &1)) + referenced_types(schema) + end + + @doc """ + Get all types that are referenced by an operation + """ + @spec referenced_types(t) :: [Type.t()] + def referenced_types(schema) do + schema + |> Schema.types() + |> Enum.filter(&(!Type.introspection?(&1))) end @doc """ @@ -544,6 +596,34 @@ defmodule Absinthe.Schema do |> Enum.map(&lookup_directive(schema, &1)) end + @doc """ + Converts a schema to an SDL string + + Per the spec, only types that are actually referenced directly or transitively from + the root query, subscription, or mutation objects are included. + + ## Example + + Absinthe.Schema.to_sdl(MyAppWeb.Schema) + "schema { + query {...} + }" + """ + @spec to_sdl(schema :: t) :: String.t() + def to_sdl(schema) do + pipeline = + schema + |> Absinthe.Pipeline.for_schema(prototype_schema: schema.__absinthe_prototype_schema__) + |> Absinthe.Pipeline.upto({Absinthe.Phase.Schema.Validation.Result, pass: :final}) + |> apply_modifiers(schema) + + # we can be assertive here, since this same pipeline was already used to + # successfully compile the schema. + {:ok, bp, _} = Absinthe.Pipeline.run(schema.__absinthe_blueprint__, pipeline) + + inspect(bp, pretty: true) + end + @doc """ List all implementors of an interface on a schema """ @@ -554,30 +634,27 @@ defmodule Absinthe.Schema do |> Enum.map(&lookup_type(schema, &1)) end - def implementors(schema, %Type.Interface{} = iface) do - implementors(schema, iface.__reference__.identifier) + def implementors(schema, %Type.Interface{identifier: identifier}) do + implementors(schema, identifier) end - @doc false - @spec type_from_ast(t, Language.type_reference_t()) :: Absinthe.Type.t() | nil - def type_from_ast(schema, %Language.NonNullType{type: inner_type}) do - case type_from_ast(schema, inner_type) do - nil -> nil - type -> %Type.NonNull{of_type: type} - end - end - - def type_from_ast(schema, %Language.ListType{type: inner_type}) do - case type_from_ast(schema, inner_type) do - nil -> nil - type -> %Type.List{of_type: type} - end + @doc """ + List all types on a schema + """ + @spec types(t) :: [Type.t()] + def types(schema) do + schema.__absinthe_types__ + |> Map.keys() + |> Enum.map(&lookup_type(schema, &1)) end - def type_from_ast(schema, ast_type) do - Schema.types(schema) - |> Enum.find(fn %{name: name} -> - name == ast_type.name - end) + @doc """ + Get all introspection types + """ + @spec introspection_types(t) :: [Type.t()] + def introspection_types(schema) do + schema + |> Schema.types() + |> Enum.filter(&Type.introspection?/1) end end diff --git a/lib/absinthe/schema/compiled.ex b/lib/absinthe/schema/compiled.ex new file mode 100644 index 0000000000..a6d7a8bea4 --- /dev/null +++ b/lib/absinthe/schema/compiled.ex @@ -0,0 +1,33 @@ +defmodule Absinthe.Schema.Compiled do + @moduledoc false + + @behaviour Absinthe.Schema.Provider + + def pipeline(pipeline) do + pipeline + end + + def __absinthe_type__(schema_mod, name) do + Module.concat([schema_mod, Compiled]).__absinthe_type__(name) + end + + def __absinthe_directive__(schema_mod, name) do + Module.concat([schema_mod, Compiled]).__absinthe_directive__(name) + end + + def __absinthe_types__(schema_mod) do + Module.concat([schema_mod, Compiled]).__absinthe_types__ + end + + def __absinthe_types__(schema_mod, group) do + Module.concat([schema_mod, Compiled]).__absinthe_types__(group) + end + + def __absinthe_directives__(schema_mod) do + Module.concat([schema_mod, Compiled]).__absinthe_directives__ + end + + def __absinthe_interface_implementors__(schema_mod) do + Module.concat([schema_mod, Compiled]).__absinthe_interface_implementors__ + end +end diff --git a/lib/absinthe/schema/error.ex b/lib/absinthe/schema/error.ex index f25ad1cd82..a4dc51c76d 100644 --- a/lib/absinthe/schema/error.ex +++ b/lib/absinthe/schema/error.ex @@ -2,30 +2,36 @@ defmodule Absinthe.Schema.Error do @moduledoc """ Exception raised when a schema is invalid """ - defexception message: "Invalid schema", details: [] + defexception phase_errors: [] - @type detail_t :: %{ - rule: Absinthe.Schema.Rule.t(), - location: %{file: binary, line: integer}, - data: any - } + def message(error) do + details = + error.phase_errors + |> Enum.map(fn %{message: message, locations: locations} -> + locations = + locations + |> Enum.map(fn + %{line: line, file: file} -> "#{file}:#{line}" + %{column: column, line: line} -> "Column #{column}, Line #{line}" + end) + |> Enum.sort() + |> Enum.join("\n") - def exception(details) do - detail = Enum.map(details, &format_detail/1) |> Enum.join("\n") - %__MODULE__{message: "Invalid schema:\n" <> detail <> "\n", details: details} - end + message = String.trim(message) - def format_detail(detail) do - explanation = indent(detail.rule.explanation(detail)) - "#{detail.location.file}:#{detail.location.line}: #{explanation}\n" - end + """ + --------------------------------------- + ## Locations + #{locations} + + #{message} + """ + end) + |> Enum.join() - defp indent(text) do - text - |> String.trim() - |> String.split("\n") - |> Enum.map(&" #{&1}") - |> Enum.join("\n") - |> String.trim_leading() + """ + Compilation failed: + #{details} + """ end end diff --git a/lib/absinthe/schema/hydrator.ex b/lib/absinthe/schema/hydrator.ex new file mode 100644 index 0000000000..1fdf7d2d49 --- /dev/null +++ b/lib/absinthe/schema/hydrator.ex @@ -0,0 +1,8 @@ +defmodule Absinthe.Schema.Hydrator do + @type hydration :: any + + @callback apply_hydration( + node :: Absinthe.Blueprint.Schema.t(), + hydration :: hydration + ) :: Absinthe.Blueprint.Schema.t() +end diff --git a/lib/absinthe/schema/manager.ex b/lib/absinthe/schema/manager.ex new file mode 100644 index 0000000000..b9a17e21e4 --- /dev/null +++ b/lib/absinthe/schema/manager.ex @@ -0,0 +1,28 @@ +defmodule Absinthe.Schema.Manager do + use GenServer + + def start_link(schema) do + GenServer.start_link(__MODULE__, schema, []) + end + + def init(schema_module) do + prototype_schema = schema_module.__absinthe_prototype_schema__ + + pipeline = + schema_module + |> Absinthe.Pipeline.for_schema(prototype_schema: prototype_schema) + |> Absinthe.Schema.apply_modifiers(schema_module) + + schema_module.__absinthe_blueprint__ + |> Absinthe.Pipeline.run(pipeline) + |> case do + {:ok, _, _} -> + [] + + {:error, errors, _} -> + raise Absinthe.Schema.Error, phase_errors: List.wrap(errors) + end + + {:ok, schema_module, :hibernate} + end +end diff --git a/lib/absinthe/schema/notation.ex b/lib/absinthe/schema/notation.ex index 9594d82e17..cf61b25605 100644 --- a/lib/absinthe/schema/notation.ex +++ b/lib/absinthe/schema/notation.ex @@ -1,18 +1,35 @@ defmodule Absinthe.Schema.Notation do + alias Absinthe.Blueprint.Schema + alias Absinthe.Utils + @moduledoc """ - This module contains macros used to build GraphQL types. + Provides a set of macros to use when creating a schema. Especially useful + when moving definitions out into a different module than the schema itself. + + ## Example + + defmodule MyAppWeb.Schema.Types do + use Absinthe.Schema.Notation + + object :item do + field :id, :id + field :name, :string + end + + # ... + + end - See `Absinthe.Schema` for a rough overview of schema building from scratch. """ - alias Absinthe.Utils - alias Absinthe.Type - alias Absinthe.Schema.Notation.Scope + Module.register_attribute(__MODULE__, :placement, accumulate: true) - defmacro __using__(opts \\ []) do - import_opts = opts |> Keyword.put(:only, :macros) - Module.register_attribute(__CALLER__.module, :absinthe_definitions, accumulate: true) - Module.register_attribute(__CALLER__.module, :absinthe_descriptions, accumulate: true) + defmacro __using__(import_opts \\ [only: :macros]) do + Module.register_attribute(__CALLER__.module, :absinthe_blueprint, accumulate: true) + Module.register_attribute(__CALLER__.module, :absinthe_desc, accumulate: true) + put_attr(__CALLER__.module, %Absinthe.Blueprint{schema: __CALLER__.module}) + Module.put_attribute(__CALLER__.module, :absinthe_scope_stack, [:schema]) + Module.put_attribute(__CALLER__.module, :absinthe_scope_stack_stash, []) quote do import Absinthe.Resolution.Helpers, @@ -23,33 +40,28 @@ defmodule Absinthe.Schema.Notation do batch: 4 ] - import unquote(__MODULE__), unquote(import_opts) - @before_compile unquote(__MODULE__).Writer - @desc nil - end - end - - Module.register_attribute(__MODULE__, :placement, accumulate: true) - - @doc false - # Return a quote that records the current @desc value for a given identifier - def desc_attribute_recorder(identifier) do - quote do - @absinthe_descriptions {unquote(identifier), @desc} + Module.register_attribute(__MODULE__, :__absinthe_type_import__, accumulate: true) @desc nil + import unquote(__MODULE__), unquote(import_opts) + @before_compile unquote(__MODULE__) end end - @doc false - defmacro resolver(_) do - raise "`resolver/1` is not a function, did you mean `resolve` ?" - end + ### Macro API ### @placement {:config, [under: [:field]]} @doc """ Configure a subscription field. - ## Example + The first argument to the config function is the field arguments passed in the subscription. + The second argument is an `Absinthe.Resolution` struct, which includes information + like the context and other execution data. + + ## Placement + + #{Utils.placement_docs(@placement)} + + ## Examples ```elixir config fn args, %{context: context} -> @@ -61,26 +73,56 @@ defmodule Absinthe.Schema.Notation do end ``` + Alternatively can provide a list of topics: + + ```elixir + config fn _, _ -> + {:ok, topic: ["topic_one", "topic_two", "topic_three"]} + end + ``` + + Using `context_id` option to allow de-duplication of updates: + + ```elixir + config fn _, %{context: context} -> + if authorized?(context) do + {:ok, topic: "topic_one", context_id: "authorized"} + else + {:ok, topic: "topic_one", context_id: "not-authorized"} + end + end + ``` + See `Absinthe.Schema.subscription/1` for details """ defmacro config(config_fun) do - env = __CALLER__ - recordable!(env, :config, @placement[:config]) - Scope.put_attribute(env.module, :config, config_fun) - [] + __CALLER__ + |> recordable!(:config, @placement[:config]) + |> record_config!(config_fun) end @placement {:trigger, [under: [:field]]} @doc """ - Set a trigger for a subscription field. + Sets triggers for a subscription, and configures which topics to publish to when that subscription + is triggered. - It accepts one or more mutation field names, and can be called more than once. + A trigger is the name of a mutation. When that mutation runs, data is pushed to the clients + who are subscribed to the subscription. - ``` + A subscription can have many triggers, and a trigger can push to many topics. + + ## Placement + + #{Utils.placement_docs(@placement)} + + ## Example + + ```elixir mutation do field :gps_event, :gps_event field :user_checkin, :user end + subscription do field :location_update, :user do arg :user_id, non_null(:id) @@ -89,12 +131,14 @@ defmodule Absinthe.Schema.Notation do {:ok, topic: args.user_id} end - trigger :gps_event, topic: fn event -> - event.user_id + trigger :gps_event, topic: fn gps_event -> + gps_event.user_id end - trigger :user_checkin, topic: fn user -> - [user.id, user.parent_id] + # Trigger on a list of mutations + trigger [:user_checkin], topic: fn user -> + # Returning a list of topics triggers the subscription for each of the topics in the list. + [user.id, user.friend.id] end end end @@ -103,13 +147,12 @@ defmodule Absinthe.Schema.Notation do Trigger functions are only called once per event, so database calls within them do not present a significant burden. - See the `subscription/2` macro docs for additional details + See the `Absinthe.Schema.subscription/2` macro docs for additional details """ defmacro trigger(mutations, attrs) do - env = __CALLER__ - recordable!(env, :trigger, @placement[:trigger]) - Scope.put_attribute(env.module, :triggers, {List.wrap(mutations), attrs}, accumulate: true) - :ok + __CALLER__ + |> recordable!(:trigger, @placement[:trigger]) + |> record_trigger!(List.wrap(mutations), attrs) end # OBJECT @@ -147,25 +190,44 @@ defmodule Absinthe.Schema.Notation do defmacro object(identifier, _attrs, _block) when identifier in @reserved_identifiers do raise Absinthe.Schema.Notation.Error, - "Invalid schema notation: cannot create an `object` with reserved identifier `#{ - identifier - }`" + "Invalid schema notation: cannot create an `object` " <> + "with reserved identifier `#{identifier}`" end defmacro object(identifier, attrs, do: block) do - __CALLER__ - |> recordable!(:object, @placement[:object]) - |> record_object!(identifier, attrs, block) + block = + for {identifier, args} <- build_directives(attrs) do + quote do + directive(unquote(identifier), unquote(args)) + end + end ++ block - desc_attribute_recorder(identifier) - end + {attrs, block} = + case Keyword.pop(attrs, :meta) do + {nil, attrs} -> + {attrs, block} - def record_object!(env, identifier, attrs, block) do - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :object, identifier, attrs, block) + {meta, attrs} -> + meta_ast = + quote do + meta unquote(meta) + end + + block = [meta_ast, block] + {attrs, block} + end + + __CALLER__ + |> recordable!(:object, @placement[:object]) + |> record!( + Schema.ObjectTypeDefinition, + identifier, + attrs |> Keyword.update(:description, nil, &wrap_in_unquote/1), + block + ) end - @placement {:interfaces, [under: :object]} + @placement {:interfaces, [under: [:object, :interface]]} @doc """ Declare implemented interfaces for an object. @@ -191,14 +253,7 @@ defmodule Absinthe.Schema.Notation do |> record_interfaces!(ifaces) end - @doc false - # Record a list of implemented interfaces in the current scope - def record_interfaces!(env, ifaces) do - Enum.each(ifaces, &record_interface!(env, &1)) - :ok - end - - @placement {:resolve, [under: [:field]]} + @placement {:deprecate, [under: [:field]]} @doc """ Mark a field as deprecated @@ -234,13 +289,6 @@ defmodule Absinthe.Schema.Notation do |> record_deprecate!(msg) end - @doc false - # Record a deprecation in the current scope - def record_deprecate!(env, msg) do - Scope.put_attribute(env.module, :deprecate, msg) - :ok - end - @doc """ Declare an implemented interface for an object. @@ -258,25 +306,13 @@ defmodule Absinthe.Schema.Notation do end ``` """ - @placement {:interface_attribute, [under: :object]} + @placement {:interface_attribute, [under: [:object, :interface]]} defmacro interface(identifier) do __CALLER__ - |> recordable!( - :interface_attribute, - @placement[:interface_attribute], - as: "`interface` (as an attribute)" - ) + |> recordable!(:interface_attribute, @placement[:interface_attribute]) |> record_interface!(identifier) end - @doc false - # Record an implemented interface in the current scope - def record_interface!(env, identifier) do - Scope.put_attribute(env.module, :interfaces, identifier, accumulate: true) - Scope.recorded!(env.module, :attr, :interface) - :ok - end - # INTERFACES @placement {:interface, [toplevel: true]} @@ -308,16 +344,7 @@ defmodule Absinthe.Schema.Notation do defmacro interface(identifier, attrs \\ [], do: block) do __CALLER__ |> recordable!(:interface, @placement[:interface]) - |> record_interface!(identifier, attrs, block) - - desc_attribute_recorder(identifier) - end - - @doc false - # Record an interface type - def record_interface!(env, identifier, attrs, block) do - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :interface, identifier, attrs, block) + |> record!(Schema.InterfaceTypeDefinition, identifier, attrs, block) end @placement {:resolve_type, [under: [:interface, :union]]} @@ -352,12 +379,59 @@ defmodule Absinthe.Schema.Notation do |> record_resolve_type!(func_ast) end - @doc false - # Record a type resolver in the current scope - def record_resolve_type!(env, func_ast) do - Scope.put_attribute(env.module, :resolve_type, func_ast) - Scope.recorded!(env.module, :attr, :resolve_type) - :ok + defp handle_field_attrs(attrs, caller) do + block = + for {identifier, arg_attrs} <- Keyword.get(attrs, :args, []) do + quote do + arg unquote(identifier), unquote(arg_attrs) + end + end + + block = + for {identifier, args} <- build_directives(attrs) do + quote do + directive(unquote(identifier), unquote(args)) + end + end ++ block + + block = + case Keyword.get(attrs, :meta) do + nil -> + block + + meta -> + meta_ast = + quote do + meta unquote(meta) + end + + [meta_ast, block] + end + + {func_ast, attrs} = Keyword.pop(attrs, :resolve) + + block = + if func_ast do + [ + quote do + resolve unquote(func_ast) + end + ] + else + [] + end ++ block + + attrs = + attrs + |> expand_ast(caller) + |> Keyword.delete(:deprecate) + |> Keyword.delete(:directives) + |> Keyword.delete(:args) + |> Keyword.delete(:meta) + |> Keyword.update(:description, nil, &wrap_in_unquote/1) + |> Keyword.update(:default_value, nil, &wrap_in_unquote/1) + + {attrs, block} end # FIELDS @@ -367,22 +441,21 @@ defmodule Absinthe.Schema.Notation do See `field/4` """ - defmacro field(identifier, do: block) do - __CALLER__ - |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, [], block) - end defmacro field(identifier, attrs) when is_list(attrs) do + {attrs, block} = handle_field_attrs(attrs, __CALLER__) + __CALLER__ |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, attrs, nil) + |> record!(Schema.FieldDefinition, identifier, attrs, block) end defmacro field(identifier, type) do + {attrs, block} = handle_field_attrs([type: type], __CALLER__) + __CALLER__ |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, [type: type], nil) + |> record!(Schema.FieldDefinition, identifier, attrs, block) end @doc """ @@ -391,21 +464,28 @@ defmodule Absinthe.Schema.Notation do See `field/4` """ defmacro field(identifier, attrs, do: block) when is_list(attrs) do + {attrs, more_block} = handle_field_attrs(attrs, __CALLER__) + block = more_block ++ List.wrap(block) + __CALLER__ |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, attrs, block) + |> record!(Schema.FieldDefinition, identifier, attrs, block) end defmacro field(identifier, type, do: block) do + {attrs, _} = handle_field_attrs([type: type], __CALLER__) + __CALLER__ |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, [type: type], block) + |> record!(Schema.FieldDefinition, identifier, attrs, block) end defmacro field(identifier, type, attrs) do + {attrs, block} = handle_field_attrs(Keyword.put(attrs, :type, type), __CALLER__) + __CALLER__ |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, Keyword.put(attrs, :type, type), nil) + |> record!(Schema.FieldDefinition, identifier, attrs, block) end @doc """ @@ -430,15 +510,13 @@ defmodule Absinthe.Schema.Notation do ``` """ defmacro field(identifier, type, attrs, do: block) do + attrs = Keyword.put(attrs, :type, type) + {attrs, more_block} = handle_field_attrs(attrs, __CALLER__) + block = more_block ++ List.wrap(block) + __CALLER__ |> recordable!(:field, @placement[:field]) - |> record_field!(identifier, Keyword.put(attrs, :type, type), block) - end - - @doc false - # Record a field in the current scope - def record_field!(env, identifier, attrs, block) do - scope(env, :field, identifier, attrs, block) + |> record!(Schema.FieldDefinition, identifier, attrs, block) end @placement {:resolve, [under: [:field]]} @@ -521,65 +599,51 @@ defmodule Absinthe.Schema.Notation do end end - @doc false - # Record a resolver in the current scope - def record_resolve!(env, func_ast) do - Scope.put_attribute(env.module, :resolve, func_ast) - Scope.recorded!(env.module, :attr, :resolve) - :ok - end - @placement {:complexity, [under: [:field]]} - defmacro complexity(func_ast) do - __CALLER__ - |> recordable!(:complexity, @placement[:complexity]) - |> record_complexity!(func_ast) - end - - @doc false - # Record a complexity analyzer in the current scope - def record_complexity!(env, func_ast) do - Scope.put_attribute(env.module, :complexity, func_ast) - Scope.recorded!(env.module, :attr, :complexity) - :ok - end - - @placement {:middleware, [under: [:field]]} - defmacro middleware(new_middleware, opts \\ []) do - env = __CALLER__ + @doc """ + Set the complexity of a field - new_middleware = Macro.expand(new_middleware, env) + For a field, the first argument to the function you supply to `complexity/1` is the user arguments -- just as a field's resolver can use user arguments to resolve its value, the complexity function that you provide can use the same arguments to calculate the field's complexity. - middleware = - Scope.current(env.module).attrs - |> Keyword.get(:middleware, []) + The second argument passed to your complexity function is the sum of all the complexity scores of all the fields nested below the current field. - new_middleware = - case new_middleware do - {module, fun} -> - {:{}, [], [{module, fun}, opts]} + An optional third argument is passed an `Absinthe.Complexity` struct, which includes information + like the context passed to `Absinthe.run/3`. - atom when is_atom(atom) -> - case Atom.to_string(atom) do - "Elixir." <> _ -> - {:{}, [], [{atom, :call}, opts]} + ## Placement - _ -> - {:{}, [], [{env.module, atom}, opts]} - end + #{Utils.placement_docs(@placement)} - val -> - val + ## Examples + ``` + query do + field :people, list_of(:person) do + arg :limit, :integer, default_value: 10 + complexity fn %{limit: limit}, child_complexity -> + # set complexity based on maximum number of items in the list and + # complexity of a child. + limit * child_complexity end + end + end + ``` + """ + defmacro complexity(func_ast) do + __CALLER__ + |> recordable!(:complexity, @placement[:complexity]) + |> record_complexity!(func_ast) + end - Scope.put_attribute(env.module, :middleware, [new_middleware | middleware]) - nil + @placement {:middleware, [under: [:field]]} + defmacro middleware(new_middleware, opts \\ []) do + __CALLER__ + |> recordable!(:middleware, @placement[:middleware]) + |> record_middleware!(new_middleware, opts) end @placement {:is_type_of, [under: [:object]]} @doc """ - ## Placement #{Utils.placement_docs(@placement)} @@ -590,14 +654,6 @@ defmodule Absinthe.Schema.Notation do |> record_is_type_of!(func_ast) end - @doc false - # Record a type checker in the current scope - def record_is_type_of!(env, func_ast) do - Scope.put_attribute(env.module, :is_type_of, func_ast) - Scope.recorded!(env.module, :attr, :is_type_of) - :ok - end - @placement {:arg, [under: [:directive, :field]]} # ARGS @doc """ @@ -612,14 +668,17 @@ defmodule Absinthe.Schema.Notation do ``` field do arg :size, :integer - arg :name, :string, description: "The desired name" + arg :name, non_null(:string), description: "The desired name" + arg :public, :boolean, default_value: true end ``` """ defmacro arg(identifier, type, attrs) do + {attrs, block} = handle_arg_attrs(identifier, type, attrs) + __CALLER__ |> recordable!(:arg, @placement[:arg]) - |> record_arg!(identifier, Keyword.put(attrs, :type, type), nil) + |> record!(Schema.InputValueDefinition, identifier, attrs, block) end @doc """ @@ -628,21 +687,19 @@ defmodule Absinthe.Schema.Notation do See `arg/3` """ defmacro arg(identifier, attrs) when is_list(attrs) do + {attrs, block} = handle_arg_attrs(identifier, nil, attrs) + __CALLER__ |> recordable!(:arg, @placement[:arg]) - |> record_arg!(identifier, attrs, nil) + |> record!(Schema.InputValueDefinition, identifier, attrs, block) end defmacro arg(identifier, type) do + {attrs, block} = handle_arg_attrs(identifier, type, []) + __CALLER__ |> recordable!(:arg, @placement[:arg]) - |> record_arg!(identifier, [type: type], nil) - end - - @doc false - # Record an argument in the current scope - def record_arg!(env, identifier, attrs, block) do - scope(env, :arg, identifier, attrs, block) + |> record!(Schema.InputValueDefinition, identifier, attrs, block) end # SCALARS @@ -669,8 +726,6 @@ defmodule Absinthe.Schema.Notation do __CALLER__ |> recordable!(:scalar, @placement[:scalar]) |> record_scalar!(identifier, attrs, block) - - desc_attribute_recorder(identifier) end @doc """ @@ -682,23 +737,12 @@ defmodule Absinthe.Schema.Notation do __CALLER__ |> recordable!(:scalar, @placement[:scalar]) |> record_scalar!(identifier, [], block) - - desc_attribute_recorder(identifier) end defmacro scalar(identifier, attrs) do __CALLER__ |> recordable!(:scalar, @placement[:scalar]) |> record_scalar!(identifier, attrs, nil) - - desc_attribute_recorder(identifier) - end - - @doc false - # Record a scalar type - def record_scalar!(env, identifier, attrs, block) do - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :scalar, identifier, attrs, block) end @placement {:serialize, [under: [:scalar]]} @@ -718,14 +762,6 @@ defmodule Absinthe.Schema.Notation do |> record_serialize!(func_ast) end - @doc false - # Record a serialize function in the current scope - def record_serialize!(env, func_ast) do - Scope.put_attribute(env.module, :serialize, func_ast) - Scope.recorded!(env.module, :attr, :serialize) - :ok - end - @placement {:private, [under: [:field, :object, :input_object, :enum, :scalar, :interface, :union]]} @doc false @@ -798,27 +834,6 @@ defmodule Absinthe.Schema.Notation do |> record_private!(:meta, keyword_list) end - @doc false - # Record private values - def record_private!(env, owner, keyword_list) when is_list(keyword_list) do - owner = expand(owner, env) - keyword_list = expand(keyword_list, env) - - keyword_list - |> Enum.each(fn {k, v} -> do_record_private!(env, owner, k, v) end) - end - - defp do_record_private!(env, owner, key, value) do - new_attrs = - Scope.current(env.module).attrs - |> Keyword.put_new(:__private__, []) - |> update_in([:__private__, owner], &List.wrap(&1)) - |> put_in([:__private__, owner, key], value) - - Scope.put_attribute(env.module, :__private__, new_attrs[:__private__]) - :ok - end - @placement {:parse, [under: [:scalar]]} @doc """ Defines a parse function for a `scalar` type @@ -836,65 +851,107 @@ defmodule Absinthe.Schema.Notation do __CALLER__ |> recordable!(:parse, @placement[:parse]) |> record_parse!(func_ast) - - [] - end - - @doc false - # Record a parse function in the current scope - def record_parse!(env, func_ast) do - Scope.put_attribute(env.module, :parse, func_ast) - Scope.recorded!(env.module, :attr, :parse) - :ok end # DIRECTIVES @placement {:directive, [toplevel: true]} + @placement {:applied_directive, + [ + under: [ + :arg, + :enum, + :field, + :input_object, + :interface, + :object, + :scalar, + :union, + :value + ] + ]} + @doc """ - Defines a directive + Defines or applies a directive - ## Placement + ## Defining a directive + ### Placement - #{Utils.placement_docs(@placement)} + #{Utils.placement_docs(@placement, :directive)} - ## Examples + ### Examples - ``` + ```elixir directive :mydirective do - arg :if, non_null(:boolean), description: "Skipped when true." + on [:field, :fragment_spread, :inline_fragment] + + expand fn + %{if: true}, node -> + Blueprint.put_flag(node, :skip, __MODULE__) + _, node -> + node + end + end + ``` + + ## Applying a type system directive + Directives can be applied in your schema. E.g. by default the `@deprecated` + directive is available to be applied to fields and enum values. + + You can define your own type system directives. See `Absinthe.Schema.Prototype` + for more information. + + ### Placement + + #{Utils.placement_docs(@placement, :applied_directive)} + + ### Examples + + When you have a type system directive named `:feature` it can be applied as + follows: - on Language.FragmentSpread - on Language.Field - on Language.InlineFragment + ```elixir + object :post do + directive :feature, name: ":object" - instruction fn - %{if: true} -> - :skip - _ -> - :include + field :name, :string do + deprecate "Bye" end + end + scalar :sweet_scalar do + directive :feature, name: ":scalar" + parse &Function.identity/1 + serialize &Function.identity/1 end ``` """ - defmacro directive(identifier, attrs \\ [], do: block) do + defmacro directive(identifier, attrs, do: block) when is_list(attrs) when not is_nil(block) do __CALLER__ |> recordable!(:directive, @placement[:directive]) |> record_directive!(identifier, attrs, block) + end - desc_attribute_recorder(identifier) + defmacro directive(identifier, do: block) when not is_nil(block) do + __CALLER__ + |> recordable!(:directive, @placement[:directive]) + |> record_directive!(identifier, [], block) end - @doc false - # Record a directive - def record_directive!(env, identifier, attrs, block) do - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :directive, identifier, attrs, block) + defmacro directive(identifier, attrs) when is_list(attrs) do + __CALLER__ + |> recordable!(:directive, @placement[:applied_directive]) + |> record_applied_directive!(identifier, attrs) end - @placement {:on, [under: :directive]} + defmacro directive(identifier) do + __CALLER__ + |> recordable!(:directive, @placement[:applied_directive]) + |> record_applied_directive!(identifier, []) + end + + @placement {:on, [under: [:directive]]} @doc """ Declare a directive as operating an a AST node type @@ -910,67 +967,35 @@ defmodule Absinthe.Schema.Notation do |> record_locations!(ast_node) end - @doc false - # Record directive AST nodes in the current scope - def record_locations!(env, ast_node) do - ast_node - |> List.wrap() - |> Enum.each(fn value -> - Scope.put_attribute( - env.module, - :locations, - value, - accumulate: true - ) - - Scope.recorded!(env.module, :attr, :locations) - end) - - :ok - end - - @placement {:instruction, [under: :directive]} + @placement {:expand, [under: [:directive]]} @doc """ - Calculate the instruction for a directive + Define the expansion for a directive ## Placement #{Utils.placement_docs(@placement)} """ - defmacro instruction(func_ast) do + defmacro expand(func_ast) do __CALLER__ - |> recordable!(:instruction, @placement[:instruction]) - |> record_instruction!(func_ast) - end - - @doc false - # Record a directive instruction function in the current scope - def record_instruction!(env, func_ast) do - Scope.put_attribute(env.module, :instruction, func_ast) - Scope.recorded!(env.module, :attr, :instruction) - :ok + |> recordable!(:expand, @placement[:expand]) + |> record_expand!(func_ast) end - @placement {:expand, [under: :directive]} + @placement {:repeatable, [under: [:directive]]} @doc """ - Define the expansion for a directive + Set whether the directive can be applied multiple times + an entity. + + If omitted, defaults to `false` ## Placement #{Utils.placement_docs(@placement)} """ - defmacro expand(func_ast) do + defmacro repeatable(bool) do __CALLER__ - |> recordable!(:expand, @placement[:expand]) - |> record_expand!(func_ast) - end - - @doc false - # Record a directive expand function in the current scope - def record_expand!(env, func_ast) do - Scope.put_attribute(env.module, :expand, func_ast) - Scope.recorded!(env.module, :attr, :expand) - :ok + |> recordable!(:repeatable, @placement[:repeatable]) + |> record_repeatable!(bool) end # INPUT OBJECTS @@ -995,16 +1020,12 @@ defmodule Absinthe.Schema.Notation do defmacro input_object(identifier, attrs \\ [], do: block) do __CALLER__ |> recordable!(:input_object, @placement[:input_object]) - |> record_input_object!(identifier, attrs, block) - - desc_attribute_recorder(identifier) - end - - @doc false - # Record an input object type - def record_input_object!(env, identifier, attrs, block) do - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :input_object, identifier, attrs, block) + |> record!( + Schema.InputObjectTypeDefinition, + identifier, + attrs |> Keyword.update(:description, nil, &wrap_in_unquote/1), + block + ) end # UNIONS @@ -1035,16 +1056,12 @@ defmodule Absinthe.Schema.Notation do defmacro union(identifier, attrs \\ [], do: block) do __CALLER__ |> recordable!(:union, @placement[:union]) - |> record_union!(identifier, attrs, block) - - desc_attribute_recorder(identifier) - end - - @doc false - # Record a union type - def record_union!(env, identifier, attrs, block) do - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :union, identifier, attrs, block) + |> record!( + Schema.UnionTypeDefinition, + identifier, + attrs |> Keyword.update(:description, nil, &wrap_in_unquote/1), + block + ) end @placement {:types, [under: [:union]]} @@ -1063,14 +1080,6 @@ defmodule Absinthe.Schema.Notation do |> record_types!(types) end - @doc false - # Record a list of member types for a union in the current scope - def record_types!(env, types) do - Scope.put_attribute(env.module, :types, List.wrap(types)) - Scope.recorded!(env.module, :attr, :types) - :ok - end - # ENUMS @placement {:enum, [toplevel: true]} @@ -1125,11 +1134,11 @@ defmodule Absinthe.Schema.Notation do """ defmacro enum(identifier, attrs, do: block) do + attrs = handle_enum_attrs(attrs, __CALLER__) + __CALLER__ |> recordable!(:enum, @placement[:enum]) - |> record_enum!(identifier, attrs, block) - - desc_attribute_recorder(identifier) + |> record!(Schema.EnumTypeDefinition, identifier, attrs, block) end @doc """ @@ -1140,25 +1149,22 @@ defmodule Absinthe.Schema.Notation do defmacro enum(identifier, do: block) do __CALLER__ |> recordable!(:enum, @placement[:enum]) - |> record_enum!(identifier, [], block) - - desc_attribute_recorder(identifier) + |> record!(Schema.EnumTypeDefinition, identifier, [], block) end defmacro enum(identifier, attrs) do + attrs = handle_enum_attrs(attrs, __CALLER__) + __CALLER__ |> recordable!(:enum, @placement[:enum]) - |> record_enum!(identifier, attrs, nil) - - desc_attribute_recorder(identifier) + |> record!(Schema.EnumTypeDefinition, identifier, attrs, []) end - @doc false - # Record an enum type - def record_enum!(env, identifier, attrs, block) do - attrs = expand(attrs, env) - attrs = Keyword.put(attrs, :identifier, identifier) - scope(env, :enum, identifier, attrs, block) + defp handle_enum_attrs(attrs, env) do + attrs + |> expand_ast(env) + |> Keyword.update(:values, [], &[wrap_in_unquote(&1)]) + |> Keyword.update(:description, nil, &wrap_in_unquote/1) end @placement {:value, [under: [:enum]]} @@ -1172,23 +1178,11 @@ defmodule Absinthe.Schema.Notation do #{Utils.placement_docs(@placement)} """ defmacro value(identifier, raw_attrs \\ []) do + attrs = expand_ast(raw_attrs, __CALLER__) + __CALLER__ |> recordable!(:value, @placement[:value]) - |> record_value!(identifier, raw_attrs) - end - - @doc false - # Record an enum value in the current scope - def record_value!(env, identifier, raw_attrs) do - attrs = - raw_attrs - |> Keyword.put(:value, Keyword.get(raw_attrs, :as, identifier)) - |> Keyword.delete(:as) - |> add_description(env) - - Scope.put_attribute(env.module, :values, {identifier, attrs}, accumulate: true) - Scope.recorded!(env.module, :attr, :value) - :ok + |> record_value!(identifier, attrs) end # GENERAL ATTRIBUTES @@ -1212,104 +1206,29 @@ defmodule Absinthe.Schema.Notation do |> record_description!(text) end - defp reformat_description(text), do: String.trim(text) - - @doc false - # Record a description in the current scope - def record_description!(env, text_block) do - text = reformat_description(text_block) - Scope.put_attribute(env.module, :description, text) - Scope.recorded!(env.module, :attr, :description) - :ok - end - - # IMPORTS - - @placement {:import_types, [toplevel: true]} + # TYPE UTILITIES @doc """ - Import types from another module - - Very frequently your schema module will simply have the `query` and `mutation` - blocks, and you'll want to break out your other types into other modules. This - macro imports those types for use the current module - - ## Placement - - #{Utils.placement_docs(@placement)} - - ## Examples - ``` - import_types MyApp.Schema.Types + Marks a type reference as non null - import_types MyApp.Schema.Types.{TypesA, TypesB, SubTypes.TypesC} - ``` + See `field/3` for examples """ - defmacro import_types(type_module_ast) do - env = __CALLER__ - type_module_ast - |> Macro.expand(env) - |> do_import_types(env) - - :ok - end - - defp do_import_types({{:., _, [root_ast, :{}]}, _, modules_ast_list}, env) do - {:__aliases__, meta, root} = root_ast - - for {_, _, leaves} <- modules_ast_list do - type_module = Macro.expand({:__aliases__, meta, root ++ leaves}, env) - - if Code.ensure_compiled?(type_module) do - do_import_types(type_module, env) - else - raise ArgumentError, "module #{type_module} is not available" - end - end + defmacro non_null({:non_null, _, _}) do + raise Absinthe.Schema.Notation.Error, + "Invalid schema notation: `non_null` must not be nested" end - defp do_import_types(type_module, env) when is_atom(type_module) do - imports = Module.get_attribute(env.module, :absinthe_imports) || [] - _ = Module.put_attribute(env.module, :absinthe_imports, [type_module | imports]) - - types = - for {ident, name} <- type_module.__absinthe_types__, - ident in type_module.__absinthe_exports__ do - put_definition(env.module, %Absinthe.Schema.Notation.Definition{ - category: :type, - source: type_module, - identifier: ident, - attrs: [name: name], - file: env.file, - line: env.line - }) - - ident - end - - directives = - for {ident, name} <- type_module.__absinthe_directives__, - ident in type_module.__absinthe_exports__ do - put_definition(env.module, %Absinthe.Schema.Notation.Definition{ - category: :directive, - source: type_module, - identifier: ident, - attrs: [name: name], - file: env.file, - line: env.line - }) - end - - {:ok, types: types, directives: directives} + defmacro non_null(type) do + %Absinthe.Blueprint.TypeReference.NonNull{of_type: expand_ast(type, __CALLER__)} end - defp do_import_types(type_module, _) do - raise ArgumentError, """ - `#{Macro.to_string(type_module)}` is not a module + @doc """ + Marks a type reference as a list of the given type - This macro must be given a literal module name or a macro which expands to a - literal module name. Variables are not supported at this time. - """ + See `field/3` for examples + """ + defmacro list_of(type) do + %Absinthe.Blueprint.TypeReference.List{of_type: expand_ast(type, __CALLER__)} end @placement {:import_fields, [under: [:input_object, :interface, :object]]} @@ -1358,340 +1277,828 @@ defmodule Absinthe.Schema.Notation do end ``` """ - defmacro import_fields(type_name, opts \\ []) do - __CALLER__ - |> recordable!(:import_fields, @placement[:import_fields]) - |> record_field_import!(type_name, opts) - end + defmacro import_fields(source_criteria, opts \\ []) do + source_criteria = expand_ast(source_criteria, __CALLER__) - defp record_field_import!(env, type_name, opts) do - Scope.put_attribute(env.module, :field_imports, {type_name, opts}, accumulate: true) + put_attr(__CALLER__.module, {:import_fields, {source_criteria, opts}}) end - # TYPE UTILITIES + @placement {:import_types, [toplevel: true]} @doc """ - Marks a type reference as non null + Import types from another module - See `field/3` for examples + Very frequently your schema module will simply have the `query` and `mutation` + blocks, and you'll want to break out your other types into other modules. This + macro imports those types for use the current module. + + To selectively import types you can use the `:only` and `:except` opts. + + ## Placement + + #{Utils.placement_docs(@placement)} + + ## Examples + ``` + import_types MyApp.Schema.Types + + import_types MyApp.Schema.Types.{TypesA, TypesB} + + import_types MyApp.Schema.Types, only: [:foo] + + import_types MyApp.Schema.Types, except: [:bar] + ``` """ - defmacro non_null(type) do - quote do - %Absinthe.Type.NonNull{of_type: unquote(type)} - end + defmacro import_types(type_module_ast, opts \\ []) do + env = __CALLER__ + + type_module_ast + |> Macro.expand(env) + |> do_import_types(env, opts) end + @placement {:import_sdl, [toplevel: true]} + @type import_sdl_option :: {:path, String.t() | Macro.t()} @doc """ - Marks a type reference as a list of the given type + Import types defined using the Schema Definition Language (SDL). - See `field/3` for examples + TODO: Explain handlers + + ## Placement + + #{Utils.placement_docs(@placement)} + + ## Examples + + Directly embedded SDL: + + ``` + import_sdl \""" + type Query { + posts: [Post] + } + + type Post { + title: String! + body: String! + } + \""" + ``` + + Loaded from a file location (supporting recompilation on change): + + ``` + import_sdl path: "/path/to/sdl.graphql" + ``` + + TODO: Example for dynamic loading during init """ - defmacro list_of(type) do - quote do - %Absinthe.Type.List{of_type: unquote(type)} - end + @spec import_sdl([import_sdl_option(), ...]) :: Macro.t() + defmacro import_sdl(opts) when is_list(opts) do + __CALLER__ + |> do_import_sdl(nil, opts) + end + + @spec import_sdl(String.t() | Macro.t(), [import_sdl_option()]) :: Macro.t() + defmacro import_sdl(sdl, opts \\ []) do + __CALLER__ + |> do_import_sdl(sdl, opts) end - # NOTATION UTILITIES + defmacro values(values) do + __CALLER__ + |> record_values!(values) + end - defp handle_meta(attrs) do - {meta, attrs} = Keyword.pop(attrs, :meta) + ### Recorders ### + ################# - if meta do - Keyword.update(attrs, :__private__, [meta: meta], fn private -> - Keyword.update(private, :meta, meta, fn existing_meta -> - meta |> Enum.into(existing_meta) - end) - end) + @scoped_types [ + Schema.ObjectTypeDefinition, + Schema.FieldDefinition, + Schema.ScalarTypeDefinition, + Schema.EnumTypeDefinition, + Schema.EnumValueDefinition, + Schema.InputObjectTypeDefinition, + Schema.InputValueDefinition, + Schema.UnionTypeDefinition, + Schema.InterfaceTypeDefinition, + Schema.DirectiveDefinition + ] + + def record!(env, type, identifier, attrs, block) when type in @scoped_types do + attrs = expand_ast(attrs, env) + scoped_def(env, type, identifier, attrs, block) + end + + defp build_directives(attrs) do + if attrs[:deprecate] do + directive = {:deprecated, reason(attrs[:deprecate])} + + directives = Keyword.get(attrs, :directives, []) + [directive | directives] else - attrs + Keyword.get(attrs, :directives, []) end end - # Define a notation scope that will accept attributes - @doc false - def scope(env, kind, identifier, attrs, block) do - attrs = attrs |> handle_meta - open_scope(kind, env, identifier, attrs) + defp reason(true), do: [] + defp reason(msg) when is_binary(msg), do: [reason: msg] + defp reason(msg), do: raise(ArgumentError, "Invalid reason: #{msg}") - # this is probably too simple for now. - block |> expand(env) + def handle_arg_attrs(identifier, type, raw_attrs) do + block = + for {identifier, args} <- build_directives(raw_attrs) do + quote do + directive(unquote(identifier), unquote(args)) + end + end - close_scope(kind, env, identifier) - Scope.recorded!(env.module, kind, identifier) - end + attrs = + raw_attrs + |> Keyword.put_new(:name, to_string(identifier)) + |> Keyword.put_new(:type, type) + |> Keyword.delete(:directives) + |> Keyword.delete(:deprecate) + |> Keyword.update(:description, nil, &wrap_in_unquote/1) + |> Keyword.update(:default_value, nil, &wrap_in_unquote/1) - defp expand(ast, env) do - Macro.prewalk(ast, fn - {:@, _, [{:desc, _, [desc]}]} -> - Module.put_attribute(env.module, :__absinthe_desc__, desc) + {attrs, block} + end - {_, _, _} = node -> - Macro.expand(node, env) + @doc false + # Record a directive expand function in the current scope + def record_expand!(env, func_ast) do + put_attr(env.module, {:expand, func_ast}) + end - node -> - node - end) + @doc false + def record_repeatable!(env, bool) do + put_attr(env.module, {:repeatable, bool}) end @doc false - # Add a `__reference__` to a generated struct - def add_reference(attrs, env, identifier) do - attrs - |> Keyword.put( - :__reference__, - Macro.escape(%{ - module: env.module, - identifier: identifier, - location: %{ - file: env.file, - line: env.line - } - }) - ) + # Record directive AST nodes in the current scope + def record_locations!(env, locations) do + locations = expand_ast(locations, env) + put_attr(env.module, {:locations, List.wrap(locations)}) end - # After verifying it is valid in the current context, open a new notation - # scope, setting any provided attributes. - defp open_scope(kind, env, identifier, attrs) do + @doc false + # Record a directive + def record_directive!(env, identifier, attrs, block) do attrs = attrs - |> add_reference(env, identifier) - |> add_description(env) + |> Keyword.put(:identifier, identifier) + |> Keyword.put_new(:name, to_string(identifier)) + |> Keyword.update(:description, nil, &wrap_in_unquote/1) - Scope.open(kind, env.module, attrs) + scoped_def(env, Schema.DirectiveDefinition, identifier, attrs, block) end - defp add_description(attrs, env) do - case Module.get_attribute(env.module, :__absinthe_desc__) do - nil -> - attrs + @doc false + # Record a parse function in the current scope + def record_parse!(env, fun_ast) do + put_attr(env.module, {:parse, fun_ast}) + end - desc -> - desc = Macro.expand(desc, env) - Module.put_attribute(env.module, :__absinthe_desc__, nil) - Keyword.put(attrs, :description, reformat_description(desc)) - end + @doc false + # Record private values + def record_private!(env, owner, keyword_list) when is_list(keyword_list) do + keyword_list = expand_ast(keyword_list, env) + + put_attr(env.module, {:__private__, [{owner, keyword_list}]}) + end + + @doc false + # Record a serialize function in the current scope + def record_serialize!(env, fun_ast) do + put_attr(env.module, {:serialize, fun_ast}) end - # CLOSE SCOPE HOOKS + @doc false + # Record a type checker in the current scope + def record_is_type_of!(env, func_ast) do + put_attr(env.module, {:is_type_of, func_ast}) + # :ok + end + + @doc false + # Record a complexity analyzer in the current scope + def record_complexity!(env, func_ast) do + put_attr(env.module, {:complexity, func_ast}) + # :ok + end + + @doc false + # Record a type resolver in the current scope + def record_resolve_type!(env, func_ast) do + put_attr(env.module, {:resolve_type, func_ast}) + # :ok + end - @unexported_identifiers ~w(query mutation subscription)a + @doc false + # Record an implemented interface in the current scope + def record_interface!(env, identifier) do + put_attr(env.module, {:interface, identifier}) + # Scope.put_attribute(env.module, :interfaces, identifier, accumulate: true) + # Scope.recorded!(env.module, :attr, :interface) + # :ok + end + + @doc false + # Record a deprecation in the current scope + def record_deprecate!(env, msg) do + msg = expand_ast(msg, env) + + record_applied_directive!(env, :deprecated, reason: msg) + end - # Close the current scope and return the appropriate - # quoted result for the type of operation. - defp close_scope(:enum, env, identifier) do - close_scope_and_define_type(Type.Enum, env, identifier) + @doc false + # Record a list of implemented interfaces in the current scope + def record_interfaces!(env, ifaces) do + Enum.each(ifaces, &record_interface!(env, &1)) end - defp close_scope(:object, env, identifier) do - close_scope_and_define_type( - Type.Object, + @doc false + # Record a list of member types for a union in the current scope + def record_types!(env, types) do + put_attr(env.module, {:types, types}) + end + + @doc false + # Record an enum type + def record_enum!(env, identifier, attrs, block) do + attrs = expand_ast(attrs, env) + attrs = Keyword.put(attrs, :identifier, identifier) + scoped_def(env, :enum, identifier, attrs, block) + end + + @doc false + # Record a description in the current scope + def record_description!(env, text_block) do + text = wrap_in_unquote(text_block) + + put_attr(env.module, {:desc, text}) + end + + @doc false + # Record a scalar + def record_scalar!(env, identifier, attrs, block_or_nil) do + record!( env, + Schema.ScalarTypeDefinition, identifier, - export: !Enum.member?(@unexported_identifiers, identifier) + attrs |> Keyword.update(:description, nil, &wrap_in_unquote/1), + block_or_nil ) end - defp close_scope(:interface, env, identifier) do - close_scope_and_define_type(Type.Interface, env, identifier) + def handle_enum_value_attrs(identifier, raw_attrs, env) do + value = Keyword.get(raw_attrs, :as, identifier) + + block = + for {identifier, args} <- build_directives(raw_attrs) do + quote do + directive(unquote(identifier), unquote(args)) + end + end + + attrs = + raw_attrs + |> expand_ast(env) + |> Keyword.delete(:directives) + |> Keyword.put(:identifier, identifier) + |> Keyword.put(:value, wrap_in_unquote(value)) + |> Keyword.put_new(:name, String.upcase(to_string(identifier))) + |> Keyword.delete(:as) + |> Keyword.delete(:deprecate) + |> Keyword.update(:description, nil, &wrap_in_unquote/1) + + {attrs, block} end - defp close_scope(:union, env, identifier) do - close_scope_and_define_type(Type.Union, env, identifier) + @doc false + # Record an enum value in the current scope + def record_value!(env, identifier, raw_attrs) do + {attrs, block} = handle_enum_value_attrs(identifier, raw_attrs, env) + record!(env, Schema.EnumValueDefinition, identifier, attrs, block) end - defp close_scope(:input_object, env, identifier) do - close_scope_and_define_type(Type.InputObject, env, identifier) + @doc false + # Record an enum value in the current scope + def record_values!(env, values) do + values = + values + |> expand_ast(env) + |> wrap_in_unquote + + put_attr(env.module, {:values, values}) end - defp close_scope(:field, env, identifier) do - close_scope_and_accumulate_attribute(:fields, env, identifier) + def record_config!(env, fun_ast) do + put_attr(env.module, {:config, fun_ast}) end - defp close_scope(:arg, env, identifier) do - close_scope_and_accumulate_attribute(:args, env, identifier) + def record_trigger!(env, mutations, attrs) do + for mutation <- mutations do + put_attr(env.module, {:trigger, {mutation, attrs}}) + end end - defp close_scope(:scalar, env, identifier) do - close_scope_and_define_type(Type.Scalar, env, identifier) + def record_applied_directive!(env, name, attrs) do + name = Atom.to_string(name) + + attrs = + attrs + |> expand_ast(env) + |> build_directive_arguments(env) + |> Keyword.put(:name, name) + |> put_reference(env) + + directive = struct!(Absinthe.Blueprint.Directive, attrs) + put_attr(env.module, {:directive, directive}) end - defp close_scope(:directive, env, identifier) do - close_scope_and_define_directive(env, identifier) + defp build_directive_arguments(attrs, env) do + arguments = + attrs + |> Enum.map(fn {name, value} -> + value = expand_ast(value, env) + + attrs = [ + name: Atom.to_string(name), + value: value, + input_value: Absinthe.Blueprint.Input.Value.build(value), + source_location: Absinthe.Blueprint.SourceLocation.at(env.line, 0) + ] + + struct!(Absinthe.Blueprint.Input.Argument, attrs) + end) + + [arguments: arguments] + end + + def record_middleware!(env, new_middleware, opts) do + new_middleware = + case expand_ast(new_middleware, env) do + {module, fun} -> + {:{}, [], [{module, fun}, opts]} + + atom when is_atom(atom) -> + case Atom.to_string(atom) do + "Elixir." <> _ -> + {:{}, [], [{atom, :call}, opts]} + + _ -> + {:{}, [], [{env.module, atom}, opts]} + end + + val -> + val + end + + put_attr(env.module, {:middleware, [new_middleware]}) end - defp close_scope(_, env, _) do - Scope.close(env) + # We wrap the value (from the user) in an `unquote` call, so that when the schema `blueprint` is + # placed into `__absinthe_blueprint__` via `unquote(Macro.escape(blueprint, unquote: true))` the + # value gets unquoted. This allows us to evaluate function calls in the scope of the schema + # module. + defp wrap_in_unquote(value) do + {:unquote, [], [value]} end - defp close_scope_with_name(mod, identifier, opts \\ []) do - Scope.close(mod).attrs - |> add_name(identifier, opts) + # ------------------------------ + + @doc false + defmacro pop() do + module = __CALLER__.module + popped = pop_stack(module, :absinthe_scope_stack_stash) + push_stack(module, :absinthe_scope_stack, popped) + put_attr(__CALLER__.module, :pop) end - defp close_scope_and_define_directive(env, identifier, def_opts \\ []) do - definition = %Absinthe.Schema.Notation.Definition{ - category: :directive, - builder: Absinthe.Type.Directive, - identifier: identifier, - attrs: close_scope_with_name(env.module, identifier), - opts: def_opts, - file: env.file, - line: env.line - } + @doc false + defmacro stash() do + module = __CALLER__.module + popped = pop_stack(module, :absinthe_scope_stack) + push_stack(module, :absinthe_scope_stack_stash, popped) + put_attr(module, :stash) + end - put_definition(env.module, definition) + @doc false + defmacro close_scope() do + put_attr(__CALLER__.module, :close) + pop_stack(__CALLER__.module, :absinthe_scope_stack) end - defp close_scope_and_define_type(type_module, env, identifier, def_opts \\ []) do - attrs = close_scope_with_name(env.module, identifier, title: true) + def put_reference(attrs, env) do + Keyword.put(attrs, :__reference__, build_reference(env)) + end - definition = %Absinthe.Schema.Notation.Definition{ - category: :type, - builder: type_module, - identifier: identifier, - attrs: attrs, - opts: def_opts, - file: env.file, - line: env.line + def build_reference(env) do + %{ + module: env.module, + location: %{ + file: env.file, + line: env.line + } } + end + + @scope_map %{ + Schema.ObjectTypeDefinition => :object, + Schema.FieldDefinition => :field, + Schema.ScalarTypeDefinition => :scalar, + Schema.EnumTypeDefinition => :enum, + Schema.EnumValueDefinition => :value, + Schema.InputObjectTypeDefinition => :input_object, + Schema.InputValueDefinition => :arg, + Schema.UnionTypeDefinition => :union, + Schema.InterfaceTypeDefinition => :interface, + Schema.DirectiveDefinition => :directive + } + defp scoped_def(caller, type, identifier, attrs, body) do + attrs = + attrs + |> Keyword.put(:identifier, identifier) + |> Keyword.put_new(:name, default_name(type, identifier)) + |> Keyword.put(:module, caller.module) + |> put_reference(caller) + + definition = struct!(type, attrs) + + ref = put_attr(caller.module, definition) - put_definition(env.module, definition) + push_stack(caller.module, :absinthe_scope_stack, Map.fetch!(@scope_map, type)) + + [ + get_desc(ref), + body, + quote(do: unquote(__MODULE__).close_scope()) + ] end - defp put_definition(module, definition) do - Module.put_attribute(module, :absinthe_definitions, definition) + defp get_desc(ref) do + quote do + unquote(__MODULE__).put_desc(__MODULE__, unquote(ref)) + end end - defp close_scope_and_accumulate_attribute(attr_name, env, identifier) do - Scope.put_attribute( - env.module, - attr_name, - {identifier, close_scope_with_name(env.module, identifier)}, - accumulate: true - ) + defp push_stack(module, key, val) do + stack = Module.get_attribute(module, key) + stack = [val | stack] + Module.put_attribute(module, key, stack) end - @doc false - # Add the default name, if needed, to a struct - def add_name(attrs, identifier, opts \\ []) do - update_in(attrs, [:name], fn value -> - default_name(identifier, value, opts) - end) + defp pop_stack(module, key) do + [popped | stack] = Module.get_attribute(module, key) + Module.put_attribute(module, key, stack) + popped end - # Find the name, or default as necessary - defp default_name(identifier, nil, opts) do - if opts[:title] do - identifier |> Atom.to_string() |> Utils.camelize() - else - identifier |> Atom.to_string() - end + def put_attr(module, thing) do + ref = :erlang.unique_integer() + Module.put_attribute(module, :absinthe_blueprint, {ref, thing}) + ref end - defp default_name(_, name, _) do - name + defp default_name(Schema.FieldDefinition, identifier) do + identifier + |> Atom.to_string() end - @doc false - # Get a value at a path - @spec get_in_private(atom, [atom]) :: any - def get_in_private(mod, path) do - Enum.find_value(Scope.on(mod), fn %{attrs: attrs} -> - get_in(attrs, [:__private__ | path]) - end) + defp default_name(_, identifier) do + identifier + |> Atom.to_string() + |> Absinthe.Utils.camelize() end - @doc false - # Ensure the provided operation can be recorded in the current environment, - # in the current scope context - def recordable!(env, usage) do - recordable!(env, usage, Keyword.get(@placement, usage, [])) + defp do_import_types({{:., _, [{:__MODULE__, _, _}, :{}]}, _, modules_ast_list}, env, opts) do + for {_, _, leaf} <- modules_ast_list do + type_module = Module.concat([env.module | leaf]) + + do_import_types(type_module, env, opts) + end end - def recordable!(env, usage, kw_rules, opts \\ []) do - do_recordable!(env, usage, Enum.into(List.wrap(kw_rules), %{}), opts) + defp do_import_types( + {{:., _, [{:__aliases__, _, [{:__MODULE__, _, _} | tail]}, :{}]}, _, modules_ast_list}, + env, + opts + ) do + root_module = Module.concat([env.module | tail]) + + for {_, _, leaf} <- modules_ast_list do + type_module = Module.concat([root_module | leaf]) + + do_import_types(type_module, env, opts) + end end - defp do_recordable!(env, usage, %{under: parents} = rules, opts) do - case Scope.current(env.module) do - %{name: name} -> - if Enum.member?(List.wrap(parents), name) do - do_recordable!(env, usage, Map.delete(rules, :under), opts) - else - raise Absinthe.Schema.Notation.Error, only_within(usage, parents, opts) - end + defp do_import_types({{:., _, [{:__aliases__, _, root}, :{}]}, _, modules_ast_list}, env, opts) do + root_module = Module.concat(root) + root_module_with_alias = Keyword.get(env.aliases, root_module, root_module) + + for {_, _, leaf} <- modules_ast_list do + type_module = Module.concat([root_module_with_alias | leaf]) - _ -> - raise Absinthe.Schema.Notation.Error, only_within(usage, parents, opts) + do_import_types(type_module, env, opts) end end - defp do_recordable!(env, usage, %{toplevel: true} = rules, opts) do - case Scope.current(env.module) do - nil -> - do_recordable!(env, usage, Map.delete(rules, :toplevel), opts) + defp do_import_types(module, env, opts) do + Module.put_attribute(env.module, :__absinthe_type_imports__, [ + {module, opts} | Module.get_attribute(env.module, :__absinthe_type_imports__) || [] + ]) - _ -> - ref = opts[:as] || "`#{usage}`" + [] + end + @spec do_import_sdl(Macro.Env.t(), nil | String.t() | Macro.t(), [import_sdl_option()]) :: + Macro.t() + defp do_import_sdl(env, nil, opts) do + case Keyword.fetch(opts, :path) do + {:ok, path} -> + [ + quote do + @__absinthe_import_sdl_path__ unquote(path) + end, + do_import_sdl( + env, + quote do + File.read!(@__absinthe_import_sdl_path__) + end, + opts + ), + quote do + @external_resource @__absinthe_import_sdl_path__ + end + ] + + :error -> raise Absinthe.Schema.Notation.Error, - "Invalid schema notation: #{ref} must only be used toplevel" + "Must provide `:path` option to `import_sdl` unless passing a raw SDL string as the first argument" end end - defp do_recordable!(env, usage, %{toplevel: false} = rules, opts) do - case Scope.current(env.module) do - nil -> - ref = opts[:as] || "`#{usage}`" + defp do_import_sdl(env, sdl, opts) do + ref = build_reference(env) - raise Absinthe.Schema.Notation.Error, - "Invalid schema notation: #{ref} must not be used toplevel" + quote do + with {:ok, definitions} <- + unquote(__MODULE__).SDL.parse( + unquote(sdl), + __MODULE__, + unquote(Macro.escape(ref)), + unquote(Macro.escape(opts)) + ) do + @__absinthe_sdl_definitions__ definitions ++ + (Module.get_attribute( + __MODULE__, + :__absinthe_sdl_definitions__ + ) || []) + else + {:error, error} -> + raise Absinthe.Schema.Notation.Error, "`import_sdl` could not parse SDL:\n#{error}" + end + end + end + + def put_desc(module, ref) do + Module.put_attribute(module, :absinthe_desc, {ref, Module.get_attribute(module, :desc)}) + Module.put_attribute(module, :desc, nil) + end - _ -> - do_recordable!(env, usage, Map.delete(rules, :toplevel), opts) + def noop(_desc) do + :ok + end + + defmacro __before_compile__(env) do + module_attribute_descs = + env.module + |> Module.get_attribute(:absinthe_desc) + |> Map.new() + + attrs = + env.module + |> Module.get_attribute(:absinthe_blueprint) + |> List.insert_at(0, :close) + |> reverse_with_descs(module_attribute_descs) + + imports = + (Module.get_attribute(env.module, :__absinthe_type_imports__) || []) + |> Enum.uniq() + |> Enum.map(fn + module when is_atom(module) -> {module, []} + other -> other + end) + + schema_def = %Schema.SchemaDefinition{ + imports: imports, + module: env.module, + __reference__: %{ + location: %{file: env.file, line: 0} + } + } + + blueprint = + attrs + |> List.insert_at(1, schema_def) + |> Absinthe.Blueprint.Schema.build() + + # TODO: handle multiple schemas + [schema] = blueprint.schema_definitions + + {schema, functions} = lift_functions(schema, env.module) + + sdl_definitions = + (Module.get_attribute(env.module, :__absinthe_sdl_definitions__) || []) + |> List.flatten() + |> Enum.map(fn definition -> + Absinthe.Blueprint.prewalk(definition, fn + %{module: _} = node -> + %{node | module: env.module} + + node -> + node + end) + end) + + {sdl_directive_definitions, sdl_type_definitions} = + Enum.split_with(sdl_definitions, fn + %Absinthe.Blueprint.Schema.DirectiveDefinition{} -> + true + + _ -> + false + end) + + schema = + schema + |> Map.update!(:type_definitions, &(sdl_type_definitions ++ &1)) + |> Map.update!(:directive_definitions, &(sdl_directive_definitions ++ &1)) + + blueprint = %{blueprint | schema_definitions: [schema]} + + quote do + unquote(__MODULE__).noop(@desc) + + def __absinthe_blueprint__ do + unquote(Macro.escape(blueprint, unquote: true)) + end + + unquote_splicing(functions) end end - defp do_recordable!(env, usage, %{private_lookup: address} = rules, opts) - when is_list(address) do - case get_in_private(env.module, address) do - nil -> - ref = opts[:as] || "`#{usage}`" + def lift_functions(schema, origin) do + Absinthe.Blueprint.prewalk(schema, [], &lift_functions(&1, &2, origin)) + end + + def lift_functions(node, acc, origin) do + {node, ast} = functions_for_type(node, origin) + {node, ast ++ acc} + end + + defp functions_for_type(%Schema.FieldDefinition{} = type, origin) do + grab_functions( + origin, + type, + {Schema.FieldDefinition, type.function_ref}, + Schema.functions(Schema.FieldDefinition) + ) + end + + defp functions_for_type(%module{identifier: identifier} = type, origin) do + grab_functions(origin, type, {module, identifier}, Schema.functions(module)) + end + + defp functions_for_type(type, _) do + {type, []} + end + + def grab_functions(origin, type, identifier, attrs) do + {ast, type} = + Enum.flat_map_reduce(attrs, type, fn attr, type -> + value = Map.fetch!(type, attr) + + ast = + quote do + def __absinthe_function__(unquote(identifier), unquote(attr)) do + unquote(value) + end + end + + ref = {:ref, origin, identifier} + + type = + Map.update!(type, attr, fn + value when is_list(value) -> + [ref] + + _ -> + ref + end) + + {[ast], type} + end) + + {type, ast} + end + + @doc false + def __ensure_middleware__([], _field, %{identifier: :subscription}) do + [Absinthe.Middleware.PassParent] + end + + def __ensure_middleware__([], %{identifier: identifier}, _) do + [{Absinthe.Middleware.MapGet, identifier}] + end + + # Don't install Telemetry middleware for Introspection fields + @introspection [Absinthe.Phase.Schema.Introspection, Absinthe.Type.BuiltIns.Introspection] + def __ensure_middleware__(middleware, %{definition: definition}, _object) + when definition in @introspection do + middleware + end + + # Install Telemetry middleware + def __ensure_middleware__(middleware, _field, _object) do + [{Absinthe.Middleware.Telemetry, []} | middleware] + end - message = - "Invalid schema notation: #{ref} failed a private value lookup for `#{ - address |> List.last() - }'" + defp reverse_with_descs(attrs, descs, acc \\ []) - raise Absinthe.Schema.Notation.Error, message + defp reverse_with_descs([], _descs, acc), do: acc - _ -> - do_recordable!(env, usage, Map.delete(rules, :private_lookup), opts) + defp reverse_with_descs([{ref, attr} | rest], descs, acc) do + if desc = Map.get(descs, ref) do + reverse_with_descs(rest, descs, [attr, {:desc, desc} | acc]) + else + reverse_with_descs(rest, descs, [attr | acc]) end end - defp do_recordable!(env, _, rules, _) when map_size(rules) == 0 do - env + defp reverse_with_descs([attr | rest], descs, acc) do + reverse_with_descs(rest, descs, [attr | acc]) + end + + defp expand_ast(ast, env) do + Macro.prewalk(ast, fn + # We don't want to expand `@bla` into `Module.get_attribute(module, @bla)` because this + # function call will fail if the module is already compiled. Remember that the ast gets put + # into a generated `__absinthe_blueprint__` function which is called at "__after_compile__" + # time. This will be after a module has been compiled if there are multiple modules in the + # schema (in the case of an `import_types`). + # + # Also see test "test/absinthe/type/import_types_test.exs" + # "__absinthe_blueprint__ is callable at runtime even if there is a module attribute" + # and it's comment for more information + {:@, _, _} = node -> + node + + {_, _, _} = node -> + Macro.expand(node, env) + + node -> + node + end) end @doc false - # Get the placement information for a macro - @spec placement(atom) :: Keyword.t() - def placement(usage) do - Keyword.get(@placement, usage, []) + # Ensure the provided operation can be recorded in the current environment, + # in the current scope context + def recordable!(env, usage, placement) do + [scope | _] = Module.get_attribute(env.module, :absinthe_scope_stack) + + unless recordable?(placement, scope) do + raise Absinthe.Schema.Notation.Error, invalid_message(placement, usage, scope) + end + + env end - # The error message when a macro can only be used within a certain set of - # parent scopes. - defp only_within(usage, parents, opts) do - ref = opts[:as] || "`#{usage}`" + defp recordable?([under: under], scope), do: scope in under + defp recordable?([toplevel: true], scope), do: scope == :schema + defp recordable?([toplevel: false], scope), do: scope != :schema - parts = - List.wrap(parents) - |> Enum.map(&"`#{&1}`") - |> Enum.join(", ") + defp invalid_message([under: under], usage, scope) do + allowed = under |> Enum.map(&"`#{&1}`") |> Enum.join(", ") + + "Invalid schema notation: `#{usage}` must only be used within #{allowed}. #{used_in(scope)}" + end + + defp invalid_message([toplevel: true], usage, scope) do + "Invalid schema notation: `#{usage}` must only be used toplevel. #{used_in(scope)}" + end + + defp invalid_message([toplevel: false], usage, scope) do + "Invalid schema notation: `#{usage}` must not be used toplevel. #{used_in(scope)}" + end - "Invalid schema notation: #{ref} must only be used within #{parts}" + defp used_in(scope) do + scope = Atom.to_string(scope) + "Was used in `#{scope}`." end end diff --git a/lib/absinthe/schema/notation/definition.ex b/lib/absinthe/schema/notation/definition.ex deleted file mode 100644 index ae0ace9be2..0000000000 --- a/lib/absinthe/schema/notation/definition.ex +++ /dev/null @@ -1,11 +0,0 @@ -defmodule Absinthe.Schema.Notation.Definition do - @moduledoc false - defstruct category: nil, - source: nil, - identifier: nil, - builder: nil, - attrs: [], - opts: [], - file: nil, - line: nil -end diff --git a/lib/absinthe/schema/notation/scope.ex b/lib/absinthe/schema/notation/scope.ex deleted file mode 100644 index 0edf21a9b3..0000000000 --- a/lib/absinthe/schema/notation/scope.ex +++ /dev/null @@ -1,121 +0,0 @@ -defmodule Absinthe.Schema.Notation.Scope do - @moduledoc false - - @stack :absinthe_notation_scopes - - defstruct name: nil, recordings: [], attrs: [] - - use Absinthe.Type.Fetch - - def open(name, mod, attrs \\ []) do - Module.put_attribute(mod, @stack, [%__MODULE__{name: name, attrs: attrs} | on(mod)]) - end - - def close(mod) do - {current, rest} = split(mod) - Module.put_attribute(mod, @stack, rest) - current - end - - def split(mod) do - case on(mod) do - [] -> - {nil, []} - - [current | rest] -> - {current, rest} - end - end - - def current(mod) do - {c, _} = split(mod) - c - end - - def recorded!(mod, kind, identifier) do - update_current(mod, fn - %{recordings: recs} = scope -> - %{scope | recordings: [{kind, identifier} | recs]} - - nil -> - # Outside any scopes, ignore - nil - end) - end - - @doc """ - Check if a certain operation has been recorded in the current scope. - - ## Examples - - See if an input object with the identifier `:input` has been defined from - this scope: - - ``` - recorded?(mod, :input_object, :input) - ``` - - See if the `:description` attribute has been - - ``` - recorded?(mod, :attr, :description) - ``` - """ - @spec recorded?(atom, atom, atom) :: boolean - def recorded?(mod, kind, identifier) do - scope = current(mod) - - case kind do - :attr -> - # Supports attributes passed directly to the macro that - # created the scope, usually (?) short-circuits the need to - # check the scope recordings. - scope.attrs[identifier] || recording_marked?(scope, kind, identifier) - - _ -> - recording_marked?(scope, kind, identifier) - end - end - - # Check the list of recordings for `recorded?/3` - defp recording_marked?(scope, kind, identifier) do - scope.recordings - |> Enum.find(fn - {^kind, ^identifier} -> - true - - _ -> - false - end) - end - - def put_attribute(mod, key, value, opts \\ [accumulate: false]) do - if opts[:accumulate] do - update_current(mod, fn scope -> - new_attrs = update_in(scope.attrs, [key], &[value | &1 || []]) - %{scope | attrs: new_attrs} - end) - else - update_current(mod, fn scope -> - %{scope | attrs: Keyword.put(scope.attrs, key, value)} - end) - end - end - - defp update_current(mod, fun) do - {current, rest} = split(mod) - updated = fun.(current) - Module.put_attribute(mod, @stack, [updated | rest]) - end - - def on(mod) do - case Module.get_attribute(mod, @stack) do - nil -> - Module.put_attribute(mod, @stack, []) - [] - - value -> - value - end - end -end diff --git a/lib/absinthe/schema/notation/sdl.ex b/lib/absinthe/schema/notation/sdl.ex new file mode 100644 index 0000000000..2fdc7225fe --- /dev/null +++ b/lib/absinthe/schema/notation/sdl.ex @@ -0,0 +1,88 @@ +defmodule Absinthe.Schema.Notation.SDL do + @moduledoc false + + @doc """ + Parse definitions from SDL source + """ + + alias Absinthe.{Blueprint, Language.Source} + + @spec parse(sdl :: Source.t() | Blueprint.t(), module(), map(), Keyword.t()) :: + {:ok, [Blueprint.Schema.t()]} | {:error, String.t()} + def parse(sdl, module, ref, opts) do + with {:ok, doc} <- Absinthe.Phase.Parse.run(sdl) do + definitions = + doc.input.definitions + |> Enum.map(&Absinthe.Blueprint.Draft.convert(&1, doc)) + |> Enum.map(&put_ref(&1, ref, opts)) + |> Enum.map(fn type -> %{type | module: module} end) + + {:ok, definitions} + else + {:error, %Blueprint{execution: %{validation_errors: [_ | _] = errors}}} -> + error = + errors + |> Enum.map(&"#{&1.message} (#{inspect(&1.locations)})") + |> Enum.join("\n") + + {:error, error} + + other -> + other + end + end + + defp put_ref(%{fields: fields, directives: directives} = node, ref, opts) do + %{ + node + | fields: Enum.map(fields, &put_ref(&1, ref, opts)), + directives: Enum.map(directives, &put_ref(&1, ref, opts)) + } + |> do_put_ref(ref, opts) + end + + defp put_ref(%{fields: fields} = node, ref, opts) do + %{node | fields: Enum.map(fields, &put_ref(&1, ref, opts))} + |> do_put_ref(ref, opts) + end + + defp put_ref(%{arguments: args, directives: directives} = node, ref, opts) do + %{ + node + | arguments: Enum.map(args, &put_ref(&1, ref, opts)), + directives: Enum.map(directives, &put_ref(&1, ref, opts)) + } + |> do_put_ref(ref, opts) + end + + defp put_ref(%{arguments: args} = node, ref, opts) do + %{node | arguments: Enum.map(args, &put_ref(&1, ref, opts))} + |> do_put_ref(ref, opts) + end + + defp put_ref(%{directives: directives} = node, ref, opts) do + %{node | directives: Enum.map(directives, &put_ref(&1, ref, opts))} + |> do_put_ref(ref, opts) + end + + defp put_ref(node, ref, opts), do: do_put_ref(node, ref, opts) + + defp do_put_ref(%{__reference__: nil} = node, ref, opts) do + ref = + case opts[:path] do + nil -> + ref + + path -> + put_in(ref.location, %{ + file: {:unquote, [], [path]}, + line: node.source_location.line, + column: node.source_location.column + }) + end + + %{node | __reference__: ref} + end + + defp do_put_ref(node, _ref, _opts), do: node +end diff --git a/lib/absinthe/schema/notation/sdl_render.ex b/lib/absinthe/schema/notation/sdl_render.ex new file mode 100644 index 0000000000..abc85a2629 --- /dev/null +++ b/lib/absinthe/schema/notation/sdl_render.ex @@ -0,0 +1,493 @@ +defmodule Absinthe.Schema.Notation.SDL.Render do + @moduledoc false + import Inspect.Algebra + import Absinthe.Utils.Render + + alias Absinthe.Blueprint + + @line_width 120 + + def inspect(term, %{pretty: true}) do + term + |> render() + |> concat(line()) + |> format(@line_width) + |> to_string + end + + def inspect(term, options) do + Inspect.Any.inspect(term, options) + end + + @skip_modules [ + Absinthe.Phase.Schema.Introspection, + Absinthe.Type.BuiltIns.Directives, + Absinthe.Type.BuiltIns.Scalars, + Absinthe.Type.BuiltIns.Introspection + ] + defp render(bp, type_definitions \\ []) + + defp render(%Blueprint{} = bp, _) do + %{ + schema_definitions: [ + %Blueprint.Schema.SchemaDefinition{ + type_definitions: type_definitions, + directive_definitions: directive_definitions, + schema_declaration: schema_declaration + } + ] + } = bp + + schema_declaration = + schema_declaration || + %{ + query: Enum.find(type_definitions, &(&1.identifier == :query)), + mutation: Enum.find(type_definitions, &(&1.identifier == :mutation)), + subscription: Enum.find(type_definitions, &(&1.identifier == :subscription)), + description: Enum.find(type_definitions, &(&1.identifier == :__schema)).description + } + + directive_definitions = + directive_definitions + |> Enum.reject(&(&1.module in @skip_modules)) + + all_type_definitions = + type_definitions + |> Enum.reject(&(&1.__struct__ == Blueprint.Schema.SchemaDeclaration)) + + types_to_render = + all_type_definitions + |> Enum.reject(&(&1.module in @skip_modules)) + |> Enum.filter(& &1.__private__[:__absinthe_referenced__]) + + ([schema_declaration] ++ directive_definitions ++ types_to_render) + |> Enum.map(&render(&1, all_type_definitions)) + |> Enum.reject(&(&1 == empty())) + |> join([line(), line()]) + end + + defp render(%Blueprint.Schema.SchemaDeclaration{} = schema, type_definitions) do + block( + concat([ + "schema", + directives(schema.directives, type_definitions) + ]), + render_list(schema.field_definitions, type_definitions) + ) + |> description(schema.description) + end + + defp render( + %{ + query: query_type, + mutation: mutation_type, + subscription: subscription_type, + description: description + }, + _type_definitions + ) do + schema_type_docs = + [ + query_type && concat("query: ", string(query_type.name)), + mutation_type && concat("mutation: ", string(mutation_type.name)), + subscription_type && concat("subscription: ", string(subscription_type.name)) + ] + |> Enum.reject(&is_nil/1) + |> join([line()]) + + block( + "schema", + schema_type_docs + ) + |> description(description) + end + + @adapter Absinthe.Adapter.LanguageConventions + defp render(%Blueprint.Schema.InputValueDefinition{} = input_value, type_definitions) do + concat([ + string(@adapter.to_external_name(input_value.name, :argument)), + ": ", + render(input_value.type, type_definitions), + default(input_value.default_value_blueprint), + directives(input_value.directives, type_definitions) + ]) + |> description(input_value.description) + end + + defp render(%Blueprint.Schema.FieldDefinition{} = field, type_definitions) do + concat([ + string(@adapter.to_external_name(field.name, :field)), + arguments(field.arguments, type_definitions), + ": ", + render(field.type, type_definitions), + directives(field.directives, type_definitions) + ]) + |> description(field.description) + end + + defp render(%Blueprint.Schema.ObjectTypeDefinition{} = object_type, type_definitions) do + block( + "type", + concat([ + string(object_type.name), + implements(object_type, type_definitions), + directives(object_type.directives, type_definitions) + ]), + render_list(object_type.fields, type_definitions) + ) + |> description(object_type.description) + end + + defp render(%Blueprint.Schema.InputObjectTypeDefinition{} = input_object_type, type_definitions) do + block( + concat([ + "input ", + string(input_object_type.name), + directives(input_object_type.directives, type_definitions) + ]), + render_list(input_object_type.fields, type_definitions) + ) + |> description(input_object_type.description) + end + + defp render(%Blueprint.Schema.UnionTypeDefinition{} = union_type, type_definitions) do + Enum.map(union_type.types, fn + identifier when is_atom(identifier) -> + render(%Blueprint.TypeReference.Identifier{id: identifier}, type_definitions) + + %Blueprint.TypeReference.Name{} = ref -> + render(ref, type_definitions) + + %Blueprint.TypeReference.Identifier{} = ref -> + render(ref, type_definitions) + end) + |> case do + [] -> + concat([ + "union ", + string(union_type.name), + directives(union_type.directives, type_definitions) + ]) + + types -> + concat([ + "union ", + string(union_type.name), + directives(union_type.directives, type_definitions), + " = ", + join(types, " | ") + ]) + end + |> description(union_type.description) + end + + defp render(%Blueprint.Schema.InterfaceTypeDefinition{} = interface_type, type_definitions) do + block( + "interface", + concat([ + string(interface_type.name), + implements(interface_type, type_definitions), + directives(interface_type.directives, type_definitions) + ]), + render_list(interface_type.fields, type_definitions) + ) + |> description(interface_type.description) + end + + defp render(%Blueprint.Schema.EnumTypeDefinition{} = enum_type, type_definitions) do + block( + concat([ + "enum ", + string(enum_type.name), + directives(enum_type.directives, type_definitions) + ]), + render_list(List.flatten(enum_type.values), type_definitions) + ) + |> description(enum_type.description) + end + + defp render(%Blueprint.Schema.EnumValueDefinition{} = enum_value, type_definitions) do + concat([ + string(enum_value.name), + directives(enum_value.directives, type_definitions) + ]) + |> description(enum_value.description) + end + + defp render(%Blueprint.Schema.ScalarTypeDefinition{} = scalar_type, type_definitions) do + concat([ + "scalar ", + string(scalar_type.name), + directives(scalar_type.directives, type_definitions) + ]) + |> description(scalar_type.description) + end + + defp render(%Blueprint.Schema.DirectiveDefinition{} = directive, type_definitions) do + locations = directive.locations |> Enum.map(&String.upcase(to_string(&1))) + + concat([ + "directive ", + "@", + string(directive.name), + arguments(directive.arguments, type_definitions), + repeatable(directive.repeatable), + " on ", + join(locations, " | ") + ]) + |> description(directive.description) + end + + defp render(%Blueprint.Directive{} = directive, type_definitions) do + concat([ + " @", + directive.name, + directive_arguments(directive.arguments, type_definitions) + ]) + end + + defp render(%Blueprint.Input.Argument{} = argument, _type_definitions) do + concat([ + argument.name, + ": ", + render_value(argument.input_value) + ]) + end + + defp render(%Blueprint.TypeReference.Name{name: name}, _type_definitions) do + string(name) + end + + defp render(%Blueprint.TypeReference.Identifier{id: id}, type_definitions) do + type = Enum.find(type_definitions, &(&1.identifier == id)) + + if type do + string(type.name) + else + all_type_ids = Enum.map(type_definitions, & &1.identifier) + + raise """ + No type found for identifier #{inspect(id)} in #{inspect(all_type_ids)} + """ + end + end + + defp render(%Blueprint.TypeReference.List{of_type: of_type}, type_definitions) do + concat(["[", render(of_type, type_definitions), "]"]) + end + + defp render(%Blueprint.TypeReference.NonNull{of_type: of_type}, type_definitions) do + concat([render(of_type, type_definitions), "!"]) + end + + defp render(nil, _) do + raise "Unexpected nil" + end + + defp render(identifier, type_definitions) when is_atom(identifier) do + render(%Blueprint.TypeReference.Identifier{id: identifier}, type_definitions) + end + + # SDL Syntax Helpers + + defp directives([], _) do + empty() + end + + defp directives(directives, type_definitions) do + concat(Enum.map(directives, &render(&1, type_definitions))) + end + + defp directive_arguments([], _) do + empty() + end + + defp directive_arguments(arguments, type_definitions) do + args = Enum.map(arguments, &render(&1, type_definitions)) + + concat([ + "(", + join(args, ", "), + ")" + ]) + end + + defp arguments([], _) do + empty() + end + + defp arguments(args, type_definitions) do + any_descriptions? = Enum.any?(args, & &1.description) + + group( + glue( + nest( + multiline( + glue( + "(", + "", + render_list(args, type_definitions, ", ") + ), + any_descriptions? + ), + 2, + :break + ), + "", + ")" + ) + ) + end + + defp default(nil) do + empty() + end + + defp default(default_value) do + concat([" = ", render_value(default_value)]) + end + + defp description(docs, nil) do + docs + end + + defp description(docs, description) do + concat([ + render_string_value(description, 0), + line(), + docs + ]) + end + + defp implements(%{interface_blueprints: [], interfaces: []}, _) do + empty() + end + + defp implements(interface, type_definitions) do + interface_names = + case interface do + %{interface_blueprints: [], interfaces: identifiers} -> + Enum.map(identifiers, fn identifier -> + Enum.find_value(type_definitions, fn + %{identifier: ^identifier, name: name} -> name + _ -> nil + end) + end) + + %{interface_blueprints: blueprints} -> + Enum.map(blueprints, & &1.name) + end + + concat([ + " implements ", + join(interface_names, " & ") + ]) + end + + defp repeatable(true), do: " repeatable" + defp repeatable(_), do: empty() + + # Render Helpers + + defp render_list(items, type_definitions, seperator \\ line()) + + # Workaround for `values` macro which temporarily defines + # values as raw atoms to support dynamic schemas + defp render_list([first | _] = items, type_definitions, seperator) when is_atom(first) do + items + |> Enum.map( + &%Blueprint.Schema.EnumValueDefinition{ + value: &1, + name: String.upcase(to_string(&1)) + } + ) + |> render_list(type_definitions, seperator) + end + + defp render_list(items, type_definitions, seperator) do + items = Enum.reject(items, &(&1.module in @skip_modules)) + + splitter = + items + |> Enum.any?(&(&1.description not in ["", nil])) + |> case do + true -> [nest(line(), :reset), line()] + false -> [seperator] + end + + items + |> Enum.reverse() + |> Enum.reduce(:start, fn + item, :start -> render(item, type_definitions) + item, acc -> concat([render(item, type_definitions)] ++ splitter ++ [acc]) + end) + end + + defp render_value(%Blueprint.Input.String{value: value}), + do: render_string_value(value) + + defp render_value(%Blueprint.Input.RawValue{content: content}), + do: render_value(content) + + defp render_value(%Blueprint.Input.Value{raw: raw}), + do: render_value(raw) + + defp render_value(%Blueprint.Input.Null{}), + do: "null" + + defp render_value(%Blueprint.Input.Object{fields: fields}) do + default_fields = Enum.map(fields, &render_value/1) + concat(["{", join(default_fields, ", "), "}"]) + end + + defp render_value(%Blueprint.Input.List{items: items}) do + default_list = Enum.map(items, &render_value/1) + concat(["[", join(default_list, ", "), "]"]) + end + + defp render_value(%Blueprint.Input.Field{name: name, input_value: value}), + do: concat([name, ": ", render_value(value)]) + + defp render_value(%{value: value}), + do: to_string(value) + + # Algebra Helpers + + defp multiline(docs, true) do + force_unfit(docs) + end + + defp multiline(docs, false) do + docs + end + + defp block(kind, name, docs) do + glue( + kind, + block(name, docs) + ) + end + + defp block(name, docs) do + glue( + name, + group( + glue( + nest( + force_unfit( + glue( + "{", + "", + docs + ) + ), + 2, + :always + ), + "", + "}" + ) + ) + ) + end +end diff --git a/lib/absinthe/schema/notation/writer.ex b/lib/absinthe/schema/notation/writer.ex deleted file mode 100644 index 85e5c1fe29..0000000000 --- a/lib/absinthe/schema/notation/writer.ex +++ /dev/null @@ -1,262 +0,0 @@ -defmodule Absinthe.Schema.Notation.Writer do - @moduledoc false - - defstruct [ - :env, - type_map: %{}, - directive_map: %{}, - errors: [], - type_functions: [], - directive_functions: [], - exports: [], - implementors: %{} - ] - - defmacro __before_compile__(env) do - info = build_info(env) - - errors = Macro.escape(info.errors) - exports = Macro.escape(info.exports) - type_map = Macro.escape(info.type_map) - implementors = Macro.escape(info.implementors) - directive_map = Macro.escape(info.directive_map) - - [ - quote do - def __absinthe_types__, do: unquote(type_map) - end, - info.type_functions, - quote do - def __absinthe_type__(_), do: nil - end, - quote do - def __absinthe_directives__, do: unquote(directive_map) - end, - info.directive_functions, - quote do - def __absinthe_directive__(_), do: nil - end, - quote do - def __absinthe_errors__, do: unquote(errors) - def __absinthe_interface_implementors__, do: unquote(implementors) - def __absinthe_exports__, do: unquote(exports) - end - ] - end - - defp init_implementors(nil) do - %{} - end - - defp init_implementors(modules) do - modules - |> Enum.map(& &1.__absinthe_interface_implementors__) - |> Enum.reduce(%{}, fn implementors, acc -> - Map.merge(implementors, acc, fn _k, v1, v2 -> - v1 ++ v2 - end) - end) - end - - def build_info(env) do - implementors = - env.module - |> Module.get_attribute(:absinthe_imports) - |> init_implementors - - descriptions = - env.module - |> Module.get_attribute(:absinthe_descriptions) - |> Map.new() - - definitions = - env.module - |> Module.get_attribute(:absinthe_definitions) - |> Enum.map(&update_description(&1, descriptions)) - - {definitions, errors} = - {definitions, []} - |> Absinthe.Schema.Rule.FieldImportsExist.check() - |> Absinthe.Schema.Rule.NoCircularFieldImports.check() - - info = %__MODULE__{ - env: env, - errors: errors, - implementors: implementors - } - - Enum.reduce(definitions, info, &do_build_info/2) - end - - defp type_functions(definition) do - ast = build(:type, definition) - identifier = definition.identifier - name = definition.attrs[:name] - - result = [ - quote(do: def(__absinthe_type__(unquote(name)), do: __absinthe_type__(unquote(identifier)))) - ] - - if definition.builder == Absinthe.Type.Object do - [ - quote do - def __absinthe_type__(unquote(identifier)) do - unquote(ast) - end - end, - result - ] - else - [ - quote do - def __absinthe_type__(unquote(identifier)), do: unquote(ast) - end, - result - ] - end - end - - defp directive_functions(definition) do - ast = build(:directive, definition) - identifier = definition.identifier - name = definition.attrs[:name] - - quote do - def __absinthe_directive__(unquote(identifier)), do: unquote(ast) - def __absinthe_directive__(unquote(name)), do: __absinthe_directive__(unquote(identifier)) - end - end - - # Type import reference - defp build(:type, %{source: source, builder: nil} = definition) do - quote bind_quoted: [source: source, identifier: definition.identifier] do - source.__absinthe_type__(identifier) - end - end - - # Directive import reference - defp build(:directive, %{source: source, builder: nil} = definition) do - quote bind_quoted: [source: source, identifier: definition.identifier] do - source.__absinthe_directive__(identifier) - end - end - - # Type/Directive definition - defp build(_, %{source: nil, builder: builder} = definition) do - builder.build(definition) - end - - defp directive_name_error(definition) do - %{ - rule: Absinthe.Schema.Rule.TypeNamesAreUnique, - location: %{file: definition.file, line: definition.line}, - data: %{artifact: "Absinthe directive identifier", value: definition.identifier} - } - end - - defp type_name_error(artifact, value, definition) do - %{ - rule: Absinthe.Schema.Rule.TypeNamesAreUnique, - location: %{file: definition.file, line: definition.line}, - data: %{artifact: artifact, value: value} - } - end - - defp directive_errors(definition, state) do - case Map.has_key?(state.directive_map, definition.identifier) do - true -> - [directive_name_error(definition)] - - false -> - [] - end - end - - defp type_errors(definition, state) do - [ - if Map.has_key?(state.type_map, definition.identifier) do - type_name_error("Absinthe type identifier", definition.identifier, definition) - end, - if Enum.member?(Map.values(state.type_map), definition.attrs[:name]) do - type_name_error("Type name", definition.attrs[:name], definition) - end - ] - |> Enum.reject(&is_nil/1) - end - - defp update_description(definition, descriptions) do - case Map.get(descriptions, definition.identifier) do - nil -> definition - desc -> Map.update!(definition, :attrs, &Keyword.put(&1, :description, desc)) - end - end - - defp do_build_info(%{category: :directive} = definition, info) do - errors = directive_errors(definition, info) - - info - |> update_directive_map(definition) - |> update_directive_functions(definition, errors) - |> update_exports(definition) - |> update_errors(errors) - end - - defp do_build_info(%{category: :type} = definition, info) do - errors = type_errors(definition, info) - - info - |> update_type_map(definition) - |> update_type_functions(definition, errors) - |> update_implementors(definition) - |> update_exports(definition) - |> update_errors(errors) - end - - defp update_directive_map(info, definition) do - Map.update!( - info, - :directive_map, - &Map.put(&1, definition.identifier, definition.attrs[:name]) - ) - end - - defp update_directive_functions(info, definition, []) do - Map.update!(info, :directive_functions, &[directive_functions(definition) | &1]) - end - - defp update_type_map(info, definition) do - Map.update!(info, :type_map, &Map.put(&1, definition.identifier, definition.attrs[:name])) - end - - defp update_type_functions(info, definition, []) do - Map.update!(info, :type_functions, &[type_functions(definition) | &1]) - end - - defp update_type_functions(info, _definition, _errors), do: info - - defp update_implementors(info, definition) do - implementors = - definition.attrs[:interfaces] - |> List.wrap() - |> Enum.reduce(info.implementors, fn iface, implementors -> - Map.update(implementors, iface, [definition.identifier], &[definition.identifier | &1]) - end) - - %{info | implementors: implementors} - end - - defp update_exports(info, definition) do - exports = - if Keyword.get(definition.opts, :export, definition.source != Absinthe.Type.BuiltIns) do - [definition.identifier | info.exports] - else - info.exports - end - - %{info | exports: exports} - end - - defp update_errors(info, errors) do - %{info | errors: errors ++ info.errors} - end -end diff --git a/lib/absinthe/schema/persistent_term.ex b/lib/absinthe/schema/persistent_term.ex new file mode 100644 index 0000000000..528d6975e0 --- /dev/null +++ b/lib/absinthe/schema/persistent_term.ex @@ -0,0 +1,113 @@ +if Code.ensure_loaded?(:persistent_term) do + defmodule Absinthe.Schema.PersistentTerm do + @moduledoc """ + Experimental: Persistent Term based Schema Backend + + By default, Absinthe schemas are stored in a generated module. If your schema + is called `MyAppWeb.Schema`, Absinthe creates a `MyAppWeb.Schema.Compiled` + module containing the structs and other data that Absinthe needs at runtime + to execute GraphQL operations against that schema. + + OTP introduced the `:persistent_term` module to provide many of the same + performance benefits of using compiled modules, without the downsides associated + with manipulating complex structures at compile time. + + This module is an experimental effort at using the `:persistent_term` module + as an Absinthe schema backend. This module has been tested against against + the entire Absinthe test suite and shown to perform perhaps even better + than the compiled module. + + To use: + + In your schema module: + ``` + use Absinthe.Schema + @schema_provider Absinthe.Schema.PersistentTerm + ``` + + In your application's supervision tree, prior to anywhere where you'd use + the schema: + ``` + {Absinthe.Schema, MyAppWeb.Schema} + ``` + + where MyAppWeb.Schema is the name of your schema. + """ + + @behaviour Absinthe.Schema.Provider + + def pipeline(pipeline) do + Enum.map(pipeline, fn + Absinthe.Phase.Schema.InlineFunctions -> + {Absinthe.Phase.Schema.InlineFunctions, inline_always: true} + + {Absinthe.Phase.Schema.Compile, options} -> + {Absinthe.Phase.Schema.PopulatePersistentTerm, options} + + phase -> + phase + end) + end + + def __absinthe_type__(schema_mod, name) do + schema_mod + |> get() + |> Map.fetch!(:__absinthe_type__) + |> Map.get(name) + end + + def __absinthe_directive__(schema_mod, name) do + schema_mod + |> get() + |> Map.fetch!(:__absinthe_directive__) + |> Map.get(name) + end + + def __absinthe_types__(schema_mod) do + schema_mod + |> get() + |> Map.fetch!(:__absinthe_types__) + |> Map.fetch!(:referenced) + end + + def __absinthe_types__(schema_mod, group) do + schema_mod + |> get() + |> Map.fetch!(:__absinthe_types__) + |> Map.fetch!(group) + end + + def __absinthe_directives__(schema_mod) do + schema_mod + |> get() + |> Map.fetch!(:__absinthe_directives__) + end + + def __absinthe_interface_implementors__(schema_mod) do + schema_mod + |> get() + |> Map.fetch!(:__absinthe_interface_implementors__) + end + + @dialyzer {:nowarn_function, [get: 1]} + defp get(schema) do + :persistent_term.get({__MODULE__, schema}) + end + end +else + defmodule Absinthe.Schema.PersistentTerm do + @moduledoc false + + @error "Can't be used without OTP >= 21.2" + + def pipeline(_), do: raise(@error) + + def __absinthe_type__(_, _), do: raise(@error) + def __absinthe_directive__(_, _), do: raise(@error) + def __absinthe_types__(_), do: raise(@error) + def __absinthe_types__(_, _), do: raise(@error) + def __absinthe_directives__(_), do: raise(@error) + def __absinthe_interface_implementors__(_), do: raise(@error) + def __absinthe_prototype_schema__(), do: raise(@error) + end +end diff --git a/lib/absinthe/schema/prototype.ex b/lib/absinthe/schema/prototype.ex new file mode 100644 index 0000000000..efd81f5eb2 --- /dev/null +++ b/lib/absinthe/schema/prototype.ex @@ -0,0 +1,42 @@ +defmodule Absinthe.Schema.Prototype do + @moduledoc """ + Provides the directives available for SDL schema definitions. + + By default, the only directive provided is `@deprecated`, which supports + a `reason` argument (of GraphQL type `String`). This can be used to + mark a field + + To add additional schema directives, define your own prototype schema, e.g.: + + ``` + defmodule MyAppWeb.SchemaPrototype do + use Absinthe.Schema.Prototype + + directive :feature do + arg :name, non_null(:string) + on [:interface] + # Define `expand`, etc. + end + + # More directives... + end + ``` + + Then, set it as the prototype for your schema: + + ``` + defmodule MyAppWeb.Schema do + use Absinthe.Schema + + @prototype_schema MyAppWeb.SchemaPrototype + + # Use `import_sdl`, etc... + end + ``` + """ + use __MODULE__.Notation + + defmacro __using__(opts \\ []) do + __MODULE__.Notation.content(opts) + end +end diff --git a/lib/absinthe/schema/prototype/notation.ex b/lib/absinthe/schema/prototype/notation.ex new file mode 100644 index 0000000000..21bd02033b --- /dev/null +++ b/lib/absinthe/schema/prototype/notation.ex @@ -0,0 +1,54 @@ +defmodule Absinthe.Schema.Prototype.Notation do + @moduledoc false + + defmacro __using__(opts \\ []) do + content(opts) + end + + def content(_opts \\ []) do + quote do + use Absinthe.Schema + @schema_provider Absinthe.Schema.Compiled + @pipeline_modifier __MODULE__ + + directive :deprecated do + arg :reason, :string + + on [ + :field_definition, + :input_field_definition, + # Technically, argument deprecation is not yet defined by the GraphQL + # specification. Absinthe does provide some support, but deprecating + # arguments is not recommended. + # + # For more information, see: + # - https://github.com/graphql/graphql-spec/pull/525 + # - https://github.com/absinthe-graphql/absinthe/issues/207 + :argument_definition, + :enum_value + ] + + expand &__MODULE__.expand_deprecate/2 + end + + def pipeline(pipeline) do + pipeline + |> Absinthe.Pipeline.without(Absinthe.Phase.Schema.Validation.QueryTypeMustBeObject) + |> Absinthe.Pipeline.without(Absinthe.Phase.Schema.DeprecatedDirectiveFields) + end + + @doc """ + Add a deprecation (with an optional reason) to a node. + """ + @spec expand_deprecate( + arguments :: %{optional(:reason) => String.t()}, + node :: Absinthe.Blueprint.node_t() + ) :: Absinthe.Blueprint.node_t() + def expand_deprecate(arguments, node) do + %{node | deprecation: %Absinthe.Type.Deprecation{reason: arguments[:reason]}} + end + + defoverridable(pipeline: 1) + end + end +end diff --git a/lib/absinthe/schema/provider.ex b/lib/absinthe/schema/provider.ex new file mode 100644 index 0000000000..b7ef2d50bb --- /dev/null +++ b/lib/absinthe/schema/provider.ex @@ -0,0 +1,28 @@ +defmodule Absinthe.Schema.Provider do + @moduledoc """ + Experimental: Behaviour for providing schema data + + This behaviour is experimental and may change significantly in patch releases. + """ + + @type schema_identifier :: term + @type type_group :: :all | :referenced + + @callback pipeline(Absinthe.Pipeline.t()) :: Absinthe.Pipeline.t() + + @callback __absinthe_type__(schema_identifier, Absinthe.Type.identifier_t()) :: + Absinthe.Type.custom_t() + + @callback __absinthe_directive__(schema_identifier, Absinthe.Type.identifier_t()) :: + Absinthe.Type.custom_t() + + @callback __absinthe_types__(schema_identifier) :: [{atom, binary}] + + @callback __absinthe_types__(schema_identifier, type_group) :: [ + {Absinthe.Type.identifier_t(), Absinthe.Type.identifier_t()} + ] + + @callback __absinthe_directives__(schema_identifier) :: Absinthe.Type.Directive.t() + + @callback __absinthe_interface_implementors__(schema_identifier) :: term +end diff --git a/lib/absinthe/schema/rule.ex b/lib/absinthe/schema/rule.ex deleted file mode 100644 index e7de809bfb..0000000000 --- a/lib/absinthe/schema/rule.ex +++ /dev/null @@ -1,40 +0,0 @@ -defmodule Absinthe.Schema.Rule do - @moduledoc false - - alias __MODULE__ - - defmacro __using__(_opts) do - quote do - @behaviour unquote(__MODULE__) - - def report(location, data) do - %{ - rule: __MODULE__, - location: location, - data: data - } - end - end - end - - @callback check(Absinthe.Schema.t()) :: [Absinthe.Schema.Error.detail_t()] - @callback explanation(Absinthe.Schema.Error.detail_t()) :: binary - - @type t :: module - - @rules [ - Rule.QueryTypeMustBeObject, - Rule.TypeNamesAreReserved, - Rule.TypeNamesAreValid, - Rule.ObjectInterfacesMustBeValid, - Rule.ObjectMustImplementInterfaces, - Rule.InterfacesMustResolveTypes, - Rule.InputOuputTypesCorrectlyPlaced, - Rule.DefaultEnumValuePresent - ] - - @spec check(Absinthe.Schema.t()) :: [Absinthe.Schema.Error.detail_t()] - def check(schema) do - Enum.flat_map(@rules, & &1.check(schema)) - end -end diff --git a/lib/absinthe/schema/rule/default_enum_value_present.ex b/lib/absinthe/schema/rule/default_enum_value_present.ex deleted file mode 100644 index 136cd0b8e8..0000000000 --- a/lib/absinthe/schema/rule/default_enum_value_present.ex +++ /dev/null @@ -1,63 +0,0 @@ -defmodule Absinthe.Schema.Rule.DefaultEnumValuePresent do - use Absinthe.Schema.Rule - - alias Absinthe.{Schema, Type} - require IEx - - @moduledoc false - - def explanation(%{data: %{default_value: default_value, type: type, value_list: value_list}}) do - """ - The default_value for an enum must be present in the enum values. - - Could not use default value of "#{default_value}" for #{inspect(type)}. - - Valid values are: - #{value_list} - """ - end - - def check(schema) do - Schema.types(schema) - |> Enum.flat_map(&check_type(schema, &1)) - end - - defp check_type(schema, %Type.Object{fields: fields}) when not is_nil(fields) do - Enum.flat_map(fields, &check_field(schema, &1)) - end - - defp check_type(_schema, _type), do: [] - - defp check_field(schema, {_name, %{args: args}}) when not is_nil(args) do - Enum.flat_map(args, &check_args(schema, &1)) - end - - defp check_field(_schema, _type), do: [] - - defp check_args(schema, {_name, %{default_value: default_value, type: type}}) - when not is_nil(default_value) do - type = Schema.lookup_type(schema, type) - check_default_value_present(default_value, type) - end - - defp check_args(_schema, _args), do: [] - - defp check_default_value_present(default_value, %Type.Enum{} = type) do - values = Enum.map(type.values, &elem(&1, 1).value) - value_list = Enum.map(values, &"\n * #{&1}") - - if not (default_value in values) do - detail = %{ - value_list: value_list, - type: type.__reference__.identifier, - default_value: default_value - } - - [report(type.__reference__.location, detail)] - else - [] - end - end - - defp check_default_value_present(_default_value, _type), do: [] -end diff --git a/lib/absinthe/schema/rule/field_imports_exist.ex b/lib/absinthe/schema/rule/field_imports_exist.ex deleted file mode 100644 index 24aeae0849..0000000000 --- a/lib/absinthe/schema/rule/field_imports_exist.ex +++ /dev/null @@ -1,65 +0,0 @@ -defmodule Absinthe.Schema.Rule.FieldImportsExist do - @moduledoc false - # This has to be run prior to the module compilation, and is called - # from Notation.Writer instead of Rule - def check({definitions, errors}) do - definition_map = build_definition_map(definitions) - - errors = - Enum.reduce(definitions, errors, fn definition, errors -> - definition.attrs - |> Keyword.get(:field_imports) - |> case do - [_ | _] = imports -> - check_imports(definition, imports, definition_map, errors) - - _ -> - errors - end - end) - - {definitions, errors} - end - - defp check_imports(definition, imports, definition_map, errors) do - Enum.reduce(imports, errors, fn {ref, _}, errors -> - case Map.fetch(definition_map, ref) do - {:ok, _} -> - errors - - _ -> - [error(definition, ref) | errors] - end - end) - end - - defp build_definition_map(definitions) do - definitions - |> Enum.filter(&Map.get(&1, :identifier)) - |> Map.new(&{&1.identifier, &1}) - end - - def explanation(%{data: %{artifact: msg}}) do - """ - #{msg} - """ - |> String.trim() - end - - defp error(definition, ref) do - msg = - """ - Field Import Error - - Object #{inspect(definition.identifier)} imports fields from #{inspect(ref)} but - #{inspect(ref)} does not exist in the schema! - """ - |> String.trim() - - %{ - data: %{artifact: msg, value: ref}, - location: %{file: definition.file, line: definition.line}, - rule: __MODULE__ - } - end -end diff --git a/lib/absinthe/schema/rule/input_output_types_correctly_placed.ex b/lib/absinthe/schema/rule/input_output_types_correctly_placed.ex deleted file mode 100644 index 39d208445f..0000000000 --- a/lib/absinthe/schema/rule/input_output_types_correctly_placed.ex +++ /dev/null @@ -1,108 +0,0 @@ -defmodule Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - alias Absinthe.Type - - @moduledoc false - - @description """ - Only input types may be used as inputs. Input types may not be used as output types - - Input types consist of Scalars, Enums, and Input Objects. - """ - - def explanation(%{data: %{argument: argument, type: type, struct: struct}}) do - """ - #{inspect(type)} is not a valid input type for argument #{inspect(argument)} because - it is an #{Macro.to_string(struct)} type. Arguments may only be input types. - - #{@description} - """ - end - - def explanation(%{data: %{field: field, type: type, struct: struct, parent: parent}}) do - """ - #{inspect(type)} is not a valid type for field #{inspect(field)} because - it is an #{Macro.to_string(struct)} type, and the parent of this field is an #{ - Macro.to_string(parent) - } type. - - #{@description} - """ - end - - def check(schema) do - Schema.types(schema) - |> Enum.flat_map(&check_type(schema, &1)) - end - - defp check_type(schema, %Type.Object{} = type) do - field_errors = - for {_, field} <- type.fields, - field_type = get_type(field, schema), - !output_type?(field_type) do - detail = %{ - field: field.identifier, - type: field_type.__reference__.identifier, - struct: field_type.__struct__, - parent: Type.Object - } - - report(type.__reference__.location, detail) - end - - argument_errors = - for {_, field} <- type.fields, - {_, arg} <- field.args, - type = get_type(arg, schema), - !input_type?(type) do - detail = %{ - argument: arg.__reference__.identifier, - type: type.__reference__.identifier, - struct: type.__struct__ - } - - report(type.__reference__.location, detail) - end - - field_errors ++ argument_errors - end - - defp check_type(schema, %Type.InputObject{} = type) do - for {_, field} <- type.fields, - field_type = get_type(field, schema), - !input_type?(field_type) do - detail = %{ - field: field.identifier, - type: field_type.__reference__.identifier, - struct: field_type.__struct__, - parent: Type.InputObject - } - - report(type.__reference__.location, detail) - end - end - - defp check_type(_, _) do - [] - end - - defp get_type(%{type: type}, schema) do - Type.expand(type, schema) - |> Type.unwrap() - end - - defp get_type(type, schema) do - Type.expand(type, schema) - |> Type.unwrap() - end - - defp input_type?(%Type.Scalar{}), do: true - defp input_type?(%Type.Enum{}), do: true - defp input_type?(%Type.InputObject{}), do: true - defp input_type?(_), do: false - - defp output_type?(%Type.InputObject{}), do: false - defp output_type?(_), do: true -end diff --git a/lib/absinthe/schema/rule/interfaces_must_resolve_types.ex b/lib/absinthe/schema/rule/interfaces_must_resolve_types.ex deleted file mode 100644 index efbdc2bc1e..0000000000 --- a/lib/absinthe/schema/rule/interfaces_must_resolve_types.ex +++ /dev/null @@ -1,44 +0,0 @@ -defmodule Absinthe.Schema.Rule.InterfacesMustResolveTypes do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - alias Absinthe.Type - - @moduledoc false - - @description """ - An interface must be able to resolve the implementing types of results. - - > The interface type should have some way of determining which object a given - > result corresponds to. - - Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces - """ - - def explanation(%{data: interface}) do - """ - Interface type "#{interface}" either: - * Does not have a `resolve_type` function. - * Is missing a `is_type_of` function on all implementing types. - - #{@description} - """ - end - - def check(schema) do - Schema.types(schema) - |> Enum.flat_map(&check_type(schema, &1)) - end - - defp check_type(schema, %Type.Interface{} = type) do - if Type.Interface.type_resolvable?(schema, type) do - [] - else - [report(type.__reference__.location, type.name)] - end - end - - defp check_type(_, _) do - [] - end -end diff --git a/lib/absinthe/schema/rule/no_circular_field_imports.ex b/lib/absinthe/schema/rule/no_circular_field_imports.ex deleted file mode 100644 index 7f26d5be78..0000000000 --- a/lib/absinthe/schema/rule/no_circular_field_imports.ex +++ /dev/null @@ -1,71 +0,0 @@ -defmodule Absinthe.Schema.Rule.NoCircularFieldImports do - @moduledoc false - # This has to be run prior to the module compilation, and is called - # from Notation.Writer instead of Rule - def check({definitions, errors}) do - acc = [] - graph = :digraph.new([:acyclic]) - do_check(definitions, graph, errors, acc) - end - - defp do_check([], graph, errors, acc) do - :digraph.delete(graph) - {:lists.reverse(acc), errors} - end - - defp do_check([definition | rest], graph, errors, acc) do - {acc, errors} = - definition.attrs - |> Keyword.get(:field_imports) - |> case do - [_ | _] = imports -> - check_imports(definition, imports, graph, errors, acc) - - _ -> - {[definition | acc], errors} - end - - do_check(rest, graph, errors, acc) - end - - defp check_imports(definition, imports, graph, errors, acc) do - :digraph.add_vertex(graph, definition.identifier) - - Enum.reduce(imports, [], fn {ref, _}, errors -> - :digraph.add_vertex(graph, ref) - - case :digraph.add_edge(graph, definition.identifier, ref) do - {:error, {:bad_edge, path}} -> - # All just error generation logic - deps = - [definition.identifier | path] - |> Enum.map(&"`#{&1}'") - |> Enum.join(" => ") - - msg = - String.trim(""" - Field Import Cycle Error - - Field Import in object `#{definition.identifier}' `import_fields(#{inspect(ref)}) forms a cycle via: (#{ - deps - }) - """) - - error = %{ - rule: __MODULE__, - location: %{file: definition.file, line: definition.line}, - data: %{artifact: msg, value: ref} - } - - [error | errors] - - _ -> - errors - end - end) - |> case do - [] -> {[definition | acc], errors} - new_errors -> {acc, new_errors ++ errors} - end - end -end diff --git a/lib/absinthe/schema/rule/object_interfaces_must_be_valid.ex b/lib/absinthe/schema/rule/object_interfaces_must_be_valid.ex deleted file mode 100644 index 3b46179545..0000000000 --- a/lib/absinthe/schema/rule/object_interfaces_must_be_valid.ex +++ /dev/null @@ -1,48 +0,0 @@ -defmodule Absinthe.Schema.Rule.ObjectInterfacesMustBeValid do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - alias Absinthe.Type - - @moduledoc false - @description """ - Only interfaces may be present in an Object's interface list. - - Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces - """ - - def explanation(%{data: %{object: obj, interface: interface}}) do - """ - Type "#{obj}" cannot implement non-interface type "#{interface}" - - #{@description} - """ - end - - def check(schema) do - Schema.types(schema) - |> Enum.flat_map(&check_type(schema, &1)) - end - - defp check_type(schema, %{interfaces: ifaces} = type) do - ifaces - |> Enum.map(&Schema.lookup_type(schema, &1)) - |> Enum.reduce([], fn - nil, _ -> - raise "No type found in #{inspect(ifaces)}" - - %Type.Interface{}, acc -> - acc - - iface_type, acc -> - [ - report(type.__reference__.location, %{object: type.name, interface: iface_type.name}) - | acc - ] - end) - end - - defp check_type(_, _) do - [] - end -end diff --git a/lib/absinthe/schema/rule/object_must_implement_interfaces.ex b/lib/absinthe/schema/rule/object_must_implement_interfaces.ex deleted file mode 100644 index c44ca9f635..0000000000 --- a/lib/absinthe/schema/rule/object_must_implement_interfaces.ex +++ /dev/null @@ -1,74 +0,0 @@ -defmodule Absinthe.Schema.Rule.ObjectMustImplementInterfaces do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - alias Absinthe.Type - - @moduledoc false - - @description """ - An object type must be a super-set of all interfaces it implements. - - * The object type must include a field of the same name for every field - defined in an interface. - * The object field must be of a type which is equal to or a sub-type of - the interface field (covariant). - * An object field type is a valid sub-type if it is equal to (the same - type as) the interface field type. - * An object field type is a valid sub-type if it is an Object type and the - interface field type is either an Interface type or a Union type and the - object field type is a possible type of the interface field type. - * An object field type is a valid sub-type if it is a List type and the - interface field type is also a List type and the list-item type of the - object field type is a valid sub-type of the list-item type of the - interface field type. - * An object field type is a valid sub-type if it is a Non-Null variant of a - valid sub-type of the interface field type. - * The object field must include an argument of the same name for every - argument defined in the interface field. - * The object field argument must accept the same type (invariant) as the - interface field argument. - * The object field may include additional arguments not defined in the - interface field, but any additional argument must not be required. - - Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#object-type-validation - """ - - def explanation(%{data: %{object: obj, interface: interface}}) do - """ - Type "#{obj}" does not fully implement interface type "#{interface}" - - #{@description} - """ - end - - def check(schema) do - schema - |> Schema.types() - |> Enum.flat_map(&check_type(schema, &1)) - end - - defp check_type(schema, %{interfaces: ifaces} = type) do - ifaces - |> Enum.map(&Schema.lookup_type(schema, &1)) - |> Enum.reduce([], fn - %Type.Interface{} = iface_type, acc -> - if Type.Interface.implements?(iface_type, type, schema) do - acc - else - [ - report(type.__reference__.location, %{object: type.name, interface: iface_type.name}) - | acc - ] - end - - _, _ -> - # Handles by a different rule - [] - end) - end - - defp check_type(_, _) do - [] - end -end diff --git a/lib/absinthe/schema/rule/query_type_must_be_object.ex b/lib/absinthe/schema/rule/query_type_must_be_object.ex deleted file mode 100644 index b5788db06b..0000000000 --- a/lib/absinthe/schema/rule/query_type_must_be_object.ex +++ /dev/null @@ -1,50 +0,0 @@ -defmodule Absinthe.Schema.Rule.QueryTypeMustBeObject do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - require IEx - - @moduledoc false - - @description """ - - #Example - defmodule MyApp.Schema do - use Absinthe.Schema - - query do - #Fields go here - end - end - - -------------------------------------- - From the graqhql schema specification - - A GraphQL schema includes types, indicating where query and mutation - operations start. This provides the initial entry points into the type system. - The query type must always be provided, and is an Object base type. The - mutation type is optional; if it is null, that means the system does not - support mutations. If it is provided, it must be an object base type. - - Reference: https://facebook.github.io/graphql/#sec-Initial-types - """ - - def explanation(_value) do - """ - The root query type must be implemented and be a of type Object - - #{@description} - """ - end - - def check(schema) do - case Schema.lookup_type(schema, :query) do - %Absinthe.Type.Object{} -> - [] - - # Real error message - _ -> - [report(%{file: schema, line: 0}, %{})] - end - end -end diff --git a/lib/absinthe/schema/rule/type_names_are_reserved.ex b/lib/absinthe/schema/rule/type_names_are_reserved.ex deleted file mode 100644 index 2980adfeb3..0000000000 --- a/lib/absinthe/schema/rule/type_names_are_reserved.ex +++ /dev/null @@ -1,67 +0,0 @@ -defmodule Absinthe.Schema.Rule.TypeNamesAreReserved do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - - @moduledoc false - - @description """ - Type system artifacts must not begin with two leading underscores. - - > GraphQL type system authors must not define any types, fields, arguments, - > or any other type system artifact with two leading underscores. - - Reference: https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#naming-conventions - - """ - - def explanation(%{data: %{artifact: artifact, value: value}}) do - artifact_name = String.capitalize(artifact) - - """ - #{artifact_name} #{inspect(value)} starts with two leading underscores. - - #{@description} - """ - end - - def check(schema) do - Enum.flat_map(Schema.types(schema), &check_type(schema, &1)) ++ - Enum.flat_map(Schema.directives(schema), &check_directive(schema, &1)) - end - - defp check_type(schema, %{fields: fields} = type) do - check_named(schema, type, "type", type) ++ - Enum.flat_map(fields |> Map.values(), &check_field(schema, type, &1)) - end - - defp check_type(schema, type) do - check_named(schema, type, "type", type) - end - - defp check_field(schema, type, field) do - check_named(schema, type, "field", field) ++ - Enum.flat_map(field.args |> Map.values(), &check_arg(schema, type, &1)) - end - - defp check_directive(schema, directive) do - check_named(schema, directive, "directive", directive) ++ - Enum.flat_map(directive.args |> Map.values(), &check_arg(schema, directive, &1)) - end - - defp check_arg(schema, type, arg) do - check_named(schema, type, "argument", arg) - end - - defp check_named(_schema, type, kind, %{name: "__" <> _} = entity) do - if Absinthe.Type.built_in?(type) do - [] - else - [report(entity.__reference__.location, %{artifact: "#{kind} name", value: entity.name})] - end - end - - defp check_named(_, _, _, _) do - [] - end -end diff --git a/lib/absinthe/schema/rule/type_names_are_unique.ex b/lib/absinthe/schema/rule/type_names_are_unique.ex deleted file mode 100644 index 917fb08e2b..0000000000 --- a/lib/absinthe/schema/rule/type_names_are_unique.ex +++ /dev/null @@ -1,28 +0,0 @@ -defmodule Absinthe.Schema.Rule.TypeNamesAreUnique do - use Absinthe.Schema.Rule - - @moduledoc false - - @description """ - References to types must be unique. - - > All types within a GraphQL schema must have unique names. No two provided - > types may have the same name. No provided type may have a name which - > conflicts with any built in types (including Scalar and Introspection - > types). - - Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#type-system - """ - - def explanation(%{data: %{artifact: artifact, value: name}}) do - """ - #{artifact} #{inspect(name)} is not unique. - - #{@description} - """ - end - - # This rule is only used for its explanation. Error details are added during - # compilation. - def check(_), do: [] -end diff --git a/lib/absinthe/schema/rule/type_names_are_valid.ex b/lib/absinthe/schema/rule/type_names_are_valid.ex deleted file mode 100644 index fac0bf39da..0000000000 --- a/lib/absinthe/schema/rule/type_names_are_valid.ex +++ /dev/null @@ -1,120 +0,0 @@ -defmodule Absinthe.Schema.Rule.TypeNamesAreValid do - use Absinthe.Schema.Rule - - alias Absinthe.Schema - alias Absinthe.Type - - @moduledoc false - - @description """ - Types must exist if referenced. - """ - - def explanation(%{data: %{identifier: identifier, parent: parent}}) do - artifact_name = String.capitalize(parent.name) - - """ - #{artifact_name} #{inspect(identifier)} is not defined in your schema. - - #{@description} - """ - end - - def check(schema) do - Enum.reduce(schema.__absinthe_types__, [], fn {identifier, name}, acc -> - schema - |> Schema.lookup_type(identifier) - |> case do - nil -> Schema.lookup_type(schema, name) - val -> val - end - |> check_type(acc, schema) - end) - end - - defp check_type(type, acc, schema) do - check_type(type, type, acc, schema) - end - - # I could do this in fewer clauses by simply matching on the inner properties - # that we care about, but by doing it this way you can easily scan the list - # and compare it to the modules in absinthe/type/*.ex to see it's complete. - defp check_type(identifier, parent, acc, schema) when is_atom(identifier) do - case schema.__absinthe_type__(identifier) do - nil -> - data = %{parent: parent, identifier: identifier} - [report(parent.__reference__.location, data) | acc] - - _ -> - acc - end - end - - defp check_type(%Type.Argument{} = arg, _, acc, schema) do - check_type(arg.type, arg, acc, schema) - end - - defp check_type(%Type.Directive{} = type, _, acc, schema) do - type.args - |> Map.values() - |> Enum.reduce(acc, &check_type(&1, type, &2, schema)) - end - - defp check_type(%Type.Enum{} = type, _, acc, schema) do - type.values - |> Map.values() - |> Enum.reduce(acc, &check_type(&1, type, &2, schema)) - end - - defp check_type(%Type.Enum.Value{}, _, acc, _schema) do - acc - end - - defp check_type(%Type.Field{} = field, _, acc, schema) do - acc = - field.args - |> Map.values() - |> Enum.reduce(acc, &check_type(&1, field, &2, schema)) - - check_type(field.type, field, acc, schema) - end - - defp check_type(%Type.InputObject{} = object, _, acc, schema) do - object.fields - |> Map.values() - |> Enum.reduce(acc, &check_type(&1, object, &2, schema)) - end - - defp check_type(%Type.Interface{} = interface, _, acc, schema) do - interface.fields - |> Map.values() - |> Enum.reduce(acc, &check_type(&1, interface, &2, schema)) - end - - defp check_type(%Type.List{of_type: inner_type}, parent, acc, schema) do - check_type(inner_type, parent, acc, schema) - end - - defp check_type(%Type.NonNull{of_type: inner_type}, parent, acc, schema) do - check_type(inner_type, parent, acc, schema) - end - - defp check_type(%Type.Object{} = object, _, acc, schema) do - object.fields - |> Map.values() - |> Enum.reduce(acc, &check_type(&1, object, &2, schema)) - end - - defp check_type(%Type.Reference{} = ref, _, acc, schema) do - check_type(ref.identifier, ref, acc, schema) - end - - defp check_type(%Type.Scalar{}, _, acc, _schema) do - acc - end - - defp check_type(%Type.Union{} = union, _, acc, schema) do - union.types - |> Enum.reduce(acc, &check_type(&1, union, &2, schema)) - end -end diff --git a/lib/absinthe/schema/verification.ex b/lib/absinthe/schema/verification.ex deleted file mode 100644 index 5d2aff4026..0000000000 --- a/lib/absinthe/schema/verification.ex +++ /dev/null @@ -1,36 +0,0 @@ -defmodule Absinthe.Schema.Verification do - @moduledoc false - - alias __MODULE__ - alias Absinthe.Traversal - alias Absinthe.Schema - - @spec setup(Schema.t()) :: Schema.t() - def setup(%{errors: []} = schema) do - errors = Traversal.reduce(schema, schema, schema.errors, &collect_errors/3) - - %{schema | errors: errors} - |> Verification.Unions.check() - end - - def setup(schema) do - schema - end - - # Don't allow anything named with a __ prefix - @spec collect_errors(Traversal.Node.t(), Traversal.t(), [binary]) :: Traversal.instruction_t() - defp collect_errors(%{__struct__: definition_type, name: "__" <> name}, traversal, errs) do - definition_name = definition_type |> Module.split() |> List.last() - errs = [format_error(:double_underscore, %{definition: definition_name, name: name}) | errs] - {:ok, errs, traversal} - end - - # No-op - defp collect_errors(_node, traversal, errs) do - {:ok, errs, traversal} - end - - defp format_error(:double_underscore, %{definition: definition, name: name}) do - "#{definition} `__#{name}': Must not define any types, fields, arguments, or any other type system artifact with two leading underscores." - end -end diff --git a/lib/absinthe/schema/verification/unions.ex b/lib/absinthe/schema/verification/unions.ex deleted file mode 100644 index f3ce430b89..0000000000 --- a/lib/absinthe/schema/verification/unions.ex +++ /dev/null @@ -1,54 +0,0 @@ -defmodule Absinthe.Schema.Verification.Unions do - @moduledoc false - - alias Absinthe.Schema - alias Absinthe.Type - - @spec check(Schema.t()) :: Schema.t() - def check(schema) do - schema - |> unions - |> Enum.reduce(schema, fn %{types: concrete_types} = union, acc -> - check_resolvers(union, concrete_types, acc) - end) - end - - # Find the union types - @spec unions(Schema.t()) :: [Type.Union.t()] - defp unions(schema) do - schema.types.by_identifier - |> Map.values() - |> Enum.filter(fn type -> match?(%Type.Union{}, type) end) - end - - defp check_resolvers(_union, [], schema) do - schema - end - - defp check_resolvers( - %{resolve_type: nil, __reference__: %{identifier: ident}} = union, - [concrete_type_ident | rest], - schema - ) do - case schema.types[concrete_type_ident] do - nil -> - err = "Could not find concrete type :#{concrete_type_ident} for union type :#{ident}" - check_resolvers(union, rest, %{schema | errors: [err | schema.errors]}) - - %{is_type_of: nil} -> - err = - "Union type :#{ident} does not provide a `resolve_type` function and concrete type :#{ - concrete_type_ident - } does not provide an `is_type_of` function. There is no way to resolve this concrete type during execution." - - check_resolvers(union, rest, %{schema | errors: [err | schema.errors]}) - - %{is_type_of: _} -> - check_resolvers(union, rest, schema) - end - end - - defp check_resolvers(%{resolve_type: _}, _concrete_types, schema) do - schema - end -end diff --git a/lib/absinthe/subscription.ex b/lib/absinthe/subscription.ex index fb557988cb..6eca6b9532 100644 --- a/lib/absinthe/subscription.ex +++ b/lib/absinthe/subscription.ex @@ -3,7 +3,7 @@ defmodule Absinthe.Subscription do Real time updates via GraphQL For a how to guide on getting started with Absinthe.Subscriptions in your phoenix - project see the Absinthe.Phoenix package. + project see the `Absinthe.Phoenix` package. Define in your schema via `Absinthe.Schema.subscription/2` @@ -28,11 +28,10 @@ defmodule Absinthe.Subscription do - More user control over back pressure / async balance. """ - alias Absinthe.Subscription.Registry - - require Logger alias __MODULE__ + alias Absinthe.Subscription.PipelineSerializer + @doc """ Add Absinthe.Subscription to your process tree. """ @@ -57,7 +56,7 @@ defmodule Absinthe.Subscription do ## Examples Note: As with all subscription examples if you're using Absinthe.Phoenix `pubsub` - will be `MyApp.Web.Endpoint`. + will be `MyAppWeb.Endpoint`. ``` Absinthe.Subscription.publish(pubsub, user, [new_users: user.account_id]) @@ -87,21 +86,39 @@ defmodule Absinthe.Subscription do end defp get_subscription_fields(resolution_info) do - mut_field_name = resolution_info.definition.schema_node.identifier + mutation_field = resolution_info.definition.schema_node schema = resolution_info.schema - subscription = Absinthe.Schema.lookup_type(schema, :subscription) || %{} + subscription = Absinthe.Schema.lookup_type(schema, :subscription) || %{fields: []} + + subscription_fields = fetch_fields(subscription.fields, mutation_field.triggers) + + for {sub_field_id, sub_field} <- subscription_fields do + triggers = Absinthe.Type.function(sub_field, :triggers) + config = Map.fetch!(triggers, mutation_field.identifier) + {sub_field_id, config} + end + end - for {sub_field_name, sub_field} <- Map.get(subscription, :fields, []), - {mutation_names, config} <- sub_field.triggers, - mut_field_name in mutation_names, - do: {sub_field_name, config} + # TODO: normalize the `.fields` type. + defp fetch_fields(fields, triggers) when is_map(fields) do + Map.take(fields, triggers) end + defp fetch_fields(_, _), do: [] + @doc false def subscribe(pubsub, field_key, doc_id, doc) do registry = pubsub |> registry_name - {:ok, _} = Registry.register(registry, field_key, {doc_id, doc}) + doc_value = { + doc_id, + %{ + initial_phases: PipelineSerializer.pack(doc.initial_phases), + source: doc.source + } + } + + {:ok, _} = Registry.register(registry, field_key, doc_value) {:ok, _} = Registry.register(registry, {self(), doc_id}, field_key) end @@ -123,8 +140,12 @@ defmodule Absinthe.Subscription do pubsub |> registry_name |> Registry.lookup(key) - |> Enum.map(&elem(&1, 1)) - |> Map.new() + |> Enum.map(fn match -> + {_, {doc_id, doc}} = match + doc = Map.update!(doc, :initial_phases, &PipelineSerializer.unpack/1) + + {doc_id, doc} + end) end @doc false diff --git a/lib/absinthe/subscription/local.ex b/lib/absinthe/subscription/local.ex index 31e5fe29e0..041896885c 100644 --- a/lib/absinthe/subscription/local.ex +++ b/lib/absinthe/subscription/local.ex @@ -1,5 +1,7 @@ defmodule Absinthe.Subscription.Local do - @moduledoc false + @moduledoc """ + This module handles broadcasting documents that are local to this node + """ require Logger @@ -8,37 +10,55 @@ defmodule Absinthe.Subscription.Local do # This module handles running and broadcasting documents that are local to this # node. + @doc """ + Publish a mutation to the local node only. + + See also `Absinthe.Subscription.publish/3` + """ + @spec publish_mutation( + Absinthe.Subscription.Pubsub.t(), + term, + [Absinthe.Subscription.subscription_field_spec()] + ) :: :ok def publish_mutation(pubsub, mutation_result, subscribed_fields) do docs_and_topics = for {field, key_strategy} <- subscribed_fields, {topic, doc} <- get_docs(pubsub, field, mutation_result, key_strategy) do - {{topic, {field, key_strategy}}, put_in(doc.execution.root_value, mutation_result)} + {topic, key_strategy, doc} end - docs_by_context = group_by_context(docs_and_topics) + run_docset(pubsub, docs_and_topics, mutation_result) - for docset <- docs_by_context do - run_docset(pubsub, docset) - end + :ok end - defp group_by_context(docs_and_topics) do - docs_and_topics - |> Enum.group_by(fn {_, doc} -> doc.execution.context end) - |> Map.values() - end + alias Absinthe.{Phase, Pipeline} - defp run_docset(pubsub, docs_and_topics) do - {topics, docs} = Enum.unzip(docs_and_topics) - docs = BatchResolver.run(docs, schema: hd(docs).schema, abort_on_error: false) + defp run_docset(pubsub, docs_and_topics, mutation_result) do + for {topic, key_strategy, doc} <- docs_and_topics do + try do + pipeline = + doc.initial_phases + |> Pipeline.replace( + Phase.Telemetry, + {Phase.Telemetry, event: [:subscription, :publish, :start]} + ) + |> Pipeline.without(Phase.Subscription.SubscribeSelf) + |> Pipeline.insert_before( + Phase.Document.Execution.Resolution, + {Phase.Document.OverrideRoot, root_value: mutation_result} + ) + |> Pipeline.upto(Phase.Document.Execution.Resolution) - pipeline = [ - Absinthe.Phase.Document.Result - ] + pipeline = [ + pipeline, + [ + Absinthe.Phase.Document.Result, + {Absinthe.Phase.Telemetry, event: [:subscription, :publish, :stop]} + ] + ] - for {doc, {topic, key_strategy}} <- Enum.zip(docs, topics), doc != :error do - try do - {:ok, %{result: data}, _} = Absinthe.Pipeline.run(doc, pipeline) + {:ok, %{result: data}, _} = Absinthe.Pipeline.run(doc.source, pipeline) Logger.debug(""" Absinthe Subscription Publication @@ -50,7 +70,7 @@ defmodule Absinthe.Subscription.Local do :ok = pubsub.publish_subscription(topic, data) rescue e -> - BatchResolver.pipeline_error(e) + BatchResolver.pipeline_error(e, __STACKTRACE__) end end end diff --git a/lib/absinthe/subscription/pipeline_serializer.ex b/lib/absinthe/subscription/pipeline_serializer.ex new file mode 100644 index 0000000000..e93848b227 --- /dev/null +++ b/lib/absinthe/subscription/pipeline_serializer.ex @@ -0,0 +1,63 @@ +defmodule Absinthe.Subscription.PipelineSerializer do + @moduledoc """ + Serializer responsible for packing and unpacking pipeline stored in the Elixir registry. + + The purpose of this logic is saving memory by deduplicating repeating options - (ETS + backed registry stores them flat in the memory). + """ + + alias Absinthe.{Phase, Pipeline} + + @type options_label :: {:options, non_neg_integer()} + + @type packed_phase_config :: Phase.t() | {Phase.t(), options_label()} + + @type options_map :: %{options_label() => Keyword.t()} + + @type packed_pipeline :: {:packed, [packed_phase_config()], options_map()} + + @spec pack(Pipeline.t()) :: packed_pipeline() + def pack(pipeline) do + {packed_pipeline, options_reverse_map} = + pipeline + |> List.flatten() + |> Enum.map_reduce(%{}, &maybe_pack_phase/2) + + options_map = Map.new(options_reverse_map, fn {options, label} -> {label, options} end) + + {:packed, packed_pipeline, options_map} + end + + @spec unpack(Pipeline.t() | packed_pipeline()) :: Pipeline.t() + def unpack({:packed, pipeline, options_map}) do + Enum.map(pipeline, fn + {phase, {:options, _n} = options_label} -> + {phase, Map.fetch!(options_map, options_label)} + + phase -> + phase + end) + end + + def unpack([_ | _] = pipeline) do + pipeline + end + + defp maybe_pack_phase({phase, options}, options_reverse_map) do + if Map.has_key?(options_reverse_map, options) do + options_label = options_reverse_map[options] + + {{phase, options_label}, options_reverse_map} + else + new_index = map_size(options_reverse_map) + options_label = {:options, new_index} + options_reverse_map = Map.put(options_reverse_map, options, options_label) + + {{phase, options_label}, options_reverse_map} + end + end + + defp maybe_pack_phase(phase, options_reverse_map) do + {phase, options_reverse_map} + end +end diff --git a/lib/absinthe/subscription/proxy.ex b/lib/absinthe/subscription/proxy.ex index 242c21f1ec..4c16cb77b3 100644 --- a/lib/absinthe/subscription/proxy.ex +++ b/lib/absinthe/subscription/proxy.ex @@ -4,23 +4,38 @@ defmodule Absinthe.Subscription.Proxy do use GenServer defstruct [ - :pubsub + :pubsub, + :node, + :task_super ] + def child_spec([_, _, shard] = args) do + %{ + id: {__MODULE__, shard}, + start: {__MODULE__, :start_link, [args]} + } + end + alias Absinthe.Subscription - def start_link(pubsub, shard) do - GenServer.start_link(__MODULE__, {pubsub, shard}) + @gc_interval 5_000 + + def start_link(args) do + GenServer.start_link(__MODULE__, args) end def topic(shard), do: "__absinthe__:proxy:#{shard}" - def init({pubsub, shard}) do + def init([task_super, pubsub, shard]) do + node_name = pubsub.node_name() :ok = pubsub.subscribe(topic(shard)) - {:ok, %__MODULE__{pubsub: pubsub}} + Process.send_after(self(), :gc, @gc_interval) + {:ok, %__MODULE__{pubsub: pubsub, node: node_name, task_super: task_super}} end - def handle_info(%{node: src_node}, state) when src_node == node() do + def handle_info(:gc, state) do + :erlang.garbage_collect() + Process.send_after(self(), :gc, @gc_interval) {:noreply, state} end @@ -28,15 +43,13 @@ defmodule Absinthe.Subscription.Proxy do # There's no meaningful form of backpressure to have here, and we can't # bottleneck execution inside each proxy process - # TODO: This should maybe be supervised? I feel like the linking here isn't - # what it should be. - Task.start_link(fn -> - Subscription.Local.publish_mutation( + unless payload.node == state.pubsub.node_name() do + Task.Supervisor.start_child(state.task_super, Subscription.Local, :publish_mutation, [ state.pubsub, payload.mutation_result, payload.subscribed_fields - ) - end) + ]) + end {:noreply, state} end diff --git a/lib/absinthe/subscription/proxy_supervisor.ex b/lib/absinthe/subscription/proxy_supervisor.ex index 7f47f0a183..62821d2b81 100644 --- a/lib/absinthe/subscription/proxy_supervisor.ex +++ b/lib/absinthe/subscription/proxy_supervisor.ex @@ -3,17 +3,20 @@ defmodule Absinthe.Subscription.ProxySupervisor do use Supervisor - def start_link(pubsub, registry, pool_size) do + def start_link([pubsub, registry, pool_size]) do Supervisor.start_link(__MODULE__, {pubsub, registry, pool_size}) end - def init({pubsub, _registry, pool_size}) do + def init({pubsub, registry, pool_size}) do + task_super_name = Module.concat(registry, TaskSuper) + task_super = {Task.Supervisor, name: task_super_name} + # Shard numbers are generated by phash2 which is 0-based: - children = + proxies = for shard <- 0..(pool_size - 1) do - worker(Absinthe.Subscription.Proxy, [pubsub, shard], id: shard) + {Absinthe.Subscription.Proxy, [task_super_name, pubsub, shard]} end - supervise(children, strategy: :one_for_one) + Supervisor.init([task_super | proxies], strategy: :one_for_one) end end diff --git a/lib/absinthe/subscription/pubsub.ex b/lib/absinthe/subscription/pubsub.ex index ef1d8fd5ea..a701c68a3d 100644 --- a/lib/absinthe/subscription/pubsub.ex +++ b/lib/absinthe/subscription/pubsub.ex @@ -22,11 +22,22 @@ defmodule Absinthe.Subscription.Pubsub do @callback subscribe(topic :: binary) :: term @doc """ - An Absinthe.Subscription.Pubsub system may extend across multiple nodes in a - cluster. Processes need only subscribe to the pubsub process that + An Absinthe.Subscription.Pubsub system may extend across multiple nodes + connected by some mechanism. Regardless of this mechanism, all nodes should + have unique names. + + Absinthe invokes `node_name` function to get current node's name. If you + are running inside erlang cluster, you can use `Kernel.node/0` as a node + name. + """ + @callback node_name() :: binary + + @doc """ + An Absinthe.Subscription.Pubsub system may extend across multiple nodes. + Processes need only subscribe to the pubsub process that is running on their own node. - However, mutations can happen on any node in the custer and must to be + However, mutations can happen on any node in the cluster and must to be broadcast to other nodes so that they can also reevaluate their GraphQL subscriptions and notify subscribers on that node. @@ -38,7 +49,7 @@ defmodule Absinthe.Subscription.Pubsub do The message broadcast should be a map that contains, at least %{ - node: node_id, # probably from Kernel.node/0 + node: node_name, # should be equal to `node_name/0` mutation_result: โ€ฆ, # from arguments subscribed_fields: โ€ฆ # from arguments diff --git a/lib/absinthe/subscription/registry.ex b/lib/absinthe/subscription/registry.ex deleted file mode 100644 index 2d56d07aee..0000000000 --- a/lib/absinthe/subscription/registry.ex +++ /dev/null @@ -1,1042 +0,0 @@ -defmodule Absinthe.Subscription.Registry do - # This is a clone of the Elixir 1.5 Registry, included here for use to support - # Elixir 1.4 Registry. The `unregister_match/3` function doesn't exist until - # Elixir 1.5, but is essential for unsubscribing. - - alias __MODULE__ - - @moduledoc false - - @keys [:unique, :duplicate] - @all_info -1 - @key_info -2 - - @typedoc "The registry identifier" - @type registry :: atom - - @typedoc "The type of the registry" - @type keys :: :unique | :duplicate - - @typedoc "The type of keys allowed on registration" - @type key :: term - - @typedoc "The type of values allowed on registration" - @type value :: term - - @typedoc "The type of registry metadata keys" - @type meta_key :: atom | tuple - - @typedoc "The type of registry metadata values" - @type meta_value :: term - - ## Via callbacks - - @doc false - def whereis_name({registry, key}) do - case key_info!(registry) do - {:unique, partitions, key_ets} -> - key_ets = key_ets || key_ets!(registry, key, partitions) - - case safe_lookup_second(key_ets, key) do - {pid, _} -> - if Process.alive?(pid), do: pid, else: :undefined - - _ -> - :undefined - end - - {kind, _, _} -> - raise ArgumentError, ":via is not supported for #{kind} registries" - end - end - - @doc false - def register_name({registry, key}, pid) when pid == self() do - case register(registry, key, nil) do - {:ok, _} -> :yes - {:error, _} -> :no - end - end - - @doc false - def send({registry, key}, msg) do - case lookup(registry, key) do - [{pid, _}] -> Kernel.send(pid, msg) - [] -> :erlang.error(:badarg, [{registry, key}, msg]) - end - end - - @doc false - def unregister_name({registry, key}) do - unregister(registry, key) - end - - ## Registry API - - @doc """ - Starts the registry as a supervisor process. - - Manually it can be started as: - - Registry.start_link(keys: :unique, name: MyApp.Registry) - - In your supervisor tree, you would write: - - Supervisor.start_link([ - {Registry, keys: :unique, name: MyApp.Registry} - ]) - - For intensive workloads, the registry may also be partitioned (by specifying - the `:partitions` option). If partitioning is required then a good default is to - set the number of partitions to the number of schedulers available: - - Registry.start_link(keys: :unique, name: MyApp.Registry, - partitions: System.schedulers_online()) - - or: - - Supervisor.start_link([ - {Registry, keys: :unique, name: MyApp.Registry, - partitions: System.schedulers_online()} - ]) - - ## Options - - The registry requires the following keys: - - * `:keys` - choose if keys are `:unique` or `:duplicate` - * `:name` - the name of the registry and its tables - - The following keys are optional: - - * `:partitions` - the number of partitions in the registry. Defaults to `1`. - * `:listeners` - a list of named processes which are notified of `:register` - and `:unregister` events. The registered process must be monitored by the - listener if the listener wants to be notified if the registered process - crashes. - * `:meta` - a keyword list of metadata to be attached to the registry. - - """ - @spec start_link( - keys: keys, - name: registry, - partitions: pos_integer, - listeners: [atom], - meta: [{meta_key, meta_value}] - ) :: {:ok, pid} | {:error, term} - def start_link(options) do - keys = Keyword.get(options, :keys) - - unless keys in @keys do - raise ArgumentError, - "expected :keys to be given and be one of :unique or :duplicate, got: #{inspect(keys)}" - end - - name = Keyword.get(options, :name) - - unless is_atom(name) do - raise ArgumentError, "expected :name to be given and to be an atom, got: #{inspect(name)}" - end - - meta = Keyword.get(options, :meta, []) - - unless Keyword.keyword?(meta) do - raise ArgumentError, "expected :meta to be a keyword list, got: #{inspect(meta)}" - end - - partitions = Keyword.get(options, :partitions, 1) - - unless is_integer(partitions) and partitions >= 1 do - raise ArgumentError, - "expected :partitions to be a positive integer, got: #{inspect(partitions)}" - end - - listeners = Keyword.get(options, :listeners, []) - - unless is_list(listeners) and Enum.all?(listeners, &is_atom/1) do - raise ArgumentError, - "expected :listeners to be a list of named processes, got: #{inspect(listeners)}" - end - - # The @info format must be kept in sync with Registry.Partition optimization. - entries = [ - {@all_info, {keys, partitions, nil, nil, listeners}}, - {@key_info, {keys, partitions, nil}} | meta - ] - - Registry.Supervisor.start_link(keys, name, partitions, listeners, entries) - end - - @doc """ - Starts the registry as a supervisor process. - - Similar to `start_link/1` except the required options, - `keys` and `name` are given as arguments. - """ - @spec start_link(keys, registry, keyword) :: {:ok, pid} | {:error, term} - def start_link(keys, name, options \\ []) when keys in @keys and is_atom(name) do - start_link([keys: keys, name: name] ++ options) - end - - @doc false - def child_spec(opts) do - %{ - id: Registry, - start: {Registry, :start_link, [opts]}, - type: :supervisor - } - end - - @doc """ - Updates the value for `key` for the current process in the unique `registry`. - - Returns a `{new_value, old_value}` tuple or `:error` if there - is no such key assigned to the current process. - - If a non-unique registry is given, an error is raised. - - ## Examples - - iex> Registry.start_link(:unique, Registry.UpdateTest) - iex> {:ok, _} = Registry.register(Registry.UpdateTest, "hello", 1) - iex> Registry.lookup(Registry.UpdateTest, "hello") - [{self(), 1}] - iex> Registry.update_value(Registry.UpdateTest, "hello", & &1 + 1) - {2, 1} - iex> Registry.lookup(Registry.UpdateTest, "hello") - [{self(), 2}] - - """ - @spec update_value(registry, key, (value -> value)) :: - {new_value :: term, old_value :: term} | :error - def update_value(registry, key, callback) when is_atom(registry) and is_function(callback, 1) do - case key_info!(registry) do - {:unique, partitions, key_ets} -> - key_ets = key_ets || key_ets!(registry, key, partitions) - - try do - :ets.lookup_element(key_ets, key, 2) - catch - :error, :badarg -> :error - else - {pid, old_value} when pid == self() -> - new_value = callback.(old_value) - :ets.insert(key_ets, {key, {pid, new_value}}) - {new_value, old_value} - - {_, _} -> - :error - end - - {kind, _, _} -> - raise ArgumentError, "Registry.update_value/3 is not supported for #{kind} registries" - end - end - - @doc """ - Invokes the callback with all entries under `key` in each partition - for the given `registry`. - - The list of `entries` is a non-empty list of two-element tuples where - the first element is the pid and the second element is the value - associated to the pid. If there are no entries for the given key, - the callback is never invoked. - - If the registry is partitioned, the callback is invoked multiple times - per partition. If the registry is partitioned and `parallel: true` is - given as an option, the dispatching happens in parallel. In both cases, - the callback is only invoked if there are entries for that partition. - - See the module documentation for examples of using the `dispatch/3` - function for building custom dispatching or a pubsub system. - """ - @spec dispatch(registry, key, (entries :: [{pid, value}] -> term), keyword) :: :ok - def dispatch(registry, key, mfa_or_fun, opts \\ []) - when is_atom(registry) and is_function(mfa_or_fun, 1) - when is_atom(registry) and tuple_size(mfa_or_fun) == 3 do - case key_info!(registry) do - {:unique, partitions, key_ets} -> - (key_ets || key_ets!(registry, key, partitions)) - |> safe_lookup_second(key) - |> List.wrap() - |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) - - {:duplicate, 1, key_ets} -> - key_ets - |> safe_lookup_second(key) - |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) - - {:duplicate, partitions, _} -> - if Keyword.get(opts, :parallel, false) do - registry - |> dispatch_parallel(key, mfa_or_fun, partitions) - |> Enum.each(&Task.await(&1, :infinity)) - else - dispatch_serial(registry, key, mfa_or_fun, partitions) - end - end - - :ok - end - - defp dispatch_serial(_registry, _key, _mfa_or_fun, 0) do - :ok - end - - defp dispatch_serial(registry, key, mfa_or_fun, partition) do - partition = partition - 1 - - registry - |> key_ets!(partition) - |> safe_lookup_second(key) - |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) - - dispatch_serial(registry, key, mfa_or_fun, partition) - end - - defp dispatch_parallel(_registry, _key, _mfa_or_fun, 0) do - [] - end - - defp dispatch_parallel(registry, key, mfa_or_fun, partition) do - partition = partition - 1 - parent = self() - - task = - Task.async(fn -> - registry - |> key_ets!(partition) - |> safe_lookup_second(key) - |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) - - Process.unlink(parent) - :ok - end) - - [task | dispatch_parallel(registry, key, mfa_or_fun, partition)] - end - - defp apply_non_empty_to_mfa_or_fun([], _mfa_or_fun) do - :ok - end - - defp apply_non_empty_to_mfa_or_fun(entries, {module, function, args}) do - apply(module, function, [entries | args]) - end - - defp apply_non_empty_to_mfa_or_fun(entries, fun) do - fun.(entries) - end - - @doc """ - Finds the `{pid, value}` pair for the given `key` in `registry` in no particular order. - - An empty list if there is no match. - - For unique registries, a single partition lookup is necessary. For - duplicate registries, all partitions must be looked up. - - ## Examples - - In the example below we register the current process and look it up - both from itself and other processes: - - iex> Registry.start_link(:unique, Registry.UniqueLookupTest) - iex> Registry.lookup(Registry.UniqueLookupTest, "hello") - [] - iex> {:ok, _} = Registry.register(Registry.UniqueLookupTest, "hello", :world) - iex> Registry.lookup(Registry.UniqueLookupTest, "hello") - [{self(), :world}] - iex> Task.async(fn -> Registry.lookup(Registry.UniqueLookupTest, "hello") end) |> Task.await - [{self(), :world}] - - The same applies to duplicate registries: - - iex> Registry.start_link(:duplicate, Registry.DuplicateLookupTest) - iex> Registry.lookup(Registry.DuplicateLookupTest, "hello") - [] - iex> {:ok, _} = Registry.register(Registry.DuplicateLookupTest, "hello", :world) - iex> Registry.lookup(Registry.DuplicateLookupTest, "hello") - [{self(), :world}] - iex> {:ok, _} = Registry.register(Registry.DuplicateLookupTest, "hello", :another) - iex> Enum.sort(Registry.lookup(Registry.DuplicateLookupTest, "hello")) - [{self(), :another}, {self(), :world}] - - """ - @spec lookup(registry, key) :: [{pid, value}] - def lookup(registry, key) when is_atom(registry) do - case key_info!(registry) do - {:unique, partitions, key_ets} -> - key_ets = key_ets || key_ets!(registry, key, partitions) - - case safe_lookup_second(key_ets, key) do - {_, _} = pair -> - [pair] - - _ -> - [] - end - - {:duplicate, 1, key_ets} -> - safe_lookup_second(key_ets, key) - - {:duplicate, partitions, _key_ets} -> - for partition <- 0..(partitions - 1), - pair <- safe_lookup_second(key_ets!(registry, partition), key), - do: pair - end - end - - @doc """ - Returns `{pid, value}` pairs under the given `key` in `registry` that match `pattern`. - - Pattern must be an atom or a tuple that will match the structure of the - value stored in the registry. The atom `:_` can be used to ignore a given - value or tuple element, while :"$1" can be used to temporarily assign part - of pattern to a variable for a subsequent comparison. - - It is possible to pass list of guard conditions for more precise matching. - Each guard is a tuple, which describes check that should be passed by assigned part of pattern. - For example :"$1" > 1 guard condition would be expressed as {:>, :"$1", 1} tuple. - Please note that guard conditions will work only for assigned variables like :"$1", :"$2", etc. - Avoid usage of special match variables :"$_" and :"$$", because it might not work as expected. - - An empty list will be returned if there is no match. - - For unique registries, a single partition lookup is necessary. For - duplicate registries, all partitions must be looked up. - - ## Examples - - In the example below we register the current process under the same - key in a duplicate registry but with different values: - - iex> Registry.start_link(:duplicate, Registry.MatchTest) - iex> {:ok, _} = Registry.register(Registry.MatchTest, "hello", {1, :atom, 1}) - iex> {:ok, _} = Registry.register(Registry.MatchTest, "hello", {2, :atom, 2}) - iex> Registry.match(Registry.MatchTest, "hello", {1, :_, :_}) - [{self(), {1, :atom, 1}}] - iex> Registry.match(Registry.MatchTest, "hello", {2, :_, :_}) - [{self(), {2, :atom, 2}}] - iex> Registry.match(Registry.MatchTest, "hello", {:_, :atom, :_}) |> Enum.sort() - [{self(), {1, :atom, 1}}, {self(), {2, :atom, 2}}] - iex> Registry.match(Registry.MatchTest, "hello", {:"$1", :_, :"$1"}) |> Enum.sort() - [{self(), {1, :atom, 1}}, {self(), {2, :atom, 2}}] - iex> Registry.match(Registry.MatchTest, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 1}]) - [{self(), {2, :atom, 2}}] - iex> Registry.match(Registry.MatchTest, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}]) |> Enum.sort() - [{self(), {1, :atom, 1}}, {self(), {2, :atom, 2}}] - - """ - @spec match(registry, key, match_pattern :: atom() | tuple(), guards :: list()) :: [{pid, term}] - def match(registry, key, pattern, guards \\ []) when is_atom(registry) and is_list(guards) do - guards = [{:"=:=", {:element, 1, :"$_"}, {:const, key}} | guards] - spec = [{{:_, {:_, pattern}}, guards, [{:element, 2, :"$_"}]}] - - case key_info!(registry) do - {:unique, partitions, key_ets} -> - key_ets = key_ets || key_ets!(registry, key, partitions) - :ets.select(key_ets, spec) - - {:duplicate, 1, key_ets} -> - :ets.select(key_ets, spec) - - {:duplicate, partitions, _key_ets} -> - for partition <- 0..(partitions - 1), - pair <- :ets.select(key_ets!(registry, partition), spec), - do: pair - end - end - - @doc """ - Returns the known keys for the given `pid` in `registry` in no particular order. - - If the registry is unique, the keys are unique. Otherwise - they may contain duplicates if the process was registered - under the same key multiple times. The list will be empty - if the process is dead or it has no keys in this registry. - - ## Examples - - Registering under a unique registry does not allow multiple entries: - - iex> Registry.start_link(:unique, Registry.UniqueKeysTest) - iex> Registry.keys(Registry.UniqueKeysTest, self()) - [] - iex> {:ok, _} = Registry.register(Registry.UniqueKeysTest, "hello", :world) - iex> Registry.register(Registry.UniqueKeysTest, "hello", :later) # registry is :unique - {:error, {:already_registered, self()}} - iex> Registry.keys(Registry.UniqueKeysTest, self()) - ["hello"] - - Such is possible for duplicate registries though: - - iex> Registry.start_link(:duplicate, Registry.DuplicateKeysTest) - iex> Registry.keys(Registry.DuplicateKeysTest, self()) - [] - iex> {:ok, _} = Registry.register(Registry.DuplicateKeysTest, "hello", :world) - iex> {:ok, _} = Registry.register(Registry.DuplicateKeysTest, "hello", :world) - iex> Registry.keys(Registry.DuplicateKeysTest, self()) - ["hello", "hello"] - - """ - @spec keys(registry, pid) :: [key] - def keys(registry, pid) when is_atom(registry) and is_pid(pid) do - {kind, partitions, _, pid_ets, _} = info!(registry) - {_, pid_ets} = pid_ets || pid_ets!(registry, pid, partitions) - - keys = - try do - spec = [{{pid, :"$1", :"$2"}, [], [{{:"$1", :"$2"}}]}] - :ets.select(pid_ets, spec) - catch - :error, :badarg -> [] - end - - # Handle the possibility of fake keys - keys = gather_keys(keys, [], false) - - cond do - kind == :unique -> Enum.uniq(keys) - true -> keys - end - end - - defp gather_keys([{key, {_, remaining}} | rest], acc, _fake) do - gather_keys(rest, [key | acc], {key, remaining}) - end - - defp gather_keys([{key, _} | rest], acc, fake) do - gather_keys(rest, [key | acc], fake) - end - - defp gather_keys([], acc, {key, remaining}) do - List.duplicate(key, remaining) ++ Enum.reject(acc, &(&1 === key)) - end - - defp gather_keys([], acc, false) do - acc - end - - @doc """ - Unregisters all entries for the given `key` associated to the current - process in `registry`. - - Always returns `:ok` and automatically unlinks the current process from - the owner if there are no more keys associated to the current process. See - also `register/3` to read more about the "owner". - - ## Examples - - For unique registries: - - iex> Registry.start_link(:unique, Registry.UniqueUnregisterTest) - iex> Registry.register(Registry.UniqueUnregisterTest, "hello", :world) - iex> Registry.keys(Registry.UniqueUnregisterTest, self()) - ["hello"] - iex> Registry.unregister(Registry.UniqueUnregisterTest, "hello") - :ok - iex> Registry.keys(Registry.UniqueUnregisterTest, self()) - [] - - For duplicate registries: - - iex> Registry.start_link(:duplicate, Registry.DuplicateUnregisterTest) - iex> Registry.register(Registry.DuplicateUnregisterTest, "hello", :world) - iex> Registry.register(Registry.DuplicateUnregisterTest, "hello", :world) - iex> Registry.keys(Registry.DuplicateUnregisterTest, self()) - ["hello", "hello"] - iex> Registry.unregister(Registry.DuplicateUnregisterTest, "hello") - :ok - iex> Registry.keys(Registry.DuplicateUnregisterTest, self()) - [] - - """ - @spec unregister(registry, key) :: :ok - def unregister(registry, key) when is_atom(registry) do - self = self() - {kind, partitions, key_ets, pid_ets, listeners} = info!(registry) - {key_partition, pid_partition} = partitions(kind, key, self, partitions) - key_ets = key_ets || key_ets!(registry, key_partition) - {pid_server, pid_ets} = pid_ets || pid_ets!(registry, pid_partition) - - # Remove first from the key_ets because in case of crashes - # the pid_ets will still be able to clean up. The last step is - # to clean if we have no more entries. - true = :ets.match_delete(key_ets, {key, {self, :_}}) - true = :ets.delete_object(pid_ets, {self, key, key_ets}) - - unlink_if_unregistered(pid_server, pid_ets, self) - - for listener <- listeners do - Kernel.send(listener, {:unregister, registry, key, self}) - end - - :ok - end - - @doc """ - Unregister entries for a given key matching a pattern. - - ## Examples - - For unique registries it can be used to conditionally unregister a key on - the basis of whether or not it matches a particular value. - - iex> Registry.start_link(:unique, Registry.UniqueUnregisterMatchTest) - iex> Registry.register(Registry.UniqueUnregisterMatchTest, "hello", :world) - iex> Registry.keys(Registry.UniqueUnregisterMatchTest, self()) - ["hello"] - iex> Registry.unregister_match(Registry.UniqueUnregisterMatchTest, "hello", :foo) - :ok - iex> Registry.keys(Registry.UniqueUnregisterMatchTest, self()) - ["hello"] - iex> Registry.unregister_match(Registry.UniqueUnregisterMatchTest, "hello", :world) - :ok - iex> Registry.keys(Registry.UniqueUnregisterMatchTest, self()) - [] - - For duplicate registries: - - iex> Registry.start_link(:duplicate, Registry.DuplicateUnregisterMatchTest) - iex> Registry.register(Registry.DuplicateUnregisterMatchTest, "hello", :world_a) - iex> Registry.register(Registry.DuplicateUnregisterMatchTest, "hello", :world_b) - iex> Registry.register(Registry.DuplicateUnregisterMatchTest, "hello", :world_c) - iex> Registry.keys(Registry.DuplicateUnregisterMatchTest, self()) - ["hello", "hello", "hello"] - iex> Registry.unregister_match(Registry.DuplicateUnregisterMatchTest, "hello", :world_a) - :ok - iex> Registry.keys(Registry.DuplicateUnregisterMatchTest, self()) - ["hello", "hello"] - iex> Registry.lookup(Registry.DuplicateUnregisterMatchTest, "hello") - [{self(), :world_b}, {self(), :world_c}] - """ - def unregister_match(registry, key, pattern, guards \\ []) when is_list(guards) do - self = self() - - {kind, partitions, key_ets, pid_ets, listeners} = info!(registry) - {key_partition, pid_partition} = partitions(kind, key, self, partitions) - key_ets = key_ets || key_ets!(registry, key_partition) - {pid_server, pid_ets} = pid_ets || pid_ets!(registry, pid_partition) - - # Remove first from the key_ets because in case of crashes - # the pid_ets will still be able to clean up. The last step is - # to clean if we have no more entries. - - # Here we want to count all entries for this pid under this key, regardless - # of pattern. - underscore_guard = {:"=:=", {:element, 1, :"$_"}, {:const, key}} - total_spec = [{{:_, {self, :_}}, [underscore_guard], [true]}] - total = :ets.select_count(key_ets, total_spec) - - # We only want to delete things that match the pattern - delete_spec = [{{:_, {self, pattern}}, [underscore_guard | guards], [true]}] - - case :ets.select_delete(key_ets, delete_spec) do - # We deleted everything, we can just delete the object - ^total -> - true = :ets.delete_object(pid_ets, {self, key, key_ets}) - - unlink_if_unregistered(pid_server, pid_ets, self) - - for listener <- listeners do - Kernel.send(listener, {:unregister, registry, key, self}) - end - - 0 -> - :ok - - deleted -> - # There are still entries remaining for this pid. delete_object/2 with - # duplicate_bag tables will remove every entry, but we only want to - # remove those we have deleted. The solution is to introduce a temp_entry - # that indicates how many keys WILL be remaining after the delete operation. - remaining = total - deleted - temp_entry = {self, key, {key_ets, remaining}} - true = :ets.insert(pid_ets, temp_entry) - true = :ets.delete_object(pid_ets, {self, key, key_ets}) - real_keys = List.duplicate({self, key, key_ets}, remaining) - true = :ets.insert(pid_ets, real_keys) - # We've recreated the real remaining key entries, so we can now delete - # our temporary entry. - true = :ets.delete_object(pid_ets, temp_entry) - end - - :ok - end - - @doc """ - Registers the current process under the given `key` in `registry`. - - A value to be associated with this registration must also be given. - This value will be retrieved whenever dispatching or doing a key - lookup. - - This function returns `{:ok, owner}` or `{:error, reason}`. - The `owner` is the pid in the registry partition responsible for - the pid. The owner is automatically linked to the caller. - - If the registry has unique keys, it will return `{:ok, owner}` unless - the key is already associated to a pid, in which case it returns - `{:error, {:already_registered, pid}}`. - - If the registry has duplicate keys, multiple registrations from the - current process under the same key are allowed. - - ## Examples - - Registering under a unique registry does not allow multiple entries: - - iex> Registry.start_link(:unique, Registry.UniqueRegisterTest) - iex> {:ok, _} = Registry.register(Registry.UniqueRegisterTest, "hello", :world) - iex> Registry.register(Registry.UniqueRegisterTest, "hello", :later) - {:error, {:already_registered, self()}} - iex> Registry.keys(Registry.UniqueRegisterTest, self()) - ["hello"] - - Such is possible for duplicate registries though: - - iex> Registry.start_link(:duplicate, Registry.DuplicateRegisterTest) - iex> {:ok, _} = Registry.register(Registry.DuplicateRegisterTest, "hello", :world) - iex> {:ok, _} = Registry.register(Registry.DuplicateRegisterTest, "hello", :world) - iex> Registry.keys(Registry.DuplicateRegisterTest, self()) - ["hello", "hello"] - - """ - @spec register(registry, key, value) :: {:ok, pid} | {:error, {:already_registered, pid}} - def register(registry, key, value) when is_atom(registry) do - self = self() - {kind, partitions, key_ets, pid_ets, listeners} = info!(registry) - {key_partition, pid_partition} = partitions(kind, key, self, partitions) - key_ets = key_ets || key_ets!(registry, key_partition) - {pid_server, pid_ets} = pid_ets || pid_ets!(registry, pid_partition) - - # Notice we write first to the pid ets table because it will - # always be able to do the clean up. If we register first to the - # key one and the process crashes, the key will stay there forever. - Process.link(pid_server) - true = :ets.insert(pid_ets, {self, key, key_ets}) - - case register_key(kind, pid_server, key_ets, key, {key, {self, value}}) do - {:ok, _} = ok -> - for listener <- listeners do - Kernel.send(listener, {:register, registry, key, self, value}) - end - - ok - - {:error, {:already_registered, ^self}} = error -> - error - - {:error, _} = error -> - true = :ets.delete_object(pid_ets, {self, key, key_ets}) - unlink_if_unregistered(pid_server, pid_ets, self) - error - end - end - - defp register_key(:duplicate, pid_server, key_ets, _key, entry) do - true = :ets.insert(key_ets, entry) - {:ok, pid_server} - end - - defp register_key(:unique, pid_server, key_ets, key, entry) do - if :ets.insert_new(key_ets, entry) do - {:ok, pid_server} - else - # Notice we have to call register_key recursively - # because we are always at odds of a race condition. - case :ets.lookup(key_ets, key) do - [{^key, {pid, _}} = current] -> - if Process.alive?(pid) do - {:error, {:already_registered, pid}} - else - :ets.delete_object(key_ets, current) - register_key(:unique, pid_server, key_ets, key, entry) - end - - [] -> - register_key(:unique, pid_server, key_ets, key, entry) - end - end - end - - @doc """ - Reads registry metadata given on `start_link/3`. - - Atoms and tuples are allowed as keys. - - ## Examples - - iex> Registry.start_link(:unique, Registry.MetaTest, meta: [custom_key: "custom_value"]) - iex> Registry.meta(Registry.MetaTest, :custom_key) - {:ok, "custom_value"} - iex> Registry.meta(Registry.MetaTest, :unknown_key) - :error - - """ - @spec meta(registry, meta_key) :: {:ok, meta_value} | :error - def meta(registry, key) when is_atom(registry) and (is_atom(key) or is_tuple(key)) do - try do - :ets.lookup(registry, key) - catch - :error, :badarg -> - raise ArgumentError, "unknown registry: #{inspect(registry)}" - else - [{^key, value}] -> {:ok, value} - _ -> :error - end - end - - @doc """ - Stores registry metadata. - - Atoms and tuples are allowed as keys. - - ## Examples - - iex> Registry.start_link(:unique, Registry.PutMetaTest) - iex> Registry.put_meta(Registry.PutMetaTest, :custom_key, "custom_value") - :ok - iex> Registry.meta(Registry.PutMetaTest, :custom_key) - {:ok, "custom_value"} - iex> Registry.put_meta(Registry.PutMetaTest, {:tuple, :key}, "tuple_value") - :ok - iex> Registry.meta(Registry.PutMetaTest, {:tuple, :key}) - {:ok, "tuple_value"} - - """ - @spec put_meta(registry, meta_key, meta_value) :: :ok - def put_meta(registry, key, value) when is_atom(registry) and (is_atom(key) or is_tuple(key)) do - try do - :ets.insert(registry, {key, value}) - :ok - catch - :error, :badarg -> - raise ArgumentError, "unknown registry: #{inspect(registry)}" - end - end - - ## Helpers - - @compile {:inline, hash: 2} - - defp hash(term, limit) do - :erlang.phash2(term, limit) - end - - defp info!(registry) do - try do - :ets.lookup_element(registry, @all_info, 2) - catch - :error, :badarg -> - raise ArgumentError, "unknown registry: #{inspect(registry)}" - end - end - - defp key_info!(registry) do - try do - :ets.lookup_element(registry, @key_info, 2) - catch - :error, :badarg -> - raise ArgumentError, "unknown registry: #{inspect(registry)}" - end - end - - defp key_ets!(registry, key, partitions) do - :ets.lookup_element(registry, hash(key, partitions), 2) - end - - defp key_ets!(registry, partition) do - :ets.lookup_element(registry, partition, 2) - end - - defp pid_ets!(registry, key, partitions) do - :ets.lookup_element(registry, hash(key, partitions), 3) - end - - defp pid_ets!(registry, partition) do - :ets.lookup_element(registry, partition, 3) - end - - defp safe_lookup_second(ets, key) do - try do - :ets.lookup_element(ets, key, 2) - catch - :error, :badarg -> [] - end - end - - defp partitions(:unique, key, pid, partitions) do - {hash(key, partitions), hash(pid, partitions)} - end - - defp partitions(:duplicate, _key, pid, partitions) do - partition = hash(pid, partitions) - {partition, partition} - end - - defp unlink_if_unregistered(pid_server, pid_ets, self) do - unless :ets.member(pid_ets, self) do - Process.unlink(pid_server) - end - end -end - -defmodule Absinthe.Subscription.Registry.Supervisor do - @moduledoc false - use Supervisor - - alias Absinthe.Subscription.Registry - - def start_link(kind, registry, partitions, listeners, entries) do - Supervisor.start_link( - __MODULE__, - {kind, registry, partitions, listeners, entries}, - name: registry - ) - end - - def init({kind, registry, partitions, listeners, entries}) do - ^registry = :ets.new(registry, [:set, :public, :named_table, read_concurrency: true]) - true = :ets.insert(registry, entries) - - children = - for i <- 0..(partitions - 1) do - key_partition = Registry.Partition.key_name(registry, i) - pid_partition = Registry.Partition.pid_name(registry, i) - arg = {kind, registry, i, partitions, key_partition, pid_partition, listeners} - worker(Registry.Partition, [pid_partition, arg], id: pid_partition) - end - - supervise(children, strategy: strategy_for_kind(kind)) - end - - # Unique registries have their key partition hashed by key. - # This means that, if a pid partition crashes, it may have - # entries from all key partitions, so we need to crash all. - defp strategy_for_kind(:unique), do: :one_for_all - - # Duplicate registries have both key and pid partitions hashed - # by pid. This means that, if a pid partition crashes, all of - # its associated entries are in its sibling table, so we crash one. - defp strategy_for_kind(:duplicate), do: :one_for_one -end - -defmodule Absinthe.Subscription.Registry.Partition do - @moduledoc false - - # This process owns the equivalent key and pid ets tables - # and is responsible for monitoring processes that map to - # its own pid table. - use GenServer - @all_info -1 - @key_info -2 - - @doc """ - Returns the name of key partition table. - """ - @spec key_name(atom, non_neg_integer) :: atom - def key_name(registry, partition) do - Module.concat(registry, "KeyPartition" <> Integer.to_string(partition)) - end - - @doc """ - Returns the name of pid partition table. - """ - @spec pid_name(atom, non_neg_integer) :: atom - def pid_name(name, partition) do - Module.concat(name, "PIDPartition" <> Integer.to_string(partition)) - end - - @doc """ - Starts the registry partition. - - The process is only responsible for monitoring, demonitoring - and cleaning up when monitored processes crash. - """ - def start_link(registry, arg) do - GenServer.start_link(__MODULE__, arg, name: registry) - end - - ## Callbacks - - def init({kind, registry, i, partitions, key_partition, pid_partition, listeners}) do - Process.flag(:trap_exit, true) - key_ets = init_key_ets(kind, key_partition) - pid_ets = init_pid_ets(kind, pid_partition) - - # If we have only one partition, we do an optimization which - # is to write the table information alongside the registry info. - if partitions == 1 do - entries = [ - {@key_info, {kind, partitions, key_ets}}, - {@all_info, {kind, partitions, key_ets, {self(), pid_ets}, listeners}} - ] - - true = :ets.insert(registry, entries) - else - true = :ets.insert(registry, {i, key_ets, {self(), pid_ets}}) - end - - {:ok, pid_ets} - end - - # The key partition is a set for unique keys, - # duplicate bag for duplicate ones. - defp init_key_ets(:unique, key_partition) do - :ets.new(key_partition, [:set, :public, read_concurrency: true, write_concurrency: true]) - end - - defp init_key_ets(:duplicate, key_partition) do - :ets.new(key_partition, [ - :duplicate_bag, - :public, - read_concurrency: true, - write_concurrency: true - ]) - end - - # A process can always have multiple keys, so the - # pid partition is always a duplicate bag. - defp init_pid_ets(_, pid_partition) do - :ets.new(pid_partition, [ - :duplicate_bag, - :public, - read_concurrency: true, - write_concurrency: true - ]) - end - - def handle_call(:sync, _, state) do - {:reply, :ok, state} - end - - def handle_info({:EXIT, pid, _reason}, ets) do - entries = :ets.take(ets, pid) - - for {_pid, key, key_ets} <- entries do - key_ets = - case key_ets do - # In case the fake key ets is being used. See unregister_match/2. - {key_ets, _} -> - key_ets - - _ -> - key_ets - end - - try do - :ets.match_delete(key_ets, {key, {pid, :_}}) - catch - :error, :badarg -> :badarg - end - end - - {:noreply, ets} - end - - def handle_info(msg, state) do - super(msg, state) - end -end diff --git a/lib/absinthe/subscription/supervisor.ex b/lib/absinthe/subscription/supervisor.ex index b70c3e7106..c339ea8636 100644 --- a/lib/absinthe/subscription/supervisor.ex +++ b/lib/absinthe/subscription/supervisor.ex @@ -3,9 +3,16 @@ defmodule Absinthe.Subscription.Supervisor do use Supervisor - alias Absinthe.Subscription.Registry - def start_link(pubsub, pool_size \\ System.schedulers_online() * 2) do + pubsub = + case pubsub do + [module] when is_atom(module) -> + module + + module -> + module + end + Supervisor.start_link(__MODULE__, {pubsub, pool_size}) end @@ -14,14 +21,17 @@ defmodule Absinthe.Subscription.Supervisor do meta = [pool_size: pool_size] children = [ - supervisor(Registry, [ - :duplicate, - registry_name, - [partitions: System.schedulers_online(), meta: meta] - ]), - supervisor(Absinthe.Subscription.ProxySupervisor, [pubsub, registry_name, pool_size]) + {Registry, + [ + keys: :duplicate, + name: registry_name, + partitions: System.schedulers_online(), + meta: meta, + compressed: true + ]}, + {Absinthe.Subscription.ProxySupervisor, [pubsub, registry_name, pool_size]} ] - supervise(children, strategy: :one_for_one) + Supervisor.init(children, strategy: :one_for_one) end end diff --git a/lib/absinthe/traversal.ex b/lib/absinthe/traversal.ex deleted file mode 100644 index 981495398e..0000000000 --- a/lib/absinthe/traversal.ex +++ /dev/null @@ -1,69 +0,0 @@ -defmodule Absinthe.Traversal do - # Graph traversal utilities for dealing with ASTs and schemas using the - # `Absinthe.Traversal.Node` protocol. - # Note this really only exists to handle some Schema rules stuff and is generally - # considered legacy code. See `Absinthe.Blueprint.Transform` for information - # on how to walk blueprint trees. - - @moduledoc false - - alias __MODULE__ - alias Absinthe.Traversal.Node - - @type t :: %__MODULE__{context: any, seen: [Node.t()], path: [Node.t()]} - defstruct context: nil, seen: [], path: [] - - # Instructions defining behavior during traversal - # * `{:ok, value, traversal}`: The value of the node is `value`, and traversal - # should continue to children (using `traversal`) - # * `{:prune, value, traversal}`: The value of the node is `value` and - # traversal should NOT continue to children, but to siblings (using - # `traversal`) - # * `{:error, message}`: Bad stuff happened, explained by `message` - @type instruction_t :: {:ok, any, t} | {:prune, any, t} | {:error, any} - - # Traverse, reducing nodes using a given function to evaluate their value. - @doc false - @spec reduce(Node.t(), any, acc, (Node.t(), t, acc -> instruction_t)) :: acc when acc: var - def reduce(node, context, initial_value, node_evaluator) do - {result, _traversal} = - do_reduce(node, %Traversal{context: context}, initial_value, node_evaluator) - - result - end - - # Reduce using a traversal struct - @spec do_reduce(Node.t(), t, acc, (Node.t(), t, acc -> instruction_t)) :: {acc, t} when acc: var - defp do_reduce(node, traversal, initial_value, node_evaluator) do - if seen?(traversal, node) do - {initial_value, traversal} - else - case node_evaluator.(node, traversal, initial_value) do - {:ok, value, next_traversal} -> - reduce_children(node, next_traversal |> put_seen(node), value, node_evaluator) - - {:prune, value, next_traversal} -> - {value, next_traversal |> put_seen(node)} - end - end - end - - # Traverse a node's children - @spec reduce_children(Node.t(), t, acc, (Node.t(), t, acc -> instruction_t)) :: {acc, t} - when acc: var - defp reduce_children(node, traversal, initial, node_evalator) do - Enum.reduce(Node.children(node, traversal), {initial, traversal}, fn child, - {this_value, - this_traversal} -> - do_reduce(child, this_traversal, this_value, node_evalator) - end) - end - - @spec seen?(t, Node.t()) :: boolean - defp seen?(traversal, node), do: traversal.seen |> Enum.member?(node) - - @spec put_seen(t, Node.t()) :: t - defp put_seen(traversal, node) do - %{traversal | seen: [node | traversal.seen]} - end -end diff --git a/lib/absinthe/traversal/node.ex b/lib/absinthe/traversal/node.ex deleted file mode 100644 index d64c893b75..0000000000 --- a/lib/absinthe/traversal/node.ex +++ /dev/null @@ -1,35 +0,0 @@ -defprotocol Absinthe.Traversal.Node do - @moduledoc false - - @fallback_to_any true - - @spec children(any, Absinthe.Traversal.t()) :: [any] - def children(node, traversal) -end - -defimpl Absinthe.Traversal.Node, for: Any do - def children(_node, _traversal), do: [] -end - -defimpl Absinthe.Traversal.Node, for: Atom do - def children(node, %{context: schema}) do - if node == schema do - # Root schema node - [node.query, node.mutation, node.subscription] - |> Enum.reject(&is_nil/1) - else - # Type Reference - case Absinthe.Schema.lookup_type(schema, node) do - nil -> - [] - - type -> - [type] - end - end - end - - def children(_node, _traversal) do - [] - end -end diff --git a/lib/absinthe/type.ex b/lib/absinthe/type.ex index 55ff014d00..3cd11a7531 100644 --- a/lib/absinthe/type.ex +++ b/lib/absinthe/type.ex @@ -3,7 +3,10 @@ defmodule Absinthe.Type do alias __MODULE__ - alias Absinthe.{Introspection, Schema} + alias Absinthe.Schema + + @type function_identifier :: {module, any} + @type function_ref :: {:ref, module, function_identifier} # ALL TYPES @@ -35,14 +38,29 @@ defmodule Absinthe.Type do @type identifier_t :: atom @typedoc "A type reference" - @type reference_t :: identifier_t | t + @type reference_t :: identifier_t | binary | t + + def function(type, key) do + case Map.fetch!(type, key) do + {:ref, module, identifier} -> + module.__absinthe_function__(identifier, key) - def identifier(%{__reference__: %{identifier: ident}}) do - ident + function -> + function + end end - def identifier(_) do - nil + @doc false + # this is just for debugging + def expand(%module{} = type) do + module.functions() + |> Enum.reduce(type, fn + :middleware, type -> + type + + attr, type -> + Map.put(type, attr, Absinthe.Type.function(type, attr)) + end) end @doc "Lookup a custom metadata field on a type" @@ -72,10 +90,15 @@ defmodule Absinthe.Type do def equal?(_, _), do: false def built_in?(type) do - type.__reference__.module + type.definition + |> built_in_module?() + end + + def built_in_module?(module) do + module |> Module.split() |> Enum.take(3) - |> Module.safe_concat() == Absinthe.Type.BuiltIns + |> Module.concat() == Absinthe.Type.BuiltIns end # INPUT TYPES @@ -252,13 +275,12 @@ defmodule Absinthe.Type do def wrapped?(_), do: false @doc "Unwrap a type from a List or NonNull" - @spec unwrap(wrapping_t) :: custom_t - @spec unwrap(type) :: type when type: custom_t + @spec unwrap(custom_t | wrapping_t | map) :: reference_t | map | nil def unwrap(%{of_type: t}), do: unwrap(t) def unwrap(type), do: type @doc "Unwrap a type from NonNull" - @spec unwrap_non_null(Type.NonNull.t()) :: custom_t + @spec unwrap_non_null(Type.NonNull.t()) :: reference_t @spec unwrap_non_null(type) :: type when type: custom_t | Type.List.t() def unwrap_non_null(%Type.NonNull{of_type: t}), do: unwrap_non_null(t) def unwrap_non_null(type), do: type @@ -352,123 +374,4 @@ defmodule Absinthe.Type do def valid_input?(_, _) do true end - - def field(_type, "__" <> meta_name) do - Introspection.Field.meta(meta_name) - end - - def field(%{fields: fields}, name) do - fields - |> Map.get(name |> String.to_existing_atom()) - rescue - ArgumentError -> nil - end - - def field(_, _name) do - nil - end - - @spec referenced_types(t, Schema.t()) :: [t] - def referenced_types(type, schema) do - referenced_types(type, schema, MapSet.new()) - end - - defp referenced_types(%Type.Argument{type: type}, schema, acc) do - referenced_types(type, schema, acc) - end - - defp referenced_types(%Type.Directive{} = type, schema, acc) do - type.args - |> Map.values() - |> Enum.reduce(acc, &referenced_types(&1.type, schema, &2)) - end - - defp referenced_types(%Type.Enum{identifier: identifier}, _schema, acc) do - MapSet.put(acc, identifier) - end - - defp referenced_types(%Type.Field{} = field, schema, acc) do - acc = - field.args - |> Map.values() - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - - referenced_types(field.type, schema, acc) - end - - defp referenced_types(%Type.InputObject{identifier: identifier} = input_object, schema, acc) do - if identifier in acc do - acc - else - acc = MapSet.put(acc, identifier) - - input_object.fields - |> Map.values() - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - end - end - - defp referenced_types(%Type.Interface{identifier: identifier} = interface, schema, acc) do - if identifier in acc do - acc - else - acc = MapSet.put(acc, identifier) - - acc = - interface.fields - |> Map.values() - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - - schema - |> Absinthe.Schema.implementors(identifier) - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - end - end - - defp referenced_types(%Type.List{of_type: inner_type}, schema, acc) do - referenced_types(inner_type, schema, acc) - end - - defp referenced_types(%Type.NonNull{of_type: inner_type}, schema, acc) do - referenced_types(inner_type, schema, acc) - end - - defp referenced_types(%Type.Object{identifier: identifier} = object, schema, acc) do - if identifier in acc do - acc - else - acc = MapSet.put(acc, identifier) - - acc = - object.fields - |> Map.values() - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - - object.interfaces - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - end - end - - defp referenced_types(%Type.Reference{} = ref, schema, acc) do - referenced_types(ref.identifier, schema, acc) - end - - defp referenced_types(%Type.Scalar{identifier: identifier}, _schema, acc) do - MapSet.put(acc, identifier) - end - - defp referenced_types(%Type.Union{identifier: identifier} = union, schema, acc) do - if identifier in acc do - acc - else - acc = MapSet.put(acc, identifier) - - union.types - |> Enum.reduce(acc, &referenced_types(&1, schema, &2)) - end - end - - defp referenced_types(type, schema, acc) when is_atom(type) and type != nil do - referenced_types(Schema.lookup_type(schema, type), schema, acc) - end end diff --git a/lib/absinthe/type/argument.ex b/lib/absinthe/type/argument.ex index eaf207e7b4..3244f2597a 100644 --- a/lib/absinthe/type/argument.ex +++ b/lib/absinthe/type/argument.ex @@ -24,52 +24,16 @@ defmodule Absinthe.Type.Argument do default_value: any, deprecation: Type.Deprecation.t() | nil, description: binary | nil, + definition: module, __reference__: Type.Reference.t() } - defstruct name: nil, + defstruct identifier: nil, + name: nil, description: nil, type: nil, deprecation: nil, default_value: nil, + definition: nil, __reference__: nil - - @doc """ - Build an AST of the args map for inclusion in other types - - ## Examples - - ``` - iex> build([foo: [type: :string], bar: [type: :integer]]) - {:%{}, [], - [foo: {:%, [], - [{:__aliases__, [alias: false], [:Absinthe, :Type, :Argument]}, - {:%{}, [], [name: "foo", type: :string]}]}, - bar: {:%, [], - [{:__aliases__, [alias: false], [:Absinthe, :Type, :Argument]}, - {:%{}, [], [name: "bar", type: :integer]}]}]} - ``` - """ - def build(args) when is_list(args) do - ast = - for {arg_name, arg_attrs} <- args do - name = arg_name |> Atom.to_string() - arg_data = arg_attrs |> Keyword.put(:name, name) - - arg_ast = - quote do: %Absinthe.Type.Argument{ - unquote_splicing(arg_data |> Absinthe.Type.Deprecation.from_attribute()) - } - - {arg_name, arg_ast} - end - - quote do: %{unquote_splicing(ast)} - end - - defimpl Absinthe.Traversal.Node do - def children(node, _traversal) do - [node.type] - end - end end diff --git a/lib/absinthe/type/built_ins.ex b/lib/absinthe/type/built_ins.ex deleted file mode 100644 index c9a204ecbd..0000000000 --- a/lib/absinthe/type/built_ins.ex +++ /dev/null @@ -1,21 +0,0 @@ -defmodule Absinthe.Type.BuiltIns do - built_in_types = - [ - Absinthe.Type.BuiltIns.Scalars, - Absinthe.Type.BuiltIns.Directives, - Absinthe.Type.BuiltIns.Introspection - ] - |> Enum.map(&Absinthe.Utils.describe_builtin_module/1) - - @moduledoc """ - Built in data types - - #{built_in_types} - """ - - use Absinthe.Schema.Notation - - import_types Absinthe.Type.BuiltIns.Scalars - import_types Absinthe.Type.BuiltIns.Directives - import_types Absinthe.Type.BuiltIns.Introspection -end diff --git a/lib/absinthe/type/built_ins/directives.ex b/lib/absinthe/type/built_ins/directives.ex index 8f242aa552..74b0959d7e 100644 --- a/lib/absinthe/type/built_ins/directives.ex +++ b/lib/absinthe/type/built_ins/directives.ex @@ -1,18 +1,20 @@ defmodule Absinthe.Type.BuiltIns.Directives do @moduledoc false - use Absinthe.Schema.Notation + alias Absinthe.Blueprint directive :include do description """ - Directs the executor to include this field or fragment only when the `if` argument is true." + Directs the executor to include this field or fragment only when the `if` argument is true. """ arg :if, non_null(:boolean), description: "Included when true." on [:field, :fragment_spread, :inline_fragment] + repeatable false + expand fn %{if: true}, node -> Blueprint.put_flag(node, :include, __MODULE__) @@ -27,6 +29,8 @@ defmodule Absinthe.Type.BuiltIns.Directives do Directs the executor to skip this field or fragment when the `if` argument is true. """ + repeatable false + arg :if, non_null(:boolean), description: "Skipped when true." on [:field, :fragment_spread, :inline_fragment] diff --git a/lib/absinthe/type/built_ins/introspection.ex b/lib/absinthe/type/built_ins/introspection.ex index 82ee105f10..2ff90d1c2b 100644 --- a/lib/absinthe/type/built_ins/introspection.ex +++ b/lib/absinthe/type/built_ins/introspection.ex @@ -6,9 +6,19 @@ defmodule Absinthe.Type.BuiltIns.Introspection do object :__schema do description "Represents a schema" - field :types, list_of(:__type) do + field :description, :string do + resolve(fn _, %{schema: schema} -> + {:ok, Absinthe.Schema.lookup_type(schema, :__schema).description} + end) + end + + field :types, non_null(list_of(non_null(:__type))) do resolve fn _, %{schema: schema} -> - {:ok, Absinthe.Schema.used_types(schema) ++ Absinthe.Schema.introspection_types(schema)} + types = + Absinthe.Schema.types(schema) + |> Enum.sort_by(& &1.identifier) + + {:ok, types} end end @@ -31,68 +41,53 @@ defmodule Absinthe.Type.BuiltIns.Introspection do end field :directives, - type: list_of(:__directive), + type: non_null(list_of(non_null(:__directive))), resolve: fn _, %{schema: schema} -> - {:ok, Absinthe.Schema.directives(schema)} + directives = + Absinthe.Schema.directives(schema) + |> Enum.sort_by(& &1.identifier) + + {:ok, directives} end end object :__directive do description "Represents a directive" - field :name, :string + field :name, non_null(:string) field :description, :string - field :args, - type: list_of(:__inputvalue), + field :is_repeatable, non_null(:boolean), resolve: fn _, %{source: source} -> - structs = source.args |> Map.values() - {:ok, structs} + {:ok, source.repeatable} end - field :on_operation, - deprecate: "Check `locations` field for enum value OPERATION", - type: :boolean, + field :args, + type: non_null(list_of(non_null(:__inputvalue))), resolve: fn _, %{source: source} -> - {:ok, Enum.any?(source.locations, &Enum.member?([:query, :mutation, :subscription], &1))} - end + args = + source.args + |> Map.values() + |> Enum.sort_by(& &1.identifier) - field :on_fragment, - deprecate: "Check `locations` field for enum value FRAGMENT_SPREAD", - type: :boolean, - resolve: fn _, %{source: source} -> - {:ok, Enum.member?(source.locations, :fragment_spread)} + {:ok, args} end - field :on_field, - type: :boolean, - deprecate: "Check `locations` field for enum value FIELD", - resolve: fn _, %{source: source} -> - {:ok, Enum.member?(source.locations, :field)} - end - - field :locations, list_of(:__directive_location) + field :locations, non_null(list_of(non_null(:__directive_location))) end + enum :__type_kind, + values: Absinthe.Introspection.TypeKind.values() + enum :__directive_location, - values: [ - # OPERATIONS - :query, - :mutation, - :subscription, - :field, - :fragment_definition, - :fragment_spread, - :inline_fragment - # TODO: Schema definitions to support Schema input - ] + values: Absinthe.Introspection.DirectiveLocation.values() object :__type do description "Represents scalars, interfaces, object types, unions, enums in the system" field :kind, - type: :string, + type: non_null(:__type_kind), resolve: fn _, %{source: %{__struct__: type}} -> {:ok, type.kind} end @@ -101,7 +96,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do field :description, :string - field :fields, list_of(:__field) do + field :fields, list_of(non_null(:__field)) do arg :include_deprecated, :boolean, default_value: false resolve fn @@ -110,12 +105,18 @@ defmodule Absinthe.Type.BuiltIns.Introspection do result = fields |> Enum.flat_map(fn {_, %{deprecation: is_deprecated} = field} -> - if !is_deprecated || (is_deprecated && show_deprecated) do - [field] - else - [] + cond do + Absinthe.Type.introspection?(field) -> + [] + + !is_deprecated || (is_deprecated && show_deprecated) -> + [field] + + true -> + [] end end) + |> Enum.sort_by(& &1.identifier) {:ok, result} @@ -125,30 +126,32 @@ defmodule Absinthe.Type.BuiltIns.Introspection do end field :interfaces, - type: list_of(:__type), + type: list_of(non_null(:__type)), resolve: fn _, %{schema: schema, source: %{interfaces: interfaces}} -> - structs = + interfaces = interfaces - |> Enum.map(fn ident -> - Absinthe.Schema.lookup_type(schema, ident) - end) + |> Enum.map(&Absinthe.Schema.lookup_type(schema, &1)) + |> Enum.sort_by(& &1.identifier) - {:ok, structs} + {:ok, interfaces} _, _ -> {:ok, nil} end field :possible_types, - type: list_of(:__type), + type: list_of(non_null(:__type)), resolve: fn _, %{schema: schema, source: %{types: types}} -> - structs = types |> Enum.map(&Absinthe.Schema.lookup_type(schema, &1)) - {:ok, structs} + possible_types = + types + |> Enum.map(&Absinthe.Schema.lookup_type(schema, &1)) + |> Enum.sort_by(& &1.identifier) + + {:ok, possible_types} - _, - %{schema: schema, source: %Absinthe.Type.Interface{__reference__: %{identifier: ident}}} -> + _, %{schema: schema, source: %Absinthe.Type.Interface{identifier: ident}} -> {:ok, Absinthe.Schema.implementors(schema, ident)} _, _ -> @@ -156,7 +159,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do end field :enum_values, - type: list_of(:__enumvalue), + type: list_of(non_null(:__enumvalue)), args: [ include_deprecated: [ type: :boolean, @@ -174,6 +177,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do [] end end) + |> Enum.sort_by(& &1.value) {:ok, result} @@ -182,11 +186,15 @@ defmodule Absinthe.Type.BuiltIns.Introspection do end field :input_fields, - type: list_of(:__inputvalue), + type: list_of(non_null(:__inputvalue)), resolve: fn _, %{source: %Absinthe.Type.InputObject{fields: fields}} -> - structs = fields |> Map.values() - {:ok, structs} + input_fields = + fields + |> Map.values() + |> Enum.sort_by(& &1.identifier) + + {:ok, input_fields} _, %{source: _} -> {:ok, nil} @@ -205,7 +213,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do object :__field do field :name, - type: :string, + type: non_null(:string), resolve: fn _, %{adapter: adapter, source: source} -> {:ok, adapter.to_external_name(source.name, :field)} end @@ -213,13 +221,18 @@ defmodule Absinthe.Type.BuiltIns.Introspection do field :description, :string field :args, - type: list_of(:__inputvalue), - resolve: fn _, %{source: source} -> - {:ok, Map.values(source.args)} + type: non_null(list_of(non_null(:__inputvalue))), + resolve: fn _, %{source: %{args: args}} -> + args = + args + |> Map.values() + |> Enum.sort_by(& &1.identifier) + + {:ok, args} end field :type, - type: :__type, + type: non_null(:__type), resolve: fn _, %{schema: schema, source: source} -> result = case source.type do @@ -234,7 +247,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do end field :is_deprecated, - type: :boolean, + type: non_null(:boolean), resolve: fn _, %{source: %{deprecation: nil}} -> {:ok, false} @@ -256,7 +269,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do object :__inputvalue, name: "__InputValue" do field :name, - type: :string, + type: non_null(:string), resolve: fn _, %{adapter: adapter, source: source} -> {:ok, adapter.to_external_name(source.name, :field)} end @@ -264,7 +277,7 @@ defmodule Absinthe.Type.BuiltIns.Introspection do field :description, :string field :type, - type: :__type, + type: non_null(:__type), resolve: fn _, %{schema: schema, source: %{type: ident}} -> type = Absinthe.Schema.lookup_type(schema, ident, unwrap: false) {:ok, type} @@ -276,17 +289,8 @@ defmodule Absinthe.Type.BuiltIns.Introspection do _, %{source: %{default_value: nil}} -> {:ok, nil} - _, %{schema: schema, source: %{default_value: value, type: type}} -> - case Absinthe.Schema.lookup_type(schema, type, unwrap: true) do - %Absinthe.Type.Enum{values_by_internal_value: values} -> - {:ok, values[value].name} - - %{serialize: serializer} -> - {:ok, inspect(serializer.(value))} - - _ -> - {:ok, to_string(value)} - end + _, %{schema: schema, source: %{default_value: value, type: type}, adapter: adapter} -> + {:ok, render_default_value(schema, adapter, type, value)} _, %{source: _} -> {:ok, nil} @@ -294,12 +298,12 @@ defmodule Absinthe.Type.BuiltIns.Introspection do end object :__enumvalue, name: "__EnumValue" do - field :name, :string + field :name, non_null(:string) field :description, :string field :is_deprecated, - type: :boolean, + type: non_null(:boolean), resolve: fn _, %{source: %{deprecation: nil}} -> {:ok, false} @@ -318,4 +322,41 @@ defmodule Absinthe.Type.BuiltIns.Introspection do {:ok, dep.reason} end end + + def render_default_value(schema, adapter, type, value) do + case Absinthe.Schema.lookup_type(schema, type, unwrap: false) do + %Absinthe.Type.InputObject{fields: fields} -> + object_values = + fields + |> Map.take(Map.keys(value)) + |> Map.values() + |> Enum.map(&render_default_value(schema, adapter, &1, value)) + |> Enum.join(", ") + + "{#{object_values}}" + + %Absinthe.Type.List{of_type: type} -> + list_values = + value + |> List.wrap() + |> Enum.map(&render_default_value(schema, adapter, type, &1)) + |> Enum.join(", ") + + "[#{list_values}]" + + %Absinthe.Type.Field{type: type, name: name, identifier: identifier} -> + key = adapter.to_external_name(name, :field) + val = render_default_value(schema, adapter, type, value[identifier]) + "#{key}: #{val}" + + %Absinthe.Type.Enum{values_by_internal_value: values} -> + values[value].name + + %Absinthe.Type.NonNull{of_type: type} -> + render_default_value(schema, adapter, type, value) + + %Absinthe.Type.Scalar{} = sc -> + inspect(Absinthe.Type.Scalar.serialize(sc, value)) + end + end end diff --git a/lib/absinthe/type/built_ins/scalars.ex b/lib/absinthe/type/built_ins/scalars.ex index ec06edf4d3..8953d5ed32 100644 --- a/lib/absinthe/type/built_ins/scalars.ex +++ b/lib/absinthe/type/built_ins/scalars.ex @@ -11,10 +11,18 @@ defmodule Absinthe.Type.BuiltIns.Scalars do by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). """ - serialize & &1 + serialize &__MODULE__.serialize_integer/1 parse parse_with([Absinthe.Blueprint.Input.Integer], &parse_int/1) end + def serialize_integer(n) when is_integer(n), do: n + + def serialize_integer(n) do + raise Absinthe.SerializationError, """ + Value #{inspect(n)} is not a valid integer + """ + end + scalar :float do description """ The `Float` scalar type represents signed double-precision fractional @@ -22,7 +30,7 @@ defmodule Absinthe.Type.BuiltIns.Scalars do [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). """ - serialize & &1 + serialize &__MODULE__.serialize_float/1 parse parse_with( [Absinthe.Blueprint.Input.Integer, Absinthe.Blueprint.Input.Float], @@ -30,6 +38,15 @@ defmodule Absinthe.Type.BuiltIns.Scalars do ) end + def serialize_float(n) when is_float(n), do: n + def serialize_float(n) when is_integer(n), do: n * 1.0 + + def serialize_float(n) do + raise Absinthe.SerializationError, """ + Value #{inspect(n)} is not a valid float + """ + end + scalar :string do description """ The `String` scalar type represents textual data, represented as UTF-8 @@ -37,7 +54,7 @@ defmodule Absinthe.Type.BuiltIns.Scalars do represent free-form human-readable text. """ - serialize &to_string/1 + serialize &String.Chars.to_string/1 parse parse_with([Absinthe.Blueprint.Input.String], &parse_string/1) end @@ -63,10 +80,19 @@ defmodule Absinthe.Type.BuiltIns.Scalars do The `Boolean` scalar type represents `true` or `false`. """ - serialize & &1 + serialize &__MODULE__.serialize_boolean/1 parse parse_with([Absinthe.Blueprint.Input.Boolean], &parse_boolean/1) end + def serialize_boolean(true), do: true + def serialize_boolean(false), do: false + + def serialize_boolean(val) do + raise Absinthe.SerializationError, """ + Value #{inspect(val)} is not a valid boolean + """ + end + # Integers are only safe when between -(2^53 - 1) and 2^53 - 1 due to being # encoded in JavaScript and represented in JSON as double-precision floating # point numbers, as specified by IEEE 754. diff --git a/lib/absinthe/type/custom.ex b/lib/absinthe/type/custom.ex index dd486d8fb4..01ed02a029 100644 --- a/lib/absinthe/type/custom.ex +++ b/lib/absinthe/type/custom.ex @@ -19,7 +19,7 @@ defmodule Absinthe.Type.Custom do The `DateTime` scalar type represents a date and time in the UTC timezone. The DateTime appears in a JSON response as an ISO8601 formatted string, including UTC timezone ("Z"). The parsed date and time string will - be converted to UTC and any UTC offset other than 0 will be rejected. + be converted to UTC if there is an offset. """ serialize &DateTime.to_iso8601/1 @@ -40,7 +40,7 @@ defmodule Absinthe.Type.Custom do scalar :date do description """ The `Date` scalar type represents a date. The Date appears in a JSON - response as an ISO8601 formatted string. + response as an ISO8601 formatted string, without a time component. """ serialize &Date.to_iso8601/1 @@ -50,7 +50,7 @@ defmodule Absinthe.Type.Custom do scalar :time do description """ The `Time` scalar type represents a time. The Time appears in a JSON - response as an ISO8601 formatted string. + response as an ISO8601 formatted string, without a date component. """ serialize &Time.to_iso8601/1 @@ -74,8 +74,7 @@ defmodule Absinthe.Type.Custom do @spec parse_datetime(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil} defp parse_datetime(%Absinthe.Blueprint.Input.String{value: value}) do case DateTime.from_iso8601(value) do - {:ok, datetime, 0} -> {:ok, datetime} - {:ok, _datetime, _offset} -> :error + {:ok, datetime, _} -> {:ok, datetime} _error -> :error end end diff --git a/lib/absinthe/type/custom/decimal.ex b/lib/absinthe/type/custom/decimal.ex index bd71bbcaf4..2cacb8223e 100644 --- a/lib/absinthe/type/custom/decimal.ex +++ b/lib/absinthe/type/custom/decimal.ex @@ -4,26 +4,31 @@ if Code.ensure_loaded?(Decimal) do defdelegate serialize(value), to: Decimal, as: :to_string - @spec parse(any) :: {:ok, Decimal.t()} | :error - @spec parse(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil} - def parse(%Absinthe.Blueprint.Input.String{value: value}) do + alias Absinthe.Blueprint.Input + + @dialyzer {:no_match, parse: 1} + @spec parse(any) :: {:ok, Decimal.t()} | {:ok, nil} | :error + def parse(%Input.String{value: value}) when is_binary(value) do case Decimal.parse(value) do + # Decimal V2 + {decimal, ""} -> {:ok, decimal} + # Decimal V1 {:ok, decimal} -> {:ok, decimal} _ -> :error end end - def parse(%Absinthe.Blueprint.Input.Float{value: value}) do - decimal = Decimal.new(value) + def parse(%Input.Float{value: value}) when is_float(value) do + decimal = Decimal.from_float(value) if Decimal.nan?(decimal), do: :error, else: {:ok, decimal} end - def parse(%Absinthe.Blueprint.Input.Integer{value: value}) do + def parse(%Input.Integer{value: value}) when is_integer(value) do decimal = Decimal.new(value) if Decimal.nan?(decimal), do: :error, else: {:ok, decimal} end - def parse(%Absinthe.Blueprint.Input.Null{}) do + def parse(%Input.Null{}) do {:ok, nil} end diff --git a/lib/absinthe/type/deprecation.ex b/lib/absinthe/type/deprecation.ex index 3d675664ef..bb81ce0342 100644 --- a/lib/absinthe/type/deprecation.ex +++ b/lib/absinthe/type/deprecation.ex @@ -3,29 +3,4 @@ defmodule Absinthe.Type.Deprecation do @type t :: %{reason: binary} defstruct reason: nil - - @doc """ - Build a Deprecation struct (or return `nil`) for a value. - """ - @spec build(nil | boolean | binary) :: nil | t - def build(nil), do: nil - def build(false), do: nil - - def build(true) do - quote do: %unquote(__MODULE__){} - end - - def build(reason) when is_binary(reason) do - quote do: %unquote(__MODULE__){reason: unquote(reason)} - end - - @doc """ - Convert a `:deprecate` attr to a Deprecation struct - """ - @spec from_attribute(Keyword.t()) :: Keyword.t() - def from_attribute(attrs) do - attrs - |> Keyword.put(:deprecation, build(attrs[:deprecate])) - |> Keyword.delete(:deprecate) - end end diff --git a/lib/absinthe/type/directive.ex b/lib/absinthe/type/directive.ex index 3b24ddb52b..27c973a5a9 100644 --- a/lib/absinthe/type/directive.ex +++ b/lib/absinthe/type/directive.ex @@ -8,16 +8,15 @@ defmodule Absinthe.Type.Directive do alias Absinthe.Type alias Absinthe.Language - use Absinthe.Introspection.Kind @typedoc """ A defined directive. - * `:name` - The name of the directivee. Should be a lowercase `binary`. Set automatically. + * `:name` - The name of the directive. Should be a lowercase `binary`. Set automatically. * `:description` - A nice description for introspection. - * `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/1`. + * `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/2`. * `:locations` - A list of places the directives can be used. - * `:instruction` - A function that, given an argument, returns an instruction for the correct action to take + * `:repeatable` - A directive may be defined as repeatable by including the โ€œrepeatableโ€ keyword The `:__reference__` key is for internal use. """ @@ -27,8 +26,10 @@ defmodule Absinthe.Type.Directive do identifier: atom, args: map, locations: [location], - expand: nil | (Absinthe.Blueprint.node_t(), map -> {Absinthe.Blueprint.t(), map}), - instruction: (map -> atom), + expand: (map, Absinthe.Blueprint.node_t() -> atom), + definition: module, + repeatable: boolean, + __private__: Keyword.t(), __reference__: Type.Reference.t() } @@ -41,32 +42,13 @@ defmodule Absinthe.Type.Directive do args: nil, locations: [], expand: nil, - instruction: nil, + definition: nil, + repeatable: false, + __private__: [], __reference__: nil - def build(%{attrs: attrs}) do - args = - attrs - |> Keyword.get(:args, []) - |> Enum.map(fn {name, attrs} -> - {name, ensure_reference(attrs, attrs[:__reference__])} - end) - |> Type.Argument.build() - - attrs = Keyword.put(attrs, :args, args) - - quote do: %unquote(__MODULE__){unquote_splicing(attrs)} - end - - defp ensure_reference(arg_attrs, default_reference) do - case Keyword.has_key?(arg_attrs, :__reference__) do - true -> - arg_attrs - - false -> - Keyword.put(arg_attrs, :__reference__, default_reference) - end - end + @doc false + defdelegate functions, to: Absinthe.Blueprint.Schema.DirectiveDefinition # Whether the directive is active in `place` @doc false @@ -81,17 +63,17 @@ defmodule Absinthe.Type.Directive do defp do_on?(:fragment_definition, %Language.Fragment{}), do: true defp do_on?(:fragment_spread, %Language.FragmentSpread{}), do: true defp do_on?(:inline_fragment, %Language.InlineFragment{}), do: true - # TODO: Schema definitions to support Schema input + defp do_on?(:schema, %Language.SchemaDefinition{}), do: true + defp do_on?(:schema, %Language.SchemaDeclaration{}), do: true + defp do_on?(:scalar, %Language.ScalarTypeDefinition{}), do: true + defp do_on?(:object, %Language.ObjectTypeDefinition{}), do: true + defp do_on?(:field_definition, %Language.FieldDefinition{}), do: true + defp do_on?(:interface, %Language.InterfaceTypeDefinition{}), do: true + defp do_on?(:union, %Language.UnionTypeDefinition{}), do: true + defp do_on?(:enum, %Language.EnumTypeDefinition{}), do: true + defp do_on?(:enum_value, %Language.EnumValueDefinition{}), do: true + defp do_on?(:input_object, %Language.InputObjectTypeDefinition{}), do: true + defp do_on?(:argument_definition, %Language.InputValueDefinition{}), do: true + defp do_on?(:input_field_definition, %Language.InputValueDefinition{}), do: true defp do_on?(_, _), do: false - - # Check a directive and return an instruction - @doc false - @spec check(t, Language.t(), map) :: atom - def check(definition, place, args) do - if on?(definition, place) && definition.instruction do - definition.instruction.(args) - else - :ok - end - end end diff --git a/lib/absinthe/type/enum.ex b/lib/absinthe/type/enum.ex index feb8676084..8cc10ca75c 100644 --- a/lib/absinthe/type/enum.ex +++ b/lib/absinthe/type/enum.ex @@ -51,7 +51,7 @@ defmodule Absinthe.Type.Enum do """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :enum alias Absinthe.{Blueprint, Type} @@ -73,6 +73,7 @@ defmodule Absinthe.Type.Enum do values: %{binary => Type.Enum.Value.t()}, identifier: atom, __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } @@ -83,28 +84,9 @@ defmodule Absinthe.Type.Enum do values_by_internal_value: %{}, values_by_name: %{}, __private__: [], + definition: nil, __reference__: nil - def build(%{attrs: attrs}) do - raw_values = attrs[:values] || [] - - values = Type.Enum.Value.build(raw_values) - internal_values = Type.Enum.Value.build(raw_values, :value) - values_by_name = Type.Enum.Value.build(raw_values, :name) - - attrs = - attrs - |> Keyword.put(:values, values) - |> Keyword.put(:values_by_internal_value, internal_values) - |> Keyword.put(:values_by_name, values_by_name) - - quote do - %unquote(__MODULE__){ - unquote_splicing(attrs) - } - end - end - # Get the internal representation of an enum value @doc false @spec parse(t, any) :: any diff --git a/lib/absinthe/type/enum/value.ex b/lib/absinthe/type/enum/value.ex index 1b5ea90dbf..8a8ec4e275 100644 --- a/lib/absinthe/type/enum/value.ex +++ b/lib/absinthe/type/enum/value.ex @@ -17,65 +17,23 @@ defmodule Absinthe.Type.Enum.Value do value that will be provided by query documents. * `:description` - A nice description for introspection. * `:value` - The raw, internal value that `:name` map to. This will be - provided as the argument value to resolve functions. - to `resolve` functions + provided as the argument value to `resolve` functions. * `:deprecation` - Deprecation information for a value, usually - set-up using the `Absinthe.Schema.Notation.deprecate/2` convenience + set-up using the `Absinthe.Schema.Notation.deprecate/1` convenience function. """ @type t :: %{ name: binary, description: binary, value: any, + enum_identifier: atom, deprecation: Type.Deprecation.t() | nil, __reference__: Type.Reference.t() } - defstruct name: nil, description: nil, value: nil, deprecation: nil, __reference__: nil - - @spec build(Keyword.t()) :: Macro.expr() - def build(raw_values) when is_list(raw_values) do - ast = - for {identifier, value_attrs} <- normalize(raw_values) do - value_data = value_data(identifier, value_attrs) - value_ast = quote do: %Absinthe.Type.Enum.Value{unquote_splicing(value_data)} - - {identifier, value_ast} - end - - quote do: %{unquote_splicing(ast)} - end - - def build(raw_values, key) when is_list(raw_values) do - ast = - for {identifier, value_attrs} <- normalize(raw_values) do - value_data = value_data(identifier, value_attrs) - value_ast = quote do: %Absinthe.Type.Enum.Value{unquote_splicing(value_data)} - - {value_data[key], value_ast} - end - - quote do: %{unquote_splicing(ast)} - end - - defp value_data(identifier, value_attrs) do - default_name = - identifier - |> Atom.to_string() - |> String.upcase() - - value_attrs - |> Keyword.put_new(:value, identifier) - |> Keyword.put_new(:name, default_name) - |> Type.Deprecation.from_attribute() - end - - # Normalize shorthand lists of atoms to the keyword list that `values` expects - @spec normalize([atom] | [{atom, Keyword.t()}]) :: [{atom, Keyword.t()}] - defp normalize(raw) do - if Keyword.keyword?(raw) do - raw - else - raw |> Enum.map(&{&1, []}) - end - end + defstruct name: nil, + description: nil, + value: nil, + deprecation: nil, + enum_identifier: nil, + __reference__: nil end diff --git a/lib/absinthe/type/field.ex b/lib/absinthe/type/field.ex index 37310459bd..9bfca934a1 100644 --- a/lib/absinthe/type/field.ex +++ b/lib/absinthe/type/field.ex @@ -11,7 +11,6 @@ defmodule Absinthe.Type.Field do alias Absinthe.Type alias Absinthe.Type.Deprecation - alias Absinthe.Schema use Type.Fetch @@ -71,7 +70,7 @@ defmodule Absinthe.Type.Field do The configuration for a field. * `:name` - The name of the field, usually assigned automatically by - the `Absinthe.Schema.Notation.field/1`. + the `Absinthe.Schema.Notation.field/4`. Including this option will bypass the snake_case to camelCase conversion. * `:description` - Description of a field, useful for introspection. * `:deprecation` - Deprecation information for a field, usually set-up using `Absinthe.Schema.Notation.deprecate/1`. @@ -104,7 +103,7 @@ defmodule Absinthe.Type.Field do ### Custom Resolution When accepting arguments, however, you probably need to use them for - something. Here's an example of definining a field that looks up a list of + something. Here's an example of defining a field that looks up a list of users for a given `location_id`: ``` query do @@ -195,6 +194,7 @@ defmodule Absinthe.Type.Field do middleware: [], complexity: complexity_t | nil, __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } @@ -206,100 +206,15 @@ defmodule Absinthe.Type.Field do args: %{}, # used by subscription fields config: nil, - # used by mutatino fields + # used by mutation fields triggers: [], middleware: [], complexity: nil, default_value: nil, __private__: [], + definition: nil, __reference__: nil - @doc """ - Build an AST of the field map for inclusion in other types - - ## Examples - - ``` - iex> build([foo: [type: :string], bar: [type: :integer]]) - {:%{}, [], - [foo: {:%, [], - [{:__aliases__, [alias: false], [:Absinthe, :Type, :Field]}, - {:%{}, [], [name: "Foo", type: :string]}]}, - bar: {:%, [], - [{:__aliases__, [alias: false], [:Absinthe, :Type, :Field]}, - {:%{}, [], [name: "Bar", type: :integer]}]}]} - ``` - """ - @spec build(Keyword.t()) :: tuple - def build(fields) when is_list(fields) do - quoted_empty_map = quote do: %{} - - ast = - for {field_name, field_attrs} <- fields do - name = field_name |> Atom.to_string() - default_ref = field_attrs[:__reference__] - - field_attrs = - case Keyword.pop(field_attrs, :resolve) do - {nil, field_attrs} -> - field_attrs - - {resolution_function_ast, field_attrs} -> - Keyword.put(field_attrs, :middleware, [ - {Absinthe.Resolution, resolution_function_ast} - ]) - end - - field_data = - field_attrs - |> Keyword.put_new(:name, name) - |> Keyword.put(:identifier, field_name) - |> Keyword.update(:middleware, [], &Enum.reverse/1) - |> Keyword.update(:args, quoted_empty_map, fn raw_args -> - args = - for {name, attrs} <- raw_args, - do: {name, ensure_reference(attrs, name, default_ref)} - - Type.Argument.build(args) - end) - - field_ast = - quote do: %Absinthe.Type.Field{ - unquote_splicing(field_data |> Absinthe.Type.Deprecation.from_attribute()) - } - - {field_name, field_ast} - end - - quote do: %{unquote_splicing(ast)} - end - - defp ensure_reference(arg_attrs, name, default_reference) do - case Keyword.has_key?(arg_attrs, :__reference__) do - true -> - arg_attrs - - false -> - # default_reference is map AST, hence the gymnastics to build it nicely. - {a, b, args} = default_reference - - Keyword.put(arg_attrs, :__reference__, {a, b, Keyword.put(args, :identifier, name)}) - end - end - - defimpl Absinthe.Traversal.Node do - def children(node, traversal) do - found = Schema.lookup_type(traversal.context, node.type) - - if found do - [found | node.args |> Map.values()] - else - type_names = traversal.context.types.by_identifier |> Map.keys() |> Enum.join(", ") - - raise "Unknown Absinthe type for field `#{node.name}': (#{node.type |> Type.unwrap()} not in available types, #{ - type_names - })" - end - end - end + @doc false + defdelegate functions, to: Absinthe.Blueprint.Schema.FieldDefinition end diff --git a/lib/absinthe/type/input_object.ex b/lib/absinthe/type/input_object.ex index 3f4ccbcbf8..64fa4b2bcb 100644 --- a/lib/absinthe/type/input_object.ex +++ b/lib/absinthe/type/input_object.ex @@ -31,7 +31,7 @@ defmodule Absinthe.Type.InputObject do ``` """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :input_object use Absinthe.Type.Fetch alias Absinthe.Type @@ -42,16 +42,17 @@ defmodule Absinthe.Type.InputObject do * `:name` - The name of the input object type. Should be a TitleCased `binary`. Set automatically. * `:description` - A nice description for introspection. - * `:fields` - A map of `Absinthe.Type.Field` structs. Usually built via `Absinthe.Schema.Notation.field/1`. + * `:fields` - A map of `Absinthe.Type.Field` structs. Usually built via `Absinthe.Schema.Notation.field/4`. The `__private__` and `:__reference__` fields are for internal use. """ @type t :: %__MODULE__{ name: binary, description: binary, - fields: map | (() -> map), + fields: map, identifier: atom, __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } @@ -60,24 +61,6 @@ defmodule Absinthe.Type.InputObject do fields: %{}, identifier: nil, __private__: [], - __reference__: nil, - field_imports: [] - - def build(%{attrs: attrs}) do - fields = - attrs - |> Keyword.get(:fields, []) - |> Type.Field.build() - |> Type.Object.handle_imports(attrs[:field_imports]) - - attrs = Keyword.put(attrs, :fields, fields) - - quote do: %unquote(__MODULE__){unquote_splicing(attrs)} - end - - defimpl Absinthe.Traversal.Node do - def children(node, _traversal) do - Map.values(node.fields) - end - end + definition: nil, + __reference__: nil end diff --git a/lib/absinthe/type/interface.ex b/lib/absinthe/type/interface.ex index f6f9217533..246de79dc6 100644 --- a/lib/absinthe/type/interface.ex +++ b/lib/absinthe/type/interface.ex @@ -42,7 +42,7 @@ defmodule Absinthe.Type.Interface do ``` """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :interface alias Absinthe.Type alias Absinthe.Schema @@ -50,7 +50,7 @@ defmodule Absinthe.Type.Interface do @typedoc """ * `:name` - The name of the interface type. Should be a TitleCased `binary`. Set automatically. * `:description` - A nice description for introspection. - * `:fields` - A map of `Absinthe.Type.Field` structs. See `Absinthe.Schema.Notation.field/1` and + * `:fields` - A map of `Absinthe.Type.Field` structs. See `Absinthe.Schema.Notation.field/4` and * `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/2`. * `:resolve_type` - A function used to determine the implementing type of a resolved object. See also `Absinthe.Type.Object`'s `:is_type_of`. @@ -63,8 +63,9 @@ defmodule Absinthe.Type.Interface do description: binary, fields: map, identifier: atom, - resolve_type: (any, Absinthe.Resolution.t() -> atom | nil), + interfaces: [Absinthe.Type.Interface.t()], __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } @@ -73,61 +74,42 @@ defmodule Absinthe.Type.Interface do fields: nil, identifier: nil, resolve_type: nil, + interfaces: [], __private__: [], - __reference__: nil, - field_imports: [] + definition: nil, + __reference__: nil - def build(%{attrs: attrs}) do - fields = - (attrs[:fields] || []) - |> Type.Field.build() - |> Type.Object.handle_imports(attrs[:field_imports]) - - attrs = Keyword.put(attrs, :fields, fields) - - quote do - %unquote(__MODULE__){unquote_splicing(attrs)} - end - end + @doc false + defdelegate functions, to: Absinthe.Blueprint.Schema.InterfaceTypeDefinition @spec resolve_type(Type.Interface.t(), any, Absinthe.Resolution.t()) :: Type.t() | nil def resolve_type(type, obj, env, opts \\ [lookup: true]) - def resolve_type( - %{resolve_type: nil, __reference__: %{identifier: ident}}, - obj, - %{schema: schema}, - opts - ) do - implementors = Schema.implementors(schema, ident) - - type_name = - Enum.find(implementors, fn - %{is_type_of: nil} -> - false - - type -> - type.is_type_of.(obj) - end) - - if opts[:lookup] do - Absinthe.Schema.lookup_type(schema, type_name) + def resolve_type(interface, obj, %{schema: schema} = env, opts) do + if resolver = Type.function(interface, :resolve_type) do + case resolver.(obj, env) do + nil -> + nil + + ident when is_atom(ident) -> + if opts[:lookup] do + Absinthe.Schema.lookup_type(schema, ident) + else + ident + end + end else - type_name - end - end - - def resolve_type(%{resolve_type: resolver}, obj, %{schema: schema} = env, opts) do - case resolver.(obj, env) do - nil -> - nil - - ident when is_atom(ident) -> - if opts[:lookup] do - Absinthe.Schema.lookup_type(schema, ident) - else - ident - end + type_name = + Schema.implementors(schema, interface.identifier) + |> Enum.find(fn type -> + Absinthe.Type.function(type, :is_type_of).(obj) + end) + + if opts[:lookup] do + Absinthe.Schema.lookup_type(schema, type_name) + else + type_name + end end end @@ -147,49 +129,11 @@ defmodule Absinthe.Type.Interface do @doc false @spec member?(t, Type.t()) :: boolean - def member?(%{__reference__: %{identifier: ident}}, %{interfaces: ifaces}) do + def member?(%{identifier: ident}, %{interfaces: ifaces}) do ident in ifaces end def member?(_, _) do false end - - @spec implements?(Type.Interface.t(), Type.Object.t(), Type.Schema.t()) :: boolean - def implements?(interface, type, schema) do - covariant?(interface, type, schema) - end - - defp covariant?(%wrapper{of_type: inner_type1}, %wrapper{of_type: inner_type2}, schema) do - covariant?(inner_type1, inner_type2, schema) - end - - defp covariant?(%{name: name}, %{name: name}, _schema) do - true - end - - defp covariant?(%Type.Interface{fields: ifields}, %{fields: type_fields}, schema) do - Enum.all?(ifields, fn {field_ident, ifield} -> - case Map.get(type_fields, field_ident) do - nil -> - false - - field -> - covariant?(ifield.type, field.type, schema) - end - end) - end - - defp covariant?(nil, _, _), do: false - defp covariant?(_, nil, _), do: false - - defp covariant?(itype, type, schema) when is_atom(itype) do - itype = schema.__absinthe_type__(itype) - covariant?(itype, type, schema) - end - - defp covariant?(itype, type, schema) when is_atom(type) do - type = schema.__absinthe_type__(type) - covariant?(itype, type, schema) - end end diff --git a/lib/absinthe/type/list.ex b/lib/absinthe/type/list.ex index ebf4e51673..ccf9ff13da 100644 --- a/lib/absinthe/type/list.ex +++ b/lib/absinthe/type/list.ex @@ -18,7 +18,7 @@ defmodule Absinthe.Type.List do ``` """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :list use Absinthe.Type.Fetch @typedoc " diff --git a/lib/absinthe/type/non_null.ex b/lib/absinthe/type/non_null.ex index 16eae01705..587104ad9a 100644 --- a/lib/absinthe/type/non_null.ex +++ b/lib/absinthe/type/non_null.ex @@ -6,6 +6,10 @@ defmodule Absinthe.Type.NonNull do By default, all types in GraphQL are nullable. To declare a type that disallows null, wrap it in a `Absinthe.Type.NonNull` struct. + Adding non_null/1 to a type is a breaking change, removing it is not. Client documents that specify non null on a + variable eg `query ($id: ID!)` are allowed to be passed to arguments which allow null. If the argument however is + non_null, then the variable type MUST be non null as well. + ## Examples Given a type, `:item`, to declare it as non-null, you could do the following: @@ -21,7 +25,7 @@ defmodule Absinthe.Type.NonNull do ``` """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :non_null use Absinthe.Type.Fetch @typedoc """ diff --git a/lib/absinthe/type/object.ex b/lib/absinthe/type/object.ex index b91e1dcff2..f7595db742 100644 --- a/lib/absinthe/type/object.ex +++ b/lib/absinthe/type/object.ex @@ -31,7 +31,7 @@ defmodule Absinthe.Type.Object do Given we have a query that supports getting a person by name (see `Absinthe.Schema`), and a query document like the following: - ``` + ```graphql { person(name: "Joe") { name @@ -67,7 +67,7 @@ defmodule Absinthe.Type.Object do """ alias Absinthe.Type - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :object @typedoc """ A defined object type. @@ -77,9 +77,9 @@ defmodule Absinthe.Type.Object do * `:name` - The name of the object type. Should be a TitleCased `binary`. Set automatically. * `:description` - A nice description for introspection. - * `:fields` - A map of `Absinthe.Type.Field` structs. Usually built via `Absinthe.Schema.Notation.field/1`. + * `:fields` - A map of `Absinthe.Type.Field` structs. Usually built via `Absinthe.Schema.Notation.field/4`. * `:interfaces` - A list of interfaces that this type guarantees to implement. See `Absinthe.Type.Interface`. - * `:is_type_of` - A function used to identify whether a resolved object belongs to this defined type. For use with `:interfaces` entry and `Absinthe.Type.Interface`. + * `:is_type_of` - A function used to identify whether a resolved object belongs to this defined type. For use with `:interfaces` entry and `Absinthe.Type.Interface`. This function will be passed one argument; the object whose type needs to be identified, and should return `true` when the object matches this type. The `__private__` and `:__reference__` keys are for internal use. """ @@ -89,8 +89,8 @@ defmodule Absinthe.Type.Object do description: binary, fields: map, interfaces: [Absinthe.Type.Interface.t()], - is_type_of: (any -> boolean), __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } @@ -99,49 +99,13 @@ defmodule Absinthe.Type.Object do description: nil, fields: nil, interfaces: [], - is_type_of: nil, __private__: [], + definition: nil, __reference__: nil, - field_imports: [] - - def build(%{attrs: attrs}) do - fields = - (attrs[:fields] || []) - |> Type.Field.build() - |> handle_imports(attrs[:field_imports]) - - attrs = Keyword.put(attrs, :fields, fields) - - quote do - %unquote(__MODULE__){ - unquote_splicing(attrs) - } - end - end + is_type_of: nil @doc false - def handle_imports(fields, []), do: fields - def handle_imports(fields, nil), do: fields - - def handle_imports(fields, imports) do - quote do - Enum.reduce( - unquote(imports), - unquote(fields), - &Absinthe.Type.Object.import_fields(__MODULE__, &1, &2) - ) - end - end - - def import_fields(schema, {type, _opts}, fields) do - case schema.__absinthe_type__(type) do - %{fields: new_fields} -> - Map.merge(fields, new_fields) - - _ -> - fields - end - end + defdelegate functions, to: Absinthe.Blueprint.Schema.ObjectTypeDefinition @doc false @spec field(t, atom) :: Absinthe.Type.Field.t() @@ -149,10 +113,4 @@ defmodule Absinthe.Type.Object do fields |> Map.get(identifier) end - - defimpl Absinthe.Traversal.Node do - def children(node, _traversal) do - Map.values(node.fields) ++ node.interfaces - end - end end diff --git a/lib/absinthe/type/scalar.ex b/lib/absinthe/type/scalar.ex index 17f094f696..b6e5372255 100644 --- a/lib/absinthe/type/scalar.ex +++ b/lib/absinthe/type/scalar.ex @@ -29,20 +29,19 @@ defmodule Absinthe.Type.Scalar do ``` """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :scalar alias Absinthe.Type - def build(%{attrs: attrs}) do - quote do: %unquote(__MODULE__){unquote_splicing(attrs)} - end + @doc false + defdelegate functions(), to: Absinthe.Blueprint.Schema.ScalarTypeDefinition - def serialize(%{serialize: serializer}, value) do - serializer.(value) + def serialize(type, value) do + Type.function(type, :serialize).(value) end - def parse(%{parse: parser}, value, context \\ %{}) do - case parser do + def parse(type, value, context \\ %{}) do + case Type.function(type, :parse) do parser when is_function(parser, 1) -> parser.(value) @@ -66,20 +65,21 @@ defmodule Absinthe.Type.Scalar do @type t :: %__MODULE__{ name: binary, description: binary, - serialize: (value_t -> any), - parse: (any -> {:ok, value_t} | :error), identifier: atom, __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } defstruct name: nil, description: nil, - serialize: nil, - parse: nil, identifier: nil, __private__: [], - __reference__: nil + definition: nil, + __reference__: nil, + parse: nil, + serialize: nil, + open_ended: false @typedoc "The internal, canonical representation of a scalar value" @type value_t :: any diff --git a/lib/absinthe/type/union.ex b/lib/absinthe/type/union.ex index a566344399..53a497b5f7 100644 --- a/lib/absinthe/type/union.ex +++ b/lib/absinthe/type/union.ex @@ -23,7 +23,7 @@ defmodule Absinthe.Type.Union do ``` """ - use Absinthe.Introspection.Kind + use Absinthe.Introspection.TypeKind, :union alias Absinthe.{Schema, Type} @@ -42,27 +42,29 @@ defmodule Absinthe.Type.Union do name: binary, description: binary, types: [Type.identifier_t()], - resolve_type: (any, Absinthe.Resolution.t() -> atom | nil), identifier: atom, + fields: map, __private__: Keyword.t(), + definition: module, __reference__: Type.Reference.t() } defstruct name: nil, description: nil, - resolve_type: nil, identifier: nil, + resolve_type: nil, types: [], + fields: nil, __private__: [], + definition: nil, __reference__: nil - def build(%{attrs: attrs}) do - quote do: %unquote(__MODULE__){unquote_splicing(attrs)} - end + @doc false + defdelegate functions, to: Absinthe.Blueprint.Schema.UnionTypeDefinition @doc false @spec member?(t, Type.t()) :: boolean - def member?(%{types: types}, %{__reference__: %{identifier: ident}}) do + def member?(%{types: types}, %{identifier: ident}) do ident in types end @@ -74,43 +76,35 @@ defmodule Absinthe.Type.Union do @spec resolve_type(t, any, Absinthe.Resolution.t()) :: Type.t() | nil def resolve_type(type, object, env, opts \\ [lookup: true]) - def resolve_type(%{resolve_type: nil, types: types}, obj, %{schema: schema}, opts) do - type_name = - Enum.find(types, fn - %{is_type_of: nil} -> - false - - type -> - case Schema.lookup_type(schema, type) do - nil -> - false - - %{is_type_of: nil} -> - false - - %{is_type_of: check} -> - check.(obj) + def resolve_type(%{types: types} = union, obj, %{schema: schema} = env, opts) do + if resolver = Type.function(union, :resolve_type) do + case resolver.(obj, env) do + nil -> + nil + + ident when is_atom(ident) -> + if opts[:lookup] do + Absinthe.Schema.lookup_type(schema, ident) + else + ident end - end) - - if opts[:lookup] do - Schema.lookup_type(schema, type_name) + end else - type_name - end - end - - def resolve_type(%{resolve_type: resolver}, obj, %{schema: schema} = env, opts) do - case resolver.(obj, env) do - nil -> - nil - - ident when is_atom(ident) -> - if opts[:lookup] do - Absinthe.Schema.lookup_type(schema, ident) - else - ident - end + type_name = + Enum.find(types, fn + %{is_type_of: nil} -> + false + + type -> + type = Absinthe.Schema.lookup_type(schema, type) + Absinthe.Type.function(type, :is_type_of).(obj) + end) + + if opts[:lookup] do + Schema.lookup_type(schema, type_name) + else + type_name + end end end end diff --git a/lib/absinthe/utils.ex b/lib/absinthe/utils.ex index fc13218b59..c18a51012a 100644 --- a/lib/absinthe/utils.ex +++ b/lib/absinthe/utils.ex @@ -53,6 +53,22 @@ defmodule Absinthe.Utils do end @doc false + @spec escapable?(any()) :: boolean() + def escapable?(value) do + # if this doesn't blow up, the value can be escaped + _ = Macro.escape(value) + true + rescue + _ -> + false + end + + @doc false + def placement_docs(placements, name) do + placement = Enum.find(placements, &match?({^name, _}, &1)) + placement_docs([placement]) + end + def placement_docs([{_, placement} | _]) do placement |> do_placement_docs diff --git a/lib/absinthe/utils/render.ex b/lib/absinthe/utils/render.ex new file mode 100644 index 0000000000..4f1476023c --- /dev/null +++ b/lib/absinthe/utils/render.ex @@ -0,0 +1,72 @@ +defmodule Absinthe.Utils.Render do + @moduledoc false + + import Inspect.Algebra + + def join(docs, joiner) do + fold_doc(docs, fn doc, acc -> + concat([doc, concat(List.wrap(joiner)), acc]) + end) + end + + def render_string_value(string, indent \\ 2) do + string + |> String.trim() + |> String.split("\n") + |> case do + [string_line] -> + concat([~s("), escape_string(string_line), ~s(")]) + + string_lines -> + concat( + nest( + block_string([~s(""")] ++ string_lines), + indent, + :always + ), + concat(line(), ~s(""")) + ) + end + end + + @escaped_chars [?", ?\\, ?/, ?\b, ?\f, ?\n, ?\r, ?\t] + + defp escape_string(string) do + escape_string(string, []) + end + + defp escape_string(<>, acc) when char in @escaped_chars do + escape_string(rest, [acc | escape_char(char)]) + end + + defp escape_string(<>, acc) do + escape_string(rest, [acc | <>]) + end + + defp escape_string(<<>>, acc) do + to_string(acc) + end + + defp escape_char(?"), do: [?\\, ?"] + defp escape_char(?\\), do: [?\\, ?\\] + defp escape_char(?/), do: [?\\, ?/] + defp escape_char(?\b), do: [?\\, ?b] + defp escape_char(?\f), do: [?\\, ?f] + defp escape_char(?\n), do: [?\\, ?n] + defp escape_char(?\r), do: [?\\, ?r] + defp escape_char(?\t), do: [?\\, ?t] + + defp block_string([string]) do + string(string) + end + + defp block_string([string | rest]) do + string + |> string() + |> concat(block_string_line(rest)) + |> concat(block_string(rest)) + end + + defp block_string_line(["", _ | _]), do: nest(line(), :reset) + defp block_string_line(_), do: line() +end diff --git a/lib/mix/tasks/absinthe.schema.json.ex b/lib/mix/tasks/absinthe.schema.json.ex index 4627506b3f..f00cb47c34 100644 --- a/lib/mix/tasks/absinthe.schema.json.ex +++ b/lib/mix/tasks/absinthe.schema.json.ex @@ -6,88 +6,123 @@ defmodule Mix.Tasks.Absinthe.Schema.Json do @shortdoc "Generate a schema.json file for an Absinthe schema" @default_filename "./schema.json" - @default_codec_name "Poison" @moduledoc """ - Generate a schema.json file + Generate a `schema.json` file ## Usage - absinthe.schema.json [FILENAME] [OPTIONS] + mix absinthe.schema.json [OPTIONS] [FILENAME] + + The JSON codec to be used needs to be included in your `mix.exs` dependencies. If using the default codec, + see the Jason [installation instructions](https://hexdocs.pm/jason). ## Options - --schema The schema. Default: As configured for `:absinthe` `:schema` - --json-codec Sets JSON Codec. Default: #{@default_codec_name} - --pretty Whether to pretty-print. Default: false + * `--schema` - The name of the `Absinthe.Schema` module defining the schema to be generated. + Default: As [configured](https://hexdocs.pm/mix/Mix.Config.html) for `:absinthe` `:schema` + * `--json-codec` - Codec to use to generate the JSON file (see [Custom Codecs](#module-custom-codecs)). + Default: [`Jason`](https://hexdocs.pm/jason/) + * `--pretty` - Whether to pretty-print. + Default: `false` ## Examples - Write to default path `#{@default_filename}` using the `:schema` configured for - the `:absinthe` application and the default `#{@default_codec_name}` JSON codec: + Write to default path `#{@default_filename}` using the `:schema` configured for the `:absinthe` application: + + mix absinthe.schema.json + + Write to default path `#{@default_filename}` using the `MySchema` schema: + + mix absinthe.schema.json --schema MySchema - $ mix absinthe.schema.json + Write to path `/path/to/schema.json` using the `MySchema` schema, with pretty-printing: - Write to default path `#{@default_filename}` using the `MySchema` schema and - the default `#{@default_codec_name}` JSON codec. + mix absinthe.schema.json --schema MySchema --pretty /path/to/schema.json - $ mix absinthe.schema.json --schema MySchema + Write to default path `#{@default_filename}` using the `MySchema` schema and a custom JSON codec, `MyCodec`: - Write to path `/path/to/schema.json` using the `MySchema` schema, using the - default `#{@default_codec_name}` JSON codec, and pretty-printing: + mix absinthe.schema.json --schema MySchema --json-codec MyCodec - $ mix absinthe.schema.json --schema MySchema --pretty /path/to/schema.json - Write to default path `#{@default_filename}` using the `MySchema` schema and - a custom JSON codec, `MyCodec`: + ## Custom Codecs - $ mix absinthe.schema.json --schema MySchema --json-codec MyCodec + Any module that provides `encode!/2` can be used as a custom codec: + encode!(value, options) + + * `value` will be provided as a Map containing the generated schema. + * `options` will be a keyword list with a `:pretty` boolean, indicating whether the user requested pretty-printing. + + The function should return a string to be written to the output file. """ - @introspection_graphql Path.join([:code.priv_dir(:absinthe), "graphql", "introspection.graphql"]) + defmodule Options do + @moduledoc false + + defstruct filename: nil, schema: nil, json_codec: nil, pretty: false + + @type t() :: %__MODULE__{ + filename: String.t(), + schema: module(), + json_codec: module(), + pretty: boolean() + } + end + @doc "Callback implementation for `Mix.Task.run/1`, which receives a list of command-line args." + @spec run(argv :: [binary()]) :: any() def run(argv) do Application.ensure_all_started(:absinthe) Mix.Task.run("loadpaths", argv) - Mix.Project.compile(argv) - - {opts, args, _} = OptionParser.parse(argv) - - schema = find_schema(opts) - json_codec = find_json(opts) - filename = args |> List.first() || @default_filename - - {:ok, query} = File.read(@introspection_graphql) + Mix.Task.run("compile", argv) - case Absinthe.run(query, schema) do - {:ok, result} -> - create_directory(Path.dirname(filename)) - content = json_codec.module.encode!(result, json_codec.opts) - create_file(filename, content, force: true) + opts = parse_options(argv) - {:error, error} -> - raise error + case generate_schema(opts) do + {:ok, content} -> write_schema(content, opts.filename) + {:error, error} -> raise error end end - defp find_json(opts) do - case Keyword.get(opts, :json_codec, Poison) do - module when is_atom(module) -> - %{module: module, opts: codec_opts(module, opts)} - - other -> - other + @doc false + @spec generate_schema(Options.t()) :: {:error, binary()} | {:ok, String.t()} + def generate_schema(%Options{ + pretty: pretty, + schema: schema, + json_codec: json_codec + }) do + with {:ok, result} <- Absinthe.Schema.introspect(schema) do + content = json_codec.encode!(result, pretty: pretty) + {:ok, content} + else + {:error, reason} -> {:error, reason} + error -> {:error, error} end end - defp codec_opts(Poison, opts) do - [pretty: Keyword.get(opts, :pretty, false)] + @doc false + @spec parse_options([String.t()]) :: Options.t() + def parse_options(argv) do + parse_options = [strict: [schema: :string, json_codec: :string, pretty: :boolean]] + {opts, args, _} = OptionParser.parse(argv, parse_options) + + %Options{ + filename: args |> List.first() || @default_filename, + schema: find_schema(opts), + json_codec: json_codec_as_atom(opts), + pretty: Keyword.get(opts, :pretty, false) + } end - defp codec_opts(_, _) do - [] + defp json_codec_as_atom(opts) do + opts + |> Keyword.fetch(:json_codec) + |> case do + {:ok, codec} -> Module.concat([codec]) + _ -> Jason + end end defp find_schema(opts) do @@ -99,4 +134,9 @@ defmodule Mix.Tasks.Absinthe.Schema.Json do [value] |> Module.safe_concat() end end + + defp write_schema(content, filename) do + create_directory(Path.dirname(filename)) + create_file(filename, content, force: true) + end end diff --git a/lib/mix/tasks/absinthe.schema.sdl.ex b/lib/mix/tasks/absinthe.schema.sdl.ex new file mode 100644 index 0000000000..c1702e182a --- /dev/null +++ b/lib/mix/tasks/absinthe.schema.sdl.ex @@ -0,0 +1,99 @@ +defmodule Mix.Tasks.Absinthe.Schema.Sdl do + use Mix.Task + import Mix.Generator + + @shortdoc "Generate a schema.graphql file for an Absinthe schema" + + @default_filename "./schema.graphql" + + @moduledoc """ + Generate a `schema.graphql` file. + + ## Usage + + mix absinthe.schema.sdl [OPTIONS] [FILENAME] + + ## Options + + * `--schema` - The name of the `Absinthe.Schema` module defining the schema to be generated. + Default: As [configured](https://hexdocs.pm/mix/Mix.Config.html) for `:absinthe` `:schema` + + ## Examples + + Write to default path `#{@default_filename}` using the `:schema` configured for the `:absinthe` application: + + mix absinthe.schema.sdl + + Write to path `/path/to/schema.graphql` using the `MySchema` schema + + mix absinthe.schema.sdl --schema MySchema /path/to/schema.graphql + + """ + + defmodule Options do + @moduledoc false + defstruct filename: nil, schema: nil + + @type t() :: %__MODULE__{ + filename: String.t(), + schema: module() + } + end + + @impl Mix.Task + def run(argv) do + Application.ensure_all_started(:absinthe) + + Mix.Task.run("loadpaths", argv) + Mix.Task.run("compile", argv) + + opts = parse_options(argv) + + case generate_schema(opts) do + {:ok, content} -> write_schema(content, opts.filename) + {:error, error} -> raise error + end + end + + def generate_schema(%Options{schema: schema}) do + pipeline = + schema + |> Absinthe.Pipeline.for_schema(prototype_schema: schema.__absinthe_prototype_schema__()) + |> Absinthe.Pipeline.upto({Absinthe.Phase.Schema.Validation.Result, pass: :final}) + |> Absinthe.Schema.apply_modifiers(schema) + + with {:ok, blueprint, _phases} <- + Absinthe.Pipeline.run( + schema.__absinthe_blueprint__(), + pipeline + ) do + {:ok, inspect(blueprint, pretty: true)} + else + _ -> {:error, "Failed to render schema"} + end + end + + defp write_schema(content, filename) do + create_directory(Path.dirname(filename)) + create_file(filename, content, force: true) + end + + def parse_options(argv) do + {opts, args, _} = OptionParser.parse(argv, strict: [schema: :string]) + + %Options{ + filename: args |> List.first() || @default_filename, + schema: find_schema(opts) + } + end + + defp find_schema(opts) do + case Keyword.get(opts, :schema, Application.get_env(:absinthe, :schema)) do + nil -> + raise "No --schema given or :schema configured for the :absinthe application" + + value -> + [value] |> Module.safe_concat() + end + end +end diff --git a/mix.exs b/mix.exs index d1ff038db8..116b175247 100644 --- a/mix.exs +++ b/mix.exs @@ -1,18 +1,22 @@ defmodule Absinthe.Mixfile do use Mix.Project - @version "1.5.0-dev" + @source_url "https://github.com/absinthe-graphql/absinthe" + @version "1.7.0" def project do [ app: :absinthe, version: @version, - elixir: "~> 1.4", + elixir: "~> 1.10", elixirc_paths: elixirc_paths(Mix.env()), build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, package: package(), - source_url: "https://github.com/absinthe-graphql/absinthe", + source_url: @source_url, + preferred_cli_env: [ + dialyzer: :test + ], docs: [ source_ref: "v#{@version}", main: "overview", @@ -24,23 +28,36 @@ defmodule Absinthe.Mixfile do extras: extras(), groups_for_extras: groups_for_extras() ], - deps: deps() + deps: deps(), + dialyzer: [ + plt_core_path: "priv/plts", + plt_add_apps: [:mix, :dataloader, :decimal, :ex_unit] + ] ] end defp package do [ description: "GraphQL for Elixir", - files: ["lib", "src", "priv", "mix.exs", "README.md", "CHANGELOG.md", ".formatter.exs"], + files: [ + "lib", + "src/absinthe_parser.yrl", + "priv", + "mix.exs", + "README.md", + "CHANGELOG.md", + ".formatter.exs" + ], maintainers: [ "Bruce Williams", - "Ben Wilson" + "Ben Wilson", + "Vince Foley" ], licenses: ["MIT"], links: %{ Website: "https://absinthe-graphql.org", - Changelog: "https://github.com/absinthe-graphql/absinthe/blob/master/CHANGELOG.md", - GitHub: "https://github.com/absinthe-graphql/absinthe" + Changelog: "#{@source_url}/blob/master/CHANGELOG.md", + GitHub: @source_url } ] end @@ -49,18 +66,20 @@ defmodule Absinthe.Mixfile do defp elixirc_paths(_), do: ["lib"] def application do - [applications: [:logger]] + [extra_applications: [:crypto, :logger]] end defp deps do [ + {:nimble_parsec, "~> 0.5 or ~> 1.0"}, + {:telemetry, "~> 1.0 or ~> 0.4"}, {:dataloader, "~> 1.0.0", optional: true}, - {:ex_doc, "~> 0.14", only: :dev}, - {:benchfella, "~> 0.3.0", only: :dev}, - {:dialyze, "~> 0.2", only: :dev}, - {:decimal, "~> 1.0", optional: true}, - {:phoenix_pubsub, ">= 0.0.0", only: :test}, - {:mix_test_watch, "~> 0.4.1", only: [:test, :dev]} + {:decimal, "~> 1.0 or ~> 2.0", optional: true}, + {:ex_doc, "~> 0.22", only: :dev}, + {:benchee, ">= 1.0.0", only: :dev}, + {:dialyxir, "~> 1.1.0", only: [:dev, :test], runtime: false}, + {:mix_test_watch, "~> 1.0", only: :dev, runtime: false}, + {:makeup_graphql, "~> 0.1.0", only: :dev} ] end @@ -82,7 +101,6 @@ defmodule Absinthe.Mixfile do "guides/tutorial/conclusion.md", "guides/schemas.md", "guides/plug-phoenix.md", - "guides/ecto.md", "guides/middleware-and-plugins.md", "guides/errors.md", "guides/batching.md", @@ -94,14 +112,18 @@ defmodule Absinthe.Mixfile do "guides/importing-fields.md", "guides/variables.md", "guides/introspection.md", + "guides/telemetry.md", "guides/deprecation.md", "guides/adapters.md", "guides/complexity-analysis.md", "guides/file-uploads.md", + "guides/testing.md", "guides/client/javascript.md", "guides/client/apollo.md", "guides/client/relay.md", - "guides/upgrading/v1.4.md" + "guides/upgrading/v1.4.md", + "guides/upgrading/v1.5.md", + "CHANGELOG.md" ] end @@ -111,7 +133,8 @@ defmodule Absinthe.Mixfile do Tutorial: ~r/guides\/tutorial\/.*/, Topics: ~r/guides\/[^\/]+\.md/, "Client Guides": ~r/guides\/client\/.*/, - "Upgrade Guides": ~r/guides\/upgrading\/.*/ + "Upgrade Guides": ~r/guides\/upgrading\/.*/, + Changelog: "CHANGELOG.md" ] end @@ -122,12 +145,13 @@ defmodule Absinthe.Mixfile do [ "Schema Definition and Types": [ Absinthe.Schema, + Absinthe.Schema.Hydrator, Absinthe.Schema.Notation, + Absinthe.Schema.Prototype, Absinthe.Resolution.Helpers, Absinthe.Type, Absinthe.Type.Custom, Absinthe.Type.Argument, - Absinthe.Type.BuiltIns, Absinthe.Type.Custom, Absinthe.Type.Directive, Absinthe.Type.Enum, @@ -148,28 +172,31 @@ defmodule Absinthe.Mixfile do Absinthe.Middleware.Batch, Absinthe.Middleware.Dataloader, Absinthe.Middleware.MapGet, - Absinthe.Middleware.PassParent + Absinthe.Middleware.PassParent, + Absinthe.Middleware.Telemetry ], Subscriptions: [ Absinthe.Subscription, - Absinthe.Subscription.Pubsub + Absinthe.Subscription.Pubsub, + Absinthe.Subscription.Local, + Absinthe.Subscription.PipelineSerializer ], Extensibility: [ Absinthe.Pipeline, Absinthe.Phase, - Absinthe.Phase.Validation.Helpers, - Absinthe.Pipeline.ErrorResult + Absinthe.Phase.Document.Context, + Absinthe.Phase.Telemetry ], "Document Adapters": [ Absinthe.Adapter, Absinthe.Adapter.LanguageConventions, Absinthe.Adapter.Passthrough, + Absinthe.Adapter.StrictLanguageConventions, Absinthe.Adapter.Underscore ], Execution: [ Absinthe.Blueprint, Absinthe.Blueprint.Execution, - Absinthe.Traversal, Absinthe.Resolution, Absinthe.Complexity ], diff --git a/mix.lock b/mix.lock index 64e6f34d20..22bedc2e56 100644 --- a/mix.lock +++ b/mix.lock @@ -1,11 +1,18 @@ %{ - "benchfella": {:hex, :benchfella, "0.3.5", "b2122c234117b3f91ed7b43b6e915e19e1ab216971154acd0a80ce0e9b8c05f5", [:mix], [], "hexpm"}, - "dataloader": {:hex, :dataloader, "1.0.1", "7a4328683e3ab8608d1b77a3beb575defb0a70bdbb51d80890be3a90633a624e", [:mix], [{:ecto, ">= 0.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"}, - "decimal": {:hex, :decimal, "1.4.1", "ad9e501edf7322f122f7fc151cce7c2a0c9ada96f2b0155b8a09a795c2029770", [:mix], [], "hexpm"}, - "dialyze": {:hex, :dialyze, "0.2.1", "9fb71767f96649020d769db7cbd7290059daff23707d6e851e206b1fdfa92f9d", [:mix], [], "hexpm"}, - "earmark": {:hex, :earmark, "1.2.4", "99b637c62a4d65a20a9fb674b8cffb8baa771c04605a80c911c4418c69b75439", [:mix], [], "hexpm"}, - "ex_doc": {:hex, :ex_doc, "0.18.3", "f4b0e4a2ec6f333dccf761838a4b253d75e11f714b85ae271c9ae361367897b7", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}], "hexpm"}, - "fs": {:hex, :fs, "0.9.2", "ed17036c26c3f70ac49781ed9220a50c36775c6ca2cf8182d123b6566e49ec59", [:rebar], [], "hexpm"}, - "mix_test_watch": {:hex, :mix_test_watch, "0.4.1", "a98a84c795623f1ba020324f4354cf30e7120ba4dab65f9c2ae300f830a25f75", [:mix], [{:fs, "~> 0.9.1", [hex: :fs, repo: "hexpm", optional: false]}], "hexpm"}, - "phoenix_pubsub": {:hex, :phoenix_pubsub, "1.0.2", "bfa7fd52788b5eaa09cb51ff9fcad1d9edfeb68251add458523f839392f034c1", [:mix], [], "hexpm"}, + "benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm", "3ad58ae787e9c7c94dd7ceda3b587ec2c64604563e049b2a0e8baafae832addb"}, + "dataloader": {:hex, :dataloader, "1.0.8", "114294362db98a613f231589246aa5b0ce847412e8e75c4c94f31f204d272cbf", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "eaf3c2aa2bc9dbd2f1e960561d616b7f593396c4754185b75904f6d66c82a667"}, + "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, + "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, + "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.19", "de0d033d5ff9fc396a24eadc2fcf2afa3d120841eb3f1004d138cbf9273210e8", [:mix], [], "hexpm", "527ab6630b5c75c3a3960b75844c314ec305c76d9899bb30f71cb85952a9dc45"}, + "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "ex_doc": {:hex, :ex_doc, "0.27.3", "d09ed7ab590b71123959d9017f6715b54a448d76b43cf909eb0b2e5a78a977b2", [:mix], [{:earmark_parser, "~> 1.4.19", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "ee60b329d08195039bfeb25231a208749be4f2274eae42ce38f9be0538a2f2e6"}, + "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, + "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.15.2", "dc72dfe17eb240552857465cc00cce390960d9a0c055c4ccd38b70629227e97c", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "fd23ae48d09b32eff49d4ced2b43c9f086d402ee4fd4fcb2d7fad97fa8823e75"}, + "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, + "makeup_graphql": {:hex, :makeup_graphql, "0.1.2", "81e2939aab6d2b81d39ee5d9e13fae02599e9ca6e1152e0eeed737a98a5f96aa", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "3390ab04ba388d52a94bbe64ef62aa4d7923ceaffac43ec948f58f631440e8fb"}, + "mix_test_watch": {:hex, :mix_test_watch, "1.0.2", "34900184cbbbc6b6ed616ed3a8ea9b791f9fd2088419352a6d3200525637f785", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "47ac558d8b06f684773972c6d04fcc15590abdb97aeb7666da19fcbfdc441a07"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.2.0", "b44d75e2a6542dcb6acf5d71c32c74ca88960421b6874777f79153bbbbd7dccc", [:mix], [], "hexpm", "52b2871a7515a5ac49b00f214e4165a40724cf99798d8e4a65e4fd64ebd002c1"}, + "telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"}, } diff --git a/priv/graphql/introspection.graphql b/priv/graphql/introspection.graphql index a3fb647ed7..3f8b0fd5e9 100644 --- a/priv/graphql/introspection.graphql +++ b/priv/graphql/introspection.graphql @@ -1,8 +1,15 @@ query IntrospectionQuery { __schema { - queryType { name } - mutationType { name } - subscriptionType { name } + description + queryType { + name + } + mutationType { + name + } + subscriptionType { + name + } types { ...FullType } @@ -10,12 +17,14 @@ query IntrospectionQuery { name description locations + isRepeatable args { ...InputValue } } } } + fragment FullType on __Type { kind name @@ -48,12 +57,16 @@ fragment FullType on __Type { ...TypeRef } } + fragment InputValue on __InputValue { name description - type { ...TypeRef } + type { + ...TypeRef + } defaultValue } + fragment TypeRef on __Type { kind name diff --git a/src/absinthe_lexer.xrl b/src/absinthe_lexer.xrl deleted file mode 100644 index 04cb57977c..0000000000 --- a/src/absinthe_lexer.xrl +++ /dev/null @@ -1,64 +0,0 @@ -% Absinthe Lexer -% -% See the spec reference http://facebook.github.io/graphql/#sec-Appendix-Grammar-Summary -% The relevant version is also copied into this repo - -Definitions. - -% Ignored tokens -WhiteSpace = [\x{0009}\x{000B}\x{000C}\x{0020}\x{00A0}] -_LineTerminator = \x{000A}\x{000D}\x{2028}\x{2029} -LineTerminator = [{_LineTerminator}] -Comment = #[^{_LineTerminator}]* -Comma = , -Ignored = {WhiteSpace}|{LineTerminator}|{Comment}|{Comma} - -% Lexical tokens -Punctuator = [!$():=@\[\]{|}]|\.\.\. -Name = [_A-Za-z][_0-9A-Za-z]* - -% Int Value -Digit = [0-9] -NonZeroDigit = [1-9] -NegativeSign = - -IntegerPart = {NegativeSign}?(0|{NonZeroDigit}{Digit}*) -IntValue = {IntegerPart} - -% Float Value -FractionalPart = \.{Digit}+ -Sign = [+\-] -ExponentIndicator = [eE] -ExponentPart = {ExponentIndicator}{Sign}?{Digit}+ -FloatValue = {IntegerPart}{FractionalPart}|{IntegerPart}{ExponentPart}|{IntegerPart}{FractionalPart}{ExponentPart} - -% Block String Value -EscapedBlockStringQuote = (\\""") -BlockStringCharacter = (\n|\t|\r|[^\x{0000}-\x{001F}]|{EscapedBlockStringQuote}) -BlockStringValue = """{BlockStringCharacter}*""" - -% String Value -HexDigit = [0-9A-Fa-f] -EscapedUnicode = u{HexDigit}{HexDigit}{HexDigit}{HexDigit} -EscapedCharacter = ["\\\/bfnrt] -StringCharacter = ([^\"{_LineTerminator}]|\\{EscapedUnicode}|\\{EscapedCharacter}) -StringValue = "{StringCharacter}*" - -% Boolean Value -BooleanValue = true|false - -% Reserved words -ReservedWord = query|mutation|subscription|fragment|on|implements|interface|union|scalar|enum|input|extend|type|directive|ON|null|schema - -Rules. - -{Ignored} : skip_token. -{Punctuator} : {token, {list_to_atom(TokenChars), TokenLine}}. -{ReservedWord} : {token, {list_to_atom(TokenChars), TokenLine}}. -{IntValue} : {token, {int_value, TokenLine, TokenChars}}. -{FloatValue} : {token, {float_value, TokenLine, TokenChars}}. -{BlockStringValue} : {token, {block_string_value, TokenLine, TokenChars}}. -{StringValue} : {token, {string_value, TokenLine, TokenChars}}. -{BooleanValue} : {token, {boolean_value, TokenLine, TokenChars}}. -{Name} : {token, {name, TokenLine, TokenChars}}. - -Erlang code. diff --git a/src/absinthe_parser.yrl b/src/absinthe_parser.yrl index 5f300e327d..d77a3115f8 100644 --- a/src/absinthe_parser.yrl +++ b/src/absinthe_parser.yrl @@ -8,7 +8,7 @@ Nonterminals EnumValueDefinitionList EnumValueDefinition DirectiveDefinition DirectiveDefinitionLocations SelectionSet Selections Selection - OperationType Name NameWithoutOn VariableDefinitions VariableDefinition Directives Directive + OperationType Name NameWithoutOn VariableDefinitions VariableDefinition DescriptionDefinition Directives Directive Field Alias Arguments ArgumentList Argument FragmentSpread FragmentName InlineFragment VariableDefinitionList Variable DefaultValue @@ -17,13 +17,13 @@ Nonterminals Terminals '{' '}' '(' ')' '[' ']' '!' ':' '@' '$' '=' '|' '...' - 'query' 'mutation' 'subscription' 'fragment' 'on' 'directive' + 'query' 'mutation' 'subscription' 'fragment' 'on' 'directive' 'repeatable' 'type' 'implements' 'interface' 'union' 'scalar' 'enum' 'input' 'extend' 'schema' name int_value float_value string_value block_string_value boolean_value null. Rootsymbol Document. -Document -> Definitions : build_ast_node('Document', #{'definitions' => '$1'}, #{'start_line' => extract_line('$1')}). +Document -> Definitions : build_ast_node('Document', #{'definitions' => '$1'}, extract_location('$1')). Definitions -> Definition : ['$1']. Definitions -> Definition Definitions : ['$1'|'$2']. @@ -32,43 +32,44 @@ Definition -> OperationDefinition : '$1'. Definition -> Fragment : '$1'. Definition -> TypeDefinition : '$1'. -OperationType -> 'query' : extract_atom('$1'). -OperationType -> 'mutation' : extract_atom('$1'). -OperationType -> 'subscription' : extract_atom('$1'). +OperationType -> 'query' : '$1'. +OperationType -> 'mutation' : '$1'. +OperationType -> 'subscription' : '$1'. -OperationDefinition -> SelectionSet : build_ast_node('OperationDefinition', #{'operation' => 'query', 'selection_set' => '$1'}, #{'start_line' => extract_child_line('$1')}). -OperationDefinition -> OperationType SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'selection_set' => '$2'}, #{'start_line' => extract_line('$2')}). -OperationDefinition -> OperationType VariableDefinitions SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'variable_definitions' => '$2', 'selection_set' => '$3'}, #{'start_line' => extract_child_line('$2')}). -OperationDefinition -> OperationType VariableDefinitions Directives SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'variable_definitions' => '$2', 'directives' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_child_line('$2')}). -OperationDefinition -> OperationType Name SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'name' => extract_binary('$2'), 'selection_set' => '$3'}, #{'start_line' => extract_line('$2')}). -OperationDefinition -> OperationType Name VariableDefinitions SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'name' => extract_binary('$2'), 'variable_definitions' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_line('$2')}). -OperationDefinition -> OperationType Name Directives SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'name' => extract_binary('$2'), 'directives' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_line('$2')}). -OperationDefinition -> OperationType Name VariableDefinitions Directives SelectionSet : build_ast_node('OperationDefinition', #{'operation' => '$1', 'name' => extract_binary('$2'), 'variable_definitions' => '$3', 'directives' => '$4', 'selection_set' => '$5'}, #{'start_line' => extract_line('$2')}). +OperationDefinition -> SelectionSet : build_ast_node('OperationDefinition', #{'operation' => 'query', 'selection_set' => '$1', 'shorthand' => true}, extract_child_location('$1')). +OperationDefinition -> OperationType SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'selection_set' => '$2'}, extract_location('$1')). +OperationDefinition -> OperationType VariableDefinitions SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'variable_definitions' => '$2', 'selection_set' => '$3'}, extract_child_location('$1')). +OperationDefinition -> OperationType VariableDefinitions Directives SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'variable_definitions' => '$2', 'directives' => '$3', 'selection_set' => '$4'}, extract_child_location('$1')). +OperationDefinition -> OperationType Name SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'name' => extract_binary('$2'), 'selection_set' => '$3'}, extract_location('$1')). +OperationDefinition -> OperationType Name VariableDefinitions SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'name' => extract_binary('$2'), 'variable_definitions' => '$3', 'selection_set' => '$4'}, extract_location('$1')). +OperationDefinition -> OperationType Name Directives SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'name' => extract_binary('$2'), 'directives' => '$3', 'selection_set' => '$4'}, extract_location('$1')). +OperationDefinition -> OperationType Name VariableDefinitions Directives SelectionSet : build_ast_node('OperationDefinition', #{'operation' => extract_atom('$1'), 'name' => extract_binary('$2'), 'variable_definitions' => '$3', 'directives' => '$4', 'selection_set' => '$5'}, extract_location('$1')). -Fragment -> 'fragment' FragmentName 'on' TypeCondition SelectionSet : build_ast_node('Fragment', #{'name' => '$2', 'type_condition' => '$4', 'selection_set' => '$5'}, #{'start_line' => extract_line('$1')}). -Fragment -> 'fragment' FragmentName 'on' TypeCondition Directives SelectionSet : build_ast_node('Fragment', #{'name' => '$2', 'type_condition' => '$4', 'directives' => '$5', 'selection_set' => '$6'}, #{'start_line' => extract_line('$1')}). +Fragment -> 'fragment' FragmentName 'on' TypeCondition SelectionSet : build_ast_node('Fragment', #{'name' => '$2', 'type_condition' => '$4', 'selection_set' => '$5'}, extract_location('$1')). +Fragment -> 'fragment' FragmentName 'on' TypeCondition Directives SelectionSet : build_ast_node('Fragment', #{'name' => '$2', 'type_condition' => '$4', 'directives' => '$5', 'selection_set' => '$6'}, extract_location('$1')). TypeCondition -> NamedType : '$1'. VariableDefinitions -> '(' VariableDefinitionList ')' : '$2'. VariableDefinitionList -> VariableDefinition : ['$1']. VariableDefinitionList -> VariableDefinition VariableDefinitionList : ['$1'|'$2']. -VariableDefinition -> Variable ':' Type : build_ast_node('VariableDefinition', #{'variable' => '$1', 'type' => '$3'}, #{'start_line' => extract_child_line('$1')}). -VariableDefinition -> Variable ':' Type DefaultValue : build_ast_node('VariableDefinition', #{'variable' => '$1', 'type' => '$3', 'default_value' => '$4'}, #{'start_line' => extract_child_line('$1')}). -Variable -> '$' NameWithoutOn : build_ast_node('Variable', #{'name' => extract_binary('$2')}, #{'start_line' => extract_line('$1')}). -Variable -> '$' 'on' : build_ast_node('Variable', #{'name' => extract_binary('$2')}, #{'start_line' => extract_line('$1')}). +VariableDefinition -> Variable ':' Type : build_ast_node('VariableDefinition', #{'variable' => '$1', 'type' => '$3'}, extract_child_location('$1')). +VariableDefinition -> Variable ':' Type DefaultValue : build_ast_node('VariableDefinition', #{'variable' => '$1', 'type' => '$3', 'default_value' => '$4'}, extract_child_location('$1')). +VariableDefinition -> Variable ':' Type DefaultValue Directives : build_ast_node('VariableDefinition', #{'variable' => '$1', 'type' => '$3', 'default_value' => '$4', 'directives' => '$5'}, extract_child_location('$1')). +Variable -> '$' NameWithoutOn : build_ast_node('Variable', #{'name' => extract_binary('$2')}, extract_location('$1')). +Variable -> '$' 'on' : build_ast_node('Variable', #{'name' => extract_binary('$2')}, extract_location('$1')). DefaultValue -> '=' Value : '$2'. Type -> NamedType : '$1'. Type -> ListType : '$1'. Type -> NonNullType : '$1'. -NamedType -> Name : build_ast_node('NamedType', #{'name' => extract_binary('$1')}, #{'start_line' => extract_line('$1')}). -ListType -> '[' Type ']' : build_ast_node('ListType', #{'type' => '$2'}, #{'start_line' => extract_line('$1')}). -NonNullType -> NamedType '!' : build_ast_node('NonNullType', #{'type' => '$1'}, #{'start_line' => extract_line('$1')}). -NonNullType -> ListType '!' : build_ast_node('NonNullType', #{'type' => '$1'}, #{'start_line' => extract_line('$1')}). +NamedType -> Name : build_ast_node('NamedType', #{'name' => extract_binary('$1')}, extract_location('$1')). +ListType -> '[' Type ']' : build_ast_node('ListType', #{'type' => '$2'}, extract_location('$1')). +NonNullType -> NamedType '!' : build_ast_node('NonNullType', #{'type' => '$1'}, extract_location('$1')). +NonNullType -> ListType '!' : build_ast_node('NonNullType', #{'type' => '$1'}, extract_location('$1')). -SelectionSet -> '{' Selections '}' : build_ast_node('SelectionSet', #{'selections' => '$2'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$3')}). +SelectionSet -> '{' Selections '}' : build_ast_node('SelectionSet', #{'selections' => '$2'}, extract_location('$1')). Selections -> Selection : ['$1']. Selections -> Selection Selections : ['$1'|'$2']. @@ -77,78 +78,77 @@ Selection -> Field : '$1'. Selection -> FragmentSpread : '$1'. Selection -> InlineFragment : '$1'. -FragmentSpread -> '...' FragmentName : build_ast_node('FragmentSpread', #{'name' => '$2'}, #{'start_line' => extract_line('$1')}). -FragmentSpread -> '...' FragmentName Directives : build_ast_node('FragmentSpread', #{'name' => '$2', 'directives' => '$3'}, #{'start_line' => extract_line('$1')}). +FragmentSpread -> '...' FragmentName : build_ast_node('FragmentSpread', #{'name' => '$2'}, extract_location('$1')). +FragmentSpread -> '...' FragmentName Directives : build_ast_node('FragmentSpread', #{'name' => '$2', 'directives' => '$3'}, extract_location('$1')). -InlineFragment -> '...' 'on' TypeCondition SelectionSet : build_ast_node('InlineFragment', #{'type_condition' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_line('$1')}). -InlineFragment -> '...' 'on' TypeCondition Directives SelectionSet : build_ast_node('InlineFragment', #{'type_condition' => '$3', 'directives' => '$4', 'selection_set' => '$5'}, #{'start_line' => extract_line('$1')}). -InlineFragment -> '...' Directives SelectionSet : build_ast_node('InlineFragment', #{'directives' => '$2', 'selection_set' => '$3'}, #{'start_line' => extract_line('$1')}). -InlineFragment -> '...' SelectionSet : build_ast_node('InlineFragment', #{'selection_set' => '$2'}, #{'start_line' => extract_line('$1')}). +InlineFragment -> '...' 'on' TypeCondition SelectionSet : build_ast_node('InlineFragment', #{'type_condition' => '$3', 'selection_set' => '$4'}, extract_location('$1')). +InlineFragment -> '...' 'on' TypeCondition Directives SelectionSet : build_ast_node('InlineFragment', #{'type_condition' => '$3', 'directives' => '$4', 'selection_set' => '$5'}, extract_location('$1')). +InlineFragment -> '...' Directives SelectionSet : build_ast_node('InlineFragment', #{'directives' => '$2', 'selection_set' => '$3'}, extract_location('$1')). +InlineFragment -> '...' SelectionSet : build_ast_node('InlineFragment', #{'selection_set' => '$2'}, extract_location('$1')). FragmentName -> NameWithoutOn : extract_binary('$1'). -Field -> Name : build_ast_node('Field', #{'name' => extract_binary('$1')}, #{'start_line' => extract_line('$1')}). -Field -> Name Arguments : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2'}, #{'start_line' => extract_line('$1')}). -Field -> Name Directives : build_ast_node('Field', #{'name' => extract_binary('$1'), 'directives' => '$2'}, #{'start_line' => extract_line('$1')}). -Field -> Name SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'selection_set' => '$2'}, #{'start_line' => extract_line('$1')}). -Field -> Name Directives SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'directives' => '$2', 'selection_set' => '$3'}, #{'start_line' => extract_line('$1')}). -Field -> Name Arguments SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'selection_set' => '$3'}, #{'start_line' => extract_line('$1')}). -Field -> Name Arguments Directives : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'directives' => '$3'}, #{'start_line' => extract_line('$1')}). -Field -> Name Arguments Directives SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'directives' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2')}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name Arguments : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'selection_set' => '$3'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name Arguments SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name Directives : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'directives' => '$3'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name Arguments Directives : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3', 'directives' => '$4'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name Directives SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'directives' => '$3', 'selection_set' => '$4'}, #{'start_line' => extract_line('$1')}). -Field -> Alias Name Arguments Directives SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3', 'directives' => '$4', 'selection_set' => '$5'}, #{'start_line' => extract_line('$1')}). +Field -> Name : build_ast_node('Field', #{'name' => extract_binary('$1')}, extract_location('$1')). +Field -> Name Arguments : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2'}, extract_location('$1')). +Field -> Name Directives : build_ast_node('Field', #{'name' => extract_binary('$1'), 'directives' => '$2'}, extract_location('$1')). +Field -> Name SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'selection_set' => '$2'}, extract_location('$1')). +Field -> Name Directives SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'directives' => '$2', 'selection_set' => '$3'}, extract_location('$1')). +Field -> Name Arguments SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'selection_set' => '$3'}, extract_location('$1')). +Field -> Name Arguments Directives : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'directives' => '$3'}, extract_location('$1')). +Field -> Name Arguments Directives SelectionSet : build_ast_node('Field', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'directives' => '$3', 'selection_set' => '$4'}, extract_location('$1')). +Field -> Alias Name : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2')}, extract_location('$1')). +Field -> Alias Name Arguments : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3'}, extract_location('$1')). +Field -> Alias Name SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'selection_set' => '$3'}, extract_location('$1')). +Field -> Alias Name Arguments SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3', 'selection_set' => '$4'}, extract_location('$1')). +Field -> Alias Name Directives : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$1')). +Field -> Alias Name Arguments Directives : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3', 'directives' => '$4'}, extract_location('$1')). +Field -> Alias Name Directives SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'directives' => '$3', 'selection_set' => '$4'}, extract_location('$1')). +Field -> Alias Name Arguments Directives SelectionSet : build_ast_node('Field', #{'alias' => extract_binary('$1'), 'name' => extract_binary('$2'), 'arguments' => '$3', 'directives' => '$4', 'selection_set' => '$5'}, extract_location('$1')). Alias -> Name ':' : '$1'. Arguments -> '(' ArgumentList ')' : '$2'. ArgumentList -> Argument : ['$1']. ArgumentList -> Argument ArgumentList : ['$1'|'$2']. -Argument -> NameWithoutOn ':' Value : build_ast_node('Argument', #{name => extract_binary('$1'), value => '$3'}, #{'start_line' => extract_line('$1')}). -Argument -> 'on' ':' Value : build_ast_node('Argument', #{name => extract_binary('$1'), value => '$3'}, #{'start_line' => extract_line('$1')}). +Argument -> NameWithoutOn ':' Value : build_ast_node('Argument', #{name => extract_binary('$1'), value => '$3'}, extract_location('$1')). +Argument -> 'on' ':' Value : build_ast_node('Argument', #{name => extract_binary('$1'), value => '$3'}, extract_location('$1')). Directives -> Directive : ['$1']. Directives -> Directive Directives : ['$1'|'$2']. -Directive -> '@' NameWithoutOn : build_ast_node('Directive', #{name => extract_binary('$2')}, #{'start_line' => extract_line('$1')}). -Directive -> '@' NameWithoutOn Arguments : build_ast_node('Directive', #{name => extract_binary('$2'), 'arguments' => '$3'}, #{'start_line' => extract_line('$1')}). -Directive -> '@' 'on' : build_ast_node('Directive', #{name => extract_binary('$2')}, #{'start_line' => extract_line('$1')}). -Directive -> '@' 'on' Arguments : build_ast_node('Directive', #{name => extract_binary('$2'), 'arguments' => '$3'}, #{'start_line' => extract_line('$1')}). +Directive -> '@' NameWithoutOn : build_ast_node('Directive', #{name => extract_binary('$2')}, extract_location('$1')). +Directive -> '@' NameWithoutOn Arguments : build_ast_node('Directive', #{name => extract_binary('$2'), 'arguments' => '$3'}, extract_location('$1')). +Directive -> '@' 'on' : build_ast_node('Directive', #{name => extract_binary('$2')}, extract_location('$1')). +Directive -> '@' 'on' Arguments : build_ast_node('Directive', #{name => extract_binary('$2'), 'arguments' => '$3'}, extract_location('$1')). NameWithoutOn -> 'name' : '$1'. -NameWithoutOn -> 'query' : extract_binary('$1'). -NameWithoutOn -> 'mutation' : extract_binary('$1'). -NameWithoutOn -> 'subscription' : extract_binary('$1'). -NameWithoutOn -> 'fragment' : extract_binary('$1'). -NameWithoutOn -> 'type' : extract_binary('$1'). -NameWithoutOn -> 'implements' : extract_binary('$1'). -NameWithoutOn -> 'interface' : extract_binary('$1'). -NameWithoutOn -> 'union' : extract_binary('$1'). -NameWithoutOn -> 'scalar' : extract_binary('$1'). -NameWithoutOn -> 'schema' : extract_binary('$1'). -NameWithoutOn -> 'enum' : extract_binary('$1'). -NameWithoutOn -> 'input' : extract_binary('$1'). -NameWithoutOn -> 'extend' : extract_binary('$1'). -NameWithoutOn -> 'directive' : extract_binary('$1'). +NameWithoutOn -> 'query' : '$1'. +NameWithoutOn -> 'mutation' : '$1'. +NameWithoutOn -> 'subscription' : '$1'. +NameWithoutOn -> 'fragment' : '$1'. +NameWithoutOn -> 'type' : '$1'. +NameWithoutOn -> 'implements' : '$1'. +NameWithoutOn -> 'interface' : '$1'. +NameWithoutOn -> 'union' : '$1'. +NameWithoutOn -> 'scalar' : '$1'. +NameWithoutOn -> 'schema' : '$1'. +NameWithoutOn -> 'enum' : '$1'. +NameWithoutOn -> 'input' : '$1'. +NameWithoutOn -> 'extend' : '$1'. +NameWithoutOn -> 'directive' : '$1'. Name -> NameWithoutOn : '$1'. Name -> 'on' : extract_binary('$1'). Value -> Variable : '$1'. -Value -> int_value : build_ast_node('IntValue', #{'value' => extract_integer('$1')}, #{'start_line' => extract_line('$1')}). -Value -> float_value : build_ast_node('FloatValue', #{'value' => extract_float('$1')}, #{'start_line' => extract_line('$1')}). -Value -> block_string_value : build_ast_node('StringValue', #{'value' => extract_quoted_block_string_token('$1')}, #{'start_line' => extract_line('$1')}). -Value -> string_value : build_ast_node('StringValue', #{'value' => extract_quoted_string_token('$1')}, #{'start_line' => extract_line('$1')}). -Value -> boolean_value : build_ast_node('BooleanValue', #{'value' => extract_boolean('$1')}, #{'start_line' => extract_line('$1')}). -Value -> null : build_ast_node('NullValue', #{}, #{'start_line' => extract_line('$1')}). -Value -> EnumValue : build_ast_node('EnumValue', #{'value' => '$1'}, #{'start_line' => extract_line('$1')}). -Value -> ListValue : build_ast_node('ListValue', #{'values' => '$1'}, #{'start_line' => extract_child_line('$1')}). -Value -> ObjectValue : build_ast_node('ObjectValue', #{'fields' => '$1'}, #{'start_line' => extract_child_line('$1')}). - +Value -> int_value : build_ast_node('IntValue', #{'value' => extract_integer('$1')}, extract_location('$1')). +Value -> float_value : build_ast_node('FloatValue', #{'value' => extract_float('$1')}, extract_location('$1')). +Value -> block_string_value : build_ast_node('StringValue', #{'value' => extract_quoted_block_string_token('$1')}, extract_location('$1')). +Value -> string_value : build_ast_node('StringValue', #{'value' => extract_quoted_string_token('$1')}, extract_location('$1')). +Value -> boolean_value : build_ast_node('BooleanValue', #{'value' => extract_boolean('$1')}, extract_location('$1')). +Value -> null : build_ast_node('NullValue', #{}, extract_location('$1')). +Value -> EnumValue : build_ast_node('EnumValue', #{'value' => '$1'}, extract_location('$1')). +Value -> ListValue : build_ast_node('ListValue', #{'values' => '$1'}, extract_child_location('$1')). +Value -> ObjectValue : build_ast_node('ObjectValue', #{'fields' => '$1'}, extract_child_location('$1')). EnumValue -> Name : extract_binary('$1'). @@ -161,7 +161,10 @@ ObjectValue -> '{' '}' : []. ObjectValue -> '{' ObjectFields '}' : '$2'. ObjectFields -> ObjectField : ['$1']. ObjectFields -> ObjectField ObjectFields : ['$1'|'$2']. -ObjectField -> Name ':' Value : build_ast_node('ObjectField', #{'name' => extract_binary('$1'), 'value' => '$3'}, #{'start_line' => extract_line('$1')}). +ObjectField -> Name ':' Value : build_ast_node('ObjectField', #{'name' => extract_binary('$1'), 'value' => '$3'}, extract_location('$1')). + +DescriptionDefinition -> string_value : extract_quoted_string_token('$1'). +DescriptionDefinition -> block_string_value : extract_quoted_block_string_token('$1'). TypeDefinition -> SchemaDefinition : '$1'. TypeDefinition -> ObjectTypeDefinition : '$1'. @@ -173,27 +176,54 @@ TypeDefinition -> InputObjectTypeDefinition : '$1'. TypeDefinition -> TypeExtensionDefinition : '$1'. TypeDefinition -> DirectiveDefinition : '$1'. +TypeDefinition -> DescriptionDefinition SchemaDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition ObjectTypeDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition InterfaceTypeDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition UnionTypeDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition ScalarTypeDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition EnumTypeDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition InputObjectTypeDefinition : put_description('$2', '$1'). +TypeDefinition -> DescriptionDefinition DirectiveDefinition : put_description('$2', '$1'). + DirectiveDefinition -> 'directive' '@' Name 'on' DirectiveDefinitionLocations : - build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'locations' =>'$5'}, #{'start_line' => extract_line('$1')}). + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'locations' => extract_directive_locations('$5')}, extract_location('$1')). DirectiveDefinition -> 'directive' '@' Name ArgumentsDefinition 'on' DirectiveDefinitionLocations : - build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'arguments' => '$4', 'locations' =>'$6'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$1')}). + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'arguments' => '$4', 'locations' => extract_directive_locations('$6')}, extract_location('$1')). DirectiveDefinition -> 'directive' '@' Name 'on' DirectiveDefinitionLocations Directives : - build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'directives' => '$6', 'locations' => '$5'}, #{'start_line' => extract_line('$1')}). + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'directives' => '$6', 'locations' => extract_directive_locations('$5')}, extract_location('$1')). DirectiveDefinition -> 'directive' '@' Name ArgumentsDefinition 'on' DirectiveDefinitionLocations Directives : - build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'arguments' => '$4', 'directives' => '$7', 'locations' =>'$6'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$1')}). + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'arguments' => '$4', 'directives' => '$7', 'locations' => extract_directive_locations('$6')}, extract_location('$1')). + +DirectiveDefinition -> 'directive' '@' Name 'repeatable' 'on' DirectiveDefinitionLocations : + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'locations' => extract_directive_locations('$6'), 'repeatable' => true}, extract_location('$1')). +DirectiveDefinition -> 'directive' '@' Name ArgumentsDefinition 'repeatable' 'on' DirectiveDefinitionLocations : + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'arguments' => '$4', 'locations' => extract_directive_locations('$7'), 'repeatable' => true}, extract_location('$1')). + +DirectiveDefinition -> 'directive' '@' Name 'repeatable' 'on' DirectiveDefinitionLocations Directives : + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'directives' => '$7', 'locations' => extract_directive_locations('$6'), 'repeatable' => true}, extract_location('$1')). +DirectiveDefinition -> 'directive' '@' Name ArgumentsDefinition 'repeatable' 'on' DirectiveDefinitionLocations Directives : + build_ast_node('DirectiveDefinition', #{'name' => extract_binary('$3'), 'arguments' => '$4', 'directives' => '$8', 'locations' => extract_directive_locations('$7'), 'repeatable' => true}, extract_location('$1')). + + -SchemaDefinition -> 'schema' '{' FieldDefinitionList '}' : build_ast_node('SchemaDefinition', #{'fields' => '$3'}, #{'start_line' => extract_line('$1')}). -SchemaDefinition -> 'schema' Directives '{' FieldDefinitionList '}' : build_ast_node('SchemaDefinition', #{'directives' => '$2', 'fields' => '$4'}, #{'start_line' => extract_line('$1')}). +SchemaDefinition -> 'schema' : build_ast_node('SchemaDeclaration', #{}, extract_location('$1')). +SchemaDefinition -> 'schema' Directives : build_ast_node('SchemaDeclaration', #{'directives' => '$2'}, extract_location('$1')). +SchemaDefinition -> 'schema' '{' FieldDefinitionList '}' : build_ast_node('SchemaDeclaration', #{'fields' => '$3'}, extract_location('$1')). +SchemaDefinition -> 'schema' Directives '{' FieldDefinitionList '}' : build_ast_node('SchemaDeclaration', #{'directives' => '$2', 'fields' => '$4'}, extract_location('$1')). +ObjectTypeDefinition -> 'type' Name : + build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2')}, extract_location('$1')). +ObjectTypeDefinition -> 'type' Name Directives : + build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$1')). ObjectTypeDefinition -> 'type' Name '{' FieldDefinitionList '}' : - build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'fields' => '$4'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$5')}). + build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'fields' => '$4'}, extract_location('$1')). ObjectTypeDefinition -> 'type' Name Directives '{' FieldDefinitionList '}' : - build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'fields' => '$5'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$6')}). + build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'fields' => '$5'}, extract_location('$1')). ObjectTypeDefinition -> 'type' Name ImplementsInterfaces '{' FieldDefinitionList '}' : - build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'interfaces' => '$3', 'fields' => '$5'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$6')}). + build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'interfaces' => '$3', 'fields' => '$5'}, extract_location('$1')). ObjectTypeDefinition -> 'type' Name ImplementsInterfaces Directives '{' FieldDefinitionList '}' : - build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'interfaces' => '$3', 'directives' => '$4', 'fields' => '$6'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$7')}). + build_ast_node('ObjectTypeDefinition', #{'name' => extract_binary('$2'), 'interfaces' => '$3', 'directives' => '$4', 'fields' => '$6'}, extract_location('$1')). ImplementsInterfaces -> 'implements' NamedTypeList : '$2'. @@ -202,144 +232,159 @@ NamedTypeList -> NamedType NamedTypeList : ['$1'|'$2']. FieldDefinitionList -> FieldDefinition : ['$1']. FieldDefinitionList -> FieldDefinition FieldDefinitionList : ['$1'|'$2']. -FieldDefinition -> Name ':' Type : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'type' => '$3'}, #{'start_line' => extract_line('$1')}). -FieldDefinition -> Name ':' Type Directives : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'directives' => '$4'}, #{'start_line' => extract_line('$1')}). -FieldDefinition -> Name ArgumentsDefinition ':' Type : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'type' => '$4'}, #{'start_line' => extract_line('$1')}). -FieldDefinition -> Name Directives ':' Type : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'directives' => '$2', 'type' => '$4'}, #{'start_line' => extract_line('$1')}). -FieldDefinition -> Name ArgumentsDefinition ':' Type Directives : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'directives' => '$5', 'type' => '$4'}, #{'start_line' => extract_line('$1')}). +FieldDefinitionList -> DescriptionDefinition FieldDefinition : [put_description('$2', '$1')]. +FieldDefinitionList -> DescriptionDefinition FieldDefinition FieldDefinitionList : [put_description('$2', '$1')|'$3']. + +FieldDefinition -> Name ':' Type : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'type' => '$3'}, extract_location('$1')). +FieldDefinition -> Name ':' Type Directives : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'directives' => '$4'}, extract_location('$1')). +FieldDefinition -> Name ArgumentsDefinition ':' Type : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'type' => '$4'}, extract_location('$1')). +FieldDefinition -> Name Directives ':' Type : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'directives' => '$2', 'type' => '$4'}, extract_location('$1')). +FieldDefinition -> Name ArgumentsDefinition ':' Type Directives : build_ast_node('FieldDefinition', #{'name' => extract_binary('$1'), 'arguments' => '$2', 'directives' => '$5', 'type' => '$4'}, extract_location('$1')). ArgumentsDefinition -> '(' InputValueDefinitionList ')' : '$2'. InputValueDefinitionList -> InputValueDefinition : ['$1']. InputValueDefinitionList -> InputValueDefinition InputValueDefinitionList : ['$1'|'$2']. -InputValueDefinition -> Name ':' Type : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3'}, #{'start_line' => extract_line('$1')}). -InputValueDefinition -> Name ':' Type Directives : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'directives' => '$4'}, #{'start_line' => extract_line('$1')}). -InputValueDefinition -> Name ':' Type DefaultValue : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'default_value' => '$4'}, #{'start_line' => extract_line('$1')}). -InputValueDefinition -> Name ':' Type DefaultValue Directives : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'default_value' => '$4', 'directives' => '$5'}, #{'start_line' => extract_line('$1')}). +InputValueDefinitionList -> DescriptionDefinition InputValueDefinition : [put_description('$2', '$1')]. +InputValueDefinitionList -> DescriptionDefinition InputValueDefinition InputValueDefinitionList : [put_description('$2', '$1')|'$3']. + +InputValueDefinition -> Name ':' Type : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3'}, extract_location('$1')). +InputValueDefinition -> Name ':' Type Directives : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'directives' => '$4'}, extract_location('$1')). +InputValueDefinition -> Name ':' Type DefaultValue : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'default_value' => '$4'}, extract_location('$1')). +InputValueDefinition -> Name ':' Type DefaultValue Directives : build_ast_node('InputValueDefinition', #{'name' => extract_binary('$1'), 'type' => '$3', 'default_value' => '$4', 'directives' => '$5'}, extract_location('$1')). +InterfaceTypeDefinition -> 'interface' Name : + build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2')}, extract_location('$1')). +InterfaceTypeDefinition -> 'interface' Name Directives : + build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$1')). InterfaceTypeDefinition -> 'interface' Name '{' FieldDefinitionList '}' : - build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'fields' => '$4'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$5')}). + build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'fields' => '$4'}, extract_location('$1')). InterfaceTypeDefinition -> 'interface' Name Directives '{' FieldDefinitionList '}' : - build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'fields' => '$5'}, #{'start_line' => extract_line('$1'), 'end_line' => extract_line('$6')}). + build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'fields' => '$5'}, extract_location('$1')). +InterfaceTypeDefinition -> 'interface' Name ImplementsInterfaces '{' FieldDefinitionList '}' : + build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'interfaces' => '$3', 'fields' => '$5'}, extract_location('$1')). +InterfaceTypeDefinition -> 'interface' Name ImplementsInterfaces Directives '{' FieldDefinitionList '}' : + build_ast_node('InterfaceTypeDefinition', #{'name' => extract_binary('$2'), 'interfaces' => '$3', 'directives' => '$4', 'fields' => '$6'}, extract_location('$1')). + +UnionTypeDefinition -> 'union' Name : + build_ast_node('UnionTypeDefinition', #{'name' => extract_binary('$2')}, extract_location('$1')). +UnionTypeDefinition -> 'union' Name Directives : + build_ast_node('UnionTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$1')). UnionTypeDefinition -> 'union' Name '=' UnionMembers : - build_ast_node('UnionTypeDefinition', #{'name' => extract_binary('$2'), 'types' => '$4'}, #{'start_line' => extract_line('$1')}). + build_ast_node('UnionTypeDefinition', #{'name' => extract_binary('$2'), 'types' => '$4'}, extract_location('$1')). UnionTypeDefinition -> 'union' Name Directives '=' UnionMembers : - build_ast_node('UnionTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'types' => '$5'}, #{'start_line' => extract_line('$1')}). + build_ast_node('UnionTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'types' => '$5'}, extract_location('$1')). UnionMembers -> NamedType : ['$1']. UnionMembers -> NamedType '|' UnionMembers : ['$1'|'$3']. +UnionMembers -> '|' NamedType '|' UnionMembers : ['$2'|'$4']. -ScalarTypeDefinition -> 'scalar' Name : build_ast_node('ScalarTypeDefinition', #{'name' => extract_binary('$2')}, #{'start_line' => extract_line('$2')}). -ScalarTypeDefinition -> 'scalar' Name Directives : build_ast_node('ScalarTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, #{'start_line' => extract_line('$2')}). +ScalarTypeDefinition -> 'scalar' Name : build_ast_node('ScalarTypeDefinition', #{'name' => extract_binary('$2')}, extract_location('$2')). +ScalarTypeDefinition -> 'scalar' Name Directives : build_ast_node('ScalarTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$2')). +EnumTypeDefinition -> 'enum' Name : + build_ast_node('EnumTypeDefinition', #{'name' => extract_binary('$2')}, extract_location('$2')). +EnumTypeDefinition -> 'enum' Name Directives : + build_ast_node('EnumTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$2')). EnumTypeDefinition -> 'enum' Name '{' EnumValueDefinitionList '}': - build_ast_node('EnumTypeDefinition', #{'name' => extract_binary('$2'), 'values' => '$4'}, #{'start_line' => extract_line('$2'), 'end_line' => extract_line('$5')}). + build_ast_node('EnumTypeDefinition', #{'name' => extract_binary('$2'), 'values' => '$4'}, extract_location('$2')). EnumTypeDefinition -> 'enum' Name Directives '{' EnumValueDefinitionList '}': - build_ast_node('EnumTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'values' => '$5'}, #{'start_line' => extract_line('$2'), 'end_line' => extract_line('$6')}). + build_ast_node('EnumTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'values' => '$5'}, extract_location('$2')). EnumValueDefinitionList -> EnumValueDefinition : ['$1']. EnumValueDefinitionList -> EnumValueDefinition EnumValueDefinitionList : ['$1'|'$2']. +EnumValueDefinitionList -> DescriptionDefinition EnumValueDefinition : [put_description('$2', '$1')]. +EnumValueDefinitionList -> DescriptionDefinition EnumValueDefinition EnumValueDefinitionList : [put_description('$2', '$1')|'$3']. + DirectiveDefinitionLocations -> Name : [extract_binary('$1')]. DirectiveDefinitionLocations -> Name '|' DirectiveDefinitionLocations : [extract_binary('$1')|'$3']. +DirectiveDefinitionLocations -> '|' Name '|' DirectiveDefinitionLocations : [extract_binary('$2')|'$4']. -EnumValueDefinition -> EnumValue : build_ast_node('EnumValueDefinition', #{'value' => extract_binary('$1')}, #{'start_line' => extract_line('$1')}). -EnumValueDefinition -> EnumValue Directives : build_ast_node('EnumValueDefinition', #{'value' => extract_binary('$1'), 'directives' => '$2'}, #{'start_line' => extract_line('$1')}). - +EnumValueDefinition -> EnumValue : build_ast_node('EnumValueDefinition', #{'value' => extract_binary('$1')}, extract_location('$1')). +EnumValueDefinition -> EnumValue Directives : build_ast_node('EnumValueDefinition', #{'value' => extract_binary('$1'), 'directives' => '$2'}, extract_location('$1')). +InputObjectTypeDefinition -> 'input' Name : + build_ast_node('InputObjectTypeDefinition', #{'name' => extract_binary('$2')}, extract_location('$2')). +InputObjectTypeDefinition -> 'input' Name Directives : + build_ast_node('InputObjectTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3'}, extract_location('$2')). InputObjectTypeDefinition -> 'input' Name '{' InputValueDefinitionList '}' : - build_ast_node('InputObjectTypeDefinition', #{'name' => extract_binary('$2'), 'fields' => '$4'}, #{'start_line' => extract_line('$2'), 'end_line' => extract_line('$5')}). + build_ast_node('InputObjectTypeDefinition', #{'name' => extract_binary('$2'), 'fields' => '$4'}, extract_location('$2')). InputObjectTypeDefinition -> 'input' Name Directives '{' InputValueDefinitionList '}' : - build_ast_node('InputObjectTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'fields' => '$5'}, #{'start_line' => extract_line('$2'), 'end_line' => extract_line('$6')}). - - + build_ast_node('InputObjectTypeDefinition', #{'name' => extract_binary('$2'), 'directives' => '$3', 'fields' => '$5'}, extract_location('$2')). + +TypeExtensionDefinition -> 'extend' EnumTypeDefinition : + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). +TypeExtensionDefinition -> 'extend' InputObjectTypeDefinition : + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). +TypeExtensionDefinition -> 'extend' InterfaceTypeDefinition : + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). TypeExtensionDefinition -> 'extend' ObjectTypeDefinition : - build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, #{'start_line' => extract_line('$1')}). + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). +TypeExtensionDefinition -> 'extend' ScalarTypeDefinition : + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). +TypeExtensionDefinition -> 'extend' SchemaDefinition : + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). +TypeExtensionDefinition -> 'extend' UnionTypeDefinition : + build_ast_node('TypeExtensionDefinition', #{'definition' => '$2'}, extract_location('$1')). +Expect 10. Erlang code. % Line-Level Utilities -extract_line({_Token, Line}) -> - Line; -extract_line({_Token, Line, _Value}) -> - Line; -extract_line(_) -> - nil. - -extract_child_line([Head|_]) -> - extract_child_line(Head); -extract_child_line(#{loc := #{'start_line' := Line}}) -> - Line; -extract_child_line(_) -> - nil. +extract_location({_Token, {Line, Column}}) -> + #{'line' => Line, 'column' => Column}; +extract_location({_Token, {Line, Column}, _Value}) -> + #{'line' => Line, 'column' => Column}; +extract_location(_Other) -> + #{'line' => nil, 'column' => nil}. +extract_child_location([Head|_]) -> + extract_child_location(Head); +extract_child_location(#{loc := #{'line' := Line, 'column' := Column}}) -> + #{'line' => Line, 'column' => Column}; +extract_child_location(_) -> + #{'line' => nil, 'column' => nil}. % Value-level Utilities -extract_atom({Value, _Line}) -> +extract_atom({Value, _Loc}) -> Value. extract_binary(Value) when is_binary(Value) -> Value; -extract_binary({Token, _Line}) -> +extract_binary({Token, _Loc}) -> list_to_binary(atom_to_list(Token)); -extract_binary({_Token, _Line, Value}) -> +extract_binary({_Token, _Loc, Value}) -> list_to_binary(Value). - % AST Generation -build_ast_node(Type, Node, #{'start_line' := nil}) -> +build_ast_node(Type, Node, #{'line' := nil, 'column' := nil}) -> build_ast_node(Type, Node, nil); build_ast_node(Type, Node, Loc) -> 'Elixir.Kernel':struct(list_to_atom("Elixir.Absinthe.Language." ++ atom_to_list(Type)), Node#{loc => Loc}). +% Descriptions -% String +put_description(Node, Description) -> + maps:put(description, Description, Node). -extract_quoted_string_token({_Token, _Line, Value}) -> - iolist_to_binary(process_string(lists:sublist(Value, 2, length(Value) - 2))). - -process_string(Escaped) -> - process_string(Escaped, []). - -process_string([], Acc) -> - lists:reverse(Acc); -process_string([$\\, $" | T], Acc) -> - process_string(T, [$" | Acc]); -process_string([$\\, $\\ | T], Acc) -> - process_string(T, [$\\ | Acc]); -process_string([$\\, $/ | T], Acc) -> - process_string(T, [$/ | Acc]); -process_string([$\\, $b | T], Acc) -> - process_string(T, [$\b | Acc]); -process_string([$\\, $f | T], Acc) -> - process_string(T, [$\f | Acc]); -process_string([$\\, $n | T], Acc) -> - process_string(T, [$\n | Acc]); -process_string([$\\, $r | T], Acc) -> - process_string(T, [$\r | Acc]); -process_string([$\\, $t | T], Acc) -> - process_string(T, [$\t | Acc]); -process_string([$\\, $u, A, B, C, D | T], Acc) -> - process_string(T, [hexlist_to_utf8_binary([A, B, C, D]) | Acc]); -process_string([H | T], Acc) -> - process_string(T, [H | Acc]). - -hexlist_to_utf8_binary(HexList) -> - unicode:characters_to_binary([httpd_util:hexlist_to_integer(HexList)]). +% String +extract_quoted_string_token({_Token, _Loc, Value}) -> + unicode:characters_to_binary(lists:sublist(Value, 2, length(Value) - 2)). % Block String -extract_quoted_block_string_token({_Token, _Line, Value}) -> - iolist_to_binary(process_block_string(lists:sublist(Value, 4, length(Value) - 6))). +extract_quoted_block_string_token({_Token, _Loc, Value}) -> + unicode:characters_to_binary(process_block_string(lists:sublist(Value, 4, length(Value) - 6))). -spec process_block_string(string()) -> string(). process_block_string(Escaped) -> @@ -354,7 +399,7 @@ process_block_string([H | T], Acc) -> process_block_string(T, [H | Acc]). -spec block_string_value(string()) -> string(). block_string_value(Value) -> - [FirstLine | Rest] = re:split(Value, "\n", [{return,list}]), + [FirstLine | Rest] = string:split(Value, "\n", all), Prefix = indentation_prefix(common_indent(Rest)), UnindentedLines = unindent(Rest, Prefix), Lines = trim_blank_lines([FirstLine | UnindentedLines]), @@ -436,22 +481,34 @@ leading_whitespace([_H | _T], N) -> is_blank(BlockStringValue) -> leading_whitespace(BlockStringValue) == length(BlockStringValue). - % Integer -extract_integer({_Token, _Line, Value}) -> +extract_integer({_Token, _Loc, Value}) -> {Int, []} = string:to_integer(Value), Int. - % Float -extract_float({_Token, _Line, Value}) -> +extract_float({_Token, _Loc, Value}) -> {Float, []} = string:to_float(Value), Float. - % Boolean -extract_boolean({_Token, _Line, "true"}) -> +extract_boolean({_Token, _Loc, "true"}) -> true; -extract_boolean({_Token, _Line, "false"}) -> +extract_boolean({_Token, _Loc, "false"}) -> false. + +% Directive Placement + +do_extract_directive_locations([], Converted) -> + Converted; +do_extract_directive_locations([LocationBinary | Tail], Converted) -> + LocationAtom = to_directive_location_atom(LocationBinary), + do_extract_directive_locations(Tail, [LocationAtom | Converted]). + +to_directive_location_atom(Name) -> + erlang:binary_to_existing_atom(string:lowercase(Name), utf8). + +extract_directive_locations(Locations) -> + Result = do_extract_directive_locations(Locations, []), + lists:sort(Result). \ No newline at end of file diff --git a/test/absinthe/adapters/language_conventions_test.exs b/test/absinthe/adapters/language_conventions_test.exs index 2df419aa44..cbac5113a7 100644 --- a/test/absinthe/adapters/language_conventions_test.exs +++ b/test/absinthe/adapters/language_conventions_test.exs @@ -11,6 +11,12 @@ defmodule Absinthe.Adapter.LanguageConventionsTest do test "converts external camelcase variable names to underscore" do assert "foo_bar" = LanguageConventions.to_internal_name("fooBar", :variable) end + + test "converts external field names that do not match internal name" do + assert "foo_bar" = LanguageConventions.to_internal_name("foo_bar", :field) + assert "foo_bar" = LanguageConventions.to_internal_name("FooBar", :field) + assert "foo_bar" = LanguageConventions.to_internal_name("FOO_BAR", :field) + end end describe "to_external_name/2" do diff --git a/test/absinthe/adapters/strict_language_conventions_test.exs b/test/absinthe/adapters/strict_language_conventions_test.exs new file mode 100644 index 0000000000..e25c69a1f8 --- /dev/null +++ b/test/absinthe/adapters/strict_language_conventions_test.exs @@ -0,0 +1,35 @@ +defmodule Absinthe.Adapter.StrictLanguageConventionsTest do + use Absinthe.Case, async: true + + alias Absinthe.Adapter.StrictLanguageConventions + + describe "to_internal_name/2" do + test "converts external camelcase directive names to underscore" do + assert "foo_bar" = StrictLanguageConventions.to_internal_name("fooBar", :directive) + end + + test "converts external camelcase field names to underscore" do + assert "foo_bar" = StrictLanguageConventions.to_internal_name("fooBar", :field) + end + + test "converts external camelcase variable names to underscore" do + assert "foo_bar" = StrictLanguageConventions.to_internal_name("fooBar", :variable) + end + + test "nullifies external field names that do not match internal name" do + assert is_nil(StrictLanguageConventions.to_internal_name("foo_bar", :field)) + assert is_nil(StrictLanguageConventions.to_internal_name("FooBar", :field)) + assert is_nil(StrictLanguageConventions.to_internal_name("FOO_BAR", :field)) + end + end + + describe "to_external_name/2" do + test "converts internal underscored field names to camelcase external field names" do + assert "fooBar" = StrictLanguageConventions.to_external_name("foo_bar", :field) + end + + test "converts internal underscored variable names to camelcase external variable names" do + assert "fooBar" = StrictLanguageConventions.to_external_name("foo_bar", :variable) + end + end +end diff --git a/test/absinthe/blueprint/type_reference_test.exs b/test/absinthe/blueprint/type_reference_test.exs index 293c9f00fe..1d3a4ed03b 100644 --- a/test/absinthe/blueprint/type_reference_test.exs +++ b/test/absinthe/blueprint/type_reference_test.exs @@ -2,6 +2,7 @@ defmodule Absinthe.Blueprint.TypeReferenceTest do use Absinthe.Case, async: true alias Absinthe.Blueprint + @moduletag :f describe ".unwrap of Name" do test "is left intact" do @@ -39,4 +40,41 @@ defmodule Absinthe.Blueprint.TypeReferenceTest do assert Blueprint.TypeReference.unwrap(list) == name end end + + describe "name/1 of Name" do + test "returns type name" do + name = %Blueprint.TypeReference.Name{name: "Foo"} + assert Blueprint.TypeReference.name(name) == "Foo" + end + end + + describe "name/1 of List" do + test "returns type name in list" do + name = %Blueprint.TypeReference.Name{name: "Foo"} + list = %Blueprint.TypeReference.List{of_type: name} + assert Blueprint.TypeReference.name(list) == "[Foo]" + end + + test "returns type name, in multiple lists" do + name = %Blueprint.TypeReference.Name{name: "Foo"} + list_1 = %Blueprint.TypeReference.List{of_type: name} + list_2 = %Blueprint.TypeReference.List{of_type: list_1} + assert Blueprint.TypeReference.name(list_2) == "[[Foo]]" + end + end + + describe "name/1 of NonNull" do + test "returns non null type name" do + name = %Blueprint.TypeReference.Name{name: "Foo"} + list = %Blueprint.TypeReference.NonNull{of_type: name} + assert Blueprint.TypeReference.name(list) == "Foo!" + end + + test "returns nested non null type name" do + name = %Blueprint.TypeReference.Name{name: "Foo"} + non_null = %Blueprint.TypeReference.NonNull{of_type: name} + list = %Blueprint.TypeReference.List{of_type: non_null} + assert Blueprint.TypeReference.name(list) == "[Foo!]" + end + end end diff --git a/test/absinthe/execution/arguments/input_object_test.exs b/test/absinthe/execution/arguments/input_object_test.exs index 8bba00615f..882cffef0c 100644 --- a/test/absinthe/execution/arguments/input_object_test.exs +++ b/test/absinthe/execution/arguments/input_object_test.exs @@ -155,4 +155,52 @@ defmodule Absinthe.Execution.Arguments.InputObjectTest do run(@graphql, @schema) ) end + + @graphql """ + query ($contact: ContactInput!) { + user(contact: $contact) + } + """ + + test "return field error with suggestion" do + assert_error_message_lines( + [ + ~s(Argument "contact" has invalid value $contact.), + ~s(In field "default_with_stream": Unknown field. Did you mean "default_with_string"?) + ], + run(@graphql, @schema, + variables: %{"contact" => %{"email" => "bubba@joe.com", "default_with_stream" => "asdf"}} + ) + ) + end + + test "return field error with multiple suggestions" do + assert_error_message_lines( + [ + ~s(Argument "contact" has invalid value $contact.), + ~s(In field "contact_typo": Unknown field. Did you mean "contact_type"?), + ~s(In field "default_with_stream": Unknown field. Did you mean "default_with_string"?) + ], + run(@graphql, @schema, + variables: %{ + "contact" => %{ + "email" => "bubba@joe.com", + "default_with_stream" => "asdf", + "contact_typo" => "foo" + } + } + ) + ) + end + + test "return field error with suggestion for non-null field" do + assert_error_message_lines( + [ + ~s(Argument "contact" has invalid value $contact.), + ~s(In field "email": Expected type "String!", found null.), + ~s(In field "mail": Unknown field. Did you mean "email"?) + ], + run(@graphql, @schema, variables: %{"contact" => %{"mail" => "bubba@joe.com"}}) + ) + end end diff --git a/test/absinthe/execution/arguments/list_test.exs b/test/absinthe/execution/arguments/list_test.exs index 7019027452..f680364ddd 100644 --- a/test/absinthe/execution/arguments/list_test.exs +++ b/test/absinthe/execution/arguments/list_test.exs @@ -23,7 +23,7 @@ defmodule Absinthe.Execution.Arguments.ListTest do end @graphql """ - query ($names: [Name!]!) { + query ($names: [InputName!]!) { names(names: $names) } """ @@ -35,7 +35,7 @@ defmodule Absinthe.Execution.Arguments.ListTest do end @graphql """ - query ($contacts: [ContactInput]) { + query ($contacts: [ContactInput]!) { contacts(contacts: $contacts) } """ diff --git a/test/absinthe/execution/arguments/scalar_test.exs b/test/absinthe/execution/arguments/scalar_test.exs index 6066bf6acc..e0f3226d66 100644 --- a/test/absinthe/execution/arguments/scalar_test.exs +++ b/test/absinthe/execution/arguments/scalar_test.exs @@ -1,6 +1,9 @@ defmodule Absinthe.Execution.Arguments.ScalarTest do use Absinthe.Case, async: true + alias Absinthe.Blueprint.Input + alias Absinthe.Fixtures.Scalar + @schema Absinthe.Fixtures.ArgumentsSchema @graphql """ @@ -20,4 +23,82 @@ defmodule Absinthe.Execution.Arguments.ScalarTest do test "works when passed to resolution" do assert_data(%{"something" => "bob"}, run(@graphql, @schema)) end + + @graphql """ + query { + raisingThing(name: {firstName: "bob"}) + } + """ + test "invalid scalar does not call parse" do + assert_error_message( + "Argument \"name\" has invalid value {firstName: \"bob\"}.\nIn field \"firstName\": Unknown field.", + run(@graphql, @schema) + ) + end + + @graphql """ + query ($scalarVar: InputNameRaising) { + raisingThing(name: $scalarVar) + } + """ + + @valid_scalars %{ + Input.Boolean => true, + Input.Float => 42.42, + Input.Integer => 42, + Input.String => "bob", + Input.Null => nil + } + test "valid scalar does call parse" do + for {expected_struct, value} <- @valid_scalars do + assert_raise( + RuntimeError, + "inputNameRaising scalar parse was called for #{expected_struct}", + fn -> + run(@graphql, @schema, variables: %{"scalarVar" => value}) + end + ) + end + end + + describe "scalar keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Scalar.TestSchemaDescriptionKeyword.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "scalar description attribute evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Scalar.TestSchemaDescriptionAttribute.__absinthe_type__(unquote(test_label)) + + assert type.description == unquote(expected_value) + end + end) + end + + describe "scalar description macro evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Scalar.TestSchemaDescriptionMacro.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end end diff --git a/test/absinthe/execution/arguments_test.exs b/test/absinthe/execution/arguments_test.exs index 8eaffc6af8..94811fad0b 100644 --- a/test/absinthe/execution/arguments_test.exs +++ b/test/absinthe/execution/arguments_test.exs @@ -24,6 +24,58 @@ defmodule Absinthe.Execution.ArgumentsTest do ) end + describe "open ended scalar" do + @graphql """ + query { + entities(representations: [{__typename: "Product", id: "123"}]) + } + """ + test "supports passing an object directly" do + assert_data( + %{"entities" => [%{"__typename" => "Product", "id" => "123"}]}, + run(@graphql, @schema) + ) + end + + @graphql """ + query($representations: [Any!]!) { + entities(representations: $representations) + } + """ + test "supports passing an object through variables" do + assert_data( + %{"entities" => [%{"__typename" => "Product", "id" => "123"}]}, + run(@graphql, @schema, + variables: %{"representations" => [%{"__typename" => "Product", "id" => "123"}]} + ) + ) + end + + @graphql """ + query { + entities(representations: [{__typename: "Product", id: null}]) + } + """ + test "supports passing an object with a nested value of null" do + assert_data( + %{"entities" => [%{"__typename" => "Product", "id" => nil}]}, + run(@graphql, @schema) + ) + end + + @graphql """ + query { + entities(representations: [{__typename: "Product", contact_type: PHONE}]) + } + """ + test "supports passing an object with a nested value of ENUM" do + assert_data( + %{"entities" => [%{"__typename" => "Product", "contact_type" => "PHONE"}]}, + run(@graphql, @schema) + ) + end + end + describe "errors" do @graphql """ query FindUser { diff --git a/test/absinthe/execution/fragment_spread_test.exs b/test/absinthe/execution/fragment_spread_test.exs index 7c0f2eaecc..d5daf26863 100644 --- a/test/absinthe/execution/fragment_spread_test.exs +++ b/test/absinthe/execution/fragment_spread_test.exs @@ -34,8 +34,12 @@ defmodule Absinthe.Execution.FragmentSpreadTest do """ assert {:ok, - %{errors: [%{locations: [%{column: 0, line: 4}], message: "Unknown type \"Foo\"."}]}} == - Absinthe.run(query, Absinthe.Fixtures.ContactSchema) + %{ + errors: [ + %{locations: [%{column: 1, line: 4}], message: "Unknown type \"Foo\"."}, + %{locations: [%{column: 1, line: 4}], message: "Fragment \"F0\" is never used."} + ] + }} == Absinthe.run(query, Absinthe.Fixtures.ContactSchema) end test "errors properly when spreading fragments that don't exist" do @@ -50,7 +54,7 @@ defmodule Absinthe.Execution.FragmentSpreadTest do %{ errors: [ %{ - locations: [%{column: 0, line: 3}], + locations: [%{column: 3, line: 3}], message: "Unknown fragment \"NonExistentFragment\"" } ] diff --git a/test/absinthe/execution/subscription_test.exs b/test/absinthe/execution/subscription_test.exs index a988a2cc71..eabee18521 100644 --- a/test/absinthe/execution/subscription_test.exs +++ b/test/absinthe/execution/subscription_test.exs @@ -1,5 +1,5 @@ defmodule Absinthe.Execution.SubscriptionTest do - use ExUnit.Case + use Absinthe.Case import ExUnit.CaptureLog @@ -7,7 +7,11 @@ defmodule Absinthe.Execution.SubscriptionTest do @behaviour Absinthe.Subscription.Pubsub def start_link() do - Registry.start_link(:unique, __MODULE__) + Registry.start_link(keys: :unique, name: __MODULE__) + end + + def node_name() do + node() end def subscribe(topic) do @@ -81,6 +85,11 @@ defmodule Absinthe.Execution.SubscriptionTest do config fn args, _ -> {:ok, topic: args[:id] || "*"} end + + trigger :update_user, + topic: fn user -> + [user.id, "*"] + end end field :thing, :string do @@ -97,6 +106,41 @@ defmodule Absinthe.Execution.SubscriptionTest do } end end + + field :multiple_topics, :string do + config fn _, _ -> + {:ok, topic: ["topic_1", "topic_2", "topic_3"]} + end + end + + field :other_user, :user do + arg :id, :id + + config fn + args, %{context: %{context_id: context_id, document_id: document_id}} -> + {:ok, topic: args[:id] || "*", context_id: context_id, document_id: document_id} + + args, %{context: %{context_id: context_id}} -> + {:ok, topic: args[:id] || "*", context_id: context_id} + end + end + + field :relies_on_document, :string do + config fn _, %{document: %Absinthe.Blueprint{} = document} -> + %{type: :subscription, name: op_name} = Absinthe.Blueprint.current_operation(document) + {:ok, topic: "*", context_id: "*", document_id: op_name} + end + end + end + + mutation do + field :update_user, :user do + arg :id, non_null(:id) + + resolve fn _, %{id: id}, _ -> + {:ok, %{id: id, name: "foo"}} + end + end end end @@ -115,7 +159,7 @@ defmodule Absinthe.Execution.SubscriptionTest do client_id = "abc" assert {:ok, %{"subscribed" => topic}} = - run( + run_subscription( @query, Schema, variables: %{"clientId" => client_id}, @@ -133,6 +177,91 @@ defmodule Absinthe.Execution.SubscriptionTest do } == msg end + @query """ + subscription ($clientId: ID!) { + thing(clientId: $clientId) + } + """ + test "can unsubscribe the current process" do + client_id = "abc" + + assert {:ok, %{"subscribed" => topic}} = + run_subscription( + @query, + Schema, + variables: %{"clientId" => client_id}, + context: %{pubsub: PubSub} + ) + + Absinthe.Subscription.unsubscribe(PubSub, topic) + + Absinthe.Subscription.publish(PubSub, "foo", thing: client_id) + + refute_receive({:broadcast, _}) + end + + @query """ + subscription { + multipleTopics + } + """ + test "schema can provide multiple topics to subscribe to" do + assert {:ok, %{"subscribed" => topic}} = + run_subscription( + @query, + Schema, + variables: %{}, + context: %{pubsub: PubSub} + ) + + msg = %{ + event: "subscription:data", + result: %{data: %{"multipleTopics" => "foo"}}, + topic: topic + } + + Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_1") + + assert_receive({:broadcast, ^msg}) + + Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_2") + + assert_receive({:broadcast, ^msg}) + + Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_3") + + assert_receive({:broadcast, ^msg}) + end + + @query """ + subscription { + multipleTopics + } + """ + test "unsubscription works when multiple topics are provided" do + assert {:ok, %{"subscribed" => topic}} = + run_subscription( + @query, + Schema, + variables: %{}, + context: %{pubsub: PubSub} + ) + + Absinthe.Subscription.unsubscribe(PubSub, topic) + + Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_1") + + refute_receive({:broadcast, _}) + + Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_2") + + refute_receive({:broadcast, _}) + + Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_3") + + refute_receive({:broadcast, _}) + end + @query """ subscription ($clientId: ID!) { thing(clientId: $clientId, extra: 1) @@ -144,13 +273,54 @@ defmodule Absinthe.Execution.SubscriptionTest do %{ errors: [ %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 30, line: 2}], message: "Unknown argument \"extra\" on field \"thing\" of type \"RootSubscriptionType\"." } ] } - } == run(@query, Schema, variables: %{"clientId" => "abc"}, context: %{pubsub: PubSub}) + } == + run_subscription(@query, Schema, + variables: %{"clientId" => "abc"}, + context: %{pubsub: PubSub} + ) + end + + @query """ + subscription ($userId: ID!) { + user(id: $userId) { id name } + } + """ + test "subscription triggers work" do + id = "1" + + assert {:ok, %{"subscribed" => topic}} = + run_subscription( + @query, + Schema, + variables: %{"userId" => id}, + context: %{pubsub: PubSub} + ) + + mutation = """ + mutation ($userId: ID!) { + updateUser(id: $userId) { id name } + } + """ + + assert {:ok, %{data: _}} = + run_subscription(mutation, Schema, + variables: %{"userId" => id}, + context: %{pubsub: PubSub} + ) + + assert_receive({:broadcast, msg}) + + assert %{ + event: "subscription:data", + result: %{data: %{"user" => %{"id" => "1", "name" => "foo"}}}, + topic: topic + } == msg end @query """ @@ -159,8 +329,8 @@ defmodule Absinthe.Execution.SubscriptionTest do } """ test "can return an error tuple from the topic function" do - assert {:ok, %{errors: [%{locations: [%{column: 0, line: 2}], message: "unauthorized"}]}} == - run( + assert {:ok, %{errors: [%{locations: [%{column: 3, line: 2}], message: "unauthorized"}]}} == + run_subscription( @query, Schema, variables: %{"clientId" => "abc"}, @@ -168,6 +338,16 @@ defmodule Absinthe.Execution.SubscriptionTest do ) end + @query """ + subscription Example { + reliesOnDocument + } + """ + test "topic function receives a document" do + assert {:ok, %{"subscribed" => _topic}} = + run_subscription(@query, Schema, context: %{pubsub: PubSub}) + end + @query """ subscription ($clientId: ID!) { thing(clientId: $clientId) @@ -175,7 +355,10 @@ defmodule Absinthe.Execution.SubscriptionTest do """ test "stringifies topics" do assert {:ok, %{"subscribed" => topic}} = - run(@query, Schema, variables: %{"clientId" => "1"}, context: %{pubsub: PubSub}) + run_subscription(@query, Schema, + variables: %{"clientId" => "1"}, + context: %{pubsub: PubSub} + ) Absinthe.Subscription.publish(PubSub, "foo", thing: 1) @@ -189,8 +372,10 @@ defmodule Absinthe.Execution.SubscriptionTest do end test "isn't tripped up if one of the subscription docs raises" do - assert {:ok, %{"subscribed" => _}} = run("subscription { raises }", Schema) - assert {:ok, %{"subscribed" => topic}} = run("subscription { thing(clientId: \"*\")}", Schema) + assert {:ok, %{"subscribed" => _}} = run_subscription("subscription { raises }", Schema) + + assert {:ok, %{"subscribed" => topic}} = + run_subscription("subscription { thing(clientId: \"*\")}", Schema) error_log = capture_log(fn -> @@ -208,15 +393,16 @@ defmodule Absinthe.Execution.SubscriptionTest do assert String.contains?(error_log, "boom") end + @tag :pending test "different subscription docs are batched together" do opts = [context: %{test_pid: self()}] assert {:ok, %{"subscribed" => doc1}} = - run("subscription { user { group { name } id} }", Schema, opts) + run_subscription("subscription { user { group { name } id} }", Schema, opts) # different docs required for test, otherwise they get deduplicated from the start assert {:ok, %{"subscribed" => doc2}} = - run("subscription { user { group { name } id name} }", Schema, opts) + run_subscription("subscription { user { group { name } id name} }", Schema, opts) user = %{id: "1", name: "Alicia", group: %{name: "Elixir Users"}} @@ -236,12 +422,14 @@ defmodule Absinthe.Execution.SubscriptionTest do ctx1 = %{test_pid: self(), user: 1} assert {:ok, %{"subscribed" => doc1}} = - run("subscription { user { group { name } id} }", Schema, context: ctx1) + run_subscription("subscription { user { group { name } id} }", Schema, context: ctx1) ctx2 = %{test_pid: self(), user: 2} # different docs required for test, otherwise they get deduplicated from the start assert {:ok, %{"subscribed" => doc2}} = - run("subscription { user { group { name } id name} }", Schema, context: ctx2) + run_subscription("subscription { user { group { name } id name} }", Schema, + context: ctx2 + ) user = %{id: "1", name: "Alicia", group: %{name: "Elixir Users"}} @@ -257,10 +445,173 @@ defmodule Absinthe.Execution.SubscriptionTest do assert_receive(:batch_get_group) end - defp run(query, schema, opts \\ []) do + describe "subscription_ids" do + @query """ + subscription { + otherUser { id } + } + """ + test "subscriptions with the same context_id and same source document have the same subscription_id" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription(@query, Schema, context: %{context_id: "logged-in"}) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription(@query, Schema, context: %{context_id: "logged-in"}) + + assert doc1 == doc2 + end + + @query """ + subscription { + otherUser { id } + } + """ + test "subscriptions with different context_id but the same source document have different subscription_ids" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription(@query, Schema, context: %{context_id: "logged-in"}) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription(@query, Schema, context: %{context_id: "not-logged-in"}) + + assert doc1 != doc2 + end + + test "subscriptions with same context_id but different source document have different subscription_ids" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription("subscription { otherUser { id name } }", Schema, + context: %{context_id: "logged-in"} + ) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription("subscription { otherUser { id } }", Schema, + context: %{context_id: "logged-in"} + ) + + assert doc1 != doc2 + end + + test "subscriptions with different context_id and different source document have different subscription_ids" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription("subscription { otherUser { id name } }", Schema, + context: %{context_id: "logged-in"} + ) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription("subscription { otherUser { id } }", Schema, + context: %{context_id: "not-logged-in"} + ) + + assert doc1 != doc2 + end + + @query """ + subscription($id: ID!) { otherUser(id: $id) { id } } + """ + test "subscriptions with the same variables & document have the same subscription_ids" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription(@query, Schema, + variables: %{"id" => "123"}, + context: %{context_id: "logged-in"} + ) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription(@query, Schema, + variables: %{"id" => "123"}, + context: %{context_id: "logged-in"} + ) + + assert doc1 == doc2 + end + + @query """ + subscription($id: ID!) { otherUser(id: $id) { id } } + """ + test "subscriptions with different variables but same document have different subscription_ids" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription(@query, Schema, + variables: %{"id" => "123"}, + context: %{context_id: "logged-in"} + ) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription(@query, Schema, + variables: %{"id" => "456"}, + context: %{context_id: "logged-in"} + ) + + assert doc1 != doc2 + end + + test "document_id can be provided to override the default logic for deriving document_id" do + assert {:ok, %{"subscribed" => doc1}} = + run_subscription("subscription { otherUser { id name } }", Schema, + context: %{context_id: "logged-in", document_id: "abcdef"} + ) + + assert {:ok, %{"subscribed" => doc2}} = + run_subscription("subscription { otherUser { name id } }", Schema, + context: %{context_id: "logged-in", document_id: "abcdef"} + ) + + assert doc1 == doc2 + end + end + + @query """ + subscription ($clientId: ID!) { + thing(clientId: $clientId) + } + """ + test "subscription executes telemetry events", context do + client_id = "abc" + + :telemetry.attach_many( + context.test, + [ + [:absinthe, :execute, :operation, :start], + [:absinthe, :execute, :operation, :stop], + [:absinthe, :subscription, :publish, :start], + [:absinthe, :subscription, :publish, :stop] + ], + fn event, measurements, metadata, config -> + send(self(), {event, measurements, metadata, config}) + end, + %{} + ) + + assert {:ok, %{"subscribed" => topic}} = + run_subscription( + @query, + Schema, + variables: %{"clientId" => client_id}, + context: %{pubsub: PubSub} + ) + + assert_receive {[:absinthe, :execute, :operation, :start], measurements, %{id: id}, _config} + assert System.convert_time_unit(measurements[:system_time], :native, :millisecond) + + assert_receive {[:absinthe, :execute, :operation, :stop], _, %{id: ^id}, _config} + + Absinthe.Subscription.publish(PubSub, "foo", thing: client_id) + assert_receive({:broadcast, msg}) + + assert %{ + event: "subscription:data", + result: %{data: %{"thing" => "foo"}}, + topic: topic + } == msg + + # Subscription events + assert_receive {[:absinthe, :subscription, :publish, :start], _, %{id: id}, _config} + assert_receive {[:absinthe, :subscription, :publish, :stop], _, %{id: ^id}, _config} + + :telemetry.detach(context.test) + end + + defp run_subscription(query, schema, opts \\ []) do opts = Keyword.update(opts, :context, %{pubsub: PubSub}, &Map.put(&1, :pubsub, PubSub)) - case Absinthe.run(query, schema, opts) do + case run(query, schema, opts) do {:ok, %{"subscribed" => topic}} = val -> PubSub.subscribe(topic) val diff --git a/test/absinthe/extensions_test.exs b/test/absinthe/extensions_test.exs index f0889a8a4c..374e0d111e 100644 --- a/test/absinthe/extensions_test.exs +++ b/test/absinthe/extensions_test.exs @@ -41,7 +41,7 @@ defmodule Absinthe.ExtensionsTest do assert bp.result == %{data: %{"foo" => "hello world"}, extensions: %{foo: 1}} end - test "Result phase doesn't clober the extensions" do + test "Result phase doesn't clobber the extensions" do doc = "{foo}" pipeline = diff --git a/test/absinthe/formatter_test.exs b/test/absinthe/formatter_test.exs new file mode 100644 index 0000000000..309dbc6870 --- /dev/null +++ b/test/absinthe/formatter_test.exs @@ -0,0 +1,12 @@ +defmodule Absinthe.FormatterTest do + use Absinthe.Case, async: true + + @query """ + { + version + } + """ + test "formats a document" do + assert Absinthe.Formatter.format(@query) == "{\n version\n}\n" + end +end diff --git a/test/absinthe/integration/README.md b/test/absinthe/integration/README.md deleted file mode 100644 index a7c3eb0462..0000000000 --- a/test/absinthe/integration/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Integration Tests - -Before adding integration tests, please read: - -- The module documentation for `Absinthe.IntegrationCase`, so you know - the expected format for `.graphql` and `.exs` files. -- The contents of `test/absinthe/integration_test.exs`, which sets the - default schema and contains custom assertion logic for some tests. - -## Directory Structure - -Top-level directories should be pretty self-explanatory. If your -integration test is focused on parsing or validation errors (ie, -documents that don't get executed), put it in `parsing/` or -`validation/`, respectively. If your integration test has a passing -(executed) scenario, put it in `execution/`. - -Try to keep the directory structure fairly flat under -`execution/`. Create subdirectories for type of thing, not specific testing -context. Try to keep to existing file naming conventions. - -Feel free to use GraphQL type names, prefixed with `type_` in -filenames (describing types longhand is too verbose/inexact). Use the -example type `T` to indicate any type. (Example: -`execution/input_types/null/literal_as_type_[T!]!_element.graphql`) diff --git a/test/absinthe/integration/execution/aliases/alias.exs b/test/absinthe/integration/execution/aliases/alias.exs deleted file mode 100644 index f04ac5692a..0000000000 --- a/test/absinthe/integration/execution/aliases/alias.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"widget" => %{"name" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/aliases/alias.graphql b/test/absinthe/integration/execution/aliases/alias.graphql deleted file mode 100644 index 6381d545be..0000000000 --- a/test/absinthe/integration/execution/aliases/alias.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - widget: thing(id: "foo") { - name - } -} diff --git a/test/absinthe/integration/execution/aliases/alias_test.exs b/test/absinthe/integration/execution/aliases/alias_test.exs new file mode 100644 index 0000000000..690010434f --- /dev/null +++ b/test/absinthe/integration/execution/aliases/alias_test.exs @@ -0,0 +1,18 @@ +defmodule Elixir.Absinthe.Integration.Execution.Aliases.AliasTest do + use Absinthe.Case, async: true + + # LEAVE ME + + @query """ + query { + widget: thing(id: "foo") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"widget" => %{"name" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/aliases/all_caps_alias.exs b/test/absinthe/integration/execution/aliases/all_caps_alias.exs deleted file mode 100644 index 772a88e600..0000000000 --- a/test/absinthe/integration/execution/aliases/all_caps_alias.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"thing" => %{"FOO" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/aliases/all_caps_alias.graphql b/test/absinthe/integration/execution/aliases/all_caps_alias.graphql deleted file mode 100644 index 38cc9a6b0c..0000000000 --- a/test/absinthe/integration/execution/aliases/all_caps_alias.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - thing(id: "foo") { - FOO: name - } -} diff --git a/test/absinthe/integration/execution/aliases/all_caps_alias_test.exs b/test/absinthe/integration/execution/aliases/all_caps_alias_test.exs new file mode 100644 index 0000000000..5f982500ba --- /dev/null +++ b/test/absinthe/integration/execution/aliases/all_caps_alias_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.Aliases.AllCapsAliasTest do + use Absinthe.Case, async: true + + @query """ + query { + thing(id: "foo") { + FOO: name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing" => %{"FOO" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/aliases/different_selection_sets.exs b/test/absinthe/integration/execution/aliases/different_selection_sets.exs deleted file mode 100644 index 04ff511551..0000000000 --- a/test/absinthe/integration/execution/aliases/different_selection_sets.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"thing1" => %{"id" => "foo"}, "thing2" => %{"name" => "Bar"}}}} diff --git a/test/absinthe/integration/execution/aliases/different_selection_sets.graphql b/test/absinthe/integration/execution/aliases/different_selection_sets.graphql deleted file mode 100644 index 90191e85f9..0000000000 --- a/test/absinthe/integration/execution/aliases/different_selection_sets.graphql +++ /dev/null @@ -1,8 +0,0 @@ -query { - thing1: thing(id: "foo") { - id - } - thing2: thing(id: "bar") { - name - } -} diff --git a/test/absinthe/integration/execution/aliases/different_selection_sets_test.exs b/test/absinthe/integration/execution/aliases/different_selection_sets_test.exs new file mode 100644 index 0000000000..24038c9426 --- /dev/null +++ b/test/absinthe/integration/execution/aliases/different_selection_sets_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.Aliases.DifferentSelectionSetsTest do + use Absinthe.Case, async: true + + @query """ + query { + thing1: thing(id: "foo") { + id + } + thing2: thing(id: "bar") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing1" => %{"id" => "foo"}, "thing2" => %{"name" => "Bar"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/aliases/leading_underscore.exs b/test/absinthe/integration/execution/aliases/leading_underscore.exs deleted file mode 100644 index 1be89eb653..0000000000 --- a/test/absinthe/integration/execution/aliases/leading_underscore.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"_thing123" => %{"name" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/aliases/leading_underscore.graphql b/test/absinthe/integration/execution/aliases/leading_underscore.graphql deleted file mode 100644 index 7aaa508cc8..0000000000 --- a/test/absinthe/integration/execution/aliases/leading_underscore.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - _thing123: thing(id: "foo") { - name - } -} diff --git a/test/absinthe/integration/execution/aliases/leading_underscore_test.exs b/test/absinthe/integration/execution/aliases/leading_underscore_test.exs new file mode 100644 index 0000000000..7034865155 --- /dev/null +++ b/test/absinthe/integration/execution/aliases/leading_underscore_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.Aliases.LeadingUnderscoreTest do + use Absinthe.Case, async: true + + @query """ + query { + _thing123: thing(id: "foo") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"_thing123" => %{"name" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/aliases/weird.exs b/test/absinthe/integration/execution/aliases/weird.exs deleted file mode 100644 index f1e094173f..0000000000 --- a/test/absinthe/integration/execution/aliases/weird.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"thing" => %{"fOO_Bar_baz" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/aliases/weird.graphql b/test/absinthe/integration/execution/aliases/weird.graphql deleted file mode 100644 index 3382f0362e..0000000000 --- a/test/absinthe/integration/execution/aliases/weird.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - thing(id: "foo") { - fOO_Bar_baz: name - } -} diff --git a/test/absinthe/integration/execution/aliases/weird_test.exs b/test/absinthe/integration/execution/aliases/weird_test.exs new file mode 100644 index 0000000000..5ecd7ba435 --- /dev/null +++ b/test/absinthe/integration/execution/aliases/weird_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.Aliases.WeirdTest do + use Absinthe.Case, async: true + + @query """ + query { + thing(id: "foo") { + fOO_Bar_baz: name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing" => %{"fOO_Bar_baz" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/aliases/with_errors.exs b/test/absinthe/integration/execution/aliases/with_errors.exs deleted file mode 100644 index 083bc8f06d..0000000000 --- a/test/absinthe/integration/execution/aliases/with_errors.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"foo" => nil}, errors: [%{code: 42, message: "Custom Error", path: ["foo"]}]}} diff --git a/test/absinthe/integration/execution/aliases/with_errors.graphql b/test/absinthe/integration/execution/aliases/with_errors.graphql deleted file mode 100644 index 9dada7ae93..0000000000 --- a/test/absinthe/integration/execution/aliases/with_errors.graphql +++ /dev/null @@ -1 +0,0 @@ -mutation { foo: failingThing(type: WITH_CODE) { name } } diff --git a/test/absinthe/integration/execution/aliases/with_errors_test.exs b/test/absinthe/integration/execution/aliases/with_errors_test.exs new file mode 100644 index 0000000000..f94d20ac9b --- /dev/null +++ b/test/absinthe/integration/execution/aliases/with_errors_test.exs @@ -0,0 +1,52 @@ +defmodule Elixir.Absinthe.Integration.Execution.Aliases.WithErrorsTest do + use Absinthe.Case, async: true + + @query """ + mutation { foo: failingThing(type: WITH_CODE) { name } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{"foo" => nil}, + errors: [ + %{ + code: 42, + message: "Custom Error", + path: ["foo"], + locations: [%{column: 12, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end + + @query """ + mutation { + foo: failingThing(type: WITH_CODE) { name } + bar: failingThing(type: WITH_CODE) { name } + } + """ + + test "multiple aliases" do + assert { + :ok, + %{ + data: %{"foo" => nil, "bar" => nil}, + errors: [ + %{ + code: 42, + locations: [%{column: 3, line: 3}], + message: "Custom Error", + path: ["bar"] + }, + %{ + code: 42, + locations: [%{column: 3, line: 2}], + message: "Custom Error", + path: ["foo"] + } + ] + } + } == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/context.exs b/test/absinthe/integration/execution/context.exs deleted file mode 100644 index 4de15b8246..0000000000 --- a/test/absinthe/integration/execution/context.exs +++ /dev/null @@ -1,4 +0,0 @@ -{ - [context: %{thing: "bar"}], - {:ok, %{data: %{"thingByContext" => %{"name" => "Bar"}}}} -} diff --git a/test/absinthe/integration/execution/context.graphql b/test/absinthe/integration/execution/context.graphql deleted file mode 100644 index b5233b416e..0000000000 --- a/test/absinthe/integration/execution/context.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - thingByContext { - name - } -} diff --git a/test/absinthe/integration/execution/context_test.exs b/test/absinthe/integration/execution/context_test.exs new file mode 100644 index 0000000000..62c76c4b6f --- /dev/null +++ b/test/absinthe/integration/execution/context_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.ContextTest do + use Absinthe.Case, async: true + + @query """ + query { + thingByContext { + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thingByContext" => %{"name" => "Bar"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, context: %{thing: "bar"}) + end +end diff --git a/test/absinthe/integration/execution/custom_types/basic.exs b/test/absinthe/integration/execution/custom_types/basic.exs deleted file mode 100644 index 039ec12492..0000000000 --- a/test/absinthe/integration/execution/custom_types/basic.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"customTypesQuery" => %{"datetime" => "2017-01-27T20:31:55Z"}}}} diff --git a/test/absinthe/integration/execution/custom_types/basic.graphql b/test/absinthe/integration/execution/custom_types/basic.graphql deleted file mode 100644 index 09ce40d2d7..0000000000 --- a/test/absinthe/integration/execution/custom_types/basic.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: CustomTypesSchema -query { - customTypesQuery { datetime } -} diff --git a/test/absinthe/integration/execution/custom_types/basic_test.exs b/test/absinthe/integration/execution/custom_types/basic_test.exs new file mode 100644 index 0000000000..6e4bb8a67b --- /dev/null +++ b/test/absinthe/integration/execution/custom_types/basic_test.exs @@ -0,0 +1,14 @@ +defmodule Elixir.Absinthe.Integration.Execution.CustomTypes.BasicTest do + use Absinthe.Case, async: true + + @query """ + query { + customTypesQuery { datetime } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"customTypesQuery" => %{"datetime" => "2017-01-27T20:31:55Z"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.CustomTypesSchema, []) + end +end diff --git a/test/absinthe/integration/execution/custom_types/datetime/input_object.exs b/test/absinthe/integration/execution/custom_types/datetime/input_object.exs deleted file mode 100644 index bc7aec6149..0000000000 --- a/test/absinthe/integration/execution/custom_types/datetime/input_object.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"customTypesMutation" => %{"message" => "ok"}}}} diff --git a/test/absinthe/integration/execution/custom_types/datetime/input_object.graphql b/test/absinthe/integration/execution/custom_types/datetime/input_object.graphql deleted file mode 100644 index 798c37d918..0000000000 --- a/test/absinthe/integration/execution/custom_types/datetime/input_object.graphql +++ /dev/null @@ -1,6 +0,0 @@ -# Schema: CustomTypesSchema -mutation { - customTypesMutation(args: { datetime: "2017-01-27T20:31:55Z" }) { - message - } -} diff --git a/test/absinthe/integration/execution/custom_types/datetime/input_object_test.exs b/test/absinthe/integration/execution/custom_types/datetime/input_object_test.exs new file mode 100644 index 0000000000..a6be085f1f --- /dev/null +++ b/test/absinthe/integration/execution/custom_types/datetime/input_object_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.CustomTypes.Datetime.InputObjectTest do + use Absinthe.Case, async: true + + @query """ + mutation { + customTypesMutation(args: { datetime: "2017-01-27T20:31:55Z" }) { + message + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"customTypesMutation" => %{"message" => "ok"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.CustomTypesSchema, []) + end +end diff --git a/test/absinthe/integration/execution/escape_sequence_test.exs b/test/absinthe/integration/execution/escape_sequence_test.exs new file mode 100644 index 0000000000..bdff9c5221 --- /dev/null +++ b/test/absinthe/integration/execution/escape_sequence_test.exs @@ -0,0 +1,72 @@ +defmodule Absinthe.Phase.Document.Execution.EscapeSequenceTest do + use Absinthe.Case, async: true + + defmodule Schema do + use Absinthe.Schema + + query do + field :echo, :string do + arg :value, :string + + resolve(fn %{value: input_string}, _ -> + {:ok, input_string} + end) + end + end + end + + test "one slash" do + assert Absinthe.run( + ~S""" + { + echo(value: "\FOO") + } + """, + Schema + ) == {:ok, %{data: %{"echo" => ~S"\FOO"}}} + end + + test "two slashes" do + assert Absinthe.run( + ~S""" + { + echo(value: "\\FOO") + } + """, + Schema + ) == {:ok, %{data: %{"echo" => ~S"\FOO"}}} + end + + test "four slashes" do + assert Absinthe.run( + ~S""" + { + echo(value: "\\\\FOO") + } + """, + Schema + ) == {:ok, %{data: %{"echo" => ~S"\\FOO"}}} + end + + test "eight slashes" do + assert Absinthe.run( + ~S""" + { + echo(value: "\\\\\\\\FOO") + } + """, + Schema + ) == {:ok, %{data: %{"echo" => ~S"\\\\FOO"}}} + end + + test "literal slash n" do + assert Absinthe.run( + ~S""" + { + echo(value: "\\nFOO") + } + """, + Schema + ) == {:ok, %{data: %{"echo" => ~S"\nFOO"}}} + end +end diff --git a/test/absinthe/integration/execution/fragments/basic.exs b/test/absinthe/integration/execution/fragments/basic.exs deleted file mode 100644 index 33a57e609e..0000000000 --- a/test/absinthe/integration/execution/fragments/basic.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} diff --git a/test/absinthe/integration/execution/fragments/basic.graphql b/test/absinthe/integration/execution/fragments/basic.graphql deleted file mode 100644 index 6e7e329aa6..0000000000 --- a/test/absinthe/integration/execution/fragments/basic.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: ContactSchema -query Q { - person { - ...NamedPerson - } -} -fragment NamedPerson on Person { - name -} diff --git a/test/absinthe/integration/execution/fragments/basic_root_type.exs b/test/absinthe/integration/execution/fragments/basic_root_type.exs deleted file mode 100644 index fbc57eb991..0000000000 --- a/test/absinthe/integration/execution/fragments/basic_root_type.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/fragments/basic_root_type.graphql b/test/absinthe/integration/execution/fragments/basic_root_type.graphql deleted file mode 100644 index f477a5e6dc..0000000000 --- a/test/absinthe/integration/execution/fragments/basic_root_type.graphql +++ /dev/null @@ -1,9 +0,0 @@ -query { - ... Fields -} - -fragment Fields on RootQueryType { - thing(id: "foo") { - name - } -} diff --git a/test/absinthe/integration/execution/fragments/basic_root_type_test.exs b/test/absinthe/integration/execution/fragments/basic_root_type_test.exs new file mode 100644 index 0000000000..ea97aaf7cd --- /dev/null +++ b/test/absinthe/integration/execution/fragments/basic_root_type_test.exs @@ -0,0 +1,20 @@ +defmodule Elixir.Absinthe.Integration.Execution.Fragments.BasicRootTypeTest do + use Absinthe.Case, async: true + + @query """ + query { + ... Fields + } + + fragment Fields on RootQueryType { + thing(id: "foo") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/fragments/basic_test.exs b/test/absinthe/integration/execution/fragments/basic_test.exs new file mode 100644 index 0000000000..c749ae589b --- /dev/null +++ b/test/absinthe/integration/execution/fragments/basic_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.Fragments.BasicTest do + use Absinthe.Case, async: true + + @query """ + query Q { + person { + ...NamedPerson + } + } + fragment NamedPerson on Person { + name + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/fragments/introspection.exs b/test/absinthe/integration/execution/fragments/introspection.exs deleted file mode 100644 index 385441c02f..0000000000 --- a/test/absinthe/integration/execution/fragments/introspection.exs +++ /dev/null @@ -1,3 +0,0 @@ -# Handled in a custom assertion match -# See `Absinthe.IntegrationTest.assert_integration/2` -:custom_assertion diff --git a/test/absinthe/integration/execution/fragments/introspection.graphql b/test/absinthe/integration/execution/fragments/introspection.graphql deleted file mode 100644 index 8b9f450645..0000000000 --- a/test/absinthe/integration/execution/fragments/introspection.graphql +++ /dev/null @@ -1,15 +0,0 @@ -# Schema: ContactSchema -query Q { - __type(name: "ProfileInput") { - name - kind - fields { - name - } - ...Inputs - } -} - -fragment Inputs on __Type { - inputFields { name } -} diff --git a/test/absinthe/integration/execution/fragments/introspection_test.exs b/test/absinthe/integration/execution/fragments/introspection_test.exs new file mode 100644 index 0000000000..2ac96c6f5e --- /dev/null +++ b/test/absinthe/integration/execution/fragments/introspection_test.exs @@ -0,0 +1,40 @@ +defmodule Elixir.Absinthe.Integration.Execution.Fragments.IntrospectionTest do + use Absinthe.Case, async: true + + @query """ + query Q { + __type(name: "ProfileInput") { + name + kind + fields { + name + } + ...Inputs + } + } + + fragment Inputs on __Type { + inputFields { name } + } + """ + + test "scenario #1" do + result = Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + + assert {:ok, + %{ + data: %{ + "__type" => %{ + "name" => "ProfileInput", + "kind" => "INPUT_OBJECT", + "fields" => nil, + "inputFields" => input_fields + } + } + }} = result + + correct = [%{"name" => "code"}, %{"name" => "name"}, %{"name" => "age"}] + sort = & &1["name"] + assert Enum.sort_by(input_fields, sort) == Enum.sort_by(correct, sort) + end +end diff --git a/test/absinthe/integration/execution/input_object.exs b/test/absinthe/integration/execution/input_object.exs deleted file mode 100644 index 9a902bda65..0000000000 --- a/test/absinthe/integration/execution/input_object.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"updateThing" => %{"name" => "Foo", "value" => 100}}}} diff --git a/test/absinthe/integration/execution/input_object.graphql b/test/absinthe/integration/execution/input_object.graphql deleted file mode 100644 index 7fc8d47d57..0000000000 --- a/test/absinthe/integration/execution/input_object.graphql +++ /dev/null @@ -1,6 +0,0 @@ -mutation { - updateThing(id: "foo", thing: {value: 100}) { - name - value - } -} diff --git a/test/absinthe/integration/execution/input_object_test.exs b/test/absinthe/integration/execution/input_object_test.exs new file mode 100644 index 0000000000..2bd1b7ae75 --- /dev/null +++ b/test/absinthe/integration/execution/input_object_test.exs @@ -0,0 +1,46 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputObjectTest do + use Absinthe.Case, async: true + + @query """ + mutation { + updateThing(id: "foo", thing: {value: 100}) { + name + value + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"updateThing" => %{"name" => "Foo", "value" => 100}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end + + @query """ + mutation ($input: Boolean) { + updateThing(id: "foo", thing: $input) { + name + value + } + } + """ + + test "errors if an invalid type is passed" do + assert {:ok, + %{ + errors: [ + %{ + locations: [%{column: 26, line: 2}], + message: "Argument \"thing\" has invalid value $input." + }, + %{ + locations: [%{column: 33, line: 2}], + message: + "Variable `$input` of type `Boolean` found as input to argument of type `InputThing!`." + } + ] + }} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, + variables: %{"input" => true} + ) + end +end diff --git a/test/absinthe/integration/execution/input_types/enum/default_value_test.exs b/test/absinthe/integration/execution/input_types/enum/default_value_test.exs new file mode 100644 index 0000000000..90a9248e83 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/enum/default_value_test.exs @@ -0,0 +1,55 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Enum.DefaultValueTest do + use Absinthe.Case, async: true + + @query """ + query { + default: info { + name + value + } + defaults: infos { + name + value + } + } + """ + test "default values" do + assert {:ok, + %{ + data: %{ + "default" => %{"name" => "RED", "value" => 100}, + "defaults" => [ + %{"name" => "RED", "value" => 100}, + %{"name" => "GREEN", "value" => 200} + ] + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ColorSchema, []) + end + + test "Introspection renders default value properly" do + {:ok, %{data: data}} = + """ + { + __schema { + queryType { + fields { + name + args { + name + defaultValue + } + } + } + } + } + """ + |> run(Absinthe.Fixtures.ColorSchema) + + fields = get_in(data, ["__schema", "queryType", "fields"]) + + assert %{ + "args" => [%{"defaultValue" => "[RED]", "name" => "channels"}], + "name" => "moreInfos" + } in fields + end +end diff --git a/test/absinthe/integration/execution/input_types/enum/literal.exs b/test/absinthe/integration/execution/input_types/enum/literal.exs deleted file mode 100644 index dfdb6c2cd8..0000000000 --- a/test/absinthe/integration/execution/input_types/enum/literal.exs +++ /dev/null @@ -1,9 +0,0 @@ -{:ok, - %{ - data: %{ - "red" => %{"name" => "RED", "value" => 100}, - "green" => %{"name" => "GREEN", "value" => 200}, - "blue" => %{"name" => "BLUE", "value" => 300}, - "puce" => %{"name" => "PUCE", "value" => -100} - } - }} diff --git a/test/absinthe/integration/execution/input_types/enum/literal.graphql b/test/absinthe/integration/execution/input_types/enum/literal.graphql deleted file mode 100644 index dd3d27e8fd..0000000000 --- a/test/absinthe/integration/execution/input_types/enum/literal.graphql +++ /dev/null @@ -1,19 +0,0 @@ -# Schema: ColorSchema -query { - red: info(channel: RED) { - name - value - } - green: info(channel: GREEN) { - name - value - } - blue: info(channel: BLUE) { - name - value - } - puce: info(channel: PUCE) { - name - value - } -} diff --git a/test/absinthe/integration/execution/input_types/enum/literal_test.exs b/test/absinthe/integration/execution/input_types/enum/literal_test.exs new file mode 100644 index 0000000000..7e5e5201b2 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/enum/literal_test.exs @@ -0,0 +1,36 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Enum.LiteralTest do + use Absinthe.Case, async: true + + @query """ + query { + red: info(channel: RED) { + name + value + } + green: info(channel: GREEN) { + name + value + } + blue: info(channel: BLUE) { + name + value + } + puce: info(channel: PUCE) { + name + value + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "blue" => %{"name" => "BLUE", "value" => 300}, + "green" => %{"name" => "GREEN", "value" => 200}, + "puce" => %{"name" => "PUCE", "value" => -100}, + "red" => %{"name" => "RED", "value" => 100} + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ColorSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/id/literal.exs b/test/absinthe/integration/execution/input_types/id/literal.exs deleted file mode 100644 index e326a16f7d..0000000000 --- a/test/absinthe/integration/execution/input_types/id/literal.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"item" => %{"id" => "foo", "name" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/input_types/id/literal.graphql b/test/absinthe/integration/execution/input_types/id/literal.graphql deleted file mode 100644 index 0e53a2c5ae..0000000000 --- a/test/absinthe/integration/execution/input_types/id/literal.graphql +++ /dev/null @@ -1,7 +0,0 @@ -# Schema: IdTestSchema -{ - item(id: "foo") { - id - name - } -} diff --git a/test/absinthe/integration/execution/input_types/id/literal_test.exs b/test/absinthe/integration/execution/input_types/id/literal_test.exs new file mode 100644 index 0000000000..5c5580d4d7 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/id/literal_test.exs @@ -0,0 +1,17 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Id.LiteralTest do + use Absinthe.Case, async: true + + @query """ + { + item(id: "foo") { + id + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"item" => %{"id" => "foo", "name" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.IdTestSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T]!.exs b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T]!.exs deleted file mode 100644 index 14a3ff82e2..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T]!.exs +++ /dev/null @@ -1,13 +0,0 @@ -{ - :ok, - %{ - data: %{ - "nonNullableList" => %{ - "length" => 2, - "content" => [nil, 1], - "nullCount" => 1, - "nonNullCount" => 1 - } - } - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T]!.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T]!.graphql deleted file mode 100644 index 73b191e1f2..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T]!.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nonNullableList(input: [null, 1]) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T].exs b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T].exs deleted file mode 100644 index a5fbb65753..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T].exs +++ /dev/null @@ -1,13 +0,0 @@ -{ - :ok, - %{ - data: %{ - "nullableList" => %{ - "length" => 2, - "content" => [nil, 1], - "nullCount" => 1, - "nonNullCount" => 1 - } - } - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T].graphql b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T].graphql deleted file mode 100644 index 91ae5eed7b..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_[T].graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nullableList(input: [null, 1]) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_list_of_T_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_list_of_T_test.exs new file mode 100644 index 0000000000..1416a29404 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_list_of_T_test.exs @@ -0,0 +1,28 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToElementOfTypeListOf_TTest do + use Absinthe.Case, async: true + + @query """ + query { + nullableList(input: [null, 1]) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "nullableList" => %{ + "content" => [nil, 1], + "length" => 2, + "nonNullCount" => 1, + "nullCount" => 1 + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_non_null_list_of_T_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_non_null_list_of_T_test.exs new file mode 100644 index 0000000000..7d7d3df7ac --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_element_of_type_non_null_list_of_T_test.exs @@ -0,0 +1,28 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToElementOfTypeNonNullListOf_TTest do + use Absinthe.Case, async: true + + @query """ + query { + nonNullableList(input: [null, 1]) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "nonNullableList" => %{ + "content" => [nil, 1], + "length" => 2, + "nonNullCount" => 1, + "nullCount" => 1 + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_T!.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_T!.exs deleted file mode 100644 index bd396de61e..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_T!.exs +++ /dev/null @@ -1,9 +0,0 @@ -{:ok, - %{ - errors: [ - %{ - message: - "Argument \"input\" has invalid value {base: null}.\nIn field \"base\": Expected type \"Int!\", found null." - } - ] - }} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_T!.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_T!.graphql deleted file mode 100644 index 0d5e0e1ee9..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_T!.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: ObjectTimesSchema -query { - times: objTimes(input: {base: null}) -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value.exs deleted file mode 100644 index f977adb1e9..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"times" => 4}}} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value.graphql deleted file mode 100644 index 416b46f253..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: ObjectTimesSchema -query { - times: objTimes(input: {base: 4, multiplier: null}) -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value_test.exs new file mode 100644 index 0000000000..ad8a1f3cab --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_T_overrides_default_value_test.exs @@ -0,0 +1,14 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToType_TOverridesDefaultValueTest do + use Absinthe.Case, async: true + + @query """ + query { + times: objTimes(input: {base: 4, multiplier: null}) + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"times" => 4}}} == + Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!.exs deleted file mode 100644 index da693d4cb0..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!.exs +++ /dev/null @@ -1,6 +0,0 @@ -{ - :ok, - %{ - errors: [%{message: "Argument \"input\" has invalid value null."}] - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!.graphql deleted file mode 100644 index 68f61c15a7..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nonNullableListOfNonNullableType(input: null) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!_element.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!_element.exs deleted file mode 100644 index d4fd7f0b72..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!_element.exs +++ /dev/null @@ -1,11 +0,0 @@ -{ - :ok, - %{ - errors: [ - %{ - message: - "Argument \"input\" has invalid value [null, 1].\nIn element #1: Expected type \"Int!\", found null." - } - ] - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!_element.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!_element.graphql deleted file mode 100644 index 57a5d0d403..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]!_element.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nonNullableListOfNonNullableType(input: [null, 1]) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!].exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!].exs deleted file mode 100644 index 0712a2b428..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!].exs +++ /dev/null @@ -1,8 +0,0 @@ -{ - :ok, - %{ - data: %{ - "nullableListOfNonNullableType" => nil - } - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!].graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!].graphql deleted file mode 100644 index 93c89e1c58..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!].graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nullableListOfNonNullableType(input: null) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]_element.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]_element.exs deleted file mode 100644 index d4fd7f0b72..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]_element.exs +++ /dev/null @@ -1,11 +0,0 @@ -{ - :ok, - %{ - errors: [ - %{ - message: - "Argument \"input\" has invalid value [null, 1].\nIn element #1: Expected type \"Int!\", found null." - } - ] - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]_element.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]_element.graphql deleted file mode 100644 index d77f9b3ed9..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T!]_element.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -{ - nullableListOfNonNullableType(input: [null, 1]) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T]!.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T]!.exs deleted file mode 100644 index 54005dff7c..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T]!.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: "Argument \"input\" has invalid value null."}]}} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T]!.graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T]!.graphql deleted file mode 100644 index 4b856a3348..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T]!.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nonNullableList(input: null) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T].exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T].exs deleted file mode 100644 index cef0229ea1..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T].exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"nullableList" => nil}}} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T].graphql b/test/absinthe/integration/execution/input_types/null/literal_to_type_[T].graphql deleted file mode 100644 index 34a10bb96f..0000000000 --- a/test/absinthe/integration/execution/input_types/null/literal_to_type_[T].graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query { - nullableList(input: null) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_list_of_T_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_list_of_T_test.exs new file mode 100644 index 0000000000..ca6f341631 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_list_of_T_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeListOf_TTest do + use Absinthe.Case, async: true + + @query """ + query { + nullableList(input: null) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"nullableList" => nil}}} == + Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_T_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_T_test.exs new file mode 100644 index 0000000000..9c66d551dd --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_T_test.exs @@ -0,0 +1,22 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNull_TTest do + use Absinthe.Case, async: true + + @query """ + query { + times: objTimes(input: {base: null}) + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"input\" has invalid value {base: null}.\nIn field \"base\": Expected type \"Int!\", found null.", + locations: [%{column: 19, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_T_element_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_T_element_test.exs new file mode 100644 index 0000000000..c6ab3ed36b --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_T_element_test.exs @@ -0,0 +1,27 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOf_TElementTest do + use Absinthe.Case, async: true + + @query """ + { + nullableListOfNonNullableType(input: [null, 1]) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"input\" has invalid value [null, 1].\nIn element #1: Expected type \"Int!\", found null.", + locations: [%{column: 33, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_T_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_T_test.exs new file mode 100644 index 0000000000..f381ba57f7 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_T_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOf_TTest do + use Absinthe.Case, async: true + + @query """ + query { + nullableListOfNonNullableType(input: null) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"nullableListOfNonNullableType" => nil}}} == + Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_non_null_T_element_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_non_null_T_element_test.exs new file mode 100644 index 0000000000..8f9f8289bf --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_non_null_T_element_test.exs @@ -0,0 +1,27 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOfNonNull_TElementTest do + use Absinthe.Case, async: true + + @query """ + query { + nonNullableListOfNonNullableType(input: [null, 1]) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"input\" has invalid value [null, 1].\nIn element #1: Expected type \"Int!\", found null.", + locations: [%{column: 36, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_non_null_T_test.exs b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_non_null_T_test.exs new file mode 100644 index 0000000000..12900cb313 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/literal_to_type_non_null_list_of_non_null_T_test.exs @@ -0,0 +1,26 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOfNonNull_TTest do + use Absinthe.Case, async: true + + @query """ + query { + nonNullableListOfNonNullableType(input: null) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Argument \"input\" has invalid value null.", + locations: [%{column: 36, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, []) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T!.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_T!.exs deleted file mode 100644 index 5a2bda0614..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_T!.exs +++ /dev/null @@ -1,14 +0,0 @@ -[ - {[variables: %{"value" => nil}], - {:ok, - %{ - errors: [ - %{ - message: - "Argument \"input\" has invalid value {base: $value}.\nIn field \"base\": Expected type \"Int!\", found $value." - }, - %{message: "Variable \"value\": Expected non-null, found null."} - ] - }}}, - {[variables: %{"value" => 8}], {:ok, %{data: %{"times" => 16}}}} -] diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T!.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_T!.graphql deleted file mode 100644 index 34503e3989..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_T!.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ObjectTimesSchema -query ($value: Int!) { times: objTimes(input: {base: $value}) } diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_T.exs deleted file mode 100644 index c397afb785..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_T.exs +++ /dev/null @@ -1,4 +0,0 @@ -[ - {[variables: %{"value" => nil}], {:ok, %{data: %{"times" => 4}}}}, - {[variables: %{"value" => 8}], {:ok, %{data: %{"times" => 32}}}} -] diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_T.graphql deleted file mode 100644 index 187ed3899f..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_T.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: ObjectTimesSchema -query ($value: Int) { - times: objTimes(input: {base: 4, multiplier: $value}) -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value.exs deleted file mode 100644 index 841936cbed..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value.exs +++ /dev/null @@ -1 +0,0 @@ -{[variables: %{"multiplier" => nil}], {:ok, %{data: %{"times" => 4}}}} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value.graphql deleted file mode 100644 index 23353ae9d1..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: ObjectTimesSchema -query ($multiplier: Int) { - times: objTimes(input: {base: 4, multiplier: $multiplier}) -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value_test.exs new file mode 100644 index 0000000000..d836206b85 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_T_overrides_default_value_test.exs @@ -0,0 +1,18 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToType_TOverridesDefaultValueTest do + use Absinthe.Case, async: true + + @query """ + query ($multiplier: Int) { + times: objTimes(input: {base: 4, multiplier: $multiplier}) + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"times" => 4}}} == + Absinthe.run( + @query, + Absinthe.Fixtures.ObjectTimesSchema, + variables: %{"multiplier" => nil} + ) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_T_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_T_test.exs new file mode 100644 index 0000000000..aaf2f88d4f --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_T_test.exs @@ -0,0 +1,23 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToType_TTest do + use Absinthe.Case, async: true + + @query """ + query ($value: Int) { + times: objTimes(input: {base: 4, multiplier: $value}) + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"times" => 4}}} == + Absinthe.run( + @query, + Absinthe.Fixtures.ObjectTimesSchema, + variables: %{"value" => nil} + ) + end + + test "scenario #2" do + assert {:ok, %{data: %{"times" => 32}}} == + Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, variables: %{"value" => 8}) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!.exs deleted file mode 100644 index f559b69586..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!.exs +++ /dev/null @@ -1,10 +0,0 @@ -{[variables: %{"value" => nil}], - { - :ok, - %{ - errors: [ - %{message: "Argument \"input\" has invalid value $value."}, - %{message: "Variable \"value\": Expected non-null, found null."} - ] - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!.graphql deleted file mode 100644 index 6a2c25ad08..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int!]!) { - nonNullableListOfNonNullableType(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!_element.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!_element.exs deleted file mode 100644 index b677e92cd8..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!_element.exs +++ /dev/null @@ -1,12 +0,0 @@ -{[variables: %{"value" => [nil, 1]}], - { - :ok, - %{ - errors: [ - %{ - message: - "Argument \"input\" has invalid value $value.\nIn element #1: Expected type \"Int!\", found null." - } - ] - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!_element.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!_element.graphql deleted file mode 100644 index 6a2c25ad08..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]!_element.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int!]!) { - nonNullableListOfNonNullableType(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!].exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!].exs deleted file mode 100644 index a84a4a4ccd..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!].exs +++ /dev/null @@ -1,9 +0,0 @@ -{[variables: %{"value" => nil}], - { - :ok, - %{ - data: %{ - "nullableListOfNonNullableType" => nil - } - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!].graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!].graphql deleted file mode 100644 index cec85a9a84..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!].graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int!]) { - nullableListOfNonNullableType(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]_element.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]_element.exs deleted file mode 100644 index b677e92cd8..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]_element.exs +++ /dev/null @@ -1,12 +0,0 @@ -{[variables: %{"value" => [nil, 1]}], - { - :ok, - %{ - errors: [ - %{ - message: - "Argument \"input\" has invalid value $value.\nIn element #1: Expected type \"Int!\", found null." - } - ] - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]_element.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]_element.graphql deleted file mode 100644 index cec85a9a84..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T!]_element.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int!]) { - nullableListOfNonNullableType(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!.exs deleted file mode 100644 index f559b69586..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!.exs +++ /dev/null @@ -1,10 +0,0 @@ -{[variables: %{"value" => nil}], - { - :ok, - %{ - errors: [ - %{message: "Argument \"input\" has invalid value $value."}, - %{message: "Variable \"value\": Expected non-null, found null."} - ] - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!.graphql deleted file mode 100644 index 4e27317dd0..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int]!) { - nonNullableList(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!_element.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!_element.exs deleted file mode 100644 index 9af48410a6..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!_element.exs +++ /dev/null @@ -1,14 +0,0 @@ -{[variables: %{"value" => [nil, 1]}], - { - :ok, - %{ - data: %{ - "nonNullableList" => %{ - "length" => 2, - "content" => [nil, 1], - "nullCount" => 1, - "nonNullCount" => 1 - } - } - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!_element.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!_element.graphql deleted file mode 100644 index 4e27317dd0..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]!_element.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int]!) { - nonNullableList(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T].exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T].exs deleted file mode 100644 index 345f68adcd..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T].exs +++ /dev/null @@ -1 +0,0 @@ -{[variables: %{"value" => nil}], {:ok, %{data: %{"nullableList" => nil}}}} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T].graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T].graphql deleted file mode 100644 index a27037d785..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T].graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int]) { - nullableList(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]_element.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]_element.exs deleted file mode 100644 index b175775e9b..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]_element.exs +++ /dev/null @@ -1,14 +0,0 @@ -{[variables: %{"value" => [nil, 1]}], - { - :ok, - %{ - data: %{ - "nullableList" => %{ - "length" => 2, - "content" => [nil, 1], - "nullCount" => 1, - "nonNullCount" => 1 - } - } - } - }} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]_element.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]_element.graphql deleted file mode 100644 index a27037d785..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_type_[T]_element.graphql +++ /dev/null @@ -1,9 +0,0 @@ -# Schema: NullListsSchema -query ($value: [Int]) { - nullableList(input: $value) { - length - content - nonNullCount - nullCount - } -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_list_of_T_element_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_list_of_T_element_test.exs new file mode 100644 index 0000000000..4d529e9f86 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_list_of_T_element_test.exs @@ -0,0 +1,33 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeListOf_TElementTest do + use Absinthe.Case, async: true + + @query """ + query ($value: [Int]) { + nullableList(input: $value) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "nullableList" => %{ + "content" => [nil, 1], + "length" => 2, + "nonNullCount" => 1, + "nullCount" => 1 + } + } + }} == + Absinthe.run( + @query, + Absinthe.Fixtures.NullListsSchema, + variables: %{"value" => [nil, 1]} + ) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_list_of_T_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_list_of_T_test.exs new file mode 100644 index 0000000000..9b63d6558e --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_list_of_T_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeListOf_TTest do + use Absinthe.Case, async: true + + @query """ + query ($value: [Int]) { + nullableList(input: $value) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"nullableList" => nil}}} == + Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, variables: %{"value" => nil}) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_T_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_T_test.exs new file mode 100644 index 0000000000..03c9bf87b0 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_T_test.exs @@ -0,0 +1,34 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNull_TTest do + use Absinthe.Case, async: true + + @query """ + query ($value: Int!) { times: objTimes(input: {base: $value}) } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"input\" has invalid value {base: $value}.\nIn field \"base\": Expected type \"Int!\", found $value.", + locations: [%{column: 40, line: 1}] + }, + %{ + message: "Variable \"value\": Expected non-null, found null.", + locations: [%{column: 8, line: 1}] + } + ] + }} == + Absinthe.run( + @query, + Absinthe.Fixtures.ObjectTimesSchema, + variables: %{"value" => nil} + ) + end + + test "scenario #2" do + assert {:ok, %{data: %{"times" => 16}}} == + Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, variables: %{"value" => 8}) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_T_element_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_T_element_test.exs new file mode 100644 index 0000000000..22058e908a --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_T_element_test.exs @@ -0,0 +1,32 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOf_TElementTest do + use Absinthe.Case, async: true + + @query """ + query ($value: [Int!]) { + nullableListOfNonNullableType(input: $value) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"input\" has invalid value $value.\nIn element #1: Expected type \"Int!\", found null.", + locations: [%{column: 33, line: 2}] + } + ] + }} == + Absinthe.run( + @query, + Absinthe.Fixtures.NullListsSchema, + variables: %{"value" => [nil, 1]} + ) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_T_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_T_test.exs new file mode 100644 index 0000000000..a9505ff521 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_T_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOf_TTest do + use Absinthe.Case, async: true + + @query """ + query ($value: [Int!]) { + nullableListOfNonNullableType(input: $value) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"nullableListOfNonNullableType" => nil}}} == + Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, variables: %{"value" => nil}) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_non_null_T_element_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_non_null_T_element_test.exs new file mode 100644 index 0000000000..d637361cf4 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_non_null_T_element_test.exs @@ -0,0 +1,32 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOfNonNull_TElementTest do + use Absinthe.Case, async: true + + @query """ + query ($value: [Int!]!) { + nonNullableListOfNonNullableType(input: $value) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"input\" has invalid value $value.\nIn element #1: Expected type \"Int!\", found null.", + locations: [%{column: 36, line: 2}] + } + ] + }} == + Absinthe.run( + @query, + Absinthe.Fixtures.NullListsSchema, + variables: %{"value" => [nil, 1]} + ) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_non_null_T_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_non_null_T_test.exs new file mode 100644 index 0000000000..3b7693bbd4 --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_type_non_null_list_of_non_null_T_test.exs @@ -0,0 +1,31 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOfNonNull_TTest do + use Absinthe.Case, async: true + + @query """ + query ($value: [Int!]!) { + nonNullableListOfNonNullableType(input: $value) { + length + content + nonNullCount + nullCount + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Argument \"input\" has invalid value $value.", + locations: [%{column: 36, line: 2}] + }, + %{ + message: "Variable \"value\": Expected non-null, found null.", + locations: [%{column: 8, line: 1}] + } + ] + }} == + Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, variables: %{"value" => nil}) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_T!.exs b/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_T!.exs deleted file mode 100644 index 29f9e1d1fd..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_T!.exs +++ /dev/null @@ -1,9 +0,0 @@ -[ - # Passed, causes error - {[variables: %{"mult" => nil}], - {:ok, %{errors: [%{message: "Variable \"mult\": Expected non-null, found null."}]}}}, - # Not passed, causes error - {:ok, %{errors: [%{message: "Variable \"mult\": Expected non-null, found null."}]}}, - # Control - {[variables: %{"mult" => 2}], {:ok, %{data: %{"times" => 8}}}} -] diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_T!.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_T!.graphql deleted file mode 100644 index c6f8e9e854..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_T!.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: TimesSchema -query ($mult: Int!) { - times(base: 4, multiplier: $mult) -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_non_null_T_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_non_null_T_test.exs new file mode 100644 index 0000000000..b23092c6fe --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_variable_type_non_null_T_test.exs @@ -0,0 +1,38 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToVariableTypeNonNull_TTest do + use Absinthe.Case, async: true + + @query """ + query ($mult: Int!) { + times(base: 4, multiplier: $mult) + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Variable \"mult\": Expected non-null, found null.", + locations: [%{column: 8, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, variables: %{"mult" => nil}) + end + + test "scenario #2" do + assert {:ok, + %{ + errors: [ + %{ + message: "Variable \"mult\": Expected non-null, found null.", + locations: [%{column: 8, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, []) + end + + test "scenario #3" do + assert {:ok, %{data: %{"times" => 8}}} == + Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, variables: %{"mult" => 2}) + end +end diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value.exs b/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value.exs deleted file mode 100644 index aa2e70a19c..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value.exs +++ /dev/null @@ -1,6 +0,0 @@ -[ - # Not provided, retains default - {:ok, %{data: %{"times" => 24}}}, - # Provided, overrides default - {[variables: %{"mult" => nil}], {:ok, %{data: %{"times" => 4}}}} -] diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value.graphql b/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value.graphql deleted file mode 100644 index 24d8573f20..0000000000 --- a/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: TimesSchema -query ($mult: Int = 6) { - times(base: 4, multiplier: $mult) -} diff --git a/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value_test.exs b/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value_test.exs new file mode 100644 index 0000000000..a6131d1c2b --- /dev/null +++ b/test/absinthe/integration/execution/input_types/null/variable_to_variable_with_default_value_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToVariableWithDefaultValueTest do + use Absinthe.Case, async: true + + @query """ + query ($mult: Int = 6) { + times(base: 4, multiplier: $mult) + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"times" => 24}}} == + Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, []) + end + + test "scenario #2" do + assert {:ok, %{data: %{"times" => 4}}} == + Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, variables: %{"mult" => nil}) + end +end diff --git a/test/absinthe/integration/execution/introspection/default_value_enum.exs b/test/absinthe/integration/execution/introspection/default_value_enum.exs deleted file mode 100644 index 327d3d5e73..0000000000 --- a/test/absinthe/integration/execution/introspection/default_value_enum.exs +++ /dev/null @@ -1,9 +0,0 @@ -{:ok, - %{ - data: %{ - "__type" => %{ - "name" => "ChannelInput", - "inputFields" => [%{"name" => "channel", "defaultValue" => "RED"}] - } - } - }} diff --git a/test/absinthe/integration/execution/introspection/default_value_enum.graphql b/test/absinthe/integration/execution/introspection/default_value_enum.graphql deleted file mode 100644 index fe70699594..0000000000 --- a/test/absinthe/integration/execution/introspection/default_value_enum.graphql +++ /dev/null @@ -1,10 +0,0 @@ -# Schema: ColorSchema -query { - __type(name: "ChannelInput") { - name - inputFields { - name - defaultValue - } - } -} diff --git a/test/absinthe/integration/execution/introspection/default_value_enum_test.exs b/test/absinthe/integration/execution/introspection/default_value_enum_test.exs new file mode 100644 index 0000000000..adbbf09cd7 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/default_value_enum_test.exs @@ -0,0 +1,27 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.DefaultValueEnumTest do + use Absinthe.Case, async: true + + @query """ + query { + __type(name: "ChannelInput") { + name + inputFields { + name + defaultValue + } + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__type" => %{ + "inputFields" => [%{"defaultValue" => "RED", "name" => "channel"}], + "name" => "ChannelInput" + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ColorSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/directives.exs b/test/absinthe/integration/execution/introspection/directives.exs deleted file mode 100644 index ecb653b940..0000000000 --- a/test/absinthe/integration/execution/introspection/directives.exs +++ /dev/null @@ -1,41 +0,0 @@ -{:ok, - %{ - data: %{ - "__schema" => %{ - "directives" => [ - %{ - "args" => [ - %{ - "name" => "if", - "type" => %{ - "kind" => "NON_NULL", - "ofType" => %{"kind" => "SCALAR", "name" => "Boolean"} - } - } - ], - "name" => "include", - "locations" => ["INLINE_FRAGMENT", "FRAGMENT_SPREAD", "FIELD"], - "onField" => true, - "onFragment" => true, - "onOperation" => false - }, - %{ - "args" => [ - %{ - "name" => "if", - "type" => %{ - "kind" => "NON_NULL", - "ofType" => %{"kind" => "SCALAR", "name" => "Boolean"} - } - } - ], - "name" => "skip", - "locations" => ["INLINE_FRAGMENT", "FRAGMENT_SPREAD", "FIELD"], - "onField" => true, - "onFragment" => true, - "onOperation" => false - } - ] - } - } - }} diff --git a/test/absinthe/integration/execution/introspection/directives.graphql b/test/absinthe/integration/execution/introspection/directives.graphql deleted file mode 100644 index 2da53d7a4e..0000000000 --- a/test/absinthe/integration/execution/introspection/directives.graphql +++ /dev/null @@ -1,13 +0,0 @@ -# Schema: ContactSchema -query { - __schema { - directives { - name - args { name type { kind ofType { name kind } } } - locations - onField - onFragment - onOperation - } - } -} diff --git a/test/absinthe/integration/execution/introspection/directives_test.exs b/test/absinthe/integration/execution/introspection/directives_test.exs new file mode 100644 index 0000000000..389fdfbba1 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/directives_test.exs @@ -0,0 +1,65 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.DirectivesTest do + use Absinthe.Case, async: true + + @query """ + query { + __schema { + directives { + name + args { name type { kind ofType { name kind } } } + locations + isRepeatable + onField + onFragment + onOperation + } + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__schema" => %{ + "directives" => [ + %{ + "args" => [ + %{ + "name" => "if", + "type" => %{ + "kind" => "NON_NULL", + "ofType" => %{"kind" => "SCALAR", "name" => "Boolean"} + } + } + ], + "locations" => ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"], + "name" => "include", + "onField" => true, + "onFragment" => true, + "onOperation" => false, + "isRepeatable" => false + }, + %{ + "args" => [ + %{ + "name" => "if", + "type" => %{ + "kind" => "NON_NULL", + "ofType" => %{"kind" => "SCALAR", "name" => "Boolean"} + } + } + ], + "locations" => ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"], + "name" => "skip", + "onField" => true, + "onFragment" => true, + "onOperation" => false, + "isRepeatable" => false + } + ] + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/full.exs b/test/absinthe/integration/execution/introspection/full.exs deleted file mode 100644 index 385441c02f..0000000000 --- a/test/absinthe/integration/execution/introspection/full.exs +++ /dev/null @@ -1,3 +0,0 @@ -# Handled in a custom assertion match -# See `Absinthe.IntegrationTest.assert_integration/2` -:custom_assertion diff --git a/test/absinthe/integration/execution/introspection/full.graphql b/test/absinthe/integration/execution/introspection/full.graphql deleted file mode 100644 index bc8e476aa9..0000000000 --- a/test/absinthe/integration/execution/introspection/full.graphql +++ /dev/null @@ -1,89 +0,0 @@ -# Schema: ContactSchema -query IntrospectionQuery { - __schema { - queryType { name } - mutationType { name } - subscriptionType { name } - types { - ...FullType - } - directives { - name - description - locations - args { - ...InputValue - } - } - } -} -fragment FullType on __Type { - kind - name - description - fields(includeDeprecated: true) { - name - description - args { - ...InputValue - } - type { - ...TypeRef - } - isDeprecated - deprecationReason - } - inputFields { - ...InputValue - } - interfaces { - ...TypeRef - } - enumValues(includeDeprecated: true) { - name - description - isDeprecated - deprecationReason - } - possibleTypes { - ...TypeRef - } -} -fragment InputValue on __InputValue { - name - description - type { ...TypeRef } - defaultValue -} -fragment TypeRef on __Type { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - } - } - } - } - } - } - } -} diff --git a/test/absinthe/integration/execution/introspection/full_test.exs b/test/absinthe/integration/execution/introspection/full_test.exs new file mode 100644 index 0000000000..e4fa380a9f --- /dev/null +++ b/test/absinthe/integration/execution/introspection/full_test.exs @@ -0,0 +1,30 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.FullTest do + use Absinthe.Case, async: true + + test "scenario #1" do + result = Absinthe.Schema.introspect(Absinthe.Fixtures.ContactSchema) + {:ok, %{data: %{"__schema" => schema}}} = result + + assert schema["description"] == "Represents a schema" + assert schema["queryType"] + assert schema["mutationType"] + assert schema["subscriptionType"] + assert schema["types"] + assert schema["directives"] + end + + defmodule MiddlewareSchema do + use Absinthe.Schema + + query do + end + + def middleware(_, _, _) do + raise "this should not be called when introspecting" + end + end + + test "middleware callback does not apply to introspection fields" do + assert Absinthe.Schema.introspect(MiddlewareSchema) + end +end diff --git a/test/absinthe/integration/execution/introspection/interface_typename.exs b/test/absinthe/integration/execution/introspection/interface_typename.exs deleted file mode 100644 index ea7e590e10..0000000000 --- a/test/absinthe/integration/execution/introspection/interface_typename.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"contact" => %{"entity" => %{"name" => "Bruce", "__typename" => "Person"}}}}} diff --git a/test/absinthe/integration/execution/introspection/interface_typename.graphql b/test/absinthe/integration/execution/introspection/interface_typename.graphql deleted file mode 100644 index ff50be67eb..0000000000 --- a/test/absinthe/integration/execution/introspection/interface_typename.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { contact { entity { __typename name } } } diff --git a/test/absinthe/integration/execution/introspection/interface_typename_alias.exs b/test/absinthe/integration/execution/introspection/interface_typename_alias.exs deleted file mode 100644 index f1d07dbc3b..0000000000 --- a/test/absinthe/integration/execution/introspection/interface_typename_alias.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"contact" => %{"entity" => %{"name" => "Bruce", "kind" => "Person"}}}}} diff --git a/test/absinthe/integration/execution/introspection/interface_typename_alias.graphql b/test/absinthe/integration/execution/introspection/interface_typename_alias.graphql deleted file mode 100644 index d6a58ce8af..0000000000 --- a/test/absinthe/integration/execution/introspection/interface_typename_alias.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { contact { entity { kind: __typename name } } } diff --git a/test/absinthe/integration/execution/introspection/interface_typename_alias_test.exs b/test/absinthe/integration/execution/introspection/interface_typename_alias_test.exs new file mode 100644 index 0000000000..494a937a08 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/interface_typename_alias_test.exs @@ -0,0 +1,12 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.InterfaceTypenameAliasTest do + use Absinthe.Case, async: true + + @query """ + query { contact { entity { kind: __typename name } } } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"contact" => %{"entity" => %{"kind" => "Person", "name" => "Bruce"}}}}} == + Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/interface_typename_test.exs b/test/absinthe/integration/execution/introspection/interface_typename_test.exs new file mode 100644 index 0000000000..a5d6e3bf9e --- /dev/null +++ b/test/absinthe/integration/execution/introspection/interface_typename_test.exs @@ -0,0 +1,13 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.InterfaceTypenameTest do + use Absinthe.Case, async: true + + @query """ + query { contact { entity { __typename name } } } + """ + + test "scenario #1" do + assert {:ok, + %{data: %{"contact" => %{"entity" => %{"__typename" => "Person", "name" => "Bruce"}}}}} == + Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/mutation_type.exs b/test/absinthe/integration/execution/introspection/mutation_type.exs deleted file mode 100644 index 25988eea6a..0000000000 --- a/test/absinthe/integration/execution/introspection/mutation_type.exs +++ /dev/null @@ -1,4 +0,0 @@ -{:ok, - %{ - data: %{"__schema" => %{"mutationType" => %{"name" => "RootMutationType", "kind" => "OBJECT"}}} - }} diff --git a/test/absinthe/integration/execution/introspection/mutation_type.graphql b/test/absinthe/integration/execution/introspection/mutation_type.graphql deleted file mode 100644 index 222f55d0ca..0000000000 --- a/test/absinthe/integration/execution/introspection/mutation_type.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { __schema { mutationType { name kind } } } diff --git a/test/absinthe/integration/execution/introspection/mutation_type_test.exs b/test/absinthe/integration/execution/introspection/mutation_type_test.exs new file mode 100644 index 0000000000..9dbf089d10 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/mutation_type_test.exs @@ -0,0 +1,18 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.MutationTypeTest do + use Absinthe.Case, async: true + + @query """ + query { __schema { mutationType { name kind } } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__schema" => %{ + "mutationType" => %{"kind" => "OBJECT", "name" => "RootMutationType"} + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/object_typename.exs b/test/absinthe/integration/execution/introspection/object_typename.exs deleted file mode 100644 index f124ea2cc2..0000000000 --- a/test/absinthe/integration/execution/introspection/object_typename.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"person" => %{"name" => "Bruce", "__typename" => "Person"}}}} diff --git a/test/absinthe/integration/execution/introspection/object_typename.graphql b/test/absinthe/integration/execution/introspection/object_typename.graphql deleted file mode 100644 index aefd01f2a8..0000000000 --- a/test/absinthe/integration/execution/introspection/object_typename.graphql +++ /dev/null @@ -1,7 +0,0 @@ -# Schema: ContactSchema -query { - person { - __typename - name - } -} diff --git a/test/absinthe/integration/execution/introspection/object_typename_alias.exs b/test/absinthe/integration/execution/introspection/object_typename_alias.exs deleted file mode 100644 index f8b42945fc..0000000000 --- a/test/absinthe/integration/execution/introspection/object_typename_alias.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"person" => %{"name" => "Bruce", "kind" => "Person"}}}} diff --git a/test/absinthe/integration/execution/introspection/object_typename_alias.graphql b/test/absinthe/integration/execution/introspection/object_typename_alias.graphql deleted file mode 100644 index 4cc23bb2cd..0000000000 --- a/test/absinthe/integration/execution/introspection/object_typename_alias.graphql +++ /dev/null @@ -1,7 +0,0 @@ -# Schema: ContactSchema -query { - person { - kind: __typename - name - } -} diff --git a/test/absinthe/integration/execution/introspection/object_typename_alias_test.exs b/test/absinthe/integration/execution/introspection/object_typename_alias_test.exs new file mode 100644 index 0000000000..be207d5ec3 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/object_typename_alias_test.exs @@ -0,0 +1,17 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.ObjectTypenameAliasTest do + use Absinthe.Case, async: true + + @query """ + query { + person { + kind: __typename + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"person" => %{"kind" => "Person", "name" => "Bruce"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/object_typename_test.exs b/test/absinthe/integration/execution/introspection/object_typename_test.exs new file mode 100644 index 0000000000..43c26bf005 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/object_typename_test.exs @@ -0,0 +1,17 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.ObjectTypenameTest do + use Absinthe.Case, async: true + + @query """ + query { + person { + __typename + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"person" => %{"__typename" => "Person", "name" => "Bruce"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/object_with_list.exs b/test/absinthe/integration/execution/introspection/object_with_list.exs deleted file mode 100644 index 1abda63939..0000000000 --- a/test/absinthe/integration/execution/introspection/object_with_list.exs +++ /dev/null @@ -1,26 +0,0 @@ -{:ok, - %{ - data: %{ - "__type" => %{ - "fields" => [ - %{ - "name" => "address", - "type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil} - }, - %{"name" => "age", "type" => %{"kind" => "SCALAR", "name" => "Int", "ofType" => nil}}, - %{ - "name" => "name", - "type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil} - }, - %{ - "name" => "others", - "type" => %{ - "kind" => "LIST", - "name" => nil, - "ofType" => %{"kind" => "OBJECT", "name" => "Person"} - } - } - ] - } - } - }} diff --git a/test/absinthe/integration/execution/introspection/object_with_list.graphql b/test/absinthe/integration/execution/introspection/object_with_list.graphql deleted file mode 100644 index 8d017a48cd..0000000000 --- a/test/absinthe/integration/execution/introspection/object_with_list.graphql +++ /dev/null @@ -1,16 +0,0 @@ -# Schema: ContactSchema -query { - __type(name: "Person") { - fields(include_deprecated: true) { - name - type { - kind - name - ofType { - kind - name - } - } - } - } -} diff --git a/test/absinthe/integration/execution/introspection/object_with_list_test.exs b/test/absinthe/integration/execution/introspection/object_with_list_test.exs new file mode 100644 index 0000000000..e298274f45 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/object_with_list_test.exs @@ -0,0 +1,53 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.ObjectWithListTest do + use Absinthe.Case, async: true + + @query """ + query { + __type(name: "Person") { + fields(include_deprecated: true) { + name + type { + kind + name + ofType { + kind + name + } + } + } + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__type" => %{ + "fields" => [ + %{ + "name" => "address", + "type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil} + }, + %{ + "name" => "age", + "type" => %{"kind" => "SCALAR", "name" => "Int", "ofType" => nil} + }, + %{ + "name" => "name", + "type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil} + }, + %{ + "name" => "others", + "type" => %{ + "kind" => "LIST", + "name" => nil, + "ofType" => %{"kind" => "OBJECT", "name" => "Person"} + } + } + ] + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/query_type.exs b/test/absinthe/integration/execution/introspection/query_type.exs deleted file mode 100644 index 22d1f5d965..0000000000 --- a/test/absinthe/integration/execution/introspection/query_type.exs +++ /dev/null @@ -1,2 +0,0 @@ -{:ok, - %{data: %{"__schema" => %{"queryType" => %{"name" => "RootQueryType", "kind" => "OBJECT"}}}}} diff --git a/test/absinthe/integration/execution/introspection/query_type.graphql b/test/absinthe/integration/execution/introspection/query_type.graphql deleted file mode 100644 index 29eee3eb98..0000000000 --- a/test/absinthe/integration/execution/introspection/query_type.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { __schema { queryType { name kind } } } diff --git a/test/absinthe/integration/execution/introspection/query_type_test.exs b/test/absinthe/integration/execution/introspection/query_type_test.exs new file mode 100644 index 0000000000..f9d9f5a7fd --- /dev/null +++ b/test/absinthe/integration/execution/introspection/query_type_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.QueryTypeTest do + use Absinthe.Case, async: true + + @query """ + query { __schema { queryType { name kind } } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__schema" => %{"queryType" => %{"kind" => "OBJECT", "name" => "RootQueryType"}} + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/schema_types.exs b/test/absinthe/integration/execution/introspection/schema_types.exs deleted file mode 100644 index 385441c02f..0000000000 --- a/test/absinthe/integration/execution/introspection/schema_types.exs +++ /dev/null @@ -1,3 +0,0 @@ -# Handled in a custom assertion match -# See `Absinthe.IntegrationTest.assert_integration/2` -:custom_assertion diff --git a/test/absinthe/integration/execution/introspection/schema_types.graphql b/test/absinthe/integration/execution/introspection/schema_types.graphql deleted file mode 100644 index 57c32c380d..0000000000 --- a/test/absinthe/integration/execution/introspection/schema_types.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { __schema { types { name } } } diff --git a/test/absinthe/integration/execution/introspection/schema_types_test.exs b/test/absinthe/integration/execution/introspection/schema_types_test.exs new file mode 100644 index 0000000000..e0065f5d40 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/schema_types_test.exs @@ -0,0 +1,38 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.SchemaTypesTest do + use Absinthe.Case, async: true + + @query """ + query { __schema { types { name } } } + """ + + @expected [ + "__Directive", + "__DirectiveLocation", + "__EnumValue", + "__Field", + "__InputValue", + "__Schema", + "__Type", + "__TypeKind", + "Boolean", + "Business", + "Contact", + "Int", + "RootMutationType", + "NamedEntity", + "Person", + "ProfileInput", + "RootQueryType", + "SearchResult", + "String", + "RootSubscriptionType" + ] + + test "scenario #1" do + result = Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + assert {:ok, %{data: %{"__schema" => %{"types" => types}}}} = result + names = types |> Enum.map(& &1["name"]) + + assert @expected == names + end +end diff --git a/test/absinthe/integration/execution/introspection/subscription_type.exs b/test/absinthe/integration/execution/introspection/subscription_type.exs deleted file mode 100644 index 51f1be0807..0000000000 --- a/test/absinthe/integration/execution/introspection/subscription_type.exs +++ /dev/null @@ -1,8 +0,0 @@ -{:ok, - %{ - data: %{ - "__schema" => %{ - "subscriptionType" => %{"name" => "RootSubscriptionType", "kind" => "OBJECT"} - } - } - }} diff --git a/test/absinthe/integration/execution/introspection/subscription_type.graphql b/test/absinthe/integration/execution/introspection/subscription_type.graphql deleted file mode 100644 index 81845708d9..0000000000 --- a/test/absinthe/integration/execution/introspection/subscription_type.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { __schema { subscriptionType { name kind } } } diff --git a/test/absinthe/integration/execution/introspection/subscription_type_test.exs b/test/absinthe/integration/execution/introspection/subscription_type_test.exs new file mode 100644 index 0000000000..c97c3f2371 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/subscription_type_test.exs @@ -0,0 +1,18 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.SubscriptionTypeTest do + use Absinthe.Case, async: true + + @query """ + query { __schema { subscriptionType { name kind } } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__schema" => %{ + "subscriptionType" => %{"kind" => "OBJECT", "name" => "RootSubscriptionType"} + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/type_interface.exs b/test/absinthe/integration/execution/introspection/type_interface.exs deleted file mode 100644 index cfe1de6c0f..0000000000 --- a/test/absinthe/integration/execution/introspection/type_interface.exs +++ /dev/null @@ -1,11 +0,0 @@ -{:ok, - %{ - data: %{ - "__type" => %{ - "description" => "A named entity", - "kind" => "INTERFACE", - "name" => "NamedEntity", - "possibleTypes" => [%{"name" => "Person"}, %{"name" => "Business"}] - } - } - }} diff --git a/test/absinthe/integration/execution/introspection/type_interface.graphql b/test/absinthe/integration/execution/introspection/type_interface.graphql deleted file mode 100644 index e52d587b05..0000000000 --- a/test/absinthe/integration/execution/introspection/type_interface.graphql +++ /dev/null @@ -1,11 +0,0 @@ -# Schema: ContactSchema -query { - __type(name: "NamedEntity") { - kind - name - description - possibleTypes { - name - } - } -} diff --git a/test/absinthe/integration/execution/introspection/type_interface_test.exs b/test/absinthe/integration/execution/introspection/type_interface_test.exs new file mode 100644 index 0000000000..8a59bdea91 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/type_interface_test.exs @@ -0,0 +1,30 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.TypeInterfaceTest do + use Absinthe.Case, async: true + + @query """ + query { + __type(name: "NamedEntity") { + kind + name + description + possibleTypes { + name + } + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "__type" => %{ + "description" => "A named entity", + "kind" => "INTERFACE", + "name" => "NamedEntity", + "possibleTypes" => [%{"name" => "Business"}, %{"name" => "Person"}] + } + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/type_kind_test.exs b/test/absinthe/integration/execution/introspection/type_kind_test.exs new file mode 100644 index 0000000000..19dce99bf3 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/type_kind_test.exs @@ -0,0 +1,54 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.TypeKindTest do + use Absinthe.Case, async: true + + @query """ + query { + __type(name: "__TypeKind") { + name + enumValues { + name + } + } + } + """ + + # https://spec.graphql.org/draft/#sel-HAJbLA6GABABKwzN + # + # enum __TypeKind { + # SCALAR + # OBJECT + # INTERFACE + # UNION + # ENUM + # INPUT_OBJECT + # LIST + # NON_NULL + # } + + @expected [ + "SCALAR", + "OBJECT", + "INTERFACE", + "UNION", + "ENUM", + "INPUT_OBJECT", + "LIST", + "NON_NULL" + ] + + test "Contains expected values" do + {:ok, + %{ + data: %{ + "__type" => %{ + "name" => "__TypeKind", + "enumValues" => enum_values + } + } + }} = Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + + type_kind_values = enum_values |> Enum.map(& &1["name"]) + + assert Enum.sort(type_kind_values) == Enum.sort(@expected) + end +end diff --git a/test/absinthe/integration/execution/introspection/union_typename.exs b/test/absinthe/integration/execution/introspection/union_typename.exs deleted file mode 100644 index 2549d33067..0000000000 --- a/test/absinthe/integration/execution/introspection/union_typename.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"firstSearchResult" => %{"__typename" => "Person"}}}} diff --git a/test/absinthe/integration/execution/introspection/union_typename.graphql b/test/absinthe/integration/execution/introspection/union_typename.graphql deleted file mode 100644 index b9bcbb2bd7..0000000000 --- a/test/absinthe/integration/execution/introspection/union_typename.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { firstSearchResult { __typename } } diff --git a/test/absinthe/integration/execution/introspection/union_typename_test.exs b/test/absinthe/integration/execution/introspection/union_typename_test.exs new file mode 100644 index 0000000000..b929bab632 --- /dev/null +++ b/test/absinthe/integration/execution/introspection/union_typename_test.exs @@ -0,0 +1,12 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.UnionTypenameTest do + use Absinthe.Case, async: true + + @query """ + query { firstSearchResult { __typename } } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"firstSearchResult" => %{"__typename" => "Person"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/introspection/union_wrapped_typename.exs b/test/absinthe/integration/execution/introspection/union_wrapped_typename.exs deleted file mode 100644 index b6b82e09b6..0000000000 --- a/test/absinthe/integration/execution/introspection/union_wrapped_typename.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"searchResults" => [%{"__typename" => "Person"}, %{"__typename" => "Business"}]}}} diff --git a/test/absinthe/integration/execution/introspection/union_wrapped_typename.graphql b/test/absinthe/integration/execution/introspection/union_wrapped_typename.graphql deleted file mode 100644 index 436250c3a0..0000000000 --- a/test/absinthe/integration/execution/introspection/union_wrapped_typename.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: ContactSchema -query { searchResults { __typename } } diff --git a/test/absinthe/integration/execution/introspection/union_wrapped_typename_test.exs b/test/absinthe/integration/execution/introspection/union_wrapped_typename_test.exs new file mode 100644 index 0000000000..bd53d2998f --- /dev/null +++ b/test/absinthe/integration/execution/introspection/union_wrapped_typename_test.exs @@ -0,0 +1,16 @@ +defmodule Elixir.Absinthe.Integration.Execution.Introspection.UnionWrappedTypenameTest do + use Absinthe.Case, async: true + + @query """ + query { searchResults { __typename } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "searchResults" => [%{"__typename" => "Person"}, %{"__typename" => "Business"}] + } + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/execution/nested_objects.exs b/test/absinthe/integration/execution/nested_objects.exs deleted file mode 100644 index b15c5d065b..0000000000 --- a/test/absinthe/integration/execution/nested_objects.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"thing" => %{"name" => "Foo", "otherThing" => %{"name" => "Bar"}}}}} diff --git a/test/absinthe/integration/execution/nested_objects.graphql b/test/absinthe/integration/execution/nested_objects.graphql deleted file mode 100644 index 07f6470b8d..0000000000 --- a/test/absinthe/integration/execution/nested_objects.graphql +++ /dev/null @@ -1,8 +0,0 @@ -query { - thing(id: "foo") { - name - otherThing { - name - } - } -} diff --git a/test/absinthe/integration/execution/nested_objects_test.exs b/test/absinthe/integration/execution/nested_objects_test.exs new file mode 100644 index 0000000000..d0bf5fc71d --- /dev/null +++ b/test/absinthe/integration/execution/nested_objects_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Execution.NestedObjectsTest do + use Absinthe.Case, async: true + + @query """ + query { + thing(id: "foo") { + name + otherThing { + name + } + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing" => %{"name" => "Foo", "otherThing" => %{"name" => "Bar"}}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/operation_by_name.exs b/test/absinthe/integration/execution/operation_by_name.exs deleted file mode 100644 index 4e3b021ee0..0000000000 --- a/test/absinthe/integration/execution/operation_by_name.exs +++ /dev/null @@ -1,24 +0,0 @@ -[ - { - [operation_name: "ThingFoo"], - {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} - }, - { - [], - {:ok, - %{ - errors: [ - %{message: "Must provide a valid operation name if query contains multiple operations."} - ] - }} - }, - { - [operation_name: "invalid"], - {:ok, - %{ - errors: [ - %{message: "Must provide a valid operation name if query contains multiple operations."} - ] - }} - } -] diff --git a/test/absinthe/integration/execution/operation_by_name.graphql b/test/absinthe/integration/execution/operation_by_name.graphql deleted file mode 100644 index d397088bfc..0000000000 --- a/test/absinthe/integration/execution/operation_by_name.graphql +++ /dev/null @@ -1,10 +0,0 @@ -query ThingFoo { - thing(id: "foo") { - name - } -} -query ThingBar { - thing(id: "bar") { - name - } -} diff --git a/test/absinthe/integration/execution/operation_by_name_test.exs b/test/absinthe/integration/execution/operation_by_name_test.exs new file mode 100644 index 0000000000..a67a17df38 --- /dev/null +++ b/test/absinthe/integration/execution/operation_by_name_test.exs @@ -0,0 +1,118 @@ +defmodule Elixir.Absinthe.Integration.Execution.OperationByNameTest do + use Absinthe.Case, async: true + + @query """ + query ThingFoo($id: String!) { + thing(id: $id) { + name + } + } + query ThingBar { + thing(id: "bar") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, + operation_name: "ThingFoo", + variables: %{"id" => "foo"} + ) + end + + test "scenario #2" do + assert {:ok, + %{ + errors: [ + %{ + message: """ + Must provide a valid operation name if query contains multiple operations. + + No operation name was given. + """ + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end + + test "scenario #3" do + assert {:ok, + %{ + errors: [ + %{ + message: """ + Must provide a valid operation name if query contains multiple operations. + + The provided operation name was: "invalid" + """ + } + ] + }} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, operation_name: "invalid") + end + + test "scenario #4" do + assert {:ok, %{data: %{"thing" => %{"name" => "Bar"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, operation_name: "ThingBar") + end + + @query """ + mutation First($id: String!, $thing: InputThing!) { + first: updateThing(id: $id thing: $thing) { + id + } + } + mutation Second { + second: failingThing(type: WITH_CODE) { + id + } + } + query Third { + third: thing(id: "bar") { + name + } + } + """ + + test "scenario #5" do + assert {:ok, + %{ + data: %{"second" => nil}, + errors: [ + %{ + code: 42, + locations: [%{column: 3, line: 7}], + message: "Custom Error", + path: ["second"] + } + ] + }} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, operation_name: "Second") + end + + @query """ + mutation First($id: String!, $thing: InputThing!) { + updateThing(id: $id thing: $thing) { + id + } + } + """ + + test "return error when single operation in document does not match given operation name" do + assert {:ok, + %{ + errors: [ + %{ + message: """ + The provided operation name did not match the operation in the query. + + The provided operation name was: "Second" + """ + } + ] + }} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, operation_name: "Second") + end +end diff --git a/test/absinthe/integration/execution/resolution/errors_include_path_indices.exs b/test/absinthe/integration/execution/resolution/errors_include_path_indices.exs deleted file mode 100644 index 9bd59ea7c6..0000000000 --- a/test/absinthe/integration/execution/resolution/errors_include_path_indices.exs +++ /dev/null @@ -1,5 +0,0 @@ -{:ok, - %{ - data: %{"things" => [%{"id" => "bar", "fail" => "bar"}, %{"id" => "foo", "fail" => nil}]}, - errors: [%{message: "fail", path: ["things", 1, "fail"]}] - }} diff --git a/test/absinthe/integration/execution/resolution/errors_include_path_indices.graphql b/test/absinthe/integration/execution/resolution/errors_include_path_indices.graphql deleted file mode 100644 index 1251e3bdb3..0000000000 --- a/test/absinthe/integration/execution/resolution/errors_include_path_indices.graphql +++ /dev/null @@ -1,6 +0,0 @@ -query { - things { - id - fail(id: "foo") - } -} diff --git a/test/absinthe/integration/execution/resolution/errors_include_path_indices_test.exs b/test/absinthe/integration/execution/resolution/errors_include_path_indices_test.exs new file mode 100644 index 0000000000..bff30282c7 --- /dev/null +++ b/test/absinthe/integration/execution/resolution/errors_include_path_indices_test.exs @@ -0,0 +1,28 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.ErrorsIncludePathIndicesTest do + use Absinthe.Case, async: true + + @query """ + query { + things { + id + fail(id: "foo") + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "things" => [%{"fail" => "bar", "id" => "bar"}, %{"fail" => nil, "id" => "foo"}] + }, + errors: [ + %{ + message: "fail", + path: ["things", 1, "fail"], + locations: [%{column: 5, line: 4}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/resolution/exceptions/bad_match.exs b/test/absinthe/integration/execution/resolution/exceptions/bad_match.exs deleted file mode 100644 index 6e4e4376f4..0000000000 --- a/test/absinthe/integration/execution/resolution/exceptions/bad_match.exs +++ /dev/null @@ -1 +0,0 @@ -{:raise, Absinthe.ExecutionError} diff --git a/test/absinthe/integration/execution/resolution/exceptions/bad_match.graphql b/test/absinthe/integration/execution/resolution/exceptions/bad_match.graphql deleted file mode 100644 index ee99fb0f9b..0000000000 --- a/test/absinthe/integration/execution/resolution/exceptions/bad_match.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - badResolution { - name - } -} diff --git a/test/absinthe/integration/execution/resolution/exceptions/bad_match_test.exs b/test/absinthe/integration/execution/resolution/exceptions/bad_match_test.exs new file mode 100644 index 0000000000..4d8261e2ef --- /dev/null +++ b/test/absinthe/integration/execution/resolution/exceptions/bad_match_test.exs @@ -0,0 +1,17 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.Exceptions.BadMatchTest do + use Absinthe.Case, async: true + + @query """ + query { + badResolution { + name + } + } + """ + + test "scenario #1" do + assert_raise(Absinthe.ExecutionError, fn -> + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end) + end +end diff --git a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message.exs b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message.exs deleted file mode 100644 index 6e4e4376f4..0000000000 --- a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message.exs +++ /dev/null @@ -1 +0,0 @@ -{:raise, Absinthe.ExecutionError} diff --git a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message.graphql b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message.graphql deleted file mode 100644 index b06ad03a44..0000000000 --- a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message.graphql +++ /dev/null @@ -1 +0,0 @@ -mutation { failingThing(type: WITHOUT_MESSAGE) { name } } diff --git a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_test.exs b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_test.exs new file mode 100644 index 0000000000..d84944b65c --- /dev/null +++ b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_test.exs @@ -0,0 +1,13 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.Exceptions.MissingErrorMessageTest do + use Absinthe.Case, async: true + + @query """ + mutation { failingThing(type: WITHOUT_MESSAGE) { name } } + """ + + test "scenario #1" do + assert_raise(Absinthe.ExecutionError, fn -> + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end) + end +end diff --git a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple.exs b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple.exs deleted file mode 100644 index 6e4e4376f4..0000000000 --- a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple.exs +++ /dev/null @@ -1 +0,0 @@ -{:raise, Absinthe.ExecutionError} diff --git a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple.graphql b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple.graphql deleted file mode 100644 index 9c49aae211..0000000000 --- a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple.graphql +++ /dev/null @@ -1 +0,0 @@ -mutation { failingThing(type: MULTIPLE_WITHOUT_MESSAGE) { name } } diff --git a/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple_test.exs b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple_test.exs new file mode 100644 index 0000000000..c9623c79eb --- /dev/null +++ b/test/absinthe/integration/execution/resolution/exceptions/missing_error_message_when_returning_multiple_test.exs @@ -0,0 +1,13 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.Exceptions.MissingErrorMessageWhenReturningMultipleTest do + use Absinthe.Case, async: true + + @query """ + mutation { failingThing(type: MULTIPLE_WITHOUT_MESSAGE) { name } } + """ + + test "scenario #1" do + assert_raise(Absinthe.ExecutionError, fn -> + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end) + end +end diff --git a/test/absinthe/integration/execution/resolution/extra_error_fields.exs b/test/absinthe/integration/execution/resolution/extra_error_fields.exs deleted file mode 100644 index 8d9cc64609..0000000000 --- a/test/absinthe/integration/execution/resolution/extra_error_fields.exs +++ /dev/null @@ -1,13 +0,0 @@ -{ - :ok, - %{ - data: %{"failingThing" => nil}, - errors: [ - %{ - code: 42, - message: "Custom Error", - path: ["failingThing"] - } - ] - } -} diff --git a/test/absinthe/integration/execution/resolution/extra_error_fields.graphql b/test/absinthe/integration/execution/resolution/extra_error_fields.graphql deleted file mode 100644 index 6ad429bb84..0000000000 --- a/test/absinthe/integration/execution/resolution/extra_error_fields.graphql +++ /dev/null @@ -1 +0,0 @@ -mutation { failingThing(type: WITH_CODE) { name } } diff --git a/test/absinthe/integration/execution/resolution/extra_error_fields_test.exs b/test/absinthe/integration/execution/resolution/extra_error_fields_test.exs new file mode 100644 index 0000000000..11da0d00a5 --- /dev/null +++ b/test/absinthe/integration/execution/resolution/extra_error_fields_test.exs @@ -0,0 +1,22 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.ExtraErrorFieldsTest do + use Absinthe.Case, async: true + + @query """ + mutation { failingThing(type: WITH_CODE) { name } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{"failingThing" => nil}, + errors: [ + %{ + code: 42, + message: "Custom Error", + path: ["failingThing"], + locations: [%{column: 12, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/resolution/multiple_errors.exs b/test/absinthe/integration/execution/resolution/multiple_errors.exs deleted file mode 100644 index 298db98a83..0000000000 --- a/test/absinthe/integration/execution/resolution/multiple_errors.exs +++ /dev/null @@ -1,10 +0,0 @@ -{ - :ok, - %{ - data: %{"failingThing" => nil}, - errors: [ - %{message: "one", path: ["failingThing"]}, - %{message: "two", path: ["failingThing"]} - ] - } -} diff --git a/test/absinthe/integration/execution/resolution/multiple_errors.graphql b/test/absinthe/integration/execution/resolution/multiple_errors.graphql deleted file mode 100644 index f3cc4de9d5..0000000000 --- a/test/absinthe/integration/execution/resolution/multiple_errors.graphql +++ /dev/null @@ -1 +0,0 @@ -mutation { failingThing(type: MULTIPLE) { name } } diff --git a/test/absinthe/integration/execution/resolution/multiple_errors_test.exs b/test/absinthe/integration/execution/resolution/multiple_errors_test.exs new file mode 100644 index 0000000000..fe1a1c66b6 --- /dev/null +++ b/test/absinthe/integration/execution/resolution/multiple_errors_test.exs @@ -0,0 +1,18 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.MultipleErrorsTest do + use Absinthe.Case, async: true + + @query """ + mutation { failingThing(type: MULTIPLE) { name } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{"failingThing" => nil}, + errors: [ + %{message: "one", path: ["failingThing"], locations: [%{column: 12, line: 1}]}, + %{message: "two", path: ["failingThing"], locations: [%{column: 12, line: 1}]} + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields.exs b/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields.exs deleted file mode 100644 index 48e68f42c8..0000000000 --- a/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields.exs +++ /dev/null @@ -1,8 +0,0 @@ -{:ok, - %{ - data: %{"failingThing" => nil}, - errors: [ - %{code: 1, message: "Custom Error 1", path: ["failingThing"]}, - %{code: 2, message: "Custom Error 2", path: ["failingThing"]} - ] - }} diff --git a/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields.graphql b/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields.graphql deleted file mode 100644 index 36de1e1a94..0000000000 --- a/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields.graphql +++ /dev/null @@ -1 +0,0 @@ -mutation { failingThing(type: MULTIPLE_WITH_CODE) { name } } diff --git a/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields_test.exs b/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields_test.exs new file mode 100644 index 0000000000..bb897a356d --- /dev/null +++ b/test/absinthe/integration/execution/resolution/multiple_errors_with_extra_fields_test.exs @@ -0,0 +1,28 @@ +defmodule Elixir.Absinthe.Integration.Execution.Resolution.MultipleErrorsWithExtraFieldsTest do + use Absinthe.Case, async: true + + @query """ + mutation { failingThing(type: MULTIPLE_WITH_CODE) { name } } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{"failingThing" => nil}, + errors: [ + %{ + code: 1, + message: "Custom Error 1", + path: ["failingThing"], + locations: [%{column: 12, line: 1}] + }, + %{ + code: 2, + message: "Custom Error 2", + path: ["failingThing"], + locations: [%{column: 12, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/root_value.exs b/test/absinthe/integration/execution/root_value.exs deleted file mode 100644 index fdd27ef30b..0000000000 --- a/test/absinthe/integration/execution/root_value.exs +++ /dev/null @@ -1,4 +0,0 @@ -{ - [root_value: %{version: "0.0.1"}], - {:ok, %{data: %{"version" => "0.0.1"}}} -} diff --git a/test/absinthe/integration/execution/root_value.graphql b/test/absinthe/integration/execution/root_value.graphql deleted file mode 100644 index 062b1363a5..0000000000 --- a/test/absinthe/integration/execution/root_value.graphql +++ /dev/null @@ -1 +0,0 @@ -query { version } diff --git a/test/absinthe/integration/execution/root_value_test.exs b/test/absinthe/integration/execution/root_value_test.exs new file mode 100644 index 0000000000..dc5ccaa2df --- /dev/null +++ b/test/absinthe/integration/execution/root_value_test.exs @@ -0,0 +1,14 @@ +defmodule Elixir.Absinthe.Integration.Execution.RootValueTest do + use Absinthe.Case, async: true + + @query """ + query { version } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"version" => "0.0.1"}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, + root_value: %{version: "0.0.1"} + ) + end +end diff --git a/test/absinthe/integration/execution/serialization_test.exs b/test/absinthe/integration/execution/serialization_test.exs new file mode 100644 index 0000000000..a736d6df9b --- /dev/null +++ b/test/absinthe/integration/execution/serialization_test.exs @@ -0,0 +1,61 @@ +defmodule Absinthe.Integration.Execution.SerializationTest do + use Absinthe.Case, async: true + + defmodule Schema do + use Absinthe.Schema + + query do + field :bad_integer, :integer do + resolve fn _, _, _ -> {:ok, 1.0} end + end + + field :bad_float, :float do + resolve fn _, _, _ -> {:ok, "1"} end + end + + field :bad_boolean, :boolean do + resolve fn _, _, _ -> {:ok, "true"} end + end + + field :bad_string, :string do + resolve fn _, _, _ -> {:ok, %{}} end + end + end + end + + @query """ + query { badInteger } + """ + test "returning not an integer for an integer raises" do + assert_raise(Absinthe.SerializationError, fn -> + Absinthe.run(@query, Schema) + end) + end + + @query """ + query { badFloat } + """ + test "returning not a float for a float raises" do + assert_raise(Absinthe.SerializationError, fn -> + Absinthe.run(@query, Schema) + end) + end + + @query """ + query { badBoolean } + """ + test "returning not a boolean for a boolean raises" do + assert_raise(Absinthe.SerializationError, fn -> + Absinthe.run(@query, Schema) + end) + end + + @query """ + query { badString } + """ + test "returning a type that can't `to_string` for a string raises" do + assert_raise(Absinthe.SerializationError, fn -> + Absinthe.run(@query, Schema) + end) + end +end diff --git a/test/absinthe/integration/execution/simple_query.exs b/test/absinthe/integration/execution/simple_query.exs deleted file mode 100644 index fbc57eb991..0000000000 --- a/test/absinthe/integration/execution/simple_query.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} diff --git a/test/absinthe/integration/execution/simple_query.graphql b/test/absinthe/integration/execution/simple_query.graphql deleted file mode 100644 index 7223f09d1a..0000000000 --- a/test/absinthe/integration/execution/simple_query.graphql +++ /dev/null @@ -1 +0,0 @@ -query { thing(id: "foo") { name } } diff --git a/test/absinthe/integration/execution/simple_query_returning_list.exs b/test/absinthe/integration/execution/simple_query_returning_list.exs deleted file mode 100644 index 46aba75431..0000000000 --- a/test/absinthe/integration/execution/simple_query_returning_list.exs +++ /dev/null @@ -1,2 +0,0 @@ -{:ok, - %{data: %{"things" => [%{"name" => "Bar", "id" => "bar"}, %{"name" => "Foo", "id" => "foo"}]}}} diff --git a/test/absinthe/integration/execution/simple_query_returning_list.graphql b/test/absinthe/integration/execution/simple_query_returning_list.graphql deleted file mode 100644 index 62d3bc8d9f..0000000000 --- a/test/absinthe/integration/execution/simple_query_returning_list.graphql +++ /dev/null @@ -1,6 +0,0 @@ -query { - things { - id - name - } -} diff --git a/test/absinthe/integration/execution/simple_query_returning_list_test.exs b/test/absinthe/integration/execution/simple_query_returning_list_test.exs new file mode 100644 index 0000000000..ffd7fd395e --- /dev/null +++ b/test/absinthe/integration/execution/simple_query_returning_list_test.exs @@ -0,0 +1,21 @@ +defmodule Elixir.Absinthe.Integration.Execution.SimpleQueryReturningListTest do + use Absinthe.Case, async: true + + @query """ + query { + things { + id + name + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + data: %{ + "things" => [%{"id" => "bar", "name" => "Bar"}, %{"id" => "foo", "name" => "Foo"}] + } + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/simple_query_test.exs b/test/absinthe/integration/execution/simple_query_test.exs new file mode 100644 index 0000000000..67f801f772 --- /dev/null +++ b/test/absinthe/integration/execution/simple_query_test.exs @@ -0,0 +1,12 @@ +defmodule Elixir.Absinthe.Integration.Execution.SimpleQueryTest do + use Absinthe.Case, async: true + + @query """ + query { thing(id: "foo") { name } } + """ + + test "scenario #1" do + assert {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} == + Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/execution/telemetry_test.exs b/test/absinthe/integration/execution/telemetry_test.exs new file mode 100644 index 0000000000..27b643ec9d --- /dev/null +++ b/test/absinthe/integration/execution/telemetry_test.exs @@ -0,0 +1,86 @@ +defmodule Elixir.Absinthe.Integration.Execution.TelemetryTest do + use Absinthe.Case, async: true + + setup context do + :telemetry.attach_many( + context.test, + [ + [:absinthe, :resolve, :field, :start], + [:absinthe, :resolve, :field, :stop], + [:absinthe, :execute, :operation, :start], + [:absinthe, :execute, :operation, :stop] + ], + &__MODULE__.handle_event/4, + %{} + ) + + on_exit(fn -> + :telemetry.detach(context.test) + end) + + :ok + end + + def handle_event(event, measurements, metadata, config) do + send(self(), {event, measurements, metadata, config}) + end + + defmodule TestSchema do + use Absinthe.Schema + + object :object_thing do + field :name, :string + end + + query do + field :object_thing, :object_thing do + resolve fn _, _, _ -> {:ok, %{name: "Foo"}} end + end + + field :async_thing, :string do + arg :echo, :string + resolve &TestSchema.resolve_async/3 + end + end + + def resolve_async(_, %{echo: echo}, _) do + async(fn -> {:ok, echo} end) + end + end + + test "Execute expected telemetry events" do + query = """ + query CustomOperationName ($echo: String!) { + asyncThing(echo: $echo) + objectThing { name } + } + """ + + {:ok, %{data: data}} = Absinthe.run(query, TestSchema, variables: %{"echo" => "ASYNC"}) + assert %{"asyncThing" => "ASYNC", "objectThing" => %{"name" => "Foo"}} == data + + # Operation events + assert_receive {[:absinthe, :execute, :operation, :start], measurements, %{id: id}, _config} + assert System.convert_time_unit(measurements[:system_time], :native, :millisecond) + + assert_receive {[:absinthe, :execute, :operation, :stop], measurements, %{id: ^id} = meta, + _config} + + assert is_number(measurements[:duration]) + assert %Absinthe.Blueprint{} = meta[:blueprint] + assert meta[:options][:schema] == TestSchema + + # Field events + assert_receive {[:absinthe, :resolve, :field, :start], measurements, %{id: id}, _} + assert System.convert_time_unit(measurements[:system_time], :native, :millisecond) + + assert_receive {[:absinthe, :resolve, :field, :stop], measurements, %{id: ^id} = meta, _} + assert is_number(measurements[:duration]) + assert %Absinthe.Resolution{} = meta[:resolution] + assert is_list(meta[:middleware]) + + assert_receive {[:absinthe, :resolve, :field, :stop], _, _, _} + # Don't execute for resolvers that don't call a resolver function (ie: default `Map.get`) + refute_receive {[:absinthe, :resolve, :field, :stop], _, _, _} + end +end diff --git a/test/absinthe/integration/execution/variables/basic.exs b/test/absinthe/integration/execution/variables/basic.exs deleted file mode 100644 index 46838a778e..0000000000 --- a/test/absinthe/integration/execution/variables/basic.exs +++ /dev/null @@ -1,4 +0,0 @@ -{ - [variables: %{"thingId" => "bar"}], - {:ok, %{data: %{"thing" => %{"name" => "Bar"}}}} -} diff --git a/test/absinthe/integration/execution/variables/basic.graphql b/test/absinthe/integration/execution/variables/basic.graphql deleted file mode 100644 index 3ec31c275d..0000000000 --- a/test/absinthe/integration/execution/variables/basic.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query ($thingId: String!) { - thing(id: $thingId) { - name - } -} diff --git a/test/absinthe/integration/execution/variables/basic_test.exs b/test/absinthe/integration/execution/variables/basic_test.exs new file mode 100644 index 0000000000..07c4b0f85e --- /dev/null +++ b/test/absinthe/integration/execution/variables/basic_test.exs @@ -0,0 +1,22 @@ +defmodule Elixir.Absinthe.Integration.Execution.Variables.BasicTest do + use Absinthe.Case, async: true + + @query """ + query ($thingId: String!) { + thing(id: $thingId) { + name + } + } + """ + + test "scenario #1" do + for schema <- schema_implementations(Absinthe.Fixtures.Things) do + assert {:ok, %{data: %{"thing" => %{"name" => "Bar"}}}} == + Absinthe.run( + @query, + schema, + variables: %{"thingId" => "bar"} + ) + end + end +end diff --git a/test/absinthe/integration/execution/variables/default_value.exs b/test/absinthe/integration/execution/variables/default_value.exs deleted file mode 100644 index 1f981d3173..0000000000 --- a/test/absinthe/integration/execution/variables/default_value.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{data: %{"times" => 24}}} diff --git a/test/absinthe/integration/execution/variables/default_value.graphql b/test/absinthe/integration/execution/variables/default_value.graphql deleted file mode 100644 index 24d8573f20..0000000000 --- a/test/absinthe/integration/execution/variables/default_value.graphql +++ /dev/null @@ -1,4 +0,0 @@ -# Schema: TimesSchema -query ($mult: Int = 6) { - times(base: 4, multiplier: $mult) -} diff --git a/test/absinthe/integration/execution/variables/default_value_test.exs b/test/absinthe/integration/execution/variables/default_value_test.exs new file mode 100644 index 0000000000..c851e67dec --- /dev/null +++ b/test/absinthe/integration/execution/variables/default_value_test.exs @@ -0,0 +1,33 @@ +defmodule Elixir.Absinthe.Integration.Execution.Variables.DefaultValueTest do + use Absinthe.Case, async: true + + @times_query """ + query ($mult: Int = 6) { + times(base: 4, multiplier: $mult) + } + """ + + @default_value_query """ + query { + microsecond + } + """ + + test "query field arg default_value and resolve execution" do + assert {:ok, %{data: %{"times" => 24}}} == + Absinthe.run(@times_query, Absinthe.Fixtures.TimesSchema, []) + end + + test "query field default is evaluated only once" do + {:ok, %{data: %{"microsecond" => first_current_microsecond}}} = + Absinthe.run(@default_value_query, Absinthe.Fixtures.DefaultValueSchema, []) + + Process.sleep(5) + + {:ok, %{data: %{"microsecond" => second_current_microsecond}}} = + Absinthe.run(@default_value_query, Absinthe.Fixtures.DefaultValueSchema, []) + + # If the code to grab the default_value was executed twice, this would be different + assert first_current_microsecond == second_current_microsecond + end +end diff --git a/test/absinthe/integration/parsing/basic_error.exs b/test/absinthe/integration/parsing/basic_error.exs deleted file mode 100644 index 178cef5965..0000000000 --- a/test/absinthe/integration/parsing/basic_error.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: "syntax error before: '}'"}]}} diff --git a/test/absinthe/integration/parsing/basic_error.graphql b/test/absinthe/integration/parsing/basic_error.graphql deleted file mode 100644 index c72db86464..0000000000 --- a/test/absinthe/integration/parsing/basic_error.graphql +++ /dev/null @@ -1,3 +0,0 @@ -{ - thing(id: "foo") {}{ name } -} diff --git a/test/absinthe/integration/parsing/basic_error_test.exs b/test/absinthe/integration/parsing/basic_error_test.exs new file mode 100644 index 0000000000..5f2b26fc8e --- /dev/null +++ b/test/absinthe/integration/parsing/basic_error_test.exs @@ -0,0 +1,18 @@ +defmodule Elixir.Absinthe.Integration.Parsing.BasicErrorTest do + use Absinthe.Case, async: true + + @query """ + { + thing(id: "foo") {}{ name } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{message: "syntax error before: '}'", locations: [%{column: 21, line: 2}]} + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/cycles.exs b/test/absinthe/integration/validation/cycles.exs deleted file mode 100644 index f6b1959ed0..0000000000 --- a/test/absinthe/integration/validation/cycles.exs +++ /dev/null @@ -1,7 +0,0 @@ -{:ok, - %{ - errors: [ - %{message: "Cannot spread fragment \"Foo\" within itself via \"Bar\", \"Foo\"."}, - %{message: "Cannot spread fragment \"Bar\" within itself via \"Foo\", \"Bar\"."} - ] - }} diff --git a/test/absinthe/integration/validation/cycles.graphql b/test/absinthe/integration/validation/cycles.graphql deleted file mode 100644 index 261a1f0aa1..0000000000 --- a/test/absinthe/integration/validation/cycles.graphql +++ /dev/null @@ -1,11 +0,0 @@ -query Foo { - name -} -fragment Foo on Blag { - name - ...Bar -} -fragment Bar on Blah { - age - ...Foo -} diff --git a/test/absinthe/integration/validation/cycles_test.exs b/test/absinthe/integration/validation/cycles_test.exs new file mode 100644 index 0000000000..f97deccf8f --- /dev/null +++ b/test/absinthe/integration/validation/cycles_test.exs @@ -0,0 +1,55 @@ +defmodule Elixir.Absinthe.Integration.Validation.CyclesTest do + use Absinthe.Case, async: true + + @query """ + query Foo { + name + } + fragment Foo on Blag { + name + ...Bar + } + fragment Bar on Blah { + age + ...Foo + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Cannot spread fragment \"Foo\" within itself via \"Bar\", \"Foo\".", + locations: [%{column: 1, line: 4}] + }, + %{ + message: "Cannot spread fragment \"Bar\" within itself via \"Foo\", \"Bar\".", + locations: [%{column: 1, line: 8}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end + + @query """ + query Foo { + ...Bar + } + fragment Bar on RootQueryType { + version + ...Foo + } + """ + + test "does not choke on unknown fragments" do + assert {:ok, + %{ + errors: [ + %{ + message: "Unknown fragment \"Foo\"", + locations: [%{column: 3, line: 6}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/error_result_when_bad_list_argument.exs b/test/absinthe/integration/validation/error_result_when_bad_list_argument.exs deleted file mode 100644 index 3333476043..0000000000 --- a/test/absinthe/integration/validation/error_result_when_bad_list_argument.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: "Argument \"id\" has invalid value [\"foo\"]."}]}} diff --git a/test/absinthe/integration/validation/error_result_when_bad_list_argument.graphql b/test/absinthe/integration/validation/error_result_when_bad_list_argument.graphql deleted file mode 100644 index 972e0a51c7..0000000000 --- a/test/absinthe/integration/validation/error_result_when_bad_list_argument.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - thing(id: ["foo"]) { - name - } -} diff --git a/test/absinthe/integration/validation/error_result_when_bad_list_argument_test.exs b/test/absinthe/integration/validation/error_result_when_bad_list_argument_test.exs new file mode 100644 index 0000000000..ff47d2383b --- /dev/null +++ b/test/absinthe/integration/validation/error_result_when_bad_list_argument_test.exs @@ -0,0 +1,23 @@ +defmodule Elixir.Absinthe.Integration.Validation.ErrorResultWhenBadListArgumentTest do + use Absinthe.Case, async: true + + @query """ + query { + thing(id: ["foo"]) { + name + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Argument \"id\" has invalid value [\"foo\"].", + locations: [%{column: 9, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/extra_arguments.exs b/test/absinthe/integration/validation/extra_arguments.exs deleted file mode 100644 index dcceee57d8..0000000000 --- a/test/absinthe/integration/validation/extra_arguments.exs +++ /dev/null @@ -1,2 +0,0 @@ -{:ok, - %{errors: [%{message: ~s(Unknown argument "extra" on field "thing" of type "RootQueryType".)}]}} diff --git a/test/absinthe/integration/validation/extra_arguments.graphql b/test/absinthe/integration/validation/extra_arguments.graphql deleted file mode 100644 index b9775d2a8d..0000000000 --- a/test/absinthe/integration/validation/extra_arguments.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query { - thing(id: "foo", extra: "dunno") { - name - } -} diff --git a/test/absinthe/integration/validation/extra_arguments_test.exs b/test/absinthe/integration/validation/extra_arguments_test.exs new file mode 100644 index 0000000000..7bedcbdf54 --- /dev/null +++ b/test/absinthe/integration/validation/extra_arguments_test.exs @@ -0,0 +1,24 @@ +defmodule Elixir.Absinthe.Integration.Validation.ExtraArgumentsTest do + use Absinthe.Case, async: true + + @query """ + query { + thing(id: "foo", extra: "dunno") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Unknown argument \"extra\" on field \"thing\" of type \"RootQueryType\".", + locations: [%{column: 20, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects.exs b/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects.exs deleted file mode 100644 index b0508bcb7d..0000000000 --- a/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects.exs +++ /dev/null @@ -1,12 +0,0 @@ -{ - [variables: %{"input" => %{"value" => 100, "__typename" => "foo"}}], - {:ok, - %{ - errors: [ - %{ - message: - "Argument \"thing\" has invalid value $input.\nIn field \"__typename\": Unknown field." - } - ] - }} -} diff --git a/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects.graphql b/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects.graphql deleted file mode 100644 index cb42a2f73b..0000000000 --- a/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects.graphql +++ /dev/null @@ -1,6 +0,0 @@ -mutation ($input: InputThing) { - thing: updateThing(id: "foo", thing: $input) { - name - value - } -} diff --git a/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects_test.exs b/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects_test.exs new file mode 100644 index 0000000000..15164d8652 --- /dev/null +++ b/test/absinthe/integration/validation/introspection_fields_ignored_in_input_objects_test.exs @@ -0,0 +1,30 @@ +defmodule Elixir.Absinthe.Integration.Validation.IntrospectionFieldsIgnoredInInputObjectsTest do + use Absinthe.Case, async: true + + @query """ + mutation ($input: InputThing!) { + thing: updateThing(id: "foo", thing: $input) { + name + value + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"thing\" has invalid value $input.\nIn field \"__typename\": Unknown field.", + locations: [%{column: 33, line: 2}] + } + ] + }} == + Absinthe.run( + @query, + Absinthe.Fixtures.Things.MacroSchema, + variables: %{"input" => %{"__typename" => "foo", "value" => 100}} + ) + end +end diff --git a/test/absinthe/integration/validation/invalid_argument.exs b/test/absinthe/integration/validation/invalid_argument.exs deleted file mode 100644 index 887c9fc803..0000000000 --- a/test/absinthe/integration/validation/invalid_argument.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: ~s(Argument "val" has invalid value "AAA".)}]}} diff --git a/test/absinthe/integration/validation/invalid_argument.graphql b/test/absinthe/integration/validation/invalid_argument.graphql deleted file mode 100644 index dce98f300b..0000000000 --- a/test/absinthe/integration/validation/invalid_argument.graphql +++ /dev/null @@ -1 +0,0 @@ -query { number(val: "AAA") } diff --git a/test/absinthe/integration/validation/invalid_argument_test.exs b/test/absinthe/integration/validation/invalid_argument_test.exs new file mode 100644 index 0000000000..5c8ee9230c --- /dev/null +++ b/test/absinthe/integration/validation/invalid_argument_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Validation.InvalidArgumentTest do + use Absinthe.Case, async: true + + @query """ + query { number(val: "AAA") } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Argument \"val\" has invalid value \"AAA\".", + locations: [%{column: 16, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/invalid_nested_type.exs b/test/absinthe/integration/validation/invalid_nested_type.exs deleted file mode 100644 index 0241ac72a1..0000000000 --- a/test/absinthe/integration/validation/invalid_nested_type.exs +++ /dev/null @@ -1,9 +0,0 @@ -{:ok, - %{ - errors: [ - %{ - message: - ~s(Argument "thing" has invalid value {value: "BAD"}.\nIn field "value": Expected type "Int", found "BAD".) - } - ] - }} diff --git a/test/absinthe/integration/validation/invalid_nested_type.graphql b/test/absinthe/integration/validation/invalid_nested_type.graphql deleted file mode 100644 index 1637a8d441..0000000000 --- a/test/absinthe/integration/validation/invalid_nested_type.graphql +++ /dev/null @@ -1,6 +0,0 @@ -mutation UpdateThingValueBadly { - thing: updateThing(id: "foo", thing: {value: "BAD"}) { - name - value - } -} diff --git a/test/absinthe/integration/validation/invalid_nested_type_test.exs b/test/absinthe/integration/validation/invalid_nested_type_test.exs new file mode 100644 index 0000000000..b74993c237 --- /dev/null +++ b/test/absinthe/integration/validation/invalid_nested_type_test.exs @@ -0,0 +1,25 @@ +defmodule Elixir.Absinthe.Integration.Validation.InvalidNestedTypeTest do + use Absinthe.Case, async: true + + @query """ + mutation UpdateThingValueBadly { + thing: updateThing(id: "foo", thing: {value: "BAD"}) { + name + value + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Argument \"thing\" has invalid value {value: \"BAD\"}.\nIn field \"value\": Expected type \"Int\", found \"BAD\".", + locations: [%{column: 33, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/missing_operation.exs b/test/absinthe/integration/validation/missing_operation.exs deleted file mode 100644 index 1b5765b815..0000000000 --- a/test/absinthe/integration/validation/missing_operation.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: "Operation \"mutation\" not supported"}]}} diff --git a/test/absinthe/integration/validation/missing_operation.graphql b/test/absinthe/integration/validation/missing_operation.graphql deleted file mode 100644 index d669678f35..0000000000 --- a/test/absinthe/integration/validation/missing_operation.graphql +++ /dev/null @@ -1,2 +0,0 @@ -# Schema: OnlyQuerySchema -mutation { foo } diff --git a/test/absinthe/integration/validation/missing_operation_test.exs b/test/absinthe/integration/validation/missing_operation_test.exs new file mode 100644 index 0000000000..ece2fb7067 --- /dev/null +++ b/test/absinthe/integration/validation/missing_operation_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Validation.MissingOperationTest do + use Absinthe.Case, async: true + + @query """ + mutation { foo } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Operation \"mutation\" not supported", + locations: [%{column: 1, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.OnlyQuerySchema, []) + end +end diff --git a/test/absinthe/integration/validation/missing_selection_set.exs b/test/absinthe/integration/validation/missing_selection_set.exs deleted file mode 100644 index 37791f4e0a..0000000000 --- a/test/absinthe/integration/validation/missing_selection_set.exs +++ /dev/null @@ -1,9 +0,0 @@ -{:ok, - %{ - errors: [ - %{ - message: - "Field \"things\" of type \"[Thing]\" must have a selection of subfields. Did you mean \"things { ... }\"?" - } - ] - }} diff --git a/test/absinthe/integration/validation/missing_selection_set.graphql b/test/absinthe/integration/validation/missing_selection_set.graphql deleted file mode 100644 index b3dc024ba0..0000000000 --- a/test/absinthe/integration/validation/missing_selection_set.graphql +++ /dev/null @@ -1,3 +0,0 @@ -query { - things -} diff --git a/test/absinthe/integration/validation/missing_selection_set_test.exs b/test/absinthe/integration/validation/missing_selection_set_test.exs new file mode 100644 index 0000000000..9943d65007 --- /dev/null +++ b/test/absinthe/integration/validation/missing_selection_set_test.exs @@ -0,0 +1,22 @@ +defmodule Elixir.Absinthe.Integration.Validation.MissingSelectionSetTest do + use Absinthe.Case, async: true + + @query """ + query { + things + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Field \"things\" of type \"[Thing]\" must have a selection of subfields. Did you mean \"things { ... }\"?", + locations: [%{column: 3, line: 2}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/object_spreads_in_object_scope.exs b/test/absinthe/integration/validation/object_spreads_in_object_scope.exs deleted file mode 100644 index 3a3f5afd11..0000000000 --- a/test/absinthe/integration/validation/object_spreads_in_object_scope.exs +++ /dev/null @@ -1,10 +0,0 @@ -# https://facebook.github.io/graphql/#sec-Object-Spreads-In-Object-Scope -{:ok, - %{ - errors: [ - %{ - message: - "Fragment spread has no type overlap with parent.\nParent possible types: [\"Person\"]\nSpread possible types: [\"Business\"]\n" - } - ] - }} diff --git a/test/absinthe/integration/validation/object_spreads_in_object_scope.graphql b/test/absinthe/integration/validation/object_spreads_in_object_scope.graphql deleted file mode 100644 index a2894367c3..0000000000 --- a/test/absinthe/integration/validation/object_spreads_in_object_scope.graphql +++ /dev/null @@ -1,10 +0,0 @@ -# Schema: ContactSchema -query Q { - person { - name - ...NamedBusiness - } -} -fragment NamedBusiness on Business { - employee_count -} diff --git a/test/absinthe/integration/validation/object_spreads_in_object_scope_test.exs b/test/absinthe/integration/validation/object_spreads_in_object_scope_test.exs new file mode 100644 index 0000000000..df31810687 --- /dev/null +++ b/test/absinthe/integration/validation/object_spreads_in_object_scope_test.exs @@ -0,0 +1,28 @@ +defmodule Elixir.Absinthe.Integration.Validation.ObjectSpreadsInObjectScopeTest do + use Absinthe.Case, async: true + + @query """ + query Q { + person { + name + ...NamedBusiness + } + } + fragment NamedBusiness on Business { + employee_count + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: + "Fragment spread has no type overlap with parent.\nParent possible types: [\"Person\"]\nSpread possible types: [\"Business\"]\n", + locations: [%{column: 5, line: 4}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, []) + end +end diff --git a/test/absinthe/integration/validation/required_arguments.exs b/test/absinthe/integration/validation/required_arguments.exs deleted file mode 100644 index 0af3956f70..0000000000 --- a/test/absinthe/integration/validation/required_arguments.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: ~s(In argument "id": Expected type "String!", found null.)}]}} diff --git a/test/absinthe/integration/validation/required_arguments.graphql b/test/absinthe/integration/validation/required_arguments.graphql deleted file mode 100644 index acf0557d7f..0000000000 --- a/test/absinthe/integration/validation/required_arguments.graphql +++ /dev/null @@ -1 +0,0 @@ -query { thing { name } } diff --git a/test/absinthe/integration/validation/required_arguments_test.exs b/test/absinthe/integration/validation/required_arguments_test.exs new file mode 100644 index 0000000000..027ba81887 --- /dev/null +++ b/test/absinthe/integration/validation/required_arguments_test.exs @@ -0,0 +1,19 @@ +defmodule Elixir.Absinthe.Integration.Validation.RequiredArgumentsTest do + use Absinthe.Case, async: true + + @query """ + query { thing { name } } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "In argument \"id\": Expected type \"String!\", found null.", + locations: [%{column: 9, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/unknown_arg_for_list_member_field.exs b/test/absinthe/integration/validation/unknown_arg_for_list_member_field.exs deleted file mode 100644 index bc8d23c7fd..0000000000 --- a/test/absinthe/integration/validation/unknown_arg_for_list_member_field.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: "Unknown argument \"x\" on field \"id\" of type \"Thing\"."}]}} diff --git a/test/absinthe/integration/validation/unknown_arg_for_list_member_field.graphql b/test/absinthe/integration/validation/unknown_arg_for_list_member_field.graphql deleted file mode 100644 index 7f6116c62c..0000000000 --- a/test/absinthe/integration/validation/unknown_arg_for_list_member_field.graphql +++ /dev/null @@ -1,6 +0,0 @@ -query { - things { - id(x: 1) - name - } -} diff --git a/test/absinthe/integration/validation/unknown_arg_for_list_member_field_test.exs b/test/absinthe/integration/validation/unknown_arg_for_list_member_field_test.exs new file mode 100644 index 0000000000..09b4b8da68 --- /dev/null +++ b/test/absinthe/integration/validation/unknown_arg_for_list_member_field_test.exs @@ -0,0 +1,24 @@ +defmodule Elixir.Absinthe.Integration.Validation.UnknownArgForListMemberFieldTest do + use Absinthe.Case, async: true + + @query """ + query { + things { + id(x: 1) + name + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Unknown argument \"x\" on field \"id\" of type \"Thing\".", + locations: [%{column: 8, line: 3}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/unknown_field.exs b/test/absinthe/integration/validation/unknown_field.exs deleted file mode 100644 index bc85a84b70..0000000000 --- a/test/absinthe/integration/validation/unknown_field.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: ~s(Cannot query field "bad" on type "Thing".)}]}} diff --git a/test/absinthe/integration/validation/unknown_field.graphql b/test/absinthe/integration/validation/unknown_field.graphql deleted file mode 100644 index ad710bf046..0000000000 --- a/test/absinthe/integration/validation/unknown_field.graphql +++ /dev/null @@ -1,6 +0,0 @@ -{ - thing(id: "foo") { - name - bad - } -} diff --git a/test/absinthe/integration/validation/unknown_field_test.exs b/test/absinthe/integration/validation/unknown_field_test.exs new file mode 100644 index 0000000000..44c7858a46 --- /dev/null +++ b/test/absinthe/integration/validation/unknown_field_test.exs @@ -0,0 +1,24 @@ +defmodule Elixir.Absinthe.Integration.Validation.UnknownFieldTest do + use Absinthe.Case, async: true + + @query """ + { + thing(id: "foo") { + name + bad + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Cannot query field \"bad\" on type \"Thing\".", + locations: [%{column: 5, line: 4}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/variables/unused/with_operation_name.exs b/test/absinthe/integration/validation/variables/unused/with_operation_name.exs deleted file mode 100644 index 316dbc7920..0000000000 --- a/test/absinthe/integration/validation/variables/unused/with_operation_name.exs +++ /dev/null @@ -1,8 +0,0 @@ -{ - :ok, - %{ - errors: [ - %{message: ~s(Variable "test" is never used in operation "AnOperationName".)} - ] - } -} diff --git a/test/absinthe/integration/validation/variables/unused/with_operation_name.graphql b/test/absinthe/integration/validation/variables/unused/with_operation_name.graphql deleted file mode 100644 index cd2c4cd79e..0000000000 --- a/test/absinthe/integration/validation/variables/unused/with_operation_name.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query AnOperationName($test: String) { - thing(id: "foo") { - name - } -} diff --git a/test/absinthe/integration/validation/variables/unused/with_operation_name_test.exs b/test/absinthe/integration/validation/variables/unused/with_operation_name_test.exs new file mode 100644 index 0000000000..ae3923d6b4 --- /dev/null +++ b/test/absinthe/integration/validation/variables/unused/with_operation_name_test.exs @@ -0,0 +1,23 @@ +defmodule Elixir.Absinthe.Integration.Validation.Variables.Unused.WithOperationNameTest do + use Absinthe.Case, async: true + + @query """ + query AnOperationName($test: String) { + thing(id: "foo") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{ + message: "Variable \"test\" is never used in operation \"AnOperationName\".", + locations: [%{column: 23, line: 1}, %{column: 1, line: 1}] + } + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration/validation/variables/unused/without_operation_name.exs b/test/absinthe/integration/validation/variables/unused/without_operation_name.exs deleted file mode 100644 index ce90959103..0000000000 --- a/test/absinthe/integration/validation/variables/unused/without_operation_name.exs +++ /dev/null @@ -1 +0,0 @@ -{:ok, %{errors: [%{message: "Variable \"test\" is never used."}]}} diff --git a/test/absinthe/integration/validation/variables/unused/without_operation_name.graphql b/test/absinthe/integration/validation/variables/unused/without_operation_name.graphql deleted file mode 100644 index 49c300cfc3..0000000000 --- a/test/absinthe/integration/validation/variables/unused/without_operation_name.graphql +++ /dev/null @@ -1,5 +0,0 @@ -query ($test: String) { - thing(id: "foo") { - name - } -} diff --git a/test/absinthe/integration/validation/variables/unused/without_operation_name_test.exs b/test/absinthe/integration/validation/variables/unused/without_operation_name_test.exs new file mode 100644 index 0000000000..ee828007c5 --- /dev/null +++ b/test/absinthe/integration/validation/variables/unused/without_operation_name_test.exs @@ -0,0 +1,20 @@ +defmodule Elixir.Absinthe.Integration.Validation.Variables.Unused.WithoutOperationNameTest do + use Absinthe.Case, async: true + + @query """ + query ($test: String) { + thing(id: "foo") { + name + } + } + """ + + test "scenario #1" do + assert {:ok, + %{ + errors: [ + %{message: "Variable \"test\" is never used.", locations: [%{column: 8, line: 1}]} + ] + }} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, []) + end +end diff --git a/test/absinthe/integration_test.exs b/test/absinthe/integration_test.exs deleted file mode 100644 index c2b062447c..0000000000 --- a/test/absinthe/integration_test.exs +++ /dev/null @@ -1,58 +0,0 @@ -defmodule Absinthe.IntegrationTest do - @moduledoc """ - See the `Absinthe.IntegrationCase` documentation for information on - how to write integration tests. - """ - - use Absinthe.IntegrationCase, - root: "test/absinthe/integration", - default_schema: Absinthe.Fixtures.ThingsSchema, - async: true - - def assert_scenario( - %{name: "execution/fragments/introspection"} = definition, - {options, _expectation} - ) do - result = run(definition.graphql, definition.schema, options) - - assert {:ok, - %{ - data: %{ - "__type" => %{ - "name" => "ProfileInput", - "kind" => "INPUT_OBJECT", - "fields" => nil, - "inputFields" => input_fields - } - } - }} = result - - correct = [%{"name" => "code"}, %{"name" => "name"}, %{"name" => "age"}] - sort = & &1["name"] - assert Enum.sort_by(input_fields, sort) == Enum.sort_by(correct, sort) - end - - def assert_scenario( - %{name: "execution/introspection/schema_types"} = definition, - {options, _expectation} - ) do - result = run(definition.graphql, definition.schema, options) - assert {:ok, %{data: %{"__schema" => %{"types" => types}}}} = result - names = types |> Enum.map(& &1["name"]) |> Enum.sort() - - expected = - ~w(Int String Boolean Contact Person Business ProfileInput SearchResult NamedEntity RootMutationType RootQueryType RootSubscriptionType __Schema __Directive __DirectiveLocation __EnumValue __Field __InputValue __Type) - |> Enum.sort() - - assert expected == names - end - - def assert_scenario( - %{name: "execution/introspection/full"} = definition, - {options, _expectation} - ) do - result = run(definition.graphql, definition.schema, options) - {:ok, %{data: %{"__schema" => schema}}} = result - assert !is_nil(schema) - end -end diff --git a/test/absinthe/introspection_test.exs b/test/absinthe/introspection_test.exs index 5eeeb5eb26..5600489221 100644 --- a/test/absinthe/introspection_test.exs +++ b/test/absinthe/introspection_test.exs @@ -3,6 +3,58 @@ defmodule Absinthe.IntrospectionTest do alias Absinthe.Schema + describe "introspection of directives" do + test "builtin" do + result = + """ + query IntrospectionQuery { + __schema { + directives { + name + description + locations + isRepeatable + onOperation + onFragment + onField + } + } + } + """ + |> run(Absinthe.Fixtures.ColorSchema) + + assert {:ok, + %{ + data: %{ + "__schema" => %{ + "directives" => [ + %{ + "description" => + "Directs the executor to include this field or fragment only when the `if` argument is true.", + "isRepeatable" => false, + "locations" => ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"], + "name" => "include", + "onField" => true, + "onFragment" => true, + "onOperation" => false + }, + %{ + "description" => + "Directs the executor to skip this field or fragment when the `if` argument is true.", + "isRepeatable" => false, + "locations" => ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"], + "name" => "skip", + "onField" => true, + "onFragment" => true, + "onOperation" => false + } + ] + } + } + }} = result + end + end + describe "introspection of an enum type" do test "can use __type and value information with deprecations" do result = @@ -60,7 +112,7 @@ defmodule Absinthe.IntrospectionTest do "isDeprecated" => false, "deprecationReason" => nil } - ] == values |> Enum.sort_by(& &1["name"]) + ] == values end test "can use __type and value information without deprecations" do @@ -107,9 +159,6 @@ defmodule Absinthe.IntrospectionTest do queryType { fields { name - type { - name - } args { name defaultValue @@ -123,9 +172,7 @@ defmodule Absinthe.IntrospectionTest do assert {:ok, %{data: %{"__schema" => %{"queryType" => %{"fields" => fields}}}}} = result - assert [ - %{"name" => "info", "args" => [%{"name" => "channel", "defaultValue" => "RED"}]} - ] = fields + assert %{"name" => "info", "args" => [%{"name" => "channel", "defaultValue" => "RED"}]} in fields end end @@ -196,6 +243,124 @@ defmodule Absinthe.IntrospectionTest do assert !match?({:ok, %{data: %{"__type" => %{"fields" => _}}}}, result) end + + defmodule ComplexDefaultSchema do + use Absinthe.Schema + + query do + field :complex_default, :string do + arg :input, :complex_input, + default_value: %{ + fancy_value: "qwerty", + fancy_nested: %{fancy_bool: false}, + fancy_enum: :foo, + fancy_list: [:foo, :bar] + } + end + end + + enum :an_enum do + value :foo + value :bar + end + + input_object :complex_input do + field :fancy_value, :string + field :fancy_enum, non_null(:an_enum) + field :fancy_list, list_of(:an_enum) + field :fancy_nested, :nested_complex_input + end + + input_object :nested_complex_input do + field :fancy_bool, :boolean + end + end + + test "can introspect complex default_value" do + result = + """ + { + __schema { + queryType { + fields { + args { + defaultValue + } + } + } + } + } + """ + |> run(ComplexDefaultSchema) + + assert_result( + {:ok, + %{ + data: %{ + "__schema" => %{ + "queryType" => %{ + "fields" => [ + %{ + "args" => [ + %{ + "defaultValue" => + "{fancyEnum: FOO, fancyList: [FOO, BAR], fancyNested: {fancyBool: false}, fancyValue: \"qwerty\"}" + } + ] + } + ] + } + } + } + }}, + result + ) + end + + defmodule ImportFieldsIntoInputSchema do + use Absinthe.Schema + + query do + field :test, :test_object do + arg :test, :test_input + end + end + + object :test_object do + import_fields(:import_object) + end + + input_object :test_input do + import_fields(:import_object) + end + + object :import_object do + field :id, :id + end + end + + test "import_fields won't import __typename" do + {:ok, %{data: data}} = + """ + { + __schema { + types { + name + inputFields { + name + } + } + } + } + """ + |> Absinthe.run(ImportFieldsIntoInputSchema) + + type = + get_in(data, ["__schema", "types"]) + |> Enum.find(&(&1["name"] == "TestInput")) + + assert get_in(type, ["inputFields"]) == [%{"name" => "id"}] + end end describe "introspection of an object type" do @@ -452,4 +617,61 @@ defmodule Absinthe.IntrospectionTest do ) end end + + test "Doesn't fail for unknown introspection fields" do + result = + """ + { + __foobar { + baz + } + } + """ + |> run(Absinthe.Fixtures.ContactSchema) + + assert_result( + {:ok, + %{ + errors: [ + %{ + locations: [%{column: 3, line: 2}], + message: "Cannot query field \"__foobar\" on type \"RootQueryType\"." + } + ] + }}, + result + ) + end + + test "properly render partial default value input objects" do + {:ok, result} = + """ + { + __schema { + queryType { + fields { + name + args { + name + defaultValue + } + } + } + } + } + """ + |> run(Absinthe.Fixtures.ArgumentsSchema) + + fields = get_in(result, [:data, "__schema", "queryType", "fields"]) + + assert %{ + "args" => [ + %{"defaultValue" => "{exclude: [2, 3], include: [1]}", "name" => "filterAll"}, + %{"defaultValue" => "{}", "name" => "filterEmpty"}, + %{"defaultValue" => "{exclude: [1, 2, 3]}", "name" => "filterExclude"}, + %{"defaultValue" => "{include: [1, 2, 3]}", "name" => "filterInclude"} + ], + "name" => "filterNumbers" + } in fields + end end diff --git a/test/absinthe/language/directive_definition_test.exs b/test/absinthe/language/directive_definition_test.exs index ac547f11c6..fc383439ce 100644 --- a/test/absinthe/language/directive_definition_test.exs +++ b/test/absinthe/language/directive_definition_test.exs @@ -5,8 +5,19 @@ defmodule Absinthe.Language.DirectiveDefinitionTest do describe "blueprint conversion" do test "works, given a Blueprint Schema 'directive' definition without arguments" do - assert %Blueprint.Schema.DirectiveDefinition{name: "thingy", locations: ["FIELD", "OBJECT"]} = - from_input("directive @thingy on FIELD | OBJECT") + assert %Blueprint.Schema.DirectiveDefinition{ + name: "thingy", + locations: [:field, :object], + repeatable: false + } = from_input("directive @thingy on FIELD | OBJECT") + end + + test "works, given a Blueprint Schema 'repeatable' 'directive' definition without arguments" do + assert %Blueprint.Schema.DirectiveDefinition{ + name: "thingy", + locations: [:field, :object], + repeatable: true + } = from_input("directive @thingy repeatable on FIELD | OBJECT") end test "works, given a Blueprint Schema 'directive' definition without arguments and with directives" do @@ -18,7 +29,7 @@ defmodule Absinthe.Language.DirectiveDefinitionTest do assert %Blueprint.Schema.DirectiveDefinition{ name: "authorized", - locations: ["FIELD"], + locations: [:field], directives: [%{name: "description"}] } = rep end diff --git a/test/absinthe/language/document_test.exs b/test/absinthe/language/document_test.exs index 6a720717ca..fdb7d41fa2 100644 --- a/test/absinthe/language/document_test.exs +++ b/test/absinthe/language/document_test.exs @@ -49,7 +49,7 @@ defmodule Absinthe.Language.DocumentTest do rep = ir("{ foo } mutation Bar { bar } subscription Baz { baz }") assert length(rep.directives) == 0 assert length(rep.operations) == 3 - assert length(rep.types) == 0 + assert length(rep.schema_definitions) == 0 assert length(rep.fragments) == 0 end @@ -71,7 +71,7 @@ defmodule Absinthe.Language.DocumentTest do assert length(rep.directives) == 0 assert length(rep.operations) == 0 - assert length(rep.types) == 4 + assert length(rep.schema_definitions) == 4 assert length(rep.fragments) == 0 end @@ -99,7 +99,7 @@ defmodule Absinthe.Language.DocumentTest do assert length(rep.directives) == 0 assert length(rep.operations) == 1 - assert length(rep.types) == 0 + assert length(rep.schema_definitions) == 0 assert length(rep.fragments) == 2 end @@ -107,7 +107,7 @@ defmodule Absinthe.Language.DocumentTest do rep = ir("directive @cs(if: Boolean!) on FIELD") assert length(rep.directives) == 1 assert length(rep.operations) == 0 - assert length(rep.types) == 0 + assert length(rep.schema_definitions) == 0 assert length(rep.fragments) == 0 end end @@ -165,7 +165,7 @@ defmodule Absinthe.Language.DocumentTest do test "creates the correct number of types" do rep = ir(@idl) - assert length(rep.types) == 10 + assert length(rep.schema_definitions) == 10 end end diff --git a/test/absinthe/language/enum_type_definition_test.exs b/test/absinthe/language/enum_type_definition_test.exs index 22a4cb2759..0fc76fa49c 100644 --- a/test/absinthe/language/enum_type_definition_test.exs +++ b/test/absinthe/language/enum_type_definition_test.exs @@ -10,9 +10,9 @@ defmodule Absinthe.Language.EnumTypeDefinitionTest do assert %Blueprint.Schema.EnumTypeDefinition{ name: "Episode", values: [ - %Blueprint.Schema.EnumValueDefinition{value: "NEWHOPE"}, - %Blueprint.Schema.EnumValueDefinition{value: "EMPIRE"}, - %Blueprint.Schema.EnumValueDefinition{value: "JEDI"} + %Blueprint.Schema.EnumValueDefinition{name: "NEWHOPE", value: :newhope}, + %Blueprint.Schema.EnumValueDefinition{name: "EMPIRE", value: :empire}, + %Blueprint.Schema.EnumValueDefinition{name: "JEDI", value: :jedi} ] } = rep end @@ -28,9 +28,9 @@ defmodule Absinthe.Language.EnumTypeDefinitionTest do name: "Episode", directives: [%Blueprint.Directive{name: "description"}], values: [ - %Blueprint.Schema.EnumValueDefinition{value: "NEWHOPE"}, - %Blueprint.Schema.EnumValueDefinition{value: "EMPIRE"}, - %Blueprint.Schema.EnumValueDefinition{value: "JEDI"} + %Blueprint.Schema.EnumValueDefinition{name: "NEWHOPE", value: :newhope}, + %Blueprint.Schema.EnumValueDefinition{name: "EMPIRE", value: :empire}, + %Blueprint.Schema.EnumValueDefinition{name: "JEDI", value: :jedi} ] } = rep end diff --git a/test/absinthe/language/field_definition_test.exs b/test/absinthe/language/field_definition_test.exs index 5d13dd7caa..24d9d6225f 100644 --- a/test/absinthe/language/field_definition_test.exs +++ b/test/absinthe/language/field_definition_test.exs @@ -39,6 +39,7 @@ defmodule Absinthe.Language.FieldDefinitionTest do field_def = fields |> Enum.at(2) |> Blueprint.Draft.convert(doc) assert %Blueprint.Schema.FieldDefinition{ + identifier: :quuxes, name: "quuxes", type: %Blueprint.TypeReference.List{ of_type: %Blueprint.TypeReference.Name{name: "Quux"} @@ -46,13 +47,20 @@ defmodule Absinthe.Language.FieldDefinitionTest do arguments: [ %Blueprint.Schema.InputValueDefinition{ name: "limit", + identifier: :limit, type: %Blueprint.TypeReference.Name{name: "Int"}, - default_value: %Blueprint.Input.Integer{ - value: 4, - source_location: %Blueprint.Document.SourceLocation{column: nil, line: 4} - } + default_value: 4, + default_value_blueprint: %Absinthe.Blueprint.Input.Integer{ + errors: [], + flags: %{}, + schema_node: nil, + source_location: %Absinthe.Blueprint.SourceLocation{column: 23, line: 4}, + value: 4 + }, + source_location: %Absinthe.Blueprint.SourceLocation{column: 10, line: 4} } - ] + ], + source_location: %Absinthe.Blueprint.SourceLocation{column: 3, line: 4} } == field_def end end diff --git a/test/absinthe/language/field_test.exs b/test/absinthe/language/field_test.exs index 4675799d4d..e7ee78f6ac 100644 --- a/test/absinthe/language/field_test.exs +++ b/test/absinthe/language/field_test.exs @@ -39,7 +39,7 @@ defmodule Absinthe.Language.FieldTest do } } ], - source_location: %Blueprint.Document.SourceLocation{line: 2} + source_location: %Blueprint.SourceLocation{line: 2} } = from_input(@query) end @@ -55,7 +55,7 @@ defmodule Absinthe.Language.FieldTest do input_value: %Input.RawValue{content: %Input.Variable{name: "showFoo"}} } ], - source_location: %Blueprint.Document.SourceLocation{line: 2} + source_location: %Blueprint.SourceLocation{line: 2} } ], arguments: [ @@ -73,7 +73,7 @@ defmodule Absinthe.Language.FieldTest do } } ], - source_location: %Blueprint.Document.SourceLocation{line: 2} + source_location: %Blueprint.SourceLocation{line: 2} } = from_input(@query_with_directive) end end diff --git a/test/absinthe/language/interface_type_definition.exs b/test/absinthe/language/interface_type_definition_test.exs similarity index 100% rename from test/absinthe/language/interface_type_definition.exs rename to test/absinthe/language/interface_type_definition_test.exs diff --git a/test/absinthe/language/object_type_definition_test.exs b/test/absinthe/language/object_type_definition_test.exs index 620c906250..bd3b882305 100644 --- a/test/absinthe/language/object_type_definition_test.exs +++ b/test/absinthe/language/object_type_definition_test.exs @@ -9,7 +9,7 @@ defmodule Absinthe.Language.ObjectTypeDefinitionTest do from_input("type Person { name: String! }") end - test "works, given a Blueprint Schema 'type' definition and a directive" do + test "works, given a Blueprint Schema 'type' definition and a built in directive" do rep = """ type Person @@ -26,6 +26,23 @@ defmodule Absinthe.Language.ObjectTypeDefinitionTest do } = rep end + test "works, given a Blueprint Schema 'type' definition and a Type System directive" do + rep = + """ + type Person + @typeSystemDirective(foo: "Bar") + { + name: String! + } + """ + |> from_input + + assert %Blueprint.Schema.ObjectTypeDefinition{ + name: "Person", + directives: [%{name: "typeSystemDirective"}] + } = rep + end + test "works, given a Blueprint Schema 'type' definition that implements an interface" do rep = """ @@ -37,7 +54,8 @@ defmodule Absinthe.Language.ObjectTypeDefinitionTest do assert %Blueprint.Schema.ObjectTypeDefinition{ name: "Person", - interfaces: [%Blueprint.TypeReference.Name{name: "Entity"}] + interfaces: [:entity], + interface_blueprints: [%Blueprint.TypeReference.Name{name: "Entity"}] } = rep end @@ -54,7 +72,8 @@ defmodule Absinthe.Language.ObjectTypeDefinitionTest do assert %Blueprint.Schema.ObjectTypeDefinition{ name: "Person", - interfaces: [%Blueprint.TypeReference.Name{name: "Entity"}], + interfaces: [:entity], + interface_blueprints: [%Blueprint.TypeReference.Name{name: "Entity"}], directives: [%{name: "description"}] } = rep end diff --git a/test/absinthe/language/operation_definition_test.exs b/test/absinthe/language/operation_definition_test.exs index 201fcf7323..0b189df503 100644 --- a/test/absinthe/language/operation_definition_test.exs +++ b/test/absinthe/language/operation_definition_test.exs @@ -21,7 +21,7 @@ defmodule Absinthe.Language.OperationDefinitionTest do default_value: %Blueprint.Input.Boolean{value: true} } ], - source_location: %Blueprint.Document.SourceLocation{line: 1} + source_location: %Blueprint.SourceLocation{line: 1} } = from_input(@query) end @@ -46,7 +46,7 @@ defmodule Absinthe.Language.OperationDefinitionTest do default_value: %Blueprint.Input.Boolean{value: true} } ], - source_location: %Blueprint.Document.SourceLocation{line: 1}, + source_location: %Blueprint.SourceLocation{line: 1}, selections: [ %Blueprint.Document.Field{name: "foo"}, %Blueprint.Document.Fragment.Spread{name: "QueryBits"} diff --git a/test/absinthe/language/render_test.exs b/test/absinthe/language/render_test.exs new file mode 100644 index 0000000000..add322c007 --- /dev/null +++ b/test/absinthe/language/render_test.exs @@ -0,0 +1,126 @@ +defmodule Absinthe.Language.RenderTest do + use ExUnit.Case, async: true + + describe "renders graphql" do + test "for unnamed query" do + assert_rendered(""" + { + version + } + """) + end + + test "for fragment typing" do + assert_rendered(""" + query FragmentTyping { + profiles(handles: ["zuck", "cocacola"]) { + handle + ...userFragment + ...pageFragment + } + } + + fragment userFragment on User @defer { + friends { + count + } + } + + fragment pageFragment on Page { + likers { + count + } + } + """) + end + + test "for inline fragment with type query" do + assert_rendered(""" + query inlineFragmentTyping { + profiles(handles: ["zuck", "cocacola"]) { + handle + ... on User @onInlineFragment { + friends { + count + } + } + ... on Page { + likers { + count + } + } + } + } + """) + end + + test "for inline fragments without type query" do + assert_rendered(""" + query inlineFragmentNoType($expandedInfo: Boolean) { + user(handle: "zuck") { + id + name + ... @include(if: $expandedInfo) { + firstName + lastName + birthday + } + } + } + """) + end + + test "for block strings" do + assert_rendered(""" + mutation { + sendEmail(message: \"\"\" + Hello, + World! + + Yours, + GraphQL. + \"\"\") + } + """) + end + + test "for null values" do + assert_rendered(""" + query { + field(arg: null) + field + } + """) + end + + test "for input objects" do + assert_rendered(""" + query { + nearestThing(location: { lon: 12.43, lat: -53.211 }) + } + """) + end + + test "for variables" do + assert_rendered(""" + query ($id: ID, $mult: Int = 6, $list: [Int!]! = [1, 2], $customScalar: CustomScalar!) { + times(base: 4, multiplier: $mult) + } + """) + end + + test "for introspection query" do + assert_rendered( + Path.join(__DIR__, "../../../priv/graphql/introspection.graphql") + |> File.read!() + ) + end + end + + defp assert_rendered(graphql) do + {:ok, blueprint} = Absinthe.Phase.Parse.run(graphql, []) + rendered_graphql = inspect(blueprint.input, pretty: true) + + assert graphql == rendered_graphql + end +end diff --git a/test/absinthe/language/variable_definition_test.exs b/test/absinthe/language/variable_definition_test.exs index 173a834d69..fe20b75f0e 100644 --- a/test/absinthe/language/variable_definition_test.exs +++ b/test/absinthe/language/variable_definition_test.exs @@ -4,7 +4,7 @@ defmodule Absinthe.Language.VariableDefinitionTest do alias Absinthe.{Blueprint, Language} @query """ - query Foo($showFoo: Boolean = true) { + query Foo($showFoo: Boolean = true @bar(a: 1)) { foo @include(if: $showFoo) } """ @@ -13,9 +13,10 @@ defmodule Absinthe.Language.VariableDefinitionTest do test "builds a VariableDefinition.t" do assert %Blueprint.Document.VariableDefinition{ name: "showFoo", + directives: [%Blueprint.Directive{name: "bar"}], type: %Blueprint.TypeReference.Name{name: "Boolean"}, default_value: %Blueprint.Input.Boolean{value: true}, - source_location: %Blueprint.Document.SourceLocation{line: 1} + source_location: %Blueprint.SourceLocation{line: 1} } = from_input(@query) end end diff --git a/test/absinthe/lexer_test.exs b/test/absinthe/lexer_test.exs new file mode 100644 index 0000000000..a56f4c0015 --- /dev/null +++ b/test/absinthe/lexer_test.exs @@ -0,0 +1,97 @@ +defmodule Absinthe.LexerTest do + use Absinthe.Case, async: true + + @query """ + { foo } + """ + test "basic document" do + assert {:ok, [{:"{", {1, 1}}, {:name, {1, 3}, 'foo'}, {:"}", {1, 7}}]} = + Absinthe.Lexer.tokenize(@query) + end + + @query """ + { nullName } + """ + test "document with a name that starts with a keyword" do + assert {:ok, [{:"{", {1, 1}}, {:name, {1, 3}, 'nullName'}, {:"}", {1, 12}}]} = + Absinthe.Lexer.tokenize(@query) + end + + @query ~S""" + { + foo + } + """ + test "basic document, multiple lines" do + assert {:ok, [{:"{", {1, 1}}, {:name, {2, 3}, 'foo'}, {:"}", {3, 1}}]} = + Absinthe.Lexer.tokenize(@query) + end + + @query ~S""" + { + { foo(bar: "\\\\FOO") } + } + """ + test "multiple escaped slashes" do + assert Absinthe.Lexer.tokenize(@query) == + {:ok, + [ + {:"{", {1, 1}}, + {:"{", {2, 3}}, + {:name, {2, 5}, 'foo'}, + {:"(", {2, 8}}, + {:name, {2, 9}, 'bar'}, + {:":", {2, 12}}, + {:string_value, {2, 14}, ~S("\\FOO") |> String.to_charlist()}, + {:")", {2, 23}}, + {:"}", {2, 25}}, + {:"}", {3, 1}} + ]} + end + + @query """ + { + foo(bar: \""" + stuff + \""") + } + """ + test "basic document, multiple lines with block string" do + assert {:ok, + [ + {:"{", {1, 1}}, + {:name, {2, 3}, 'foo'}, + {:"(", {2, 6}}, + {:name, {2, 7}, 'bar'}, + {:":", {2, 10}}, + {:block_string_value, {2, 12}, '"""\n stuff\n """'}, + {:")", {4, 6}}, + {:"}", {5, 1}} + ]} = Absinthe.Lexer.tokenize(@query) + end + + @query """ + # A comment with a ๐Ÿ˜• emoji. + \""" + A block quote with a ๐Ÿ‘ emoji. + \""" + { + foo(bar: "A string with a ๐ŸŽ‰ emoji.") anotherOnSameLine + } + """ + test "document with emojis" do + assert {:ok, + [ + {:block_string_value, {2, 1}, '"""\nA block quote with a ๐Ÿ‘ emoji.\n"""'}, + {:"{", {5, 1}}, + {:name, {6, 3}, 'foo'}, + {:"(", {6, 6}}, + {:name, {6, 7}, 'bar'}, + {:":", {6, 10}}, + {:string_value, {6, 12}, '"A string with a ๐ŸŽ‰ emoji."'}, + {:")", {6, 38}}, + {:name, {6, 40}, 'anotherOnSameLine'}, + {:"}", {7, 1}} + ]} == Absinthe.Lexer.tokenize(@query) + end +end diff --git a/test/absinthe/middleware/async_test.exs b/test/absinthe/middleware/async_test.exs index 5bfe7935c3..7c4e88eefd 100644 --- a/test/absinthe/middleware/async_test.exs +++ b/test/absinthe/middleware/async_test.exs @@ -26,6 +26,28 @@ defmodule Absinthe.Middleware.AsyncTest do {:ok, nil} end) end + + field :async_bare_thing_with_opts, :string do + resolve fn _, _, _ -> + task = + Task.async(fn -> + {:ok, "bare task"} + end) + + {:middleware, Elixir.Absinthe.Middleware.Async, {task, []}} + end + end + + field :async_bare_thing, :string do + resolve fn _, _, _ -> + task = + Task.async(fn -> + {:ok, "bare task"} + end) + + {:middleware, Elixir.Absinthe.Middleware.Async, task} + end + end end def cool_async(fun) do @@ -37,7 +59,23 @@ defmodule Absinthe.Middleware.AsyncTest do end end - test "can resolve a field using the normal async helper" do + test "can resolve a field using the bare api with opts" do + doc = """ + {asyncBareThingWithOpts} + """ + + assert {:ok, %{data: %{"asyncBareThingWithOpts" => "bare task"}}} == Absinthe.run(doc, Schema) + end + + test "can resolve a field using the bare api" do + doc = """ + {asyncBareThing} + """ + + assert {:ok, %{data: %{"asyncBareThing" => "bare task"}}} == Absinthe.run(doc, Schema) + end + + test "can resolve a field using the normal test helper" do doc = """ {asyncThing} """ diff --git a/test/absinthe/middleware/batch_test.exs b/test/absinthe/middleware/batch_test.exs index c521e34fbf..d099055649 100644 --- a/test/absinthe/middleware/batch_test.exs +++ b/test/absinthe/middleware/batch_test.exs @@ -82,7 +82,8 @@ defmodule Absinthe.Middleware.BatchTest do assert expected_data == data end - test "can resolve batched fields cross-query that have different data requirements" do + test "can resolve batched fields cross-query that have different data requirements and should emit telemetry events", + %{test: test} do doc = """ { users { @@ -105,7 +106,26 @@ defmodule Absinthe.Middleware.BatchTest do "organization" => %{"id" => 1} } + :ok = + :telemetry.attach_many( + "#{test}", + [ + [:absinthe, :middleware, :batch, :start], + [:absinthe, :middleware, :batch, :stop] + ], + fn name, measurements, metadata, _ -> + send(self(), {:telemetry_event, name, measurements, metadata}) + end, + nil + ) + assert {:ok, %{data: data}} = Absinthe.run(doc, Schema) assert expected_data == data + + assert_receive {:telemetry_event, [:absinthe, :middleware, :batch, :start], %{system_time: _}, + %{id: _, batch_fun: _, batch_opts: _, batch_data: _}} + + assert_receive {:telemetry_event, [:absinthe, :middleware, :batch, :stop], %{duration: _}, + %{id: _, batch_fun: _, batch_opts: _, batch_data: _, result: _}} end end diff --git a/test/absinthe/middleware/dataloader_test.exs b/test/absinthe/middleware/dataloader_test.exs index 03dbd10a40..201cd1bfcf 100644 --- a/test/absinthe/middleware/dataloader_test.exs +++ b/test/absinthe/middleware/dataloader_test.exs @@ -2,102 +2,170 @@ defmodule Absinthe.Middleware.DataloaderTest do use Absinthe.Case, async: true defmodule Schema do - use Absinthe.Schema - - import Absinthe.Resolution.Helpers - - @organizations 1..3 - |> Map.new( - &{&1, - %{ - id: &1, - name: "Organization: ##{&1}" - }} - ) - @users 1..3 - |> Enum.map( - &%{ - id: &1, - name: "User: ##{&1}", - organization_id: &1 - } - ) - - def organizations(), do: @organizations - - defp batch_load({:organization_id, %{pid: test_pid}}, sources) do - send(test_pid, :loading) - - Map.new(sources, fn src -> - {src, Map.fetch!(@organizations, src.organization_id)} - end) - end + defmacro __using__(_opts) do + quote do + use Absinthe.Schema + + import Absinthe.Resolution.Helpers + + @organizations 1..3 + |> Map.new( + &{&1, + %{ + id: &1, + name: "Organization: ##{&1}" + }} + ) + @users 1..3 + |> Enum.map( + &%{ + id: &1, + name: "User: ##{&1}", + organization_id: &1 + } + ) + + @users_with_organization 1..3 + |> Enum.map( + &%{ + id: &1, + name: "User: ##{&1}", + organization_id: &1, + organization: %{ + id: &1, + name: "Organization: ##{&1}" + } + } + ) + + def organizations(), do: @organizations + + defp batch_load({:organization, %{pid: test_pid}}, sources) do + send(test_pid, :loading) + + Map.new(sources, fn src -> + {src, Map.fetch!(@organizations, src.organization_id)} + end) + end + + def batch_dataloader(opts \\ []) do + source = Dataloader.KV.new(&batch_load/2) + Dataloader.add_source(Dataloader.new(opts), :test, source) + end - def dataloader() do - source = Dataloader.KV.new(&batch_load/2) - Dataloader.add_source(Dataloader.new(), :test, source) + def plugins do + [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() + end + + object :organization do + field :id, :integer + field :name, :string + end + + object :user do + field :name, :string + + field :foo_organization, :organization do + resolve dataloader( + :test, + fn _, _, %{context: %{test_pid: pid}} -> + {:organization, %{pid: pid}} + end + ) + end + + field :bar_organization, :organization do + resolve dataloader(:test, :organization, args: %{pid: self()}, use_parent: true) + end + end + + query do + field :users, list_of(:user) do + resolve fn _, _, _ -> {:ok, @users} end + end + + field :users_with_organization, list_of(:user) do + resolve fn _, _, _ -> {:ok, @users_with_organization} end + end + + field :organization, :organization do + arg :id, non_null(:integer) + + resolve fn _, %{id: id}, %{context: %{loader: loader, test_pid: test_pid}} -> + loader + |> Dataloader.load(:test, {:organization, %{pid: test_pid}}, %{ + organization_id: id + }) + |> Dataloader.put( + :test, + {:organization, %{pid: self()}}, + %{organization_id: 123}, + %{} + ) + |> on_load(fn loader -> + {:ok, + Dataloader.get(loader, :test, {:organization, %{pid: test_pid}}, %{ + organization_id: id + })} + end) + end + end + end + end end + end + + defmodule DefaultSchema do + use Schema def context(ctx) do ctx - |> Map.put_new(:loader, dataloader()) + |> Map.put_new(:loader, batch_dataloader()) |> Map.merge(%{ test_pid: self() }) end + end - def plugins do - [Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults() - end + defmodule TuplesSchema do + use Schema - object :organization do - field :id, :integer - field :name, :string + def context(ctx) do + ctx + |> Map.put_new(:loader, batch_dataloader(get_policy: :tuples)) + |> Map.merge(%{ + test_pid: self() + }) end + end - object :user do - field :name, :string - - field :foo_organization, :organization do - resolve dataloader(:test, fn _, _, %{context: %{test_pid: pid}} -> - {:organization_id, %{pid: pid}} - end) - end + test "can resolve a field using the normal dataloader helper" do + doc = """ + { + users { + organization: barOrganization { + name + } + } + } + """ - field :bar_organization, :organization do - resolve dataloader(:test, :organization_id, args: %{pid: self()}) - end - end + expected_data = %{ + "users" => [ + %{"organization" => %{"name" => "Organization: #1"}}, + %{"organization" => %{"name" => "Organization: #2"}}, + %{"organization" => %{"name" => "Organization: #3"}} + ] + } - query do - field :users, list_of(:user) do - resolve fn _, _, _ -> {:ok, @users} end - end + assert {:ok, %{data: data}} = Absinthe.run(doc, DefaultSchema) + assert expected_data == data - field :organization, :organization do - arg :id, non_null(:integer) - - resolve fn _, %{id: id}, %{context: %{loader: loader, test_pid: test_pid}} -> - loader - |> Dataloader.load(:test, {:organization_id, %{pid: test_pid}}, %{organization_id: id}) - |> Dataloader.put( - :test, - {:organization_id, %{pid: self()}}, - %{organization_id: 123}, - %{} - ) - |> on_load(fn loader -> - {:ok, - Dataloader.get(loader, :test, {:organization_id, %{pid: test_pid}}, %{ - organization_id: id - })} - end) - end - end - end + assert_receive(:loading) + refute_receive(:loading) end - test "can resolve a field using the normal dataloader helper" do + test "can resolve a field when dataloader uses 'tuples' get_policy" do doc = """ { users { @@ -116,7 +184,7 @@ defmodule Absinthe.Middleware.DataloaderTest do ] } - assert {:ok, %{data: data}} = Absinthe.run(doc, Schema) + assert {:ok, %{data: data}} = Absinthe.run(doc, TuplesSchema) assert expected_data == data assert_receive(:loading) @@ -146,7 +214,7 @@ defmodule Absinthe.Middleware.DataloaderTest do "organization" => %{"id" => 1} } - assert {:ok, %{data: data}} = Absinthe.run(doc, Schema) + assert {:ok, %{data: data}} = Absinthe.run(doc, DefaultSchema) assert expected_data == data assert_receive(:loading) refute_receive(:loading) @@ -163,21 +231,72 @@ defmodule Absinthe.Middleware.DataloaderTest do expected_data = %{"organization" => %{"id" => 1}} - org = Schema.organizations()[1] + org = DefaultSchema.organizations()[1] # Get the dataloader, and warm the cache for the organization key we're going # to try to access via graphql. dataloader = - Schema.dataloader() - |> Dataloader.put(:test, {:organization_id, %{pid: self()}}, %{organization_id: 1}, org) + DefaultSchema.batch_dataloader() + |> Dataloader.put(:test, {:organization, %{pid: self()}}, %{organization_id: 1}, org) context = %{ loader: dataloader } - assert {:ok, %{data: data}} = Absinthe.run(doc, Schema, context: context) + assert {:ok, %{data: data}} = Absinthe.run(doc, DefaultSchema, context: context) assert expected_data == data refute_receive(:loading) end + + test "use parent's pre-existing value when use_parent is true" do + doc = """ + { + usersWithOrganization { + organization: barOrganization { + name + } + } + } + """ + + expected_data = %{ + "usersWithOrganization" => [ + %{"organization" => %{"name" => "Organization: #1"}}, + %{"organization" => %{"name" => "Organization: #2"}}, + %{"organization" => %{"name" => "Organization: #3"}} + ] + } + + assert {:ok, %{data: data}} = Absinthe.run(doc, DefaultSchema) + assert expected_data == data + + refute_receive(:loading) + end + + test "ignore parent's pre-existing value when use_parent is false (default)" do + doc = """ + { + usersWithOrganization { + organization: fooOrganization { + name + } + } + } + """ + + expected_data = %{ + "usersWithOrganization" => [ + %{"organization" => %{"name" => "Organization: #1"}}, + %{"organization" => %{"name" => "Organization: #2"}}, + %{"organization" => %{"name" => "Organization: #3"}} + ] + } + + assert {:ok, %{data: data}} = Absinthe.run(doc, DefaultSchema) + assert expected_data == data + + assert_receive(:loading) + refute_receive(:loading) + end end diff --git a/test/absinthe/phase/document/arguments/normalize_test.exs b/test/absinthe/phase/document/arguments/normalize_test.exs index ac397fc099..b538702d66 100644 --- a/test/absinthe/phase/document/arguments/normalize_test.exs +++ b/test/absinthe/phase/document/arguments/normalize_test.exs @@ -78,14 +78,14 @@ defmodule Absinthe.Phase.Document.Arguments.NormalizeTest do assert %Blueprint.Input.Integer{ value: 36, - source_location: %Blueprint.Document.SourceLocation{column: nil, line: 6} + source_location: %Blueprint.SourceLocation{column: 29, line: 6} } == age_argument.input_value.normalized name_argument = field.arguments |> Enum.find(&(&1.name == "name")) assert %Blueprint.Input.String{ value: "Bruce", - source_location: %Blueprint.Document.SourceLocation{column: nil, line: 7} + source_location: %Blueprint.SourceLocation{column: 19, line: 7} } == name_argument.input_value.normalized end end @@ -101,7 +101,7 @@ defmodule Absinthe.Phase.Document.Arguments.NormalizeTest do assert %Blueprint.Input.String{ value: "Bruce", - source_location: %Blueprint.Document.SourceLocation{column: nil, line: 7} + source_location: %Blueprint.SourceLocation{column: 19, line: 7} } == name_argument.input_value.normalized end end diff --git a/test/absinthe/phase/document/complexity_test.exs b/test/absinthe/phase/document/complexity_test.exs index bdd909a36f..9a9ab66fdb 100644 --- a/test/absinthe/phase/document/complexity_test.exs +++ b/test/absinthe/phase/document/complexity_test.exs @@ -57,6 +57,10 @@ defmodule Absinthe.Phase.Document.ComplexityTest do field :nested, :foo do complexity 1 end + + field :nested_heavy, :foo do + complexity 100 + end end defp penalize_guests(penalty) do @@ -77,6 +81,7 @@ defmodule Absinthe.Phase.Document.ComplexityTest do unionComplexity { ... on Foo { bar + heavy } } } @@ -84,7 +89,7 @@ defmodule Absinthe.Phase.Document.ComplexityTest do assert {:ok, result, _} = run_phase(doc, operation_name: "UnionComplexity", variables: %{}) op = result.operations |> Enum.find(&(&1.name == "UnionComplexity")) - assert op.complexity == 2 + assert op.complexity == 102 errors = result.execution.validation_errors |> Enum.map(& &1.message) assert errors == [] end @@ -285,6 +290,41 @@ defmodule Absinthe.Phase.Document.ComplexityTest do ] end + test "errors when inline fragment is too complex" do + doc = """ + query ComplexityInlineFrag { + unionComplexity { + ... on Quux { + ...QuuxFields + } + } + } + fragment QuuxFields on Quux { + nested_heavy { + bar + } + } + """ + + assert {:error, result, _} = + run_phase( + doc, + operation_name: "ComplexityInlineFrag", + variables: %{}, + max_complexity: 1, + schema: Absinthe.Fixtures.ContactSchema + ) + + errors = result.execution.validation_errors |> Enum.map(& &1.message) + + assert errors == [ + "Spread QuuxFields is too complex: complexity is 100 and maximum is 1", + "Inline Fragment is too complex: complexity is 100 and maximum is 1", + "Field unionComplexity is too complex: complexity is 101 and maximum is 1", + "Operation ComplexityInlineFrag is too complex: complexity is 101 and maximum is 1" + ] + end + test "skips analysis when disabled" do doc = """ query ComplexitySkip { @@ -310,27 +350,10 @@ defmodule Absinthe.Phase.Document.ComplexityTest do end test "handles GraphQL introspection" do - doc = """ - query IntrospectionQuery { - __schema { - types { - ...FullType - } - } - } - - fragment FullType on __Type { - fields { - args { - ...InputValue - } - } - } - - fragment InputValue on __InputValue { - type { name } - } - """ + doc = + [:code.priv_dir(:absinthe), "graphql", "introspection.graphql"] + |> Path.join() + |> File.read!() assert {:ok, _, _} = run_phase( @@ -340,5 +363,38 @@ defmodule Absinthe.Phase.Document.ComplexityTest do analyze_complexity: true ) end + + test "__typename doesn't increase complexity" do + doc_with = """ + query TypenameComplexity { + fooComplexity(limit: 3) { + bar + __typename + } + } + """ + + doc_without = """ + query TypenameComplexity { + fooComplexity(limit: 3) { + bar + } + } + """ + + assert {:ok, result_with, _} = + run_phase(doc_with, operation_name: "TypenameComplexity", variables: %{}) + + op_with = result_with.operations |> Enum.find(&(&1.name == "TypenameComplexity")) + complexity_with = op_with.complexity + + assert {:ok, result_without, _} = + run_phase(doc_without, operation_name: "TypenameComplexity", variables: %{}) + + op_without = result_without.operations |> Enum.find(&(&1.name == "TypenameComplexity")) + complexity_without = op_without.complexity + + assert complexity_with == complexity_without + end end end diff --git a/test/absinthe/phase/document/schema_test.exs b/test/absinthe/phase/document/schema_test.exs index aa6ecbd0c5..26faef8754 100644 --- a/test/absinthe/phase/document/schema_test.exs +++ b/test/absinthe/phase/document/schema_test.exs @@ -33,7 +33,7 @@ defmodule Absinthe.Phase.Document.SchemaTest do end object :category do - field(:name) + field(:name, :string) end object :review do @@ -99,59 +99,59 @@ defmodule Absinthe.Phase.Document.SchemaTest do ~w(Q BooksOnly) |> Enum.each(fn name -> node = op(result, name) - assert %Type.Object{__reference__: %{identifier: :query}} = node.schema_node + assert %Type.Object{identifier: :query} = node.schema_node end) end test "sets the non-named query operation schema node" do {:ok, result} = input(@nameless_query) node = op(result, nil) - assert %Type.Object{__reference__: %{identifier: :query}} = node.schema_node + assert %Type.Object{identifier: :query} = node.schema_node end test "sets the mutation schema node" do {:ok, result} = input(@query) node = op(result, "ModifyBook") - assert %Type.Object{__reference__: %{identifier: :mutation}} = node.schema_node + assert %Type.Object{identifier: :mutation} = node.schema_node end test "sets the subscription schema node" do {:ok, result} = input(@query) node = op(result, "NewBooks") - assert %Type.Object{__reference__: %{identifier: :subscription}} = node.schema_node + assert %Type.Object{identifier: :subscription} = node.schema_node end test "sets the named fragment schema node" do {:ok, result} = input(@query) node = frag(result, "BookName") - assert %Type.Object{__reference__: %{identifier: :book}} = node.schema_node + assert %Type.Object{identifier: :book} = node.schema_node end test "sets the schema node for a named fragment field" do {:ok, result} = input(@query) fragment = frag(result, "BookName") node = field(fragment, "name") - assert %Type.Field{__reference__: %{identifier: :name}} = node.schema_node + assert %Type.Field{identifier: :name} = node.schema_node end test "sets the inline fragment schema node" do {:ok, result} = input(@query) node = first_inline_frag(result) - assert %Type.Object{__reference__: %{identifier: :book}} = node.schema_node + assert %Type.Object{identifier: :book} = node.schema_node end test "sets the schema node for an inline fragment" do {:ok, result} = input(@query) fragment = first_inline_frag(result) node = field(fragment, "id") - assert %Type.Field{__reference__: %{identifier: :id}} = node.schema_node + assert %Type.Field{identifier: :id} = node.schema_node end test "sets an operation field schema node" do {:ok, result} = input(@query) operation = op(result, "BooksOnly") node = field(operation, "books") - assert %Type.Field{__reference__: %{identifier: :books}} = node.schema_node + assert %Type.Field{identifier: :books} = node.schema_node end test "sets an field schema node inside another field" do @@ -159,13 +159,13 @@ defmodule Absinthe.Phase.Document.SchemaTest do operation = op(result, "Q") books = field(operation, "books") node = field(books, "name") - assert %Type.Field{__reference__: %{identifier: :name}} = node.schema_node + assert %Type.Field{identifier: :name} = node.schema_node end test "sets an operation field schema node supporting an adapter" do {:ok, result} = input(@query) node = named(result, Blueprint.Document.Field, "changeName") - assert %Type.Field{__reference__: %{identifier: :change_name}} = node.schema_node + assert %Type.Field{identifier: :change_name} = node.schema_node end test "sets directive schema nodes" do @@ -179,7 +179,7 @@ defmodule Absinthe.Phase.Document.SchemaTest do operation = op(result, "ModifyBook") f = field(operation, "changeName") node = named(f, Blueprint.Input.Argument, "id") - assert %Type.Argument{__reference__: %{identifier: :id}} = node.schema_node + assert %Type.Argument{identifier: :id} = node.schema_node end test "sets field argument schema nodes supporting input objects" do @@ -187,11 +187,11 @@ defmodule Absinthe.Phase.Document.SchemaTest do operation = op(result, "ModifyBook") f = field(operation, "addReview") top_node = named(f, Blueprint.Input.Argument, "info") - assert %Type.Argument{__reference__: %{identifier: :info}} = top_node.schema_node + assert %Type.Argument{identifier: :info} = top_node.schema_node node = top_node.input_value.normalized.fields |> List.first() - assert %Type.Field{__reference__: %{identifier: :stars}} = node.schema_node + assert %Type.Field{identifier: :stars} = node.schema_node - assert %Type.NonNull{of_type: %Type.Scalar{__reference__: %{identifier: :integer}}} = + assert %Type.NonNull{of_type: %Type.Scalar{identifier: :integer}} = node.input_value.schema_node end @@ -199,7 +199,7 @@ defmodule Absinthe.Phase.Document.SchemaTest do {:ok, result} = input(@query) directive = named(result, Blueprint.Directive, "include") node = named(directive, Blueprint.Input.Argument, "if") - assert %Type.Argument{__reference__: %{identifier: :if}} = node.schema_node + assert %Type.Argument{identifier: :if} = node.schema_node end end diff --git a/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs b/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs index f897681fdd..bf23e5394a 100644 --- a/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs +++ b/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs @@ -802,6 +802,37 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectTypeTest do ) end + test "Partial object, list with correct value" do + assert_passes_validation( + """ + { + complicatedArgs { + complexArgField(complexArgList: [{ requiredField: true }]) + } + } + """, + [] + ) + end + + test "Partial object, list with bad value" do + assert_fails_validation( + """ + { + complicatedArgs { + complexArgField(complexArgList: [2]) + } + } + """, + [], + [ + bad_argument("complexArgList", "[ComplexInput]", "[2]", 3, [ + @phase.value_error_message(0, "ComplexInput", "2") + ]) + ] + ) + end + test "Full object" do assert_passes_validation( """ @@ -888,7 +919,7 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectTypeTest do ) end - test "Partial object, unknown field arg" do + test "Partial object, unknown field arg without suggestion" do assert_fails_validation( """ { @@ -912,6 +943,65 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectTypeTest do ) ) end + + test "Partial object, unknown field arg with suggestion" do + assert_fails_validation( + """ + { + complicatedArgs { + complexArgField(complexArg: { + requiredField: true, + strinField: "value" + }) + } + } + """, + [], + bad_argument( + "complexArg", + "ComplexInput", + ~s({requiredField: true, strinField: "value"}), + 3, + [ + @phase.unknown_field_error_message("strinField", [ + "string_list_field", + "int_field", + "string_field" + ]) + ] + ) + ) + end + end + + describe "Invalid Custom Scalar value" do + test "Invalid scalar input on mutation, no suggestion" do + assert_fails_validation( + """ + mutation($scalarInput: CustomScalar) { + createDog(customScalarInput: $scalarInput) + } + """, + [ + variables: %{ + "scalarInput" => [ + %{ + "foo" => "BAR" + } + ] + } + ], + [ + bad_argument( + "customScalarInput", + "CustomScalar", + ~s($scalarInput), + 2, + [@phase.unknown_field_error_message("foo")] + ) + ] + ) + end end describe "Directive arguments" do diff --git a/test/absinthe/phase/document/validation/fields_on_correct_type_test.exs b/test/absinthe/phase/document/validation/fields_on_correct_type_test.exs index ec0b4c090d..64aaa62880 100644 --- a/test/absinthe/phase/document/validation/fields_on_correct_type_test.exs +++ b/test/absinthe/phase/document/validation/fields_on_correct_type_test.exs @@ -203,7 +203,7 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectTypeTest do } """, [], - undefined_field("nickname", "Pet", ["Dog", "Cat"], ["name"], 2) + undefined_field("nickname", "Pet", ["Cat", "Dog"], ["name"], 2) ) end @@ -238,7 +238,7 @@ defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectTypeTest do } """, [], - undefined_field("name", "CatOrDog", ["Being", "Pet", "Canine", "Dog", "Cat"], [], 2) + undefined_field("name", "CatOrDog", ["Being", "Canine", "Cat", "Dog", "Pet"], [], 2) ) end diff --git a/test/absinthe/phase/validation/known_directives_test.exs b/test/absinthe/phase/document/validation/known_directives_test.exs similarity index 94% rename from test/absinthe/phase/validation/known_directives_test.exs rename to test/absinthe/phase/document/validation/known_directives_test.exs index 6fd42fbc92..f20ce5782e 100644 --- a/test/absinthe/phase/validation/known_directives_test.exs +++ b/test/absinthe/phase/document/validation/known_directives_test.exs @@ -1,5 +1,5 @@ -defmodule Absinthe.Phase.Validation.KnownDirectivesTest do - @phase Absinthe.Phase.Validation.KnownDirectives +defmodule Absinthe.Phase.Document.Validation.KnownDirectivesTest do + @phase Absinthe.Phase.Document.Validation.KnownDirectives use Absinthe.ValidationPhaseCase, phase: @phase, @@ -10,7 +10,7 @@ defmodule Absinthe.Phase.Validation.KnownDirectivesTest do def unknown_directive(name, line) do bad_value( Blueprint.Directive, - "Unknown directive.", + "Unknown directive `#{name}'.", line, name: name ) @@ -19,7 +19,7 @@ defmodule Absinthe.Phase.Validation.KnownDirectivesTest do def misplaced_directive(name, placement, line) do bad_value( Blueprint.Directive, - "May not be used on #{placement}.", + "Directive `#{name}' may not be used on #{placement}.", line, name: name ) @@ -137,6 +137,7 @@ defmodule Absinthe.Phase.Validation.KnownDirectivesTest do ) end + @tag :pending describe "within schema language" do test "with well placed directives" do assert_passes_validation( @@ -169,6 +170,7 @@ defmodule Absinthe.Phase.Validation.KnownDirectivesTest do ) end + @tag :pending test "with misplaced directives" do assert_fails_validation( """ diff --git a/test/absinthe/phase/document/validation/no_unused_fragments_test.exs b/test/absinthe/phase/document/validation/no_unused_fragments_test.exs index ef92bfbb42..be29397d51 100644 --- a/test/absinthe/phase/document/validation/no_unused_fragments_test.exs +++ b/test/absinthe/phase/document/validation/no_unused_fragments_test.exs @@ -5,8 +5,6 @@ defmodule Absinthe.Phase.Document.Validation.NoUnusedFragmentsTest do phase: @phase, async: true - @moduletag :pending - alias Absinthe.Blueprint defp unused_fragment(name, line) do diff --git a/test/absinthe/phase/document/validation/repeatable_directives_test.exs b/test/absinthe/phase/document/validation/repeatable_directives_test.exs new file mode 100644 index 0000000000..b089717fa2 --- /dev/null +++ b/test/absinthe/phase/document/validation/repeatable_directives_test.exs @@ -0,0 +1,45 @@ +defmodule Absinthe.Phase.Document.Validation.RepeatableDirectivesTest do + @phase Absinthe.Phase.Document.Validation.RepeatableDirectives + + use Absinthe.ValidationPhaseCase, + phase: @phase, + async: true + + alias Absinthe.Blueprint + + defp duplicate(name, line) do + bad_value( + Blueprint.Directive, + "Directive `#{name}' cannot be applied repeatedly.", + line, + name: name + ) + end + + test "with disallowed repeated directives" do + assert_fails_validation( + """ + query Foo { + skippedField @skip(if: true) @skip(if: true) + } + """, + [], + duplicate("skip", 2) + ) + end + + test "with allowed repeated directives" do + assert_passes_validation( + """ + query Foo { + skippedField @onField @onField + } + + mutation Bar @onMutation { + someField + } + """, + [] + ) + end +end diff --git a/test/absinthe/phase/document/validation/selected_current_operation_test.exs b/test/absinthe/phase/document/validation/selected_current_operation_test.exs index c92c9a244b..11e31585f9 100644 --- a/test/absinthe/phase/document/validation/selected_current_operation_test.exs +++ b/test/absinthe/phase/document/validation/selected_current_operation_test.exs @@ -7,10 +7,10 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperationTest do alias Absinthe.Blueprint - defp no_current_operation do + defp no_current_operation(operation_name, count) do bad_value( Blueprint, - @phase.error_message, + @phase.error_message(operation_name, count), nil ) end @@ -30,6 +30,18 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperationTest do ) end + test "fails when single operation in document does not match given operation name" do + assert_fails_validation( + """ + query Bar { + name + } + """, + [operation_name: "Nothere"], + no_current_operation("Nothere", 1) + ) + end + test "fails when the operation is not provided" do assert_fails_validation( """ @@ -41,7 +53,7 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperationTest do } """, [operation_name: "Nothere"], - no_current_operation() + no_current_operation("Nothere", 2) ) end end @@ -80,7 +92,7 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperationTest do } """, [], - no_current_operation() + no_current_operation(nil, 2) ) end end diff --git a/test/absinthe/phase/document/validation/variables_are_input_types_test.exs b/test/absinthe/phase/document/validation/variables_are_input_types_test.exs index 1c8a5469aa..67490a86cc 100644 --- a/test/absinthe/phase/document/validation/variables_are_input_types_test.exs +++ b/test/absinthe/phase/document/validation/variables_are_input_types_test.exs @@ -43,5 +43,16 @@ defmodule Absinthe.Phase.Document.Validation.VariablesAreInputTypesTest do ] ) end + + test "unknown types don't blow up this validation" do + assert_passes_validation( + """ + query Foo($a: Number!) { + field(a: $a) + } + """, + [] + ) + end end end diff --git a/test/absinthe/phase/document/validation/variables_of_correct_type_test.exs b/test/absinthe/phase/document/validation/variables_of_correct_type_test.exs new file mode 100644 index 0000000000..f7cb666837 --- /dev/null +++ b/test/absinthe/phase/document/validation/variables_of_correct_type_test.exs @@ -0,0 +1,154 @@ +defmodule Absinthe.Phase.Document.Validation.VariablesOfCorrectTypeTest do + @phase Absinthe.Phase.Document.Arguments.VariableTypesMatch + + use Absinthe.ValidationPhaseCase, async: true, phase: @phase + + defp error_message(op, variable_name, var_type, arg_type) do + var = %Absinthe.Blueprint.Input.Variable{name: variable_name} + @phase.error_message(op, var, var_type, arg_type) + end + + test "types of variables match types of arguments" do + {:ok, %{errors: errors}} = + Absinthe.run( + """ + query test($intArg: Int!) { + complicatedArgs { + stringArgField(stringArg: $intArg) + } + } + """, + Absinthe.Fixtures.PetsSchema, + variables: %{"intArg" => 5} + ) + + expected_error_msg = error_message("test", "intArg", "Int!", "String") + assert expected_error_msg in (errors |> Enum.map(& &1.message)) + end + + test "variable type check handles non existent type" do + {:ok, %{errors: errors}} = + Absinthe.run( + """ + query test($intArg: DoesNotExist!) { + complicatedArgs { + stringArgField(stringArg: $intArg) + } + } + """, + Absinthe.Fixtures.PetsSchema, + variables: %{"intArg" => 5} + ) + + expected_error_msg = error_message("test", "intArg", "DoesNotExist!", "String") + + assert expected_error_msg in (errors |> Enum.map(& &1.message)) + end + + test "types of variables match types of arguments even when the value is null" do + {:ok, %{errors: errors}} = + Absinthe.run( + """ + query test($intArg: Int) { + complicatedArgs { + stringArgField(stringArg: $intArg) + } + } + """, + Absinthe.Fixtures.PetsSchema, + variables: %{"intArg" => nil} + ) + + expected_error_msg = error_message("test", "intArg", "Int", "String") + assert expected_error_msg in (errors |> Enum.map(& &1.message)) + end + + test "types of variables match types of arguments in named fragments" do + {:ok, %{errors: errors}} = + Absinthe.run( + """ + query test($intArg: Int) { + complicatedArgs { + ...Fragment + } + } + + fragment Fragment on ComplicatedArgs { + stringArgField(stringArg: $intArg) + } + """, + Absinthe.Fixtures.PetsSchema, + variables: %{"intArg" => 5} + ) + + expected_error_msg = error_message("test", "intArg", "Int", "String") + assert expected_error_msg in (errors |> Enum.map(& &1.message)) + end + + test "non null types of variables match non null types of arguments" do + {:ok, %{errors: errors}} = + Absinthe.run( + """ + query test($intArg: Int) { + complicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + } + """, + Absinthe.Fixtures.PetsSchema, + variables: %{"intArg" => 5} + ) + + expected_error_msg = error_message("test", "intArg", "Int", "Int!") + assert expected_error_msg in (errors |> Enum.map(& &1.message)) + end + + test "list types of variables match list types of arguments" do + result = + Absinthe.run( + """ + query test($stringListArg: [String!]) { + complicatedArgs { + stringListArgField(stringListArg: $stringListArg) + } + } + """, + Absinthe.Fixtures.PetsSchema, + variables: %{"stringListArg" => ["a"]} + ) + + assert {:ok, %{data: %{"complicatedArgs" => nil}}} = result + end + + test "variable can be nullable for non-nullable argument with default" do + result = + Absinthe.run( + """ + query booleanArgQueryWithDefault($booleanArg: Boolean) { + complicatedArgs { + optionalNonNullBooleanArgField(optionalBooleanArg: $booleanArg) + } + } + """, + Absinthe.Fixtures.PetsSchema + ) + + assert {:ok, %{data: %{"complicatedArgs" => nil}}} = result + end + + test "variable with default can be nullable for non-nullable argument" do + result = + Absinthe.run( + """ + query booleanArgQueryWithDefault($booleanArg: Boolean = true) { + complicatedArgs { + nonNullBooleanArgField(nonNullBooleanArg: $booleanArg) + } + } + """, + Absinthe.Fixtures.PetsSchema + ) + + assert {:ok, %{data: %{"complicatedArgs" => nil}}} = result + end +end diff --git a/test/absinthe/phase/document/variables_test.exs b/test/absinthe/phase/document/variables_test.exs index 71b4e6d5bb..1970c1b061 100644 --- a/test/absinthe/phase/document/variables_test.exs +++ b/test/absinthe/phase/document/variables_test.exs @@ -26,7 +26,7 @@ defmodule Absinthe.Phase.Document.VariablesTest do assert op.provided_values == %{ "age" => %Blueprint.Input.Integer{ value: 36, - source_location: %Blueprint.Document.SourceLocation{column: nil, line: 6} + source_location: %Blueprint.SourceLocation{column: 29, line: 6} }, "name" => %Blueprint.Input.String{value: "Bruce"} } @@ -67,17 +67,17 @@ defmodule Absinthe.Phase.Document.VariablesTest do expected = %{ errors: [ %{ - locations: [%{column: 0, line: 1}], + locations: [%{column: 11, line: 1}], message: "Variable \"input\" cannot be non-input type \"Thing\"." }, %{ - locations: [%{column: 0, line: 1}, %{column: 0, line: 1}], + locations: [%{column: 11, line: 1}, %{column: 1, line: 1}], message: "Variable \"input\" is never used in operation \"Foo\"." } ] } - assert {:ok, expected} == Absinthe.run(doc, Absinthe.Fixtures.ThingsSchema) + assert {:ok, expected} == Absinthe.run(doc, Absinthe.Fixtures.Things.MacroSchema) end def input(query, values) do diff --git a/test/absinthe/phase/execution/non_null_test.exs b/test/absinthe/phase/execution/non_null_test.exs index 2d464728d6..0895988574 100644 --- a/test/absinthe/phase/execution/non_null_test.exs +++ b/test/absinthe/phase/execution/non_null_test.exs @@ -1,5 +1,5 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do - use ExUnit.Case, async: true + use Absinthe.Case, async: true defmodule Schema do use Absinthe.Schema @@ -64,6 +64,12 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do end end + field :non_null_list_of_non_null, non_null(list_of(non_null(:thing))) do + resolve fn _, _ -> + {:ok, [%{}]} + end + end + @desc """ A field declared to be non null. @@ -98,7 +104,7 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do errors = [ %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 25, line: 2}], message: "Cannot return null for non-nullable field", path: ["nullable", "nullable", "nonNull"] } @@ -107,23 +113,38 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema) end - test "error propogation to root field returns nil on data" do + test "returning nil from a non null child of non nulls pushes nil all the way up to data" do doc = """ { - nullable { nullable { nonNullErrorField }} + nonNull { nonNull { nonNull(makeNull: true) { __typename }}} } """ - data = %{"nullable" => %{"nullable" => nil}} + data = nil errors = [ %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 23, line: 2}], message: "Cannot return null for non-nullable field", - path: ["nullable", "nullable", "nonNullErrorField"] - }, + path: ["nonNull", "nonNull", "nonNull"] + } + ] + + assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema) + end + + test "error propagation to root field returns nil on data" do + doc = """ + { + nullable { nullable { nonNullErrorField }} + } + """ + + data = %{"nullable" => %{"nullable" => nil}} + + errors = [ %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 25, line: 2}], message: "boom", path: ["nullable", "nullable", "nonNullErrorField"] } @@ -143,12 +164,7 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do errors = [ %{ - locations: [%{column: 0, line: 2}], - message: "Cannot return null for non-nullable field", - path: ["nonNull", "nonNull", "nonNullErrorField"] - }, - %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 23, line: 2}], message: "boom", path: ["nonNull", "nonNull", "nonNullErrorField"] } @@ -168,12 +184,7 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do path = ["nullable", "nonNull", "nonNull", "nonNull", "nonNull", "nonNullErrorField"] errors = [ - %{ - locations: [%{column: 0, line: 2}], - message: "Cannot return null for non-nullable field", - path: path - }, - %{locations: [%{column: 0, line: 2}], message: "boom", path: path} + %{locations: [%{column: 54, line: 2}], message: "boom", path: path} ] assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema) @@ -191,7 +202,7 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do errors = [ %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 28, line: 2}], message: "Cannot return null for non-nullable field", path: ["nullableListOfNullable", 0, "nonNull"] } @@ -211,7 +222,7 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do errors = [ %{ - locations: [%{column: 0, line: 2}], + locations: [%{column: 27, line: 2}], message: "Cannot return null for non-nullable field", path: ["nullableListOfNonNull", 0, "nonNull"] } @@ -219,5 +230,25 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema) end + + test "list of non null things works when child has a null violation and the root field is non null" do + doc = """ + { + nonNullListOfNonNull { nonNull(makeNull: true) { __typename } } + } + """ + + data = nil + + errors = [ + %{ + locations: [%{column: 26, line: 2}], + message: "Cannot return null for non-nullable field", + path: ["nonNullListOfNonNull", 0, "nonNull"] + } + ] + + assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema) + end end end diff --git a/test/absinthe/phase/parse/block_strings_test.exs b/test/absinthe/phase/parse/block_strings_test.exs index 9f2d52a610..6bc381df98 100644 --- a/test/absinthe/phase/parse/block_strings_test.exs +++ b/test/absinthe/phase/parse/block_strings_test.exs @@ -1,6 +1,8 @@ defmodule Absinthe.Phase.Parse.BlockStringsTest do use Absinthe.Case, async: true + @moduletag :parser + test "parses a query with a block string literal and no newlines" do assert {:ok, result} = run(~S<{ post(title: "single", body: """text""") { name } }>) assert "text" == extract_body(result) @@ -27,6 +29,28 @@ defmodule Absinthe.Phase.Parse.BlockStringsTest do assert "slashes \\\\ \\/" == extract_body(result) end + test "parses attributes when there are escapes" do + assert {:ok, result} = run(~s<{ post(title: "title", body: "body\\\\") { name } }>) + assert "body\\" == extract_body(result) + + assert {:ok, result} = run(~s<{ post(title: "title\\\\", body: "body") { name } }>) + assert "body" == extract_body(result) + end + + test "parse attributes where there are escapes on multiple lines" do + assert {:ok, result} = run(~s<{ post( + title: "title", + body: "body\\\\" + ) { name } }>) + assert "body\\" == extract_body(result) + + assert {:ok, result} = run(~s<{ post( + title: "title\\\\", + body: "body" + ) { name } }>) + assert "body" == extract_body(result) + end + @input [ "", " Hello,", @@ -155,15 +179,6 @@ defmodule Absinthe.Phase.Parse.BlockStringsTest do assert ~S == extract_body(result) end - test "returns an error for a bad byte" do - assert {:error, err} = - run( - ~s<{ post(title: "single", body: """trying to escape a \u0000 byte""") { name } }> - ) - - assert "syntax error" <> _ = extract_error_message(err) - end - test "parses a query with a block string literal as a variable default" do assert {:ok, result} = run( @@ -181,15 +196,6 @@ defmodule Absinthe.Phase.Parse.BlockStringsTest do ]) end - defp extract_error_message(err) do - get_in(err, [ - Access.key(:execution, %{}), - Access.key(:validation_errors, []), - Access.at(0), - Access.key(:message, nil) - ]) - end - defp extract_body(value) do get_in(value, [ Access.key(:definitions), diff --git a/test/absinthe/phase/parse/descriptions_test.exs b/test/absinthe/phase/parse/descriptions_test.exs new file mode 100644 index 0000000000..7d85af6a74 --- /dev/null +++ b/test/absinthe/phase/parse/descriptions_test.exs @@ -0,0 +1,89 @@ +defmodule Absinthe.Phase.Parse.DescriptionsTest do + use Absinthe.Case, async: true + + @moduletag :parser + @moduletag :sdl + + @sdl """ + \""" + A simple GraphQL schema which is well described. + \""" + type Query { + \""" + Translates a string from a given language into a different language. + \""" + translate( + "The original language that `text` is provided in." + fromLanguage: Language + + "The translated language to be returned." + toLanguage: Language + + "The text to be translated." + text: String + ): String + } + + \""" + The set of languages supported by `translate`. + \""" + enum Language { + "English" + EN + + "French" + FR + + "Chinese" + CH + } + """ + + test "parses descriptions" do + assert {:ok, + %{ + definitions: [ + %Absinthe.Language.ObjectTypeDefinition{ + description: "A simple GraphQL schema which is well described.", + fields: [ + %Absinthe.Language.FieldDefinition{ + arguments: [ + %Absinthe.Language.InputValueDefinition{ + description: "The original language that `text` is provided in." + }, + %Absinthe.Language.InputValueDefinition{ + description: "The translated language to be returned." + }, + %Absinthe.Language.InputValueDefinition{ + description: "The text to be translated." + } + ], + description: + "Translates a string from a given language into a different language." + } + ] + }, + %Absinthe.Language.EnumTypeDefinition{ + description: "The set of languages supported by `translate`.", + values: [ + %Absinthe.Language.EnumValueDefinition{ + description: "English" + }, + %Absinthe.Language.EnumValueDefinition{ + description: "French" + }, + %Absinthe.Language.EnumValueDefinition{ + description: "Chinese" + } + ] + } + ] + }} = run(@sdl) + end + + def run(input) do + with {:ok, %{input: input}} <- Absinthe.Phase.Parse.run(input) do + {:ok, input} + end + end +end diff --git a/test/absinthe/phase/parse/language_test.exs b/test/absinthe/phase/parse/language_test.exs new file mode 100644 index 0000000000..39d13ccf3a --- /dev/null +++ b/test/absinthe/phase/parse/language_test.exs @@ -0,0 +1,32 @@ +defmodule Absinthe.Phase.Parse.LanguageTest do + use Absinthe.Case, async: true + + @moduletag :parser + + test "parses kitchen-sink.graphql" do + filename = Path.join(__DIR__, "../../../support/fixtures/language/kitchen-sink.graphql") + input = File.read!(filename) + assert {:ok, _} = run(input) + end + + test "parses schema-kitchen-sink.graphql" do + filename = + Path.join(__DIR__, "../../../support/fixtures/language/schema-kitchen-sink.graphql") + + input = File.read!(filename) + assert {:ok, _} = run(input) + end + + test "parses schema-with-emojis.graphql" do + filename = Path.join(__DIR__, "../../../support/fixtures/language/schema-with-emojis.graphql") + + input = File.read!(filename) + assert {:ok, _} = run(input) + end + + def run(input) do + with {:ok, %{input: input}} <- Absinthe.Phase.Parse.run(input) do + {:ok, input} + end + end +end diff --git a/test/absinthe/phase/parse_test.exs b/test/absinthe/phase/parse_test.exs index c4497fe49b..8bac242309 100644 --- a/test/absinthe/phase/parse_test.exs +++ b/test/absinthe/phase/parse_test.exs @@ -1,6 +1,8 @@ defmodule Absinthe.Phase.ParseTest do use Absinthe.Case, async: true + @moduletag :parser + test "parses a simple query" do assert {:ok, _} = run("{ user(id: 2) { name } }") end @@ -20,8 +22,71 @@ defmodule Absinthe.Phase.ParseTest do assert [ %Absinthe.Phase.Error{ extra: %{}, - locations: [%{column: 0, line: 2}], - message: "illegal: -w", + locations: [%{column: 12, line: 2}], + message: "Parsing failed at `-won't-lex`", + phase: Absinthe.Phase.Parse + } + ] == bp.execution.validation_errors + end + + @graphql "aa;bbbbbbbbโ€”cc" + test "should provide sample of parsing failure respecting unicode boundary" do + assert {:error, bp} = Absinthe.Phase.Parse.run(@graphql, jump_phases: false) + + assert [ + %Absinthe.Phase.Error{ + extra: %{}, + locations: [%{column: 3, line: 1}], + message: "Parsing failed at `;bbbbbbbbโ€”`", + phase: Absinthe.Phase.Parse + } + ] == bp.execution.validation_errors + end + + @graphql "test bad string" <> <<223>> <> "error" + test "coerces non-string binaries to strings" do + assert {:error, bp} = Absinthe.Phase.Parse.run(@graphql) + + [parse_error] = bp.execution.validation_errors + assert String.valid?(parse_error.message) + + assert %Absinthe.Phase.Error{ + extra: %{}, + locations: [%{column: 16, line: 1}], + message: "Parsing failed at `<<223, 101, 114, 114, 111, 114>>`", + phase: Absinthe.Phase.Parse + } == parse_error + end + + @graphql ";" + test "should provide sample of parsing failure on very short query strings" do + assert {:error, bp} = Absinthe.Phase.Parse.run(@graphql, jump_phases: false) + + assert [ + %Absinthe.Phase.Error{ + extra: %{}, + locations: [%{column: 1, line: 1}], + message: "Parsing failed at `;`", + phase: Absinthe.Phase.Parse + } + ] == bp.execution.validation_errors + end + + @graphql """ + query { + user { + name + } + + """ + test "handle parse error when column not available" do + assert {:error, bp} = Absinthe.Phase.Parse.run(@graphql, jump_phases: false) + + assert [ + %Absinthe.Phase.Error{ + extra: %{}, + locations: [%{column: 0, line: 4}], + message: "syntax error before: ", phase: Absinthe.Phase.Parse } ] == bp.execution.validation_errors diff --git a/test/absinthe/phase/schema/inline_functions_test.exs b/test/absinthe/phase/schema/inline_functions_test.exs new file mode 100644 index 0000000000..fee2459eba --- /dev/null +++ b/test/absinthe/phase/schema/inline_functions_test.exs @@ -0,0 +1,83 @@ +defmodule Absinthe.Phase.Schema.InlineFunctionsTest do + use Absinthe.Case, async: true + + defmodule Schema do + use Absinthe.Schema.Notation + + object :inlined do + field :direct, :string, resolve: &__MODULE__.foo/3 + field :indirect, :string, resolve: indirection() + field :via_callback, :string + + field :complexity_literal, :string do + complexity 1 + end + end + + object :not_inlined do + field :local_capture, :string, resolve: &foo/3 + + field :anon_function, :string, resolve: fn _, _, _ -> {:ok, "yo"} end + end + + def foo(_, _, _), do: {:ok, "hey"} + + defp indirection() do + &__MODULE__.foo/3 + end + + def middleware(_, %{identifier: :via_callback}, %{identifier: :inlined}) do + [{{Absinthe.Resolution, :call}, &__MODULE__.foo/3}] + end + + def middleware(middleware, _a, _b) do + middleware + end + end + + setup_all do + {:ok, %{bp: result()}} + end + + describe "resolvers and middleware" do + test "are inlined when they are a literal external function", %{bp: bp} do + assert {{Absinthe.Resolution, :call}, &Schema.foo/3} in get_field(bp, :inlined, :direct).middleware + + assert {{Absinthe.Resolution, :call}, &Schema.foo/3} in get_field(bp, :inlined, :indirect).middleware + + assert {{Absinthe.Resolution, :call}, &Schema.foo/3} in get_field( + bp, + :inlined, + :via_callback + ).middleware + end + + test "aren't inlined if they're a local capture", %{bp: bp} do + assert [{{Absinthe.Middleware, :shim}, _}] = + get_field(bp, :not_inlined, :local_capture).middleware + end + end + + describe "complexity" do + test "is inlined when it's a literal", %{bp: bp} do + assert 1 == get_field(bp, :inlined, :complexity_literal).complexity + end + end + + defp get_field(%{schema_definitions: [schema]}, object, field) do + object = Enum.find(schema.type_artifacts, fn t -> t.identifier == object end) + Map.fetch!(object.fields, field) + end + + def result() do + assert {:ok, bp, _} = Absinthe.Pipeline.run(Schema.__absinthe_blueprint__(), pipeline()) + bp + end + + def pipeline() do + Schema + |> Absinthe.Pipeline.for_schema() + |> Absinthe.Pipeline.from(Absinthe.Phase.Schema.Build) + |> Absinthe.Pipeline.upto(Absinthe.Phase.Schema.InlineFunctions) + end +end diff --git a/test/absinthe/phase/schema/reformat_description_test.exs b/test/absinthe/phase/schema/reformat_description_test.exs new file mode 100644 index 0000000000..9567df3da5 --- /dev/null +++ b/test/absinthe/phase/schema/reformat_description_test.exs @@ -0,0 +1,102 @@ +defmodule Absinthe.Phase.Schema.ReformatDescriptionTest do + use Absinthe.Case, async: true + + defmodule Schema do + use Absinthe.Schema + + query do + # Must exist + end + + object :via_macro do + description " Description via macro " + + field :foo, :string do + description " Description via macro " + end + end + + object :via_attribute, description: " Description via attribute " do + field :foo, :string, description: " Description via attribute " + end + + @desc " Description via module attribute " + object :via_module_attribute do + @desc " Description via module attribute " + field :foo, :string + end + + def desc(), do: " Description via function " + + object :via_function, description: desc() do + field :foo, :string, description: desc() + end + + import_sdl """ + " Description via SDL " + type ViaSdl { + " Description via SDL " + foo: String + } + + " Description on Enum " + enum OnEnum { + " Description on Enum " + FOO + } + + " Description on Scalar " + scalar OnScalar + """ + end + + describe "Description trimming" do + test "via macro" do + type = Schema.__absinthe_type__(:via_macro) + + assert %{description: "Description via macro"} = type + assert %{description: "Description via macro"} = type.fields.foo + end + + test "via attribute" do + type = Schema.__absinthe_type__(:via_attribute) + + assert %{description: "Description via attribute"} = type + assert %{description: "Description via attribute"} = type.fields.foo + end + + test "via module attribute" do + type = Schema.__absinthe_type__(:via_module_attribute) + + assert %{description: "Description via module attribute"} = type + assert %{description: "Description via module attribute"} = type.fields.foo + end + + test "via function" do + type = Schema.__absinthe_type__(:via_function) + + assert %{description: "Description via function"} = type + assert %{description: "Description via function"} = type.fields.foo + end + + test "via SDL" do + type = Schema.__absinthe_type__(:via_sdl) + + assert %{description: "Description via SDL"} = type + assert %{description: "Description via SDL"} = type.fields.foo + end + + test "on Enum" do + type = Schema.__absinthe_type__(:on_enum) + + assert %{description: "Description on Enum"} = type + assert %{description: "Description on Enum"} = type.values.foo + end + + test "on Scalar" do + type = Schema.__absinthe_type__(:on_scalar) + + assert %{description: "Description on Scalar"} = type + end + end +end diff --git a/test/absinthe/phase/validation/known_type_names_test.exs b/test/absinthe/phase/validation/known_type_names_test.exs index 3a16f915eb..9d6f5db94f 100644 --- a/test/absinthe/phase/validation/known_type_names_test.exs +++ b/test/absinthe/phase/validation/known_type_names_test.exs @@ -7,24 +7,26 @@ defmodule Absinthe.Phase.Validation.KnownTypeNamesTest do alias Absinthe.Blueprint - def unknown_type(:variable_definition, name, line) do + def unknown_type(type, name, line, custom_error_message \\ nil) + + def unknown_type(:variable_definition, name, line, custom_error_message) do bad_value( Blueprint.Document.VariableDefinition, - error_message(name), + custom_error_message || error_message(name), line, &(Blueprint.TypeReference.unwrap(&1.type).name == name) ) end - def unknown_type(:named_type_condition, name, line) do + def unknown_type(:named_type_condition, name, line, _) do unknown_type_condition(Blueprint.Document.Fragment.Named, name, line) end - def unknown_type(:spread_type_condition, name, line) do + def unknown_type(:spread_type_condition, name, line, _) do unknown_type_condition(Blueprint.Document.Fragment.Spread, name, line) end - def unknown_type(:inline_type_condition, name, line) do + def unknown_type(:inline_type_condition, name, line, _) do unknown_type_condition(Blueprint.Document.Fragment.Inline, name, line) end @@ -61,7 +63,7 @@ defmodule Absinthe.Phase.Validation.KnownTypeNamesTest do test "unknown type names are invalid" do assert_fails_validation( """ - query Foo($var: JumbledUpLetters) { + query Foo($var: JumbledUpLetters, $foo: Boolen!, $bar: [Bar!]) { user(id: 4) { name pets { ... on Badger { name }, ...PetFields } @@ -74,6 +76,13 @@ defmodule Absinthe.Phase.Validation.KnownTypeNamesTest do [], [ unknown_type(:variable_definition, "JumbledUpLetters", 1), + unknown_type( + :variable_definition, + "Boolen", + 1, + ~s(Unknown type "Boolen". Did you mean "Alien" or "Boolean"?) + ), + unknown_type(:variable_definition, "Bar", 1), unknown_type(:inline_type_condition, "Badger", 4), unknown_type(:named_type_condition, "Peettt", 7) ] diff --git a/test/absinthe/pipeline_test.exs b/test/absinthe/pipeline_test.exs index 4a13ebf478..c4ac6e78d4 100644 --- a/test/absinthe/pipeline_test.exs +++ b/test/absinthe/pipeline_test.exs @@ -21,23 +21,93 @@ defmodule Absinthe.PipelineTest do Pipeline.for_document(Schema) |> Pipeline.upto(Phase.Blueprint) - assert {:ok, %Blueprint{}, [Phase.Blueprint, Phase.Parse]} = Pipeline.run(@query, pipeline) + assert {:ok, %Blueprint{}, [Phase.Blueprint, Phase.Parse, Phase.Telemetry, Phase.Init]} = + Pipeline.run(@query, pipeline) end end - describe ".run an idl" do + describe ".run handles pipelines that exclude Execution and Subscription phases" do + defmodule ValidationOnlySchema do + use Absinthe.Schema + + query do + field :foo, :Stuff do + resolve fn _, _, _ -> nil end + end + end + + object :Stuff do + field :bar, :string + end + end + + @goodQuery """ + { foo { bar } } + """ + + @badQuery """ + { noFoo { bar } } + """ + test "well-formed query" do + {:ok, %{execution: %{validation_errors: validation_errors}}, _} = + validation_only_query(@goodQuery) + + assert length(validation_errors) == 0 + end + + test "ill-formed query" do + {:ok, %{execution: %{validation_errors: validation_errors}}, _} = + validation_only_query(@badQuery) + + refute length(validation_errors) == 0 + end + + defp validation_only_query(query) do + pipeline = + Pipeline.for_document(ValidationOnlySchema) + |> Pipeline.without(Phase.Subscription.SubscribeSelf) + |> Pipeline.without(Phase.Document.Execution.Resolution) + + Pipeline.run(query, pipeline) + end + end + + describe "default pipeline accepts possible inputs" do @query """ - type Person { - name: String! - } + { foo { bar } } """ - test "can create a blueprint without a prototype schema" do - assert {:ok, %Blueprint{}, _} = Pipeline.run(@query, Pipeline.for_schema(nil)) + test "query string" do + pipeline = + Pipeline.for_document(Schema) + |> Pipeline.upto(Phase.Blueprint) + + pipeline_input = @query + + assert {:ok, %Blueprint{operations: [%{selections: [%{name: "foo"}]}]}, _phases} = + Pipeline.run(pipeline_input, pipeline) + end + + test "language source" do + pipeline = + Pipeline.for_document(Schema) + |> Pipeline.upto(Phase.Blueprint) + + pipeline_input = %Absinthe.Language.Source{body: @query} + + assert {:ok, %Blueprint{operations: [%{selections: [%{name: "foo"}]}]}, _phases} = + Pipeline.run(pipeline_input, pipeline) end - test "can create a blueprint with a prototype schema" do - assert {:ok, %Blueprint{}, _} = Pipeline.run(@query, Pipeline.for_schema(Schema)) + test "blueprint" do + pipeline = + Pipeline.for_document(Schema) + |> Pipeline.upto(Phase.Blueprint) + + pipeline_input = %Blueprint{input: @query} + + assert {:ok, %Blueprint{operations: [%{selections: [%{name: "foo"}]}]}, _phases} = + Pipeline.run(pipeline_input, pipeline) end end diff --git a/test/absinthe/resolution/middleware_test.exs b/test/absinthe/resolution/middleware_test.exs index 5420c68fb9..1dcb56907f 100644 --- a/test/absinthe/resolution/middleware_test.exs +++ b/test/absinthe/resolution/middleware_test.exs @@ -116,7 +116,7 @@ defmodule Absinthe.MiddlewareTest do assert [ %{ - locations: [%{column: 0, line: 1}], + locations: [%{column: 2, line: 1}], message: "unauthorized", path: ["authenticated"] } @@ -132,7 +132,7 @@ defmodule Absinthe.MiddlewareTest do assert [ %{ - locations: [%{column: 0, line: 1}], + locations: [%{column: 16, line: 1}], message: "unauthorized", path: ["public", "email"] } @@ -159,7 +159,7 @@ defmodule Absinthe.MiddlewareTest do assert [ %{ - locations: [%{column: 0, line: 1}], + locations: [%{column: 25, line: 1}], message: "unauthorized", path: ["returnsPrivateObject", "key"] } diff --git a/test/absinthe/resolution/projector_test.exs b/test/absinthe/resolution/projector_test.exs index 4b50e4222e..6c7acd956e 100644 --- a/test/absinthe/resolution/projector_test.exs +++ b/test/absinthe/resolution/projector_test.exs @@ -1,5 +1,5 @@ defmodule Absinthe.Resolution.ProjectorTest do - use ExUnit.Case, async: true + use Absinthe.Case, async: true # describe "merging" do # test "asdf" diff --git a/test/absinthe/resolution_test.exs b/test/absinthe/resolution_test.exs index aa56bc5ccf..3e487b4afb 100644 --- a/test/absinthe/resolution_test.exs +++ b/test/absinthe/resolution_test.exs @@ -1,5 +1,5 @@ defmodule Absinthe.ResolutionTest do - use ExUnit.Case, async: true + use Absinthe.Case, async: true defmodule Schema do use Absinthe.Schema @@ -26,6 +26,10 @@ defmodule Absinthe.ResolutionTest do {:ok, nil} end end + + field :invalid_resolver, :string do + resolve("bogus") + end end end @@ -61,4 +65,16 @@ defmodule Absinthe.ResolutionTest do assert ["id", "name"] == fields end + + test "invalid resolver" do + doc = """ + { invalidResolver } + """ + + assert_raise Absinthe.ExecutionError, + ~r/Field resolve property must be a 2 arity anonymous function, 3 arity\nanonymous function, or a `{Module, :function}` tuple.\n\nInstead got: \"bogus\"\n\nResolving field:\n\n invalidResolver/, + fn -> + {:ok, _} = Absinthe.run(doc, Schema) + end + end end diff --git a/test/absinthe/schema/experimental_test.exs b/test/absinthe/schema/experimental_test.exs new file mode 100644 index 0000000000..a8b3aa229c --- /dev/null +++ b/test/absinthe/schema/experimental_test.exs @@ -0,0 +1,75 @@ +defmodule Absinthe.Schema.ExperimentalTest do + use Absinthe.Case, async: true + + @moduletag :experimental + + defmodule Schema do + use Absinthe.Schema + + query do + field :user, non_null(:user) do + resolve fn _, _ -> + {:ok, %{first_name: "Bruce", last_name: "Williams"}} + end + end + + field :hello, :string do + arg :name, :string + + resolve fn %{name: name}, _ -> + {:ok, "hello #{name}"} + end + end + end + + @desc "user" + object :user do + @desc "their full name" + field :full_name, :string do + resolve fn user, _, _ -> + {:ok, "#{user.first_name} #{user.last_name}"} + end + end + end + end + + describe "__absinthe_blueprint__/0" do + test "returns the blueprint" do + assert 2 == + length( + Schema.__absinthe_blueprint__().schema_definitions + |> List.first() + |> Map.fetch!(:type_definitions) + ) + end + end + + describe "type lookup" do + test "it works on objects" do + assert %Absinthe.Type.Object{} = type = Absinthe.Schema.lookup_type(Schema, :user) + assert %{fields: %{full_name: field}} = type + assert field.identifier == :full_name + assert field.middleware != [] + end + end + + test "simple" do + query = """ + { user { fullName }} + """ + + assert %Absinthe.Type.Object{} = type = Absinthe.Schema.lookup_type(Schema, :query) + assert %{fields: %{user: _field}} = type + + assert {:ok, %{data: %{"user" => %{"fullName" => "Bruce Williams"}}}} == + Absinthe.run(query, Schema) + end + + test "simple input" do + query = """ + { hello(name: "bob") } + """ + + assert {:ok, %{data: %{"hello" => "hello bob"}}} == Absinthe.run(query, Schema) + end +end diff --git a/test/absinthe/schema/hydrate_builtins_test.exs b/test/absinthe/schema/hydrate_builtins_test.exs new file mode 100644 index 0000000000..599cb0b09d --- /dev/null +++ b/test/absinthe/schema/hydrate_builtins_test.exs @@ -0,0 +1,30 @@ +defmodule HydrateBuiltinsTest do + use ExUnit.Case, async: true + + defmodule Schema do + use Absinthe.Schema + + query do + field :value, :float do + # intentionally a float + resolve fn _, _, _ -> {:ok, 1} end + end + end + + def hydrate(%Absinthe.Blueprint.Schema.ScalarTypeDefinition{identifier: :float}, _) do + {:serialize, &__MODULE__.serialize_float/1} + end + + def hydrate(_, _) do + [] + end + + def serialize_float(number) when is_number(number) do + number * 1.0 + end + end + + test "we can override the builtin scalars" do + assert {:ok, %{data: %{"value" => 1.0}}} == Absinthe.run("{ value }", Schema) + end +end diff --git a/test/absinthe/schema/hydrate_dynamic_values_test.exs b/test/absinthe/schema/hydrate_dynamic_values_test.exs new file mode 100644 index 0000000000..1af5a42160 --- /dev/null +++ b/test/absinthe/schema/hydrate_dynamic_values_test.exs @@ -0,0 +1,38 @@ +defmodule HydrateDynamicValuesTest do + use ExUnit.Case, async: true + + defmodule Schema do + use Absinthe.Schema + + enum :color do + value :red + value :blue + value :green + end + + query do + field :all, list_of(:color) do + resolve fn _, _, _ -> {:ok, [1, 2, 3]} end + end + end + + def hydrate( + %Absinthe.Blueprint.Schema.EnumValueDefinition{identifier: identifier}, + [%Absinthe.Blueprint.Schema.EnumTypeDefinition{identifier: :color}] + ) do + {:as, color_map(identifier)} + end + + def hydrate(_, _) do + [] + end + + defp color_map(:red), do: 1 + defp color_map(:blue), do: 2 + defp color_map(:green), do: 3 + end + + test "can hydrate enum values dynamically" do + assert {:ok, %{data: %{"all" => ["RED", "BLUE", "GREEN"]}}} == Absinthe.run("{ all }", Schema) + end +end diff --git a/test/absinthe/schema/manipulation_test.exs b/test/absinthe/schema/manipulation_test.exs new file mode 100644 index 0000000000..674086a2f5 --- /dev/null +++ b/test/absinthe/schema/manipulation_test.exs @@ -0,0 +1,354 @@ +defmodule Absinthe.Schema.ManipulationTest do + use Absinthe.Case, async: true + + alias Absinthe.Phase.Schema.Validation.TypeNamesAreReserved + + defmodule ExtTypes do + use Absinthe.Schema.Notation + + object :some_dyn_obj do + field :some_dyn_integer, :integer do + meta :some_string_meta, "some_dyn_integer meta" + end + + field :some_dyn_string, :string do + meta :some_string_meta, "some_dyn_string meta" + resolve fn _, _ -> {:ok, "some_string_val"} end + end + end + end + + defmodule CustomIntrospectionTypes do + use Absinthe.Schema.Notation + + object :custom_introspection_helper do + description "Simple Helper Object used to define blueprint fields" + + field :simple_string, :string do + description "customer introspection field" + + resolve fn _, %{schema: schema} -> + {:ok, "This is a new introspection type on #{inspect(schema)}"} + end + end + + field :some_string_meta, :string do + description "Expose some_string_meta" + + resolve fn _, + %{ + source: source + } -> + private = source[:__private__] || [] + meta_items = private[:meta] || [] + + {:ok, meta_items[:some_string_meta]} + end + end + end + end + + defmodule MyAppWeb.CustomSchemaPhase do + alias Absinthe.{Phase, Pipeline, Blueprint} + + # Add this module to the pipeline of phases + # to run on the schema + def pipeline(pipeline) do + Pipeline.insert_after(pipeline, Phase.Schema.TypeImports, __MODULE__) + end + + # Here's the blueprint of the schema, let's do whatever we want with it. + def run(blueprint = %Blueprint{}, _) do + custom_introspection_types = Blueprint.types_by_name(CustomIntrospectionTypes) + custom_introspection_fields = custom_introspection_types["CustomIntrospectionHelper"] + + simple_string_field = + Blueprint.find_field(custom_introspection_fields, "simple_string") + |> TypeNamesAreReserved.make_reserved() + + some_string_meta_field = + Blueprint.find_field(custom_introspection_fields, "some_string_meta") + |> TypeNamesAreReserved.make_reserved() + + blueprint = + blueprint + |> Blueprint.extend_fields(ExtTypes) + |> Blueprint.add_field("__Type", simple_string_field) + |> Blueprint.add_field("__Field", simple_string_field) + |> Blueprint.add_field("__Field", some_string_meta_field) + + {:ok, blueprint} + end + end + + defmodule MyAppWeb.CustomSchemaEnumTypes do + alias Absinthe.Blueprint.Schema + alias Absinthe.Schema.Notation + alias Absinthe.{Blueprint, Pipeline, Phase} + + def pipeline(pipeline) do + Pipeline.insert_after(pipeline, Phase.Schema.TypeImports, __MODULE__) + end + + def run(blueprint = %Blueprint{}, _) do + %{schema_definitions: [schema]} = blueprint + + new_enum = build_dynamic_enum() + + schema = + Map.update!(schema, :type_definitions, fn type_definitions -> + [new_enum | type_definitions] + end) + + {:ok, %{blueprint | schema_definitions: [schema]}} + end + + def build_dynamic_enum() do + %Schema.EnumTypeDefinition{ + name: "Categories", + identifier: :categories, + module: __MODULE__, + __reference__: Notation.build_reference(__ENV__), + values: [ + %Schema.EnumValueDefinition{ + identifier: :foo, + value: :foo, + name: "FOO", + module: __MODULE__, + __reference__: Notation.build_reference(__ENV__) + }, + %Schema.EnumValueDefinition{ + identifier: :bar, + value: :foo, + name: "BAR", + module: __MODULE__, + __reference__: Notation.build_reference(__ENV__) + } + ] + } + end + end + + defmodule MyAppWeb.Schema do + use Absinthe.Schema + + @pipeline_modifier MyAppWeb.CustomSchemaPhase + @pipeline_modifier MyAppWeb.CustomSchemaEnumTypes + + object :some_obj do + field :some_integer, :integer do + meta :some_string_meta, "some_integer meta" + end + + field :some_string, :string do + meta :some_string_meta, "some_string meta" + resolve fn _, _ -> {:ok, "some_string_val"} end + end + end + + object :some_dyn_obj do + field :non_dyn_integer, :integer do + meta :some_string_meta, "non_dyn_integer meta" + end + + field :non_dyn_string, :string, meta: [some_string_meta: "non_dyn_string meta"] do + resolve fn _, _ -> {:ok, "some_string_val"} end + end + end + + query do + field :some_field, :some_obj do + meta :some_field_meta, "some field meta" + resolve fn _, _ -> {:ok, %{some_integer: 1}} end + end + end + end + + test "Schema works" do + q = """ + query { + some_field { + some_integer + some_string + } + } + """ + + expected = %{ + data: %{"some_field" => %{"some_integer" => 1, "some_string" => "some_string_val"}} + } + + actual = Absinthe.run!(q, MyAppWeb.Schema) + + assert expected == actual + end + + test "enum types work" do + q = """ + query { + __type(name: "Categories") { + enumValues { + name + } + } + } + """ + + expected = %{data: %{"__type" => %{"enumValues" => [%{"name" => "BAR"}, %{"name" => "FOO"}]}}} + + actual = Absinthe.run!(q, MyAppWeb.Schema) + + assert expected == actual + end + + test "Introspection works" do + q = """ + query { + __type(name: "SomeObj") { + fields { + name + type { + name + } + } + } + } + """ + + expected = %{ + data: %{ + "__type" => %{ + "fields" => [ + %{"name" => "someInteger", "type" => %{"name" => "Int"}}, + %{"name" => "someString", "type" => %{"name" => "String"}} + ] + } + } + } + + actual = Absinthe.run!(q, MyAppWeb.Schema) + + assert expected == actual + end + + test "Custom introspection works" do + q = """ + query { + __type(name: "SomeObj") { + __simple_string + fields { + name + type { + name + } + } + } + } + """ + + expected = %{ + data: %{ + "__type" => %{ + "__simple_string" => + "This is a new introspection type on Absinthe.Schema.ManipulationTest.MyAppWeb.Schema", + "fields" => [ + %{"name" => "someInteger", "type" => %{"name" => "Int"}}, + %{"name" => "someString", "type" => %{"name" => "String"}} + ] + } + } + } + + actual = Absinthe.run!(q, MyAppWeb.Schema) + + assert expected == actual + end + + test "Exposing meta data via introspection works" do + q = """ + query { + __type(name: "SomeObj") { + fields { + name + type { + name + } + __some_string_meta + } + } + } + """ + + expected = %{ + data: %{ + "__type" => %{ + "fields" => [ + %{ + "name" => "someInteger", + "type" => %{"name" => "Int"}, + "__some_string_meta" => "some_integer meta" + }, + %{ + "name" => "someString", + "type" => %{"name" => "String"}, + "__some_string_meta" => "some_string meta" + } + ] + } + } + } + + actual = Absinthe.run!(q, MyAppWeb.Schema) + + assert expected == actual + end + + test "Extending Objects works" do + q = """ + query { + __type(name: "SomeDynObj") { + fields { + name + type { + name + } + __some_string_meta + } + } + } + """ + + expected = %{ + data: %{ + "__type" => %{ + "fields" => [ + %{ + "name" => "nonDynInteger", + "type" => %{"name" => "Int"}, + "__some_string_meta" => "non_dyn_integer meta" + }, + %{ + "name" => "nonDynString", + "type" => %{"name" => "String"}, + "__some_string_meta" => "non_dyn_string meta" + }, + %{ + "name" => "someDynInteger", + "type" => %{"name" => "Int"}, + "__some_string_meta" => "some_dyn_integer meta" + }, + %{ + "name" => "someDynString", + "type" => %{"name" => "String"}, + "__some_string_meta" => "some_dyn_string meta" + } + ] + } + } + } + + actual = Absinthe.run!(q, MyAppWeb.Schema) + + assert expected == actual + end +end diff --git a/test/absinthe/schema/notation/experimental/argument_test.exs b/test/absinthe/schema/notation/experimental/argument_test.exs new file mode 100644 index 0000000000..a4e2a03ac7 --- /dev/null +++ b/test/absinthe/schema/notation/experimental/argument_test.exs @@ -0,0 +1,54 @@ +defmodule Absinthe.Schema.Notation.Experimental.ArgumentTest do + use Absinthe.Case, async: true + import ExperimentalNotationHelpers + + @moduletag :experimental + + defmodule Definition do + use Absinthe.Schema.Notation + + @desc "Object" + object :obj do + @desc "Field" + field :field, :string do + arg :plain, :string + + arg :with_attrs, type: :boolean, name: "HasAttrs" + + @desc "Desc One" + arg :with_desc, :string + + @desc "Desc Three" + arg :with_desc_attr, type: :string, description: "overridden" + + arg :with_desc_attr_literal, type: :string, description: "Desc Four" + end + end + end + + describe "arg" do + test "with a bare type" do + assert %{name: "plain", description: nil, type: :string, identifier: :plain} = + lookup_argument(Definition, :obj, :field, :plain) + end + + test "with attrs" do + assert %{name: "HasAttrs", type: :boolean, identifier: :with_attrs} = + lookup_argument(Definition, :obj, :field, :with_attrs) + end + + test "with @desc" do + assert %{description: "Desc One"} = lookup_argument(Definition, :obj, :field, :with_desc) + end + + test "with @desc and a description attr" do + assert %{description: "Desc Three"} = + lookup_argument(Definition, :obj, :field, :with_desc_attr) + end + + test "with a description attribute as a literal" do + assert %{description: "Desc Four"} = + lookup_argument(Definition, :obj, :field, :with_desc_attr_literal) + end + end +end diff --git a/test/absinthe/schema/notation/experimental/field_test.exs b/test/absinthe/schema/notation/experimental/field_test.exs new file mode 100644 index 0000000000..df786121b9 --- /dev/null +++ b/test/absinthe/schema/notation/experimental/field_test.exs @@ -0,0 +1,80 @@ +defmodule Absinthe.Schema.Notation.Experimental.FieldTest do + use Absinthe.Case, async: true + import ExperimentalNotationHelpers + + @moduletag :experimental + + defmodule Definition do + use Absinthe.Schema.Notation + + @desc "Object description" + object :obj do + field :plain, :string + + field :with_block, :string do + end + + field :with_attrs, type: :boolean, name: "HasAttrs" + + field :with_attrs_and_body, type: :boolean, name: "HasAttrsAndBody" do + end + + @desc "Desc One" + field :with_desc, :string + + @desc "Desc Two" + field :with_desc_and_block, :string do + end + + @desc "Desc Three" + field :with_desc_attr, type: :string, description: "overridden" + + field :with_desc_attr_literal, type: :string, description: "Desc Four" + + @desc "Desc Five" + field :with_desc_attr_mod, type: :string, description: @desc_five + end + end + + describe "field" do + test "without a body and with a bare type" do + assert %{name: "plain", description: nil, type: :string, identifier: :plain} = + lookup_field(Definition, :obj, :plain) + end + + test "with a body and with a bare type" do + assert %{name: "with_block", type: :string, identifier: :with_block} = + lookup_field(Definition, :obj, :with_block) + end + + test "with attrs and without a body" do + assert %{name: "HasAttrs", type: :boolean, identifier: :with_attrs} = + lookup_field(Definition, :obj, :with_attrs) + end + + test "with attrs and with a body" do + assert %{name: "HasAttrsAndBody", type: :boolean, identifier: :with_attrs_and_body} = + lookup_field(Definition, :obj, :with_attrs_and_body) + end + + test "with @desc and without a block" do + assert %{description: "Desc One"} = lookup_field(Definition, :obj, :with_desc) + end + + test "with @desc and with a block" do + assert %{description: "Desc Two"} = lookup_field(Definition, :obj, :with_desc_and_block) + end + + test "with @desc and a description attr" do + assert %{description: "Desc Three"} = lookup_field(Definition, :obj, :with_desc_attr) + end + + test "with a description attribute as a literal" do + assert %{description: "Desc Four"} = lookup_field(Definition, :obj, :with_desc_attr_literal) + end + + test "with a description attribute from a module attribute" do + assert %{description: "Desc Five"} = lookup_field(Definition, :obj, :with_desc_attr_mod) + end + end +end diff --git a/test/absinthe/schema/notation/experimental/import_fields_test.exs b/test/absinthe/schema/notation/experimental/import_fields_test.exs new file mode 100644 index 0000000000..8c95c1f0f3 --- /dev/null +++ b/test/absinthe/schema/notation/experimental/import_fields_test.exs @@ -0,0 +1,124 @@ +defmodule Absinthe.Schema.Notation.Experimental.ImportFieldsTest do + use Absinthe.Case, async: true + import ExperimentalNotationHelpers + + @moduletag :experimental + + defmodule Source do + use Absinthe.Schema.Notation + + object :source do + field :one, :string do + end + + field :two, :string do + end + + field :three, :string do + end + end + end + + defmodule WithoutOptions do + use Absinthe.Schema.Notation + + object :internal_source do + field :one, :string do + end + + field :two, :string do + end + + field :three, :string do + end + end + + object :internal_target do + import_fields :internal_source + end + + object :external_target do + import_fields {Source, :source} + end + end + + defmodule UsingOnlyOption do + use Absinthe.Schema.Notation + + object :internal_source do + field :one, :string do + end + + field :two, :string do + end + + field :three, :string do + end + end + + object :internal_target do + import_fields :internal_source, only: [:one, :two] + end + + object :external_target do + import_fields {Source, :source}, only: [:one, :two] + end + end + + defmodule UsingExceptOption do + use Absinthe.Schema.Notation + + object :internal_source do + field :one, :string do + end + + field :two, :string do + end + + field :three, :string do + end + end + + object :internal_target do + import_fields :internal_source, except: [:one, :two] + end + + object :external_target do + import_fields {Source, :source}, except: [:one, :two] + end + end + + describe "import_fields" do + test "without options from an internal source" do + assert [{:internal_source, []}] == imports(WithoutOptions, :internal_target) + end + + test "without options from an external source" do + assert [{{Source, :source}, []}] == imports(WithoutOptions, :external_target) + end + + test "with :only from an internal source" do + assert [{:internal_source, only: [:one, :two]}] == + imports(UsingOnlyOption, :internal_target) + end + + test "with :only from external source" do + assert [{{Source, :source}, only: [:one, :two]}] == + imports(UsingOnlyOption, :external_target) + end + + test "with :except from an internal source" do + assert [{:internal_source, [except: [:one, :two]]}] == + imports(UsingExceptOption, :internal_target) + end + + test "with :except from external source" do + assert [{{Source, :source}, [except: [:one, :two]]}] == + imports(UsingExceptOption, :external_target) + end + end + + defp imports(module, type) do + lookup_type(module, type).imports + end +end diff --git a/test/absinthe/schema/notation/experimental/import_sdl_test.exs b/test/absinthe/schema/notation/experimental/import_sdl_test.exs new file mode 100644 index 0000000000..7d96268291 --- /dev/null +++ b/test/absinthe/schema/notation/experimental/import_sdl_test.exs @@ -0,0 +1,644 @@ +defmodule Absinthe.Schema.Notation.Experimental.ImportSdlTest do + use Absinthe.Case, async: true + import ExperimentalNotationHelpers + + @moduletag :experimental + @moduletag :sdl + + defmodule WithFeatureDirective do + use Absinthe.Schema.Prototype + + directive :feature do + arg :name, non_null(:string) + on [:interface] + end + end + + defmodule Definition do + use Absinthe.Schema + + @prototype_schema WithFeatureDirective + + # Embedded SDL + import_sdl """ + directive @foo(name: String!) repeatable on SCALAR | OBJECT + directive @bar(name: String!) on SCALAR | OBJECT + + type Query { + "A list of posts" + posts(filterBy: PostFilter, reverse: Boolean): [Post] + admin: User! + droppedField: String + defaultsOfVariousFlavors( + name: String = "Foo" + count: Int = 3 + average: Float = 3.14 + category: Category = NEWS + category: [Category] = [NEWS] + valid: Boolean = false + complex: ComplexInput = {nested: "String"} + ): String + metaEcho: String + scalarEcho(input: CoolScalar): CoolScalar + namedThings: [Named] + titledThings: [Titled] + playerField: PlayerInterface + } + + scalar CoolScalar + + input ComplexInput { + nested: String + } + + type Comment { + author: User! + subject: Post! + order: Int + deprecatedField: String @deprecated + deprecatedFieldWithReason: String @deprecated(reason: "Reason") + } + + enum Category { + NEWS + OPINION + } + + enum PostState { + SUBMITTED + ACCEPTED + REJECTED + } + + interface Named { + name: String! + } + + type Human implements Named { + name: String! + age: Int! + } + + type City implements Named { + name: String! + population: Int! + } + + interface Titled @feature(name: "bar") { + title: String! + } + + type Book implements Titled { + title: String! + pages: Int! + } + + type Movie implements Titled { + title: String! + duration: Int! + } + + interface PlayerInterface { + metadata: PlayerMetadataInterface + } + + interface PlayerMetadataInterface { + displayName: String + } + + type HumanPlayer implements PlayerInterface { + metadata: HumanMetadata + } + + type HumanMetadata implements PlayerMetadataInterface { + displayName: String + } + + scalar B + + union SearchResult = Post | User + union Content = Post | Comment + """ + + # Read SDL from file manually at compile-time + import_sdl File.read!("test/support/fixtures/import_sdl_binary_fn.graphql") + + # Read from file at compile time (with support for automatic recompilation) + import_sdl path: "test/support/fixtures/import_sdl_path_option.graphql" + import_sdl path: Path.join("test/support", "fixtures/import_sdl_path_option_fn.graphql") + + def get_posts(_, _, _) do + posts = [ + %{title: "Foo", body: "A body.", author: %{name: "Bruce"}}, + %{title: "Bar", body: "A body.", author: %{name: "Ben"}} + ] + + {:ok, posts} + end + + def upcase_title(post, _, _) do + {:ok, Map.get(post, :title) |> String.upcase()} + end + + def meta_echo(_source, _args, resolution) do + {:ok, get_in(resolution.definition.schema_node.__private__, [:meta, :echo])} + end + + def scalar_echo(_source, %{input: scalar}, _resolution) do + {:ok, scalar} + end + + def named_things(_source, _args, _resolution) do + {:ok, [%{name: "Sue", age: 38}, %{name: "Portland", population: 647_000}]} + end + + def titled_things(_source, _args, _resolution) do + {:ok, [%{title: "The Matrix", duration: 150}, %{title: "Origin of Species", pages: 502}]} + end + + def hydrate(%{identifier: :admin}, [%{identifier: :query} | _]) do + {:description, "The admin"} + end + + def hydrate(%{identifier: :filter_by}, [%{identifier: :posts} | _]) do + {:description, "A filter argument"} + end + + def hydrate(%{identifier: :posts}, [%{identifier: :query} | _]) do + {:resolve, &__MODULE__.get_posts/3} + end + + def hydrate(%{identifier: :meta_echo}, [%{identifier: :query} | _]) do + [ + {:meta, echo: "Hello"}, + {:resolve, &__MODULE__.meta_echo/3} + ] + end + + def hydrate(%{name: "CoolScalar"}, _) do + [ + {:parse, &__MODULE__.parse_cool_scalar/1}, + {:serialize, &__MODULE__.serialize_cool_scalar/1} + ] + end + + def hydrate(%{identifier: :scalar_echo}, [%{identifier: :query} | _]) do + [{:middleware, {Absinthe.Resolution, &__MODULE__.scalar_echo/3}}] + end + + def hydrate(%{identifier: :titled}, _) do + [{:resolve_type, &__MODULE__.titled_resolve_type/2}] + end + + def hydrate(%{identifier: :player_interface}, _) do + [{:resolve_type, &__MODULE__.player_interface/2}] + end + + def hydrate(%{identifier: :player_metadata_interface}, _) do + [{:resolve_type, &__MODULE__.player_metadata_interface/2}] + end + + def hydrate(%{identifier: :content}, _) do + [{:resolve_type, &__MODULE__.content_resolve_type/2}] + end + + def hydrate(%{identifier: :human}, _) do + [{:is_type_of, &__MODULE__.human_is_type_of/1}] + end + + def hydrate(%{identifier: :city}, _) do + [{:is_type_of, &__MODULE__.city_is_type_of/1}] + end + + def hydrate(%{identifier: :named_things}, [%{identifier: :query} | _]) do + [{:resolve, &__MODULE__.named_things/3}] + end + + def hydrate(%{identifier: :titled_things}, [%{identifier: :query} | _]) do + [{:resolve, &__MODULE__.titled_things/3}] + end + + def hydrate(%Absinthe.Blueprint{}, _) do + %{ + query: %{ + posts: %{ + reverse: {:description, "Just reverse the list, if you want"} + } + }, + post: %{ + upcased_title: [ + {:description, "The title, but upcased"}, + {:resolve, &__MODULE__.upcase_title/3} + ] + }, + search_result: [ + resolve_type: &__MODULE__.search_result_resolve_type/2 + ] + } + end + + def hydrate(_node, _ancestors) do + [] + end + + def city_is_type_of(%{population: _}), do: true + def city_is_type_of(_), do: false + + def human_is_type_of(%{age: _}), do: true + def human_is_type_of(_), do: false + + def titled_resolve_type(%{duration: _}, _), do: :movie + def titled_resolve_type(%{pages: _}, _), do: :book + + def content_resolve_type(_, _), do: :comment + + def search_result_resolve_type(_, _), do: :post + + def parse_cool_scalar(value), do: {:ok, value} + def serialize_cool_scalar(%{value: value}), do: value + + def player_interface(_, _), do: :human_player + def player_metadata_interface(_, _), do: :human_metadata + end + + describe "custom prototype schema" do + test "is set" do + assert Definition.__absinthe_prototype_schema__() == WithFeatureDirective + end + end + + describe "locations" do + test "have evaluated file values" do + Absinthe.Blueprint.prewalk(Definition.__absinthe_blueprint__(), nil, fn + %{__reference__: %{location: %{file: file}}} = node, _ -> + assert is_binary(file) + {node, nil} + + node, _ -> + {node, nil} + end) + end + end + + describe "directives" do + test "can be defined" do + assert %{name: "foo", identifier: :foo, locations: [:object, :scalar], repeatable: true} = + lookup_compiled_directive(Definition, :foo) + + assert %{name: "bar", identifier: :bar, locations: [:object, :scalar]} = + lookup_compiled_directive(Definition, :bar) + end + end + + describe "deprecations" do + test "can be defined without a reason" do + object = lookup_compiled_type(Definition, :comment) + assert %{deprecation: %{}} = object.fields.deprecated_field + end + + test "can be defined with a reason" do + object = lookup_compiled_type(Definition, :comment) + assert %{deprecation: %{reason: "Reason"}} = object.fields.deprecated_field_with_reason + end + end + + describe "query root type" do + test "is defined" do + assert %{name: "Query", identifier: :query} = lookup_type(Definition, :query) + end + + test "defines fields" do + assert %{name: "posts"} = lookup_field(Definition, :query, :posts) + end + end + + describe "non-root type" do + test "is defined" do + assert %{name: "Post", identifier: :post} = lookup_type(Definition, :post) + end + + test "defines fields" do + assert %{name: "title"} = lookup_field(Definition, :post, :title) + assert %{name: "body"} = lookup_field(Definition, :post, :body) + end + end + + describe "descriptions" do + test "work on objects" do + assert %{description: "A submitted post"} = lookup_type(Definition, :post) + end + + test "work on fields" do + assert %{description: "A list of posts"} = lookup_field(Definition, :query, :posts) + end + + test "work on fields, defined deeply" do + assert %{description: "The title, but upcased"} = + lookup_compiled_field(Definition, :post, :upcased_title) + end + + test "work on arguments, defined deeply" do + assert %{description: "Just reverse the list, if you want"} = + lookup_compiled_argument(Definition, :query, :posts, :reverse) + end + + test "can be multiline" do + assert %{description: "The post author\n(is a user)"} = + lookup_field(Definition, :post, :author) + end + + test "can be added by hydrating a field" do + assert %{description: "The admin"} = lookup_compiled_field(Definition, :query, :admin) + end + + test "can be added by hydrating an argument" do + field = lookup_compiled_field(Definition, :query, :posts) + assert %{description: "A filter argument"} = field.args.filter_by + end + end + + describe "union types" do + test "have correct type references" do + assert content_union = Absinthe.Schema.lookup_type(Definition, :content) + assert content_union.types == [:comment, :post] + end + + test "have resolve_type via a dedicated clause" do + assert content_union = Absinthe.Schema.lookup_type(Definition, :content) + assert content_union.resolve_type + end + + test "have resolve_type via the blueprint hydrator" do + assert search_union = Absinthe.Schema.lookup_type(Definition, :search_result) + assert search_union.resolve_type + end + end + + describe "resolve" do + test "work on fields, defined deeply" do + assert %{middleware: mw} = lookup_compiled_field(Definition, :post, :upcased_title) + assert length(mw) > 0 + end + end + + describe "multiple invocations" do + test "can add definitions" do + assert %{name: "User", identifier: :user} = lookup_type(Definition, :user) + end + end + + @query """ + { admin { name } } + """ + + describe "execution with root_value" do + test "works" do + assert {:ok, %{data: %{"admin" => %{"name" => "Bruce"}}}} = + Absinthe.run(@query, Definition, root_value: %{admin: %{name: "Bruce"}}) + end + end + + @query """ + { posts { title } } + """ + + describe "execution with hydration-defined resolvers" do + test "works" do + assert {:ok, %{data: %{"posts" => [%{"title" => "Foo"}, %{"title" => "Bar"}]}}} = + Absinthe.run(@query, Definition) + end + end + + @query """ + { posts { upcasedTitle } } + """ + describe "execution with deep hydration-defined resolvers" do + test "works" do + assert {:ok, + %{data: %{"posts" => [%{"upcasedTitle" => "FOO"}, %{"upcasedTitle" => "BAR"}]}}} = + Absinthe.run(@query, Definition) + end + end + + describe "hydration" do + @query """ + { metaEcho } + """ + test "allowed for meta data" do + assert {:ok, %{data: %{"metaEcho" => "Hello"}}} = Absinthe.run(@query, Definition) + end + + @query """ + { scalarEcho(input: "Hey there") } + """ + test "enables scalar creation" do + assert {:ok, %{data: %{"scalarEcho" => "Hey there"}}} = Absinthe.run(@query, Definition) + end + + @query """ + { + namedThings { + __typename + name + ... on Human { age } + ... on City { population } + } + } + """ + test "interface via is_type_of" do + assert {:ok, + %{ + data: %{ + "namedThings" => [ + %{"__typename" => "Human", "name" => "Sue", "age" => 38}, + %{"__typename" => "City", "name" => "Portland", "population" => 647_000} + ] + } + }} = Absinthe.run(@query, Definition) + end + + @query """ + { + titledThings { + __typename + title + ... on Book { pages } + ... on Movie { duration } + } + } + """ + test "interface via resolve_type" do + assert {:ok, + %{ + data: %{ + "titledThings" => [ + %{"__typename" => "Movie", "title" => "The Matrix", "duration" => 150}, + %{"__typename" => "Book", "title" => "Origin of Species", "pages" => 502} + ] + } + }} = Absinthe.run(@query, Definition) + end + end + + @query """ + { posts(filterBy: {name: "foo"}) { upcasedTitle } } + """ + describe "execution with multi word args" do + test "works" do + assert {:ok, + %{data: %{"posts" => [%{"upcasedTitle" => "FOO"}, %{"upcasedTitle" => "BAR"}]}}} = + Absinthe.run(@query, Definition) + end + end + + describe "Absinthe.Schema.referenced_types/1" do + test "works" do + assert Absinthe.Schema.referenced_types(Definition) + end + end + + defmodule FakerSchema do + use Absinthe.Schema + + query do + field :hello, :string + end + + import_sdl path: "test/support/fixtures/fake_definition.graphql" + end + + describe "graphql-faker schema" do + test "defines the correct types" do + type_names = + FakerSchema.__absinthe_types__() + |> Map.values() + + for type <- + ~w(fake__Locale fake__Types fake__imageCategory fake__loremSize fake__color fake__options examples__JSON) do + assert type in type_names + end + end + + test "defines the correct directives" do + directive_names = + FakerSchema.__absinthe_directives__() + |> Map.values() + + for directive <- ~w(examples) do + assert directive in directive_names + end + end + + test "default values" do + type = Absinthe.Schema.lookup_type(FakerSchema, :fake__options) + assert %{red255: _, blue255: _, green255: _} = type.fields.base_color.default_value + + type = Absinthe.Schema.lookup_type(FakerSchema, :fake__color) + assert type.fields.red255.default_value == 0 + assert type.fields.green255.default_value == 0 + assert type.fields.blue255.default_value == 0 + end + end + + test "Keyword extend not yet supported" do + schema = """ + defmodule KeywordExtend do + use Absinthe.Schema + + import_sdl " + type Movie { + title: String! + } + + extend type Movie { + year: Int + } + " + end + """ + + error = ~r/Keyword `extend` is not yet supported/ + + assert_raise(Absinthe.Schema.Notation.Error, error, fn -> + Code.eval_string(schema) + end) + end + + test "Validate known directive arguments in SDL schema" do + schema = """ + defmodule SchemaWithDirectivesWithNestedArgs do + use Absinthe.Schema + + defmodule Directives do + use Absinthe.Schema.Prototype + + directive :some_directive do + on [:field_definition] + end + end + + @prototype_schema Directives + + " + type Widget { + name: String @some_directive(a: { b: {} }) + } + + type Query { + widgets: [Widget!] + } + " + |> import_sdl + end + """ + + error = ~r/Unknown argument "a" on directive "@some_directive"./ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(schema) + end) + end + + def handle_event(event, measurements, metadata, config) do + send(self(), {event, measurements, metadata, config}) + end + + describe "telemetry" do + setup context do + :telemetry.attach_many( + context.test, + [ + [:absinthe, :resolve, :field, :start], + [:absinthe, :resolve, :field, :stop], + [:absinthe, :execute, :operation, :start], + [:absinthe, :execute, :operation, :stop] + ], + &__MODULE__.handle_event/4, + %{} + ) + + on_exit(fn -> + :telemetry.detach(context.test) + end) + + :ok + end + + test "executes on SDL defined schemas" do + assert {:ok, + %{data: %{"posts" => [%{"upcasedTitle" => "FOO"}, %{"upcasedTitle" => "BAR"}]}}} = + Absinthe.run(@query, Definition) + + assert_receive {[:absinthe, :execute, :operation, :start], _, %{id: id}, _config} + + assert_receive {[:absinthe, :execute, :operation, :stop], _measurements, %{id: ^id}, + _config} + + assert_receive {[:absinthe, :resolve, :field, :start], _measurements, + %{resolution: %{definition: %{name: "posts"}}}, _config} + + assert_receive {[:absinthe, :resolve, :field, :stop], _measurements, + %{resolution: %{definition: %{name: "posts"}}}, _config} + end + end +end diff --git a/test/absinthe/schema/notation/experimental/import_types_test.exs b/test/absinthe/schema/notation/experimental/import_types_test.exs new file mode 100644 index 0000000000..6229d9ccc5 --- /dev/null +++ b/test/absinthe/schema/notation/experimental/import_types_test.exs @@ -0,0 +1,76 @@ +defmodule Absinthe.Schema.Notation.Experimental.ImportTypesTest do + use Absinthe.Case, async: true + + @moduletag :experimental + + defmodule Source do + use Absinthe.Schema.Notation + + object :one do + end + + object :two do + end + + object :three do + end + end + + defmodule WithoutOptions do + use Absinthe.Schema + + query do + end + + import_types Source + end + + defmodule UsingOnlyOption do + use Absinthe.Schema + + query do + end + + import_types(Source, only: [:one, :two]) + end + + defmodule UsingExceptOption do + use Absinthe.Schema + + query do + end + + import_types(Source, except: [:one, :two]) + end + + describe "import_types" do + test "without options" do + assert [{Source, []}] == imports(WithoutOptions) + + assert WithoutOptions.__absinthe_type__(:one) + assert WithoutOptions.__absinthe_type__(:two) + assert WithoutOptions.__absinthe_type__(:three) + end + + test "with :only" do + assert [{Source, only: [:one, :two]}] == imports(UsingOnlyOption) + + assert UsingOnlyOption.__absinthe_type__(:one) + assert UsingOnlyOption.__absinthe_type__(:two) + refute UsingOnlyOption.__absinthe_type__(:three) + end + + test "with :except" do + assert [{Source, except: [:one, :two]}] == imports(UsingExceptOption) + + refute UsingExceptOption.__absinthe_type__(:one) + refute UsingExceptOption.__absinthe_type__(:two) + assert UsingExceptOption.__absinthe_type__(:three) + end + end + + defp imports(module) do + %{schema_definitions: [schema]} = module.__absinthe_blueprint__ + schema.imports + end +end diff --git a/test/absinthe/schema/notation/experimental/object_test.exs b/test/absinthe/schema/notation/experimental/object_test.exs new file mode 100644 index 0000000000..0ec546a68b --- /dev/null +++ b/test/absinthe/schema/notation/experimental/object_test.exs @@ -0,0 +1,66 @@ +defmodule Absinthe.Schema.Notation.Experimental.ObjectTest do + use Absinthe.Case, async: true + import ExperimentalNotationHelpers + + @moduletag :experimental + + defmodule Definition do + use Absinthe.Schema.Notation + + object :no_attrs do + end + + object :with_attr, name: "Named" do + end + + @desc "Desc One" + object :with_desc do + end + + @desc "Desc Two" + object :with_desc_attr, description: "overridden" do + end + + @modattr "Desc Three" + @desc @modattr + object :with_desc_assign do + end + + object :with_desc_attr_literal, description: "Desc Four" do + end + + @desc "Desc Five" + object :with_desc_attr_mod, description: @desc_five do + end + end + + describe "object" do + test "without attributes" do + assert %{name: "NoAttrs", identifier: :no_attrs} = lookup_type(Definition, :no_attrs) + end + + test "with a name attribute" do + assert %{name: "Named", identifier: :with_attr} = lookup_type(Definition, :with_attr) + end + + test "with a @desc and no description attr" do + assert %{description: "Desc One"} = lookup_type(Definition, :with_desc) + end + + test "with a @desc using an assignment" do + assert %{description: "Desc Three"} = lookup_type(Definition, :with_desc_assign) + end + + test "with a @desc and a description attr" do + assert %{description: "Desc Two"} = lookup_type(Definition, :with_desc_attr) + end + + test "with a description attribute as a literal" do + assert %{description: "Desc Four"} = lookup_type(Definition, :with_desc_attr_literal) + end + + test "from a module attribute" do + assert %{description: "Desc Five"} = lookup_type(Definition, :with_desc_attr_mod) + end + end +end diff --git a/test/absinthe/schema/notation/experimental/resolve_test.exs b/test/absinthe/schema/notation/experimental/resolve_test.exs new file mode 100644 index 0000000000..59e40cb187 --- /dev/null +++ b/test/absinthe/schema/notation/experimental/resolve_test.exs @@ -0,0 +1,90 @@ +defmodule Absinthe.Schema.Notation.Experimental.ResolveTest do + use Absinthe.Case, async: true + import ExperimentalNotationHelpers + + @moduletag :experimental + + defmodule Definition do + use Absinthe.Schema.Notation + + object :obj do + field :anon_literal, :boolean do + resolve fn _, _, _ -> + {:ok, true} + end + end + + field :local_private, :boolean do + resolve &local_private/3 + end + + field :local_public, :boolean do + resolve &local_public/3 + end + + field :remote, :boolean do + resolve &Absinthe.Schema.Notation.Experimental.ResolveTest.remote_resolve/3 + end + + field :remote_ref, :boolean do + resolve {Absinthe.Schema.Notation.Experimental.ResolveTest, :remote_resolve} + end + + field :invocation_result, :boolean do + resolve mapping(:foo) + end + end + + defp local_private(_, _, _) do + {:ok, true} + end + + def local_public(_, _, _) do + {:ok, true} + end + + def mapping(_) do + fn _, _, _ -> + {:ok, true} + end + end + end + + def remote_resolve(_, _, _) do + {:ok, true} + end + + def assert_resolver(field_identifier) do + assert %{middleware: [{:ref, module, identifier}]} = + lookup_field(Definition, :obj, field_identifier) + + assert [{{Absinthe.Resolution, :call}, _}] = + module.__absinthe_function__(identifier, :middleware) + end + + describe "resolve" do + test "when given an anonymous function literal" do + assert_resolver(:anon_literal) + end + + test "when given a local private function capture" do + assert_resolver(:local_private) + end + + test "when given a local public function capture" do + assert_resolver(:local_public) + end + + test "when given a remote public function capture" do + assert_resolver(:remote) + end + + test "when given a remote ref" do + assert_resolver(:remote_ref) + end + + test "when given the result of a function invocation" do + assert_resolver(:invocation_result) + end + end +end diff --git a/test/absinthe/schema/notation/import_test.exs b/test/absinthe/schema/notation/import_test.exs new file mode 100644 index 0000000000..463e6e5235 --- /dev/null +++ b/test/absinthe/schema/notation/import_test.exs @@ -0,0 +1,281 @@ +defmodule Absinthe.Schema.Notation.ImportTest do + use Absinthe.Case, async: true + + defp field_list(module, name) do + module.__absinthe_type__(name).fields + |> Enum.filter(&(!introspection?(&1))) + |> Keyword.keys() + |> Enum.sort() + end + + defp introspection?({_, field}) do + Absinthe.Type.introspection?(field) + end + + alias Absinthe.Phase + + describe "import fields" do + test "fields can be imported" do + defmodule Foo do + use Absinthe.Schema + + query do + # Query type must exist + end + + object :foo do + field :name, :string + end + + object :bar do + import_fields :foo + field :email, :string + end + end + + assert [:email, :name] = field_list(Foo, :bar) + end + + test "works for input objects" do + defmodule InputFoo do + use Absinthe.Schema + + query do + # Query type must exist + end + + input_object :foo do + field :name, :string + end + + input_object :bar do + import_fields :foo + field :email, :string + end + end + + fields = InputFoo.__absinthe_type__(:bar).fields + + assert [:email, :name] = fields |> Map.keys() |> Enum.sort() + end + + test "works for interfaces" do + defmodule InterfaceFoo do + use Absinthe.Schema + + query do + # Query type must exist + end + + object :cool_fields do + field :name, :string + end + + interface :foo do + import_fields :cool_fields + resolve_type fn _, _ -> :real_foo end + end + + object :real_foo do + interface :foo + import_fields :cool_fields + end + end + + assert [:name] = field_list(InterfaceFoo, :foo) + + assert [:name] = field_list(InterfaceFoo, :real_foo) + end + + test "can work transitively" do + defmodule Bar do + use Absinthe.Schema + + query do + # Query type must exist + end + + object :foo do + field :name, :string + end + + object :bar do + import_fields :foo + field :email, :string + end + + object :baz do + import_fields :bar + field :age, :integer + end + end + + assert [:age, :email, :name] == field_list(Bar, :baz) + end + + test "raises errors nicely" do + defmodule ErrorSchema do + use Absinthe.Schema + + @pipeline_modifier Absinthe.Schema.Notation.ImportTest + + object :bar do + import_fields :asdf + field :email, :string + end + end + + assert {:error, + [ + %Absinthe.Phase.Error{ + extra: %{}, + locations: [_], + message: + "In Bar, :asdf is not defined in your schema.\n\nTypes must exist if referenced.\n", + path: [], + phase: Absinthe.Phase.Schema.Validation.TypeReferencesExist + } + ]} = validate(ErrorSchema) + end + + test "handles circular errors" do + defmodule Circles do + use Absinthe.Schema + + @pipeline_modifier Absinthe.Schema.Notation.ImportTest + + object :foo do + import_fields :bar + field :name, :string + end + + object :bar do + import_fields :foo + field :email, :string + end + end + + assert {:error, + [ + %Absinthe.Phase.Error{ + extra: :bar, + locations: [ + %{ + line: _ + } + ], + message: + "Field Import Cycle Error\n\nField Import in object `bar' `import_fields([foo: []]) forms a cycle via: ([:bar, :foo, :bar])", + path: [], + phase: Absinthe.Phase.Schema.Validation.NoCircularFieldImports + }, + %Absinthe.Phase.Error{ + extra: :foo, + locations: [ + %{ + line: _ + } + ], + message: + "Field Import Cycle Error\n\nField Import in object `foo' `import_fields([bar: []]) forms a cycle via: ([:foo, :bar, :foo])", + path: [], + phase: Absinthe.Phase.Schema.Validation.NoCircularFieldImports + } + ]} = validate(Circles) + end + + test "can import types from more than one thing" do + defmodule Multiples do + use Absinthe.Schema + + object :foo do + field :name, :string + end + + object :bar do + field :email, :string + end + + query do + import_fields :foo + import_fields :bar + field :age, :integer + end + end + + assert [:age, :email, :name] == field_list(Multiples, :query) + end + + test "can import fields from imported types" do + defmodule Source1 do + use Absinthe.Schema + + query do + # Query type must exist + end + + object :foo do + field :name, :string + end + end + + defmodule Source2 do + use Absinthe.Schema + + query do + # Query type must exist + end + + object :bar do + field :email, :string + end + end + + defmodule Dest do + use Absinthe.Schema + + query do + # Query type must exist + end + + import_types Source1 + import_types Source2 + + object :baz do + import_fields :foo + import_fields :bar + end + end + + assert [:email, :name] = field_list(Dest, :baz) + end + end + + describe "unknown imported modules" do + test "returns error" do + assert_schema_error("unknown_import_schema", [ + %Absinthe.Phase.Error{ + message: "Could not load module `Elixir.Test.Unknown`. It returned reason: `nofile`.", + phase: Absinthe.Phase.Schema.TypeImports + } + ]) + end + end + + defp validate(schema) do + pipeline = + schema + |> Absinthe.Pipeline.for_schema() + |> Absinthe.Pipeline.upto(Phase.Schema.FieldImports) + |> Kernel.++([Phase.Schema.Validation.Result]) + + case Absinthe.Pipeline.run(schema.__absinthe_blueprint__, pipeline) do + {ok_or_error, val, _} -> + {ok_or_error, val} + end + end + + def pipeline(_pipeline) do + [] + end +end diff --git a/test/absinthe/schema/notation_test.exs b/test/absinthe/schema/notation_test.exs index ab605709de..6f7a0602c7 100644 --- a/test/absinthe/schema/notation_test.exs +++ b/test/absinthe/schema/notation_test.exs @@ -1,227 +1,6 @@ defmodule Absinthe.Schema.NotationTest do use Absinthe.Case, async: true - describe "import fields" do - test "fields can be imported" do - defmodule Foo do - use Absinthe.Schema - - query do - # Query type must exist - end - - object :foo do - field :name, :string - end - - object :bar do - import_fields :foo - field :email, :string - end - end - - assert [:email, :name] = Foo.__absinthe_type__(:bar).fields |> Map.keys() |> Enum.sort() - end - - test "works for input objects" do - defmodule InputFoo do - use Absinthe.Schema - - query do - # Query type must exist - end - - input_object :foo do - field :name, :string - end - - input_object :bar do - import_fields :foo - field :email, :string - end - end - - fields = InputFoo.__absinthe_type__(:bar).fields - - assert [:email, :name] = fields |> Map.keys() |> Enum.sort() - end - - test "works for interfaces" do - defmodule InterfaceFoo do - use Absinthe.Schema - - query do - # Query type must exist - end - - object :cool_fields do - field :name, :string - end - - interface :foo do - import_fields :cool_fields - resolve_type fn _, _ -> :real_foo end - end - - object :real_foo do - interface :foo - import_fields :cool_fields - end - end - - interface_fields = InterfaceFoo.__absinthe_type__(:foo).fields - assert [:name] = interface_fields |> Map.keys() |> Enum.sort() - - object_fields = InterfaceFoo.__absinthe_type__(:real_foo).fields - assert [:name] = object_fields |> Map.keys() |> Enum.sort() - end - - test "can work transitively" do - defmodule Bar do - use Absinthe.Schema - - query do - # Query type must exist - end - - object :foo do - field :name, :string - end - - object :bar do - import_fields :foo - field :email, :string - end - - object :baz do - import_fields :bar - field :age, :integer - end - end - - assert [:age, :email, :name] == - Bar.__absinthe_type__(:baz).fields |> Map.keys() |> Enum.sort() - end - - test "raises errors nicely" do - defmodule ErrorSchema do - use Absinthe.Schema.Notation - - object :bar do - import_fields :asdf - field :email, :string - end - end - - assert [error] = ErrorSchema.__absinthe_errors__() - - assert %{ - data: %{ - artifact: - "Field Import Error\n\nObject :bar imports fields from :asdf but\n:asdf does not exist in the schema!", - value: :asdf - }, - location: %{file: _, line: _}, - rule: Absinthe.Schema.Rule.FieldImportsExist - } = error - end - - test "handles circular errors" do - defmodule Circles do - use Absinthe.Schema.Notation - - object :foo do - import_fields :bar - field :name, :string - end - - object :bar do - import_fields :foo - field :email, :string - end - end - - assert [error] = Circles.__absinthe_errors__() - - assert %{ - data: %{ - artifact: - "Field Import Cycle Error\n\nField Import in object `foo' `import_fields(:bar) forms a cycle via: (`foo' => `bar' => `foo')", - value: :bar - }, - location: %{file: _, line: _}, - rule: Absinthe.Schema.Rule.NoCircularFieldImports - } = error - end - - test "can import types from more than one thing" do - defmodule Multiples do - use Absinthe.Schema - - object :foo do - field :name, :string - end - - object :bar do - field :email, :string - end - - query do - import_fields :foo - import_fields :bar - field :age, :integer - end - end - - assert [:age, :email, :name] == - Multiples.__absinthe_type__(:query).fields |> Map.keys() |> Enum.sort() - end - - test "can import fields from imported types" do - defmodule Source1 do - use Absinthe.Schema - - query do - # Query type must exist - end - - object :foo do - field :name, :string - end - end - - defmodule Source2 do - use Absinthe.Schema - - query do - # Query type must exist - end - - object :bar do - field :email, :string - end - end - - defmodule Dest do - use Absinthe.Schema - - query do - # Query type must exist - end - - import_types Source1 - import_types Source2 - - object :baz do - import_fields :foo - import_fields :bar - end - end - - assert [:email, :name] = Dest.__absinthe_type__(:baz).fields |> Map.keys() |> Enum.sort() - end - end - describe "arg" do test "can be under field as an attribute" do assert_no_notation_error("ArgFieldValid", """ @@ -237,6 +16,8 @@ defmodule Absinthe.Schema.NotationTest do assert_no_notation_error("ArgDirectiveValid", """ directive :test do arg :if, :boolean + + on :field end """) end @@ -247,7 +28,7 @@ defmodule Absinthe.Schema.NotationTest do """ arg :name, :string """, - "Invalid schema notation: `arg` must only be used within `directive`, `field`" + "Invalid schema notation: `arg` must only be used within `directive`, `field`. Was used in `schema`." ) end end @@ -256,6 +37,7 @@ defmodule Absinthe.Schema.NotationTest do test "can be toplevel" do assert_no_notation_error("DirectiveValid", """ directive :foo do + on :field end """) end @@ -269,7 +51,7 @@ defmodule Absinthe.Schema.NotationTest do end end """, - "Invalid schema notation: `directive` must only be used toplevel" + "Invalid schema notation: `directive` must only be used toplevel. Was used in `directive`." ) end end @@ -291,7 +73,7 @@ defmodule Absinthe.Schema.NotationTest do end end """, - "Invalid schema notation: `enum` must only be used toplevel" + "Invalid schema notation: `enum` must only be used toplevel. Was used in `enum`." ) end end @@ -327,7 +109,7 @@ defmodule Absinthe.Schema.NotationTest do """ field :foo, :string """, - "Invalid schema notation: `field` must only be used within `input_object`, `interface`, `object`" + "Invalid schema notation: `field` must only be used within `input_object`, `interface`, `object`. Was used in `schema`." ) end end @@ -349,16 +131,17 @@ defmodule Absinthe.Schema.NotationTest do end end """, - "Invalid schema notation: `input_object` must only be used toplevel" + "Invalid schema notation: `input_object` must only be used toplevel. Was used in `input_object`." ) end end - describe "instruction" do + describe "expand" do test "can be under directive as an attribute" do assert_no_notation_error("InstructionValid", """ directive :bar do - instruction fn -> :ok end + expand fn _, _ -> :ok end + on :field end """) end @@ -367,9 +150,9 @@ defmodule Absinthe.Schema.NotationTest do assert_notation_error( "InstructionToplevelInvalid", """ - instruction fn -> :ok end + expand fn _, _ -> :ok end """, - "Invalid schema notation: `instruction` must only be used within `directive`" + "Invalid schema notation: `expand` must only be used within `directive`. Was used in `schema`." ) end @@ -378,10 +161,10 @@ defmodule Absinthe.Schema.NotationTest do "InstructionObjectInvalid", """ object :foo do - instruction fn -> :ok end + expand fn _, _ -> :ok end end """, - "Invalid schema notation: `instruction` must only be used within `directive`" + "Invalid schema notation: `expand` must only be used within `directive`. Was used in `object`." ) end end @@ -421,14 +204,14 @@ defmodule Absinthe.Schema.NotationTest do interface :foo end """, - "Invalid schema notation: `interface` (as an attribute) must only be used within `object`" + "Invalid schema notation: `interface_attribute` must only be used within `object`, `interface`. Was used in `input_object`." ) end end describe "interfaces" do test "can be under object as an attribute" do - assert_no_notation_error("InterfacesValid", """ + assert_no_notation_error("ObjectInterfacesValid", """ interface :bar do field :name, :string resolve_type fn _, _ -> :foo end @@ -440,6 +223,19 @@ defmodule Absinthe.Schema.NotationTest do """) end + test "can be under interface as an attribute" do + assert_no_notation_error("InterfaceInterfacesValid", """ + interface :bar do + field :name, :string + resolve_type fn _, _ -> :foo end + end + interface :foo do + field :name, :string + interfaces [:bar] + end + """) + end + test "cannot be toplevel" do assert_notation_error( "InterfacesInvalid", @@ -449,7 +245,7 @@ defmodule Absinthe.Schema.NotationTest do end interfaces [:bar] """, - "Invalid schema notation: `interfaces` must only be used within `object`" + "Invalid schema notation: `interfaces` must only be used within `object`, `interface`. Was used in `schema`." ) end end @@ -469,7 +265,7 @@ defmodule Absinthe.Schema.NotationTest do """ is_type_of fn _, _ -> true end """, - "Invalid schema notation: `is_type_of` must only be used within `object`" + "Invalid schema notation: `is_type_of` must only be used within `object`. Was used in `schema`." ) end @@ -481,7 +277,7 @@ defmodule Absinthe.Schema.NotationTest do is_type_of fn _, _ -> :bar end end """, - "Invalid schema notation: `is_type_of` must only be used within `object`" + "Invalid schema notation: `is_type_of` must only be used within `object`. Was used in `interface`." ) end end @@ -503,7 +299,7 @@ defmodule Absinthe.Schema.NotationTest do end end """, - "Invalid schema notation: `object` must only be used toplevel" + "Invalid schema notation: `object` must only be used toplevel. Was used in `object`." ) end @@ -541,7 +337,7 @@ defmodule Absinthe.Schema.NotationTest do test "can be under directive as an attribute" do assert_no_notation_error("OnValid", """ directive :foo do - on [Foo, Bar] + on [:fragment_spread, :mutation] end """) end @@ -550,9 +346,9 @@ defmodule Absinthe.Schema.NotationTest do assert_notation_error( "OnInvalid", """ - on [Foo, Bar] + on [:fragment_spread, :mutation] """, - "Invalid schema notation: `on` must only be used within `directive`" + "Invalid schema notation: `on` must only be used within `directive`. Was used in `schema`." ) end end @@ -572,7 +368,7 @@ defmodule Absinthe.Schema.NotationTest do """ parse &(&1) """, - "Invalid schema notation: `parse` must only be used within `scalar`" + "Invalid schema notation: `parse` must only be used within `scalar`. Was used in `schema`." ) end end @@ -594,7 +390,7 @@ defmodule Absinthe.Schema.NotationTest do """ resolve fn _, _ -> {:ok, 1} end """, - "Invalid schema notation: `resolve` must only be used within `field`" + "Invalid schema notation: `resolve` must only be used within `field`. Was used in `schema`." ) end @@ -606,7 +402,7 @@ defmodule Absinthe.Schema.NotationTest do resolve fn _, _ -> {:ok, 1} end end """, - "Invalid schema notation: `resolve` must only be used within `field`" + "Invalid schema notation: `resolve` must only be used within `field`. Was used in `object`." ) end end @@ -634,7 +430,7 @@ defmodule Absinthe.Schema.NotationTest do """ resolve_type fn _, _ -> :bar end """, - "Invalid schema notation: `resolve_type` must only be used within `interface`, `union`" + "Invalid schema notation: `resolve_type` must only be used within `interface`, `union`. Was used in `schema`." ) end @@ -646,7 +442,7 @@ defmodule Absinthe.Schema.NotationTest do resolve_type fn _, _ -> :bar end end """, - "Invalid schema notation: `resolve_type` must only be used within `interface`, `union`" + "Invalid schema notation: `resolve_type` must only be used within `interface`, `union`. Was used in `object`." ) end end @@ -668,7 +464,7 @@ defmodule Absinthe.Schema.NotationTest do end end """, - "Invalid schema notation: `scalar` must only be used toplevel" + "Invalid schema notation: `scalar` must only be used toplevel. Was used in `scalar`." ) end end @@ -688,7 +484,7 @@ defmodule Absinthe.Schema.NotationTest do """ serialize &(&1) """, - "Invalid schema notation: `serialize` must only be used within `scalar`" + "Invalid schema notation: `serialize` must only be used within `scalar`. Was used in `schema`." ) end end @@ -710,7 +506,7 @@ defmodule Absinthe.Schema.NotationTest do assert_notation_error( "TypesInvalid", "types [:foo]", - "Invalid schema notation: `types` must only be used within `union`" + "Invalid schema notation: `types` must only be used within `union`. Was used in `schema`." ) end end @@ -730,7 +526,7 @@ defmodule Absinthe.Schema.NotationTest do assert_notation_error( "ValueInvalid", "value :b", - "Invalid schema notation: `value` must only be used within `enum`" + "Invalid schema notation: `value` must only be used within `enum`. Was used in `schema`." ) end end @@ -750,11 +546,23 @@ defmodule Absinthe.Schema.NotationTest do assert_notation_error( "DescriptionInvalid", ~s(description "test"), - "Invalid schema notation: `description` must not be used toplevel" + "Invalid schema notation: `description` must not be used toplevel. Was used in `schema`." ) end end + test "No nested non_null" do + assert_notation_error( + "NestedNonNull", + """ + object :really_null do + field :foo, non_null(non_null(:string)) + end + """, + "Invalid schema notation: `non_null` must not be nested" + ) + end + @doc """ Assert a notation error occurs. diff --git a/test/absinthe/schema/rule/default_enum_value_present_test.exs b/test/absinthe/schema/rule/default_enum_value_present_test.exs index 226e0cc5de..aae35f4054 100644 --- a/test/absinthe/schema/rule/default_enum_value_present_test.exs +++ b/test/absinthe/schema/rule/default_enum_value_present_test.exs @@ -2,7 +2,7 @@ defmodule Absinthe.Schema.Rule.DefaultEnumValuePresentTest do use Absinthe.Case, async: true describe "rule" do - test "is enforced when the defaultValue is not in the enum" do + test "is enforced when the default_value is not in the enum" do schema = """ defmodule BadColorSchema do use Absinthe.Schema @@ -14,14 +14,14 @@ defmodule Absinthe.Schema.Rule.DefaultEnumValuePresentTest do query do field :info, - type: :channel_info, - args: [ - channel: [type: non_null(:channel), default_value: :OTHER], - ], - resolve: fn - %{channel: channel}, _ -> - {:ok, %{name: @names[channel]}} - end + type: :channel_info, + args: [ + channel: [type: non_null(:channel), default_value: :OTHER], + ], + resolve: fn + %{channel: channel}, _ -> + {:ok, %{name: @names[channel]}} + end end @@ -42,5 +42,74 @@ defmodule Absinthe.Schema.Rule.DefaultEnumValuePresentTest do Code.eval_string(schema) end) end + + test "is enforced when the default_value is a list of enums and some items are not in the enum" do + schema = """ + defmodule MovieSchema do + use Absinthe.Schema + + query do + + field :movies, + type: non_null(list_of(non_null(:movie_genre))), + args: [ + genres: [type: non_null(list_of(non_null(:movie_genre))), default_value: [:action, :OTHER]], + ], + resolve: fn + %{genres: _}, _ -> {:ok, []} + end + + end + + enum :movie_genre do + value :action, as: :action + value :comedy, as: :comedy + value :sf, as: :sf + end + + object :movie do + field :name, :string + end + end + """ + + error = ~r/The default_value for an enum must be present in the enum values/ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(schema) + end) + end + + test "passes when the default_value is a list and that list is a valid enum value" do + schema = """ + defmodule CorrectCatSchema do + use Absinthe.Schema + + query do + + field :cats, + type: non_null(list_of(non_null(:cat))), + args: [ + order_by: [type: non_null(:cat_order_by), default_value: [{:asc, :name}]], + ], + resolve: fn + %{order_by: _}, _ -> {:ok, []} + end + + end + + enum :cat_order_by do + value :name_asc, as: [{:asc, :name}] + value :name_desc_inserted_at_asc, as: [{:desc, :name}, {:asc, :inserted_at}] + end + + object :cat do + field :name, :string + end + end + """ + + assert Code.eval_string(schema) + end end end diff --git a/test/absinthe/schema/rule/directive_must_be_valid_test.exs b/test/absinthe/schema/rule/directive_must_be_valid_test.exs new file mode 100644 index 0000000000..a4925218eb --- /dev/null +++ b/test/absinthe/schema/rule/directive_must_be_valid_test.exs @@ -0,0 +1,14 @@ +defmodule Absinthe.Schema.Rule.DirectivesMustBeValidTest do + use Absinthe.Case, async: true + + alias Absinthe.Phase.Schema.Validation.DirectivesMustBeValid + + describe "rule" do + test "is enforced" do + assert_schema_error("bad_directives_schema", [ + %{phase: DirectivesMustBeValid, extra: %{}}, + %{phase: DirectivesMustBeValid, extra: %{location: :unknown}} + ]) + end + end +end diff --git a/test/absinthe/schema/rule/input_output_types_correctly_placed_test.exs b/test/absinthe/schema/rule/input_output_types_correctly_placed_test.exs index 8e8b5ce9d1..18e7dd6999 100644 --- a/test/absinthe/schema/rule/input_output_types_correctly_placed_test.exs +++ b/test/absinthe/schema/rule/input_output_types_correctly_placed_test.exs @@ -1,31 +1,37 @@ -defmodule Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlacedTest do +defmodule Absinthe.Schema.Rule.InputOutputTypesCorrectlyPlacedTest do use Absinthe.Case, async: true - describe "rule" do + describe "macro schema" do test "is enforced with output types on arguments" do assert_schema_error("invalid_output_types", [ %{ - data: %{ + extra: %{ field: :blah, - parent: Absinthe.Type.Object, - struct: Absinthe.Type.InputObject, + parent: Absinthe.Blueprint.Schema.ObjectTypeDefinition, + struct: Absinthe.Blueprint.Schema.InputObjectTypeDefinition, type: :input }, - location: %{ - file: - "/Users/ben/src/absinthe/test/support/fixtures/dynamic/invalid_output_types.exs", - line: 10 - }, - rule: Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_output_types.exs", + line: 11 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced }, %{ - data: %{argument: :invalid_arg, struct: Absinthe.Type.Object, type: :user}, - location: %{ - file: - "/Users/ben/src/absinthe/test/support/fixtures/dynamic/invalid_output_types.exs", - line: 4 + extra: %{ + argument: :invalid_arg, + struct: Absinthe.Blueprint.Schema.ObjectTypeDefinition, + type: :user }, - rule: Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_output_types.exs", + line: 16 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced } ]) end @@ -33,17 +39,89 @@ defmodule Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlacedTest do test "is enforced with input types on arguments" do assert_schema_error("invalid_input_types", [ %{ - data: %{ + extra: %{ + field: :blah, + parent: Absinthe.Blueprint.Schema.InputObjectTypeDefinition, + struct: Absinthe.Blueprint.Schema.ObjectTypeDefinition, + type: :user + }, + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_input_types.exs", + line: 8 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced + } + ]) + end + end + + describe "sdl schema" do + test "is enforced with output types on arguments" do + assert_schema_error("invalid_output_types_sdl", [ + %{ + extra: %{ field: :blah, - parent: Absinthe.Type.InputObject, - struct: Absinthe.Type.Object, + parent: Absinthe.Blueprint.Schema.ObjectTypeDefinition, + struct: Absinthe.Blueprint.Schema.InputObjectTypeDefinition, + type: :input + }, + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_output_types_sdl.exs", + line: 4 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced + }, + %{ + extra: %{ + argument: :invalid_arg, + struct: Absinthe.Blueprint.Schema.ObjectTypeDefinition, + type: :user + }, + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_output_types_sdl.exs", + line: 4 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced + } + ]) + end + + test "is enforced with input types on arguments" do + assert_schema_error("invalid_input_types_sdl", [ + %{ + extra: %{ + argument: :blah, + struct: Absinthe.Blueprint.Schema.ObjectTypeDefinition, type: :user }, - location: %{ - file: "/Users/ben/src/absinthe/test/support/fixtures/dynamic/invalid_input_types.exs", - line: 7 + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_input_types_sdl.exs", + line: 4 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced + }, + %{ + extra: %{ + field: :blah, + parent: Absinthe.Blueprint.Schema.InputObjectTypeDefinition, + struct: Absinthe.Blueprint.Schema.ObjectTypeDefinition, + type: :user }, - rule: Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_input_types_sdl.exs", + line: 4 + } + ], + phase: Absinthe.Phase.Schema.Validation.InputOutputTypesCorrectlyPlaced } ]) end diff --git a/test/absinthe/schema/rule/names_must_be_valid_test.exs b/test/absinthe/schema/rule/names_must_be_valid_test.exs new file mode 100644 index 0000000000..30c42f95b0 --- /dev/null +++ b/test/absinthe/schema/rule/names_must_be_valid_test.exs @@ -0,0 +1,25 @@ +defmodule Absinthe.Schema.Rule.NamesMustBeValidTest do + use Absinthe.Case, async: true + + alias Absinthe.Phase.Schema.Validation.NamesMustBeValid + + describe "rule" do + test "is enforced" do + assert_schema_error("bad_names_schema", [ + %{phase: NamesMustBeValid, extra: %{artifact: "field name", value: "bad field name"}}, + %{phase: NamesMustBeValid, extra: %{artifact: "argument name", value: "bad arg name"}}, + %{ + phase: NamesMustBeValid, + extra: %{artifact: "directive name", value: "bad directive name"} + }, + %{phase: NamesMustBeValid, extra: %{artifact: "scalar name", value: "bad?scalar#name"}}, + %{phase: NamesMustBeValid, extra: %{artifact: "object name", value: "bad object name"}}, + %{ + phase: NamesMustBeValid, + extra: %{artifact: "input object name", value: "bad input name"} + }, + %{phase: NamesMustBeValid, extra: %{artifact: "enum value name", value: "1"}} + ]) + end + end +end diff --git a/test/absinthe/schema/rule/no_interface_cycles_test.exs b/test/absinthe/schema/rule/no_interface_cycles_test.exs new file mode 100644 index 0000000000..420565c816 --- /dev/null +++ b/test/absinthe/schema/rule/no_interface_cycles_test.exs @@ -0,0 +1,36 @@ +defmodule Absinthe.Schema.Rule.NoInterfacecyclesTest do + use Absinthe.Case, async: true + + describe "rule" do + test "is enforced" do + assert_schema_error("interface_cycle_schema", [ + %{ + extra: :named, + locations: [ + %{ + file: "test/support/fixtures/dynamic/interface_cycle_schema.exs", + line: 24 + } + ], + message: + "Interface Cycle Error\n\nInterface `named' forms a cycle via: ([:named, :node, :named])", + path: [], + phase: Absinthe.Phase.Schema.Validation.NoInterfaceCyles + }, + %{ + extra: :node, + locations: [ + %{ + file: "test/support/fixtures/dynamic/interface_cycle_schema.exs", + line: 24 + } + ], + message: + "Interface Cycle Error\n\nInterface `node' forms a cycle via: ([:node, :named, :node])", + path: [], + phase: Absinthe.Phase.Schema.Validation.NoInterfaceCyles + } + ]) + end + end +end diff --git a/test/absinthe/schema/rule/object_interfaces_must_be_valid_test.exs b/test/absinthe/schema/rule/object_interfaces_must_be_valid_test.exs new file mode 100644 index 0000000000..bfd1ac4463 --- /dev/null +++ b/test/absinthe/schema/rule/object_interfaces_must_be_valid_test.exs @@ -0,0 +1,75 @@ +defmodule Absinthe.Schema.Rule.ObjectInterfacesMustBeValidTest do + use Absinthe.Case, async: true + + @interface_transitive_interfaces ~S( + defmodule InterfaceWithTransitiveInterfaces do + use Absinthe.Schema + + query do + end + + import_sdl """ + interface Node { + id: ID! + } + + interface Resource implements Node { + id: ID! + url: String + } + + # should also implement Node + interface Image implements Resource { + id: ID! + url: String + thumbnail: String + } + """ + end + ) + + test "errors on interface not implementing all transitive interfaces" do + error = + ~r/Type \"image\" must implement interface type \"node\" because it is implemented by \"resource\"./ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(@interface_transitive_interfaces) + end) + end + + @object_transitive_interfaces ~S( + defmodule ObjectWithTransitiveInterfaces do + use Absinthe.Schema + + query do + end + + import_sdl """ + interface Node { + id: ID! + } + + interface Resource implements Node { + id: ID! + url: String + } + + # should also implement Node + type Image implements Resource { + id: ID! + url: String + thumbnail: String + } + """ + end + ) + + test "errors on object not implementing all transitive interfaces" do + error = + ~r/Type \"image\" must implement interface type \"node\" because it is implemented by \"resource\"./ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(@object_transitive_interfaces) + end) + end +end diff --git a/test/absinthe/schema/rule/object_must_implement_interfaces_test.exs b/test/absinthe/schema/rule/object_must_implement_interfaces_test.exs index 80cc38a6a2..fc07ce8914 100644 --- a/test/absinthe/schema/rule/object_must_implement_interfaces_test.exs +++ b/test/absinthe/schema/rule/object_must_implement_interfaces_test.exs @@ -6,7 +6,13 @@ defmodule Absinthe.Schema.Rule.ObjectMustImplementInterfacesTest do object :user do interface :named + interface :favorite_foods + interface :parented field :name, :string + field :id, :id + field :parent, :named + field :another_parent, :user + field :color, non_null(list_of(non_null(:string))) end end @@ -14,26 +20,234 @@ defmodule Absinthe.Schema.Rule.ObjectMustImplementInterfacesTest do use Absinthe.Schema import_types Types + interface :parented do + field :parent, :named + field :another_parent, :named + + resolve_type fn + %{type: :dog}, _ -> :dog + %{type: :user}, _ -> :user + %{type: :cat}, _ -> :cat + _, _ -> nil + end + end + interface :named do + interface :parented field :name, :string + field :parent, :named + field :another_parent, :named + + resolve_type fn + %{type: :dog}, _ -> :dog + %{type: :user}, _ -> :user + %{type: :cat}, _ -> :cat + _, _ -> nil + end + end + + interface :favorite_foods do + field :color, list_of(:string) resolve_type fn - %{type: :dog} -> :dog - %{type: :user} -> :dog - _ -> nil + %{type: :dog}, _ -> :dog + %{type: :user}, _ -> :user + %{type: :cat}, _ -> :cat + _, _ -> nil end end object :dog do field :name, :string interface :named + interface :parented + interface :favorite_foods + field :parent, :named + field :another_parent, :user + field :color, list_of(non_null(:string)) + end + + # An object field type is a valid sub-type if it is a Non-Null variant of a + # valid sub-type of the interface field type. + object :cat do + interface :named + interface :favorite_foods + interface :parented + field :name, non_null(:string) + field :parent, :named + field :another_parent, :user + field :color, non_null(list_of(:string)) + end + + query do + field :user, :user do + resolve fn _, _ -> + {:ok, + %{ + type: :user, + id: "abc-123", + name: "User Name", + parent: %{type: :user, id: "def-456", name: "Parent User"}, + another_parent: %{type: :user, id: "ghi-789", name: "Another Parent"} + }} + end + end + end + end + + test "interfaces are propagated across type imports" do + assert %{ + named: [:cat, :dog, :user], + favorite_foods: [:cat, :dog, :user], + parented: [:cat, :dog, :user] + } == + Schema.__absinthe_interface_implementors__() + end + + defmodule InterfaceImplementsInterfaces do + use Absinthe.Schema + + import_sdl """ + interface Node { + id: ID! + } + + interface Resource implements Node { + id: ID! + url: String + } + + type Image implements Resource & Node { + id: ID! + url: String + thumbnail: String + } + + """ + + def hydrate(%Absinthe.Blueprint.Schema.InterfaceTypeDefinition{}, _) do + {:resolve_type, &__MODULE__.resolve_type/1} end + def hydrate(_node, _ancestors), do: [] + + def resolve_type(_), do: false + + query do + end + end + + test "interfaces are set from sdl" do + assert %{ + node: [:image], + resource: [:image] + } == + InterfaceImplementsInterfaces.__absinthe_interface_implementors__() + end + + defmodule InterfaceFieldsReferenceInterfaces do + use Absinthe.Schema + + import_sdl """ + interface Pet { + food: PetFood! + } + + interface PetFood { + brand: String! + } + + type Dog implements Pet { + food: DogFood! + } + + type DogFood implements PetFood { + brand: String! + } + + type Cat implements Pet { + food: CatFood! + } + + type CatFood implements PetFood { + brand: String! + } + """ + query do end + + def hydrate(%{identifier: :pet}, _) do + [{:resolve_type, &__MODULE__.pet/2}] + end + + def hydrate(%{identifier: :pet_food}, _) do + [{:resolve_type, &__MODULE__.pet_food/2}] + end + + def hydrate(_, _), do: [] + + def pet(_, _), do: nil + def pet_food(_, _), do: nil + end + + test "interface fields can reference other interfaces" do + assert %{ + pet: [:cat, :dog], + pet_food: [:cat_food, :dog_food] + } == + InterfaceFieldsReferenceInterfaces.__absinthe_interface_implementors__() end - test "interfaces are propogated across type imports" do - assert %{named: [:dog, :user]} == Schema.__absinthe_interface_implementors__() + test "is enforced" do + assert_schema_error("invalid_interface_types", [ + %{ + extra: %{ + fields: [:name], + object: :foo, + interface: :named + }, + locations: [ + %{ + file: "test/support/fixtures/dynamic/invalid_interface_types.exs", + line: 13 + } + ], + phase: Absinthe.Phase.Schema.Validation.ObjectMustImplementInterfaces + } + ]) + end + + test "Interfaces can contain fields of their own type" do + doc = """ + { + user { + ... on User { + id + parent { + ... on Named { + name + } + ... on User { + id + } + } + anotherParent { + id + } + } + ... on Named { + name + } + } + } + """ + + {:ok, %{data: data}} = Absinthe.run(doc, Schema) + + assert get_in(data, ["user", "id"]) == "abc-123" + assert get_in(data, ["user", "parent", "id"]) == "def-456" + assert get_in(data, ["user", "parent", "name"]) == "Parent User" + assert get_in(data, ["user", "anotherParent", "id"]) == "ghi-789" end end diff --git a/test/absinthe/schema/rule/query_type_must_be_object_test.exs b/test/absinthe/schema/rule/query_type_must_be_object_test.exs index 2653681555..8886d1ecc5 100644 --- a/test/absinthe/schema/rule/query_type_must_be_object_test.exs +++ b/test/absinthe/schema/rule/query_type_must_be_object_test.exs @@ -1,12 +1,19 @@ defmodule Absinthe.Schema.Rule.QueryTypeMustBeObjectTest do use Absinthe.Case, async: true - alias Absinthe.Schema.Rule - describe "rule" do test "is enforced" do assert_schema_error("empty_schema", [ - %{rule: Rule.QueryTypeMustBeObject, data: %{}} + %{ + phase: Absinthe.Phase.Schema.Validation.QueryTypeMustBeObject, + extra: %{}, + locations: [ + %{ + file: "test/support/fixtures/dynamic/empty_schema.exs", + line: 0 + } + ] + } ]) end end diff --git a/test/absinthe/schema/rule/type_names_are_reserved_test.exs b/test/absinthe/schema/rule/type_names_are_reserved_test.exs index 8a67fd790a..a8974337a3 100644 --- a/test/absinthe/schema/rule/type_names_are_reserved_test.exs +++ b/test/absinthe/schema/rule/type_names_are_reserved_test.exs @@ -1,19 +1,19 @@ defmodule Absinthe.Schema.Rule.TypeNamesAreReservedTest do use Absinthe.Case, async: true - alias Absinthe.Schema.Rule + alias Absinthe.Phase.Schema.Validation.TypeNamesAreReserved describe "rule" do test "is enforced" do assert_schema_error("prefix_schema", [ - %{rule: Rule.TypeNamesAreReserved, data: %{artifact: "type name", value: "__MyThing"}}, - %{rule: Rule.TypeNamesAreReserved, data: %{artifact: "field name", value: "__mything"}}, - %{rule: Rule.TypeNamesAreReserved, data: %{artifact: "argument name", value: "__myarg"}}, + %{phase: TypeNamesAreReserved, extra: %{artifact: "type name", value: "__MyThing"}}, + %{phase: TypeNamesAreReserved, extra: %{artifact: "field name", value: "__mything"}}, + %{phase: TypeNamesAreReserved, extra: %{artifact: "argument name", value: "__myarg"}}, %{ - rule: Rule.TypeNamesAreReserved, - data: %{artifact: "directive name", value: "__mydirective"} + phase: TypeNamesAreReserved, + extra: %{artifact: "directive name", value: "__mydirective"} }, - %{rule: Rule.TypeNamesAreReserved, data: %{artifact: "argument name", value: "__if"}} + %{phase: TypeNamesAreReserved, extra: %{artifact: "argument name", value: "__if"}} ]) end end diff --git a/test/absinthe/schema/rule/unique_field_names_test.exs b/test/absinthe/schema/rule/unique_field_names_test.exs new file mode 100644 index 0000000000..8e4415ab77 --- /dev/null +++ b/test/absinthe/schema/rule/unique_field_names_test.exs @@ -0,0 +1,75 @@ +defmodule Absinthe.Schema.Rule.UniqueFieldNamesTest do + use Absinthe.Case, async: true + + @duplicate_object_fields ~S( + defmodule DuplicateObjectFields do + use Absinthe.Schema + + query do + end + + import_sdl """ + type Dog { + name: String! + name: String + } + """ + end + ) + + @duplicate_interface_fields ~S( + defmodule DuplicateInterfaceFields do + use Absinthe.Schema + + query do + end + + import_sdl """ + interface Animal { + tail: Boolean + tail: Boolean + } + """ + end + ) + + @duplicate_input_fields ~S( + defmodule DuplicateInputFields do + use Absinthe.Schema + + query do + end + + import_sdl """ + input AnimalInput { + species: String! + species: String! + } + """ + end + ) + + test "errors on non unique object field names" do + error = ~r/The field \"name\" is not unique in type \"Dog\"./ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(@duplicate_object_fields) + end) + end + + test "errors on non unique interface field names" do + error = ~r/The field \"tail\" is not unique in type \"Animal\"./ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(@duplicate_interface_fields) + end) + end + + test "errors on non unique input field names" do + error = ~r/The field \"species\" is not unique in type \"AnimalInput\"./ + + assert_raise(Absinthe.Schema.Error, error, fn -> + Code.eval_string(@duplicate_input_fields) + end) + end +end diff --git a/test/absinthe/schema/sdl_render_test.exs b/test/absinthe/schema/sdl_render_test.exs new file mode 100644 index 0000000000..f9d27f3df8 --- /dev/null +++ b/test/absinthe/schema/sdl_render_test.exs @@ -0,0 +1,288 @@ +defmodule Absinthe.Schema.SdlRenderTest do + use ExUnit.Case, async: true + + defmodule SdlTestSchema do + use Absinthe.Schema + + alias Absinthe.Blueprint.Schema + + @sdl """ + "Schema description" + schema { + query: Query + } + + directive @foo(name: String!) repeatable on OBJECT | SCALAR + + interface Animal { + legCount: Int! + } + + \""" + A submitted post + Multiline description + \""" + type Post { + old: String @deprecated(reason: \""" + It's old + Really old + \""") + + sweet: SweetScalar + + "Something" + title: String! + } + + input ComplexInput { + foo: String + } + + scalar SweetScalar + + type Query { + echo( + category: Category! + + "The number of times" + times: Int = 10 + ): [Category!]! + posts: Post + search(limit: Int, sort: SorterInput!): [SearchResult] + defaultBooleanArg(boolean: Boolean = false): String + defaultInputArg(input: ComplexInput = {foo: "bar"}): String + defaultListArg(things: [String] = ["ThisThing"]): [String] + defaultEnumArg(category: Category = NEWS): Category + defaultNullStringArg(name: String = null): String + animal: Animal + } + + type Dog implements Pet & Animal { + legCount: Int! + name: String! + } + + "Simple description" + enum Category { + "Just the facts" + NEWS + + \""" + What some rando thinks + + Take with a grain of salt + \""" + OPINION + + CLASSIFIED + } + + interface Pet implements Animal { + name: String! + legCount: Int! + } + + "One or the other" + union SearchResult = Post | User + + "Sort this thing" + input SorterInput { + "By this field" + field: String! + } + + type User { + name: String! + } + """ + import_sdl @sdl + def sdl, do: @sdl + + def hydrate(%Schema.InterfaceTypeDefinition{identifier: :animal}, _) do + {:resolve_type, &__MODULE__.resolve_type/1} + end + + def hydrate(%{identifier: :pet}, _) do + {:resolve_type, &__MODULE__.resolve_type/1} + end + + def hydrate(_node, _ancestors), do: [] + + def resolve_type(_), do: false + end + + test "Render SDL from blueprint defined with SDL" do + assert Absinthe.Schema.to_sdl(SdlTestSchema) == + SdlTestSchema.sdl() + end + + describe "Render SDL" do + test "for a type" do + assert_rendered(""" + type Person implements Entity { + name: String! + baz: Int + } + """) + end + + test "for an interface" do + assert_rendered(""" + interface Entity implements Node { + name: String! + } + """) + end + + test "for an input" do + assert_rendered(""" + "Description for Profile" + input Profile { + "Description for name" + name: String! + } + """) + end + + test "for a union with types" do + assert_rendered(""" + union Foo = Bar | Baz + """) + end + + test "for a union without types" do + assert_rendered(""" + union Foo + """) + end + + test "for a scalar" do + assert_rendered(""" + scalar MyGreatScalar + """) + end + + test "for a directive" do + assert_rendered(""" + directive @foo(name: String!) on OBJECT | SCALAR + """) + end + + test "for a schema declaration" do + assert_rendered(""" + schema { + query: Query + } + """) + end + end + + defp assert_rendered(sdl) do + rendered_sdl = + with {:ok, %{input: doc}} <- Absinthe.Phase.Parse.run(sdl), + %Absinthe.Language.Document{definitions: [node]} <- doc, + blueprint = Absinthe.Blueprint.Draft.convert(node, doc) do + Inspect.inspect(blueprint, %Inspect.Opts{pretty: true}) + end + + assert sdl == rendered_sdl + end + + defmodule MacroTestSchema do + use Absinthe.Schema + + query do + description "Escaped\t\"descriรงรฃo/description\"" + + field :echo, :string do + arg :times, :integer, default_value: 10, description: "The number of times" + arg :time_interval, :integer + end + + field :search, :search_result + end + + directive :foo do + arg :baz, :string + + on :field + end + + enum :order_status do + value :delivered + value :processing + value :picking + end + + enum :status, values: [:one, :two, :three] + + object :order do + field :id, :id + field :name, :string + field :status, :order_status + field :other_status, :status + import_fields :imported_fields + end + + object :category do + field :name, :string + end + + union :search_result do + types [:order, :category] + end + + object :imported_fields do + field :imported, non_null(:boolean) + end + end + + test "Render SDL from blueprint defined with macros" do + assert Absinthe.Schema.to_sdl(MacroTestSchema) == + """ + "Represents a schema" + schema { + query: RootQueryType + } + + directive @foo(baz: String) on FIELD + + "Escaped\\t\\\"descriรงรฃo\\/description\\\"" + type RootQueryType { + echo( + "The number of times" + times: Int + + timeInterval: Int + ): String + search: SearchResult + } + + type Category { + name: String + } + + union SearchResult = Order | Category + + enum Status { + ONE + TWO + THREE + } + + enum OrderStatus { + DELIVERED + PROCESSING + PICKING + } + + type Order { + imported: Boolean! + id: ID + name: String + status: OrderStatus + otherStatus: Status + } + """ + end +end diff --git a/test/absinthe/schema/type_system_directive_test.exs b/test/absinthe/schema/type_system_directive_test.exs new file mode 100644 index 0000000000..6251d6a25a --- /dev/null +++ b/test/absinthe/schema/type_system_directive_test.exs @@ -0,0 +1,227 @@ +defmodule Absinthe.Schema.TypeSystemDirectiveTest do + use ExUnit.Case, async: true + + defmodule WithTypeSystemDirective do + use Absinthe.Schema.Prototype + + input_object :complex do + field :str, :string + end + + directive :feature do + arg :name, non_null(:string) + arg :number, :integer + arg :complex, :complex + + repeatable true + + on [ + :schema, + :scalar, + :object, + :field_definition, + :argument_definition, + :interface, + :union, + :enum, + :enum_value, + :input_object, + :input_field_definition + ] + end + end + + defmodule TypeSystemDirectivesSdlSchema do + use Absinthe.Schema + + @prototype_schema WithTypeSystemDirective + + @sdl """ + schema @feature(name: ":schema") { + query: Query + } + + interface Animal @feature(name: ":interface") { + legCount: Int! @feature(name: \""" + Multiline here? + Second line + \""") + } + + input SearchFilter @feature(name: ":input_object") { + query: String = "default" @feature(name: ":input_field_definition") + } + + type Post @feature(name: ":object", number: 3, complex: {str: "foo"}) { + name: String @deprecated(reason: "Bye") + } + + scalar SweetScalar @feature(name: ":scalar") + + type Query { + post: Post @feature(name: ":field_definition") + sweet: SweetScalar + pet: Dog + which: Category + search(filter: SearchFilter @feature(name: ":argument_definition")): SearchResult + } + + type Dog implements Animal { + legCount: Int! + name: String! + } + + enum Category @feature(name: ":enum") { + THIS + THAT @feature(name: ":enum_value") + THE_OTHER @deprecated(reason: "It's old") + } + + union SearchResult @feature(name: ":union") = Dog | Post + """ + import_sdl @sdl + def sdl, do: @sdl + + def hydrate(%{identifier: :animal}, _) do + {:resolve_type, &__MODULE__.resolve_type/1} + end + + def hydrate(_node, _ancestors), do: [] + + def resolve_type(_), do: false + end + + defmodule TypeSystemDirectivesMacroSchema do + use Absinthe.Schema + + @prototype_schema WithTypeSystemDirective + + query do + field :post, :post do + directive :feature, name: ":field_definition" + end + + field :sweet, :sweet_scalar + field :which, :category + field :pet, :dog + + field :search, :search_result do + arg :filter, :search_filter, directives: [{:feature, name: ":argument_definition"}] + directive :feature, name: ":argument_definition" + end + end + + object :post do + directive :feature, name: ":object", number: 3 + + field :name, :string do + deprecate "Bye" + end + end + + scalar :sweet_scalar do + directive :feature, name: ":scalar" + parse &Function.identity/1 + serialize &Function.identity/1 + end + + enum :category do + directive :feature, name: ":enum" + value :this + value :that, directives: [feature: [name: ":enum_value"]] + value :the_other, directives: [deprecated: [reason: "It's old"]] + end + + interface :animal do + directive :feature, name: ":interface" + + field :leg_count, non_null(:integer) do + directive :feature, + name: """ + Multiline here? + Second line + """ + end + end + + object :dog do + is_type_of fn _ -> true end + interface :animal + field :leg_count, non_null(:integer) + field :name, non_null(:string) + end + + input_object :search_filter do + directive :feature, name: ":input_object" + + field :query, :string, default_value: "default" do + directive :feature, name: ":input_field_definition" + end + end + + union :search_result do + directive :feature, name: ":union" + types [:dog, :post] + + resolve_type fn %{type: type}, _ -> type end + end + end + + describe "with SDL schema" do + test "Render SDL with Type System Directives applied" do + assert Absinthe.Schema.to_sdl(TypeSystemDirectivesSdlSchema) == + TypeSystemDirectivesSdlSchema.sdl() + end + end + + @macro_schema_sdl """ + "Represents a schema" + schema { + query: RootQueryType + } + + interface Animal @feature(name: ":interface") { + legCount: Int! @feature(name: \"\"\" + Multiline here? + Second line + \"\"\") + } + + input SearchFilter @feature(name: ":input_object") { + query: String @feature(name: ":input_field_definition") + } + + type Post @feature(name: ":object", number: 3) { + name: String @deprecated(reason: "Bye") + } + + scalar SweetScalar @feature(name: ":scalar") + + type RootQueryType { + post: Post @feature(name: ":field_definition") + sweet: SweetScalar + which: Category + pet: Dog + search(filter: SearchFilter @feature(name: ":argument_definition")): SearchResult @feature(name: ":argument_definition") + } + + type Dog implements Animal { + legCount: Int! + name: String! + } + + enum Category @feature(name: ":enum") { + THIS + THAT @feature(name: ":enum_value") + THE_OTHER @deprecated(reason: "It's old") + } + + union SearchResult @feature(name: ":union") = Dog | Post + """ + describe "with macro schema" do + test "Render SDL with Type System Directives applied" do + assert Absinthe.Schema.to_sdl(TypeSystemDirectivesMacroSchema) == + @macro_schema_sdl + end + end +end diff --git a/test/absinthe/schema_test.exs b/test/absinthe/schema_test.exs index 81b1bf3c5a..f9f0927d93 100644 --- a/test/absinthe/schema_test.exs +++ b/test/absinthe/schema_test.exs @@ -1,25 +1,39 @@ defmodule Absinthe.SchemaTest do - use Absinthe.Case, async: true + # can't async due to capture io + use Absinthe.Case + import ExUnit.CaptureIO alias Absinthe.Schema alias Absinthe.Type describe "built-in types" do - def load_valid_schema do - load_schema("valid_schema") + defmodule ValidSchema do + use Absinthe.Schema + + query do + # Query type must exist + end + + object :person do + description "A person" + field :name, :string + end end test "are loaded" do - load_valid_schema() - assert map_size(Absinthe.Type.BuiltIns.__absinthe_types__()) > 0 + builtin_types = + ValidSchema + |> Absinthe.Schema.types() + |> Enum.filter(&Absinthe.Type.built_in?(&1)) + + assert length(builtin_types) > 0 - Absinthe.Type.BuiltIns.__absinthe_types__() - |> Enum.each(fn {ident, name} -> - assert Absinthe.Fixtures.ValidSchema.__absinthe_type__(ident) == - Absinthe.Fixtures.ValidSchema.__absinthe_type__(name) + Enum.each(builtin_types, fn type -> + assert ValidSchema.__absinthe_type__(type.identifier) == + ValidSchema.__absinthe_type__(type.name) end) - int = Absinthe.Fixtures.ValidSchema.__absinthe_type__(:integer) + int = ValidSchema.__absinthe_type__(:integer) assert 1 == Type.Scalar.serialize(int, 1) assert {:ok, 1} == Type.Scalar.parse(int, 1, %{}) end @@ -27,25 +41,23 @@ defmodule Absinthe.SchemaTest do describe "using the same identifier" do test "raises an exception" do - assert_schema_error("schema_with_duplicate_identifiers", [ - %{ - rule: Absinthe.Schema.Rule.TypeNamesAreUnique, - data: %{artifact: "Absinthe type identifier", value: :person} - } - ]) + capture_io(:stderr, fn -> + assert_schema_error("schema_with_duplicate_identifiers", [ + %{ + phase: Absinthe.Phase.Schema.Validation.TypeNamesAreUnique, + extra: %{artifact: "Absinthe type identifier", value: :person} + } + ]) + end) end end describe "using the same name" do - def load_duplicate_name_schema do - load_schema("schema_with_duplicate_names") - end - test "raises an exception" do assert_schema_error("schema_with_duplicate_names", [ %{ - rule: Absinthe.Schema.Rule.TypeNamesAreUnique, - data: %{artifact: "Type name", value: "Person"} + phase: Absinthe.Phase.Schema.Validation.TypeNamesAreUnique, + extra: %{artifact: "Type name", value: "Person"} } ]) end @@ -117,6 +129,8 @@ defmodule Absinthe.SchemaTest do directive :directive do arg :baz, :dir_enum + + on :field end enum :dir_enum do @@ -159,7 +173,7 @@ defmodule Absinthe.SchemaTest do end test "adds the types from a grandparent" do - assert %{foo: "Foo", bar: "Bar", baz: "Baz"} = ThirdSchema.__absinthe_types__() + assert %{foo: "Foo", bar: "Bar", baz: "Baz"} = ThirdSchema.__absinthe_types__(:all) assert "Foo" == ThirdSchema.__absinthe_type__(:foo).name end end @@ -192,10 +206,10 @@ defmodule Absinthe.SchemaTest do end end - describe "used_types" do + describe "referenced_types" do test "does not contain introspection types" do assert !Enum.any?( - Schema.used_types(ThirdSchema), + Schema.referenced_types(ThirdSchema), &Type.introspection?/1 ) end @@ -203,7 +217,7 @@ defmodule Absinthe.SchemaTest do test "contains enums" do types = ThirdSchema - |> Absinthe.Schema.used_types() + |> Absinthe.Schema.referenced_types() |> Enum.map(& &1.identifier) assert :some_enum in types @@ -213,7 +227,7 @@ defmodule Absinthe.SchemaTest do test "contains interfaces" do types = ThirdSchema - |> Absinthe.Schema.used_types() + |> Absinthe.Schema.referenced_types() |> Enum.map(& &1.identifier) assert :named in types @@ -222,7 +236,7 @@ defmodule Absinthe.SchemaTest do test "contains types only connected via interfaces" do types = ThirdSchema - |> Absinthe.Schema.used_types() + |> Absinthe.Schema.referenced_types() |> Enum.map(& &1.identifier) assert :person in types @@ -231,7 +245,7 @@ defmodule Absinthe.SchemaTest do test "contains types only connected via union" do types = ThirdSchema - |> Absinthe.Schema.used_types() + |> Absinthe.Schema.referenced_types() |> Enum.map(& &1.identifier) assert :dog in types @@ -283,6 +297,14 @@ defmodule Absinthe.SchemaTest do end end + describe "to_sdl/1" do + test "return schema sdl" do + assert Schema.to_sdl(SourceSchema) == """ + \"Represents a schema\"\nschema {\n query: RootQueryType\n}\n\ntype Foo {\n name: String\n}\n\n\"can describe query\"\ntype RootQueryType {\n foo: Foo\n} + """ + end + end + defmodule FragmentSpreadSchema do use Absinthe.Schema @@ -365,10 +387,12 @@ defmodule Absinthe.SchemaTest do end describe "can add metadata to an object" do - @tag :wip test "sets object metadata" do foo = Schema.lookup_type(MetadataSchema, :foo) - assert [eager: true, cache: false, sql_table: "foos", foo: "bar"] == foo.__private__[:meta] + + assert Enum.sort(eager: true, cache: false, sql_table: "foos", foo: "bar") == + Enum.sort(foo.__private__[:meta]) + assert Type.meta(foo, :sql_table) == "foos" assert Type.meta(foo, :cache) == false assert Type.meta(foo, :eager) == true diff --git a/test/absinthe/strict_schema_test.exs b/test/absinthe/strict_schema_test.exs new file mode 100644 index 0000000000..03eda113c4 --- /dev/null +++ b/test/absinthe/strict_schema_test.exs @@ -0,0 +1,375 @@ +defmodule Absinthe.StrictSchemaTest do + use Absinthe.Case, async: true + + describe "directive strict adapter" do + test "can use camelcase external name" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery @fooBarDirective(bazQux: $input) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + + test "returns an error when underscore external name used" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery @foo_bar_directive(bazQux: $input) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_error_message( + "Unknown directive `foo_bar_directive'.", + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + + test "returns an error when underscore external name used in argument" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery @fooBarDirective(baz_qux: $input) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_error_message( + "Unknown argument \"baz_qux\" on directive \"@fooBarDirective\".", + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + end + + describe "directive non-strict adapter" do + test "can use camelcase external name" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery @fooBarDirective(bazQux: $input) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + + test "can use underscore external name" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery @foo_bar_directive(bazQux: $input) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + + test "can use underscore external name in argument" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery @fooBarDirective(baz_qux: $input) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + end + + describe "query strict adapter" do + test "can use camelcase external name" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + + test "returns an error when underscore external name used" do + document = """ + query ($input: FooBarInput!) { + foo_bar_query(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_error_message( + "Cannot query field \"foo_bar_query\" on type \"RootQueryType\". Did you mean to use an inline fragment on \"RootQueryType\"?", + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + + test "returns an error when underscore external name used in argument" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery(baz_qux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_error_message( + "Unknown argument \"baz_qux\" on field \"fooBarQuery\" of type \"RootQueryType\".", + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + end + + describe "query non-strict adapter" do + test "can use camelcase external name" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + + test "can use underscore external name" do + document = """ + query ($input: FooBarInput!) { + foo_bar_query(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naive_datetime + } + } + """ + + variables = %{"input" => %{"naive_datetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"foo_bar_query" => %{"naive_datetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + + test "can use underscore external name in argument" do + document = """ + query ($input: FooBarInput!) { + fooBarQuery(baz_qux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarQuery" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + end + + describe "mutation strict adapter" do + test "can use camelcase external name" do + document = """ + mutation ($input: FooBarInput!) { + fooBarMutation(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarMutation" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + + test "returns an error when underscore external name used" do + document = """ + mutation ($input: FooBarInput!) { + foo_bar_mutation(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_error_message( + "Cannot query field \"foo_bar_mutation\" on type \"RootMutationType\". Did you mean to use an inline fragment on \"RootMutationType\"?", + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + + test "returns an error when underscore external name used in argument" do + document = """ + mutation ($input: FooBarInput!) { + fooBarMutation(baz_qux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_error_message( + "Unknown argument \"baz_qux\" on field \"fooBarMutation\" of type \"RootMutationType\".", + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.StrictLanguageConventions, + variables: variables + ) + ) + end + end + + describe "mutation non-strict adapter" do + test "can use camelcase external name" do + document = """ + mutation ($input: FooBarInput!) { + fooBarMutation(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarMutation" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + + test "can use underscore external name" do + document = """ + mutation ($input: FooBarInput!) { + foo_bar_mutation(bazQux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naive_datetime + } + } + """ + + variables = %{"input" => %{"naive_datetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"foo_bar_mutation" => %{"naive_datetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + + test "can use underscore external name in argument" do + document = """ + mutation ($input: FooBarInput!) { + fooBarMutation(baz_qux: $input) @fooBarDirective(bazQux: {naiveDatetime: "2017-01-27T20:31:56"}) { + naiveDatetime + } + } + """ + + variables = %{"input" => %{"naiveDatetime" => "2017-01-27T20:31:55"}} + + assert_data( + %{"fooBarMutation" => %{"naiveDatetime" => "2017-01-27T20:31:55"}}, + run(document, Absinthe.Fixtures.StrictSchema, + adapter: Absinthe.Adapter.LanguageConventions, + variables: variables + ) + ) + end + end +end diff --git a/test/absinthe/subscription/pipeline_serializer_test.exs b/test/absinthe/subscription/pipeline_serializer_test.exs new file mode 100644 index 0000000000..eeafa9be85 --- /dev/null +++ b/test/absinthe/subscription/pipeline_serializer_test.exs @@ -0,0 +1,95 @@ +defmodule Absinthe.Subscription.PipelineSerializerTest do + use ExUnit.Case, async: true + + alias Absinthe.Pipeline + alias Absinthe.Subscription.PipelineSerializer + + defmodule Schema do + use Absinthe.Schema + + query do + # Query type must exist + end + end + + describe "pack/1" do + test "packs full-fledged pipeline successfully" do + pipeline = Pipeline.for_document(Schema, some: :option) + + assert {:packed, [_ | _], %{{:options, 0} => options}} = PipelineSerializer.pack(pipeline) + assert options[:some] == :option + end + + test "packs with correct mapping of unique options sets" do + pipeline = [ + {Phase1, [option1: :value1]}, + Phase2, + {Phase3, [option2: :value2]}, + {Phase4, [option1: :value1]} + ] + + assert {:packed, + [ + {Phase1, {:options, 0}}, + Phase2, + {Phase3, {:options, 1}}, + {Phase4, {:options, 0}} + ], + %{{:options, 0} => [option1: :value1], {:options, 1} => [option2: :value2]}} = + PipelineSerializer.pack(pipeline) + end + end + + describe "unpack/1" do + test "unpacks full-fledged pipeline successfully" do + packed_pipeline = + Schema + |> Pipeline.for_document(some: :option) + |> PipelineSerializer.pack() + + assert [_ | _] = PipelineSerializer.unpack(packed_pipeline) + end + + test "leaves unpacked pipeline intact" do + pipeline = Pipeline.for_document(Schema, some: :option) + + assert PipelineSerializer.unpack(pipeline) == pipeline + end + + test "unpacks with correct options in right spots" do + pipeline = [ + {Phase1, [option1: :value1]}, + Phase2, + {Phase3, [option2: :value2]}, + {Phase4, [option1: :value1]} + ] + + unpacked = + pipeline + |> PipelineSerializer.pack() + |> PipelineSerializer.unpack() + + assert unpacked == pipeline + end + end + + test "flattens nested pipeline in full pack/unpack cycle" do + pipeline = [ + {Phase1, [option1: :value1]}, + Phase2, + [{Phase3, [option2: :value2]}, {Phase4, [option1: :value1]}] + ] + + unpacked = + pipeline + |> PipelineSerializer.pack() + |> PipelineSerializer.unpack() + + assert unpacked == [ + {Phase1, [option1: :value1]}, + Phase2, + {Phase3, [option2: :value2]}, + {Phase4, [option1: :value1]} + ] + end +end diff --git a/test/absinthe/type/custom_test.exs b/test/absinthe/type/custom_test.exs index 85dd136805..060bea0533 100644 --- a/test/absinthe/type/custom_test.exs +++ b/test/absinthe/type/custom_test.exs @@ -56,9 +56,12 @@ defmodule Absinthe.Type.CustomTest do parse(:datetime, %Input.String{value: "2017-01-27T20:31:55+00:00"}) end - test "cannot be parsed when a non-zero UTC offset is included" do - assert :error == parse(:datetime, %Input.String{value: "2017-01-27T20:31:55-02:30"}) - assert :error == parse(:datetime, %Input.String{value: "2017-01-27T20:31:55+04:00"}) + test "can be parsed when a non-zero UTC offset is included" do + assert {:ok, @datetime} == + parse(:datetime, %Input.String{value: "2017-01-27T18:01:55-02:30"}) + + assert {:ok, @datetime} == + parse(:datetime, %Input.String{value: "2017-01-28T00:31:55+04:00"}) end test "cannot be parsed without UTC timezone marker" do @@ -166,17 +169,17 @@ defmodule Absinthe.Type.CustomTest do test "can be parsed from a numeric string" do assert {:ok, decimal} = parse(:decimal, %Input.String{value: "-3.49"}) - assert Decimal.cmp(@decimal, decimal) == :eq + assert Decimal.compare(@decimal, decimal) == :eq end test "can be parsed from a float" do assert {:ok, decimal} = parse(:decimal, %Input.Float{value: -3.49}) - assert Decimal.cmp(@decimal, decimal) == :eq + assert Decimal.compare(@decimal, decimal) == :eq end test "can be parsed from an integer" do assert {:ok, decimal} = parse(:decimal, %Input.Integer{value: 3}) - assert Decimal.cmp(@decimal_int, decimal) == :eq + assert Decimal.compare(@decimal_int, decimal) == :eq end test "cannot be parsed from alphanumeric string" do diff --git a/test/absinthe/type/deprecation_test.exs b/test/absinthe/type/deprecation_test.exs index 9ebedffd6b..539a455346 100644 --- a/test/absinthe/type/deprecation_test.exs +++ b/test/absinthe/type/deprecation_test.exs @@ -10,7 +10,7 @@ defmodule Absinthe.Type.DeprecationTest do # Query type must exist end - input_object :profile do + object :profile do description "A profile" field :name, :string diff --git a/test/absinthe/type/directive_test.exs b/test/absinthe/type/directive_test.exs index 798b3aba8b..14d26779e3 100644 --- a/test/absinthe/type/directive_test.exs +++ b/test/absinthe/type/directive_test.exs @@ -2,6 +2,7 @@ defmodule Absinthe.Type.DirectiveTest do use Absinthe.Case, async: true alias Absinthe.Schema + alias Absinthe.Fixtures.Directive defmodule TestSchema do use Absinthe.Schema @@ -27,7 +28,7 @@ defmodule Absinthe.Type.DirectiveTest do describe "the `@skip` directive" do @query_field """ - query Test($skipPerson: Boolean) { + query Test($skipPerson: Boolean!) { person @skip(if: $skipPerson) { name } @@ -51,16 +52,10 @@ defmodule Absinthe.Type.DirectiveTest do Absinthe.Fixtures.ContactSchema, variables: %{"skipPerson" => true} ) - - assert_result( - {:ok, - %{errors: [%{message: ~s(In argument "if": Expected type "Boolean!", found null.)}]}}, - run(@query_field, Absinthe.Fixtures.ContactSchema) - ) end @query_fragment """ - query Test($skipAge: Boolean) { + query Test($skipAge: Boolean!) { person { name ...Aging @skip(if: $skipAge) @@ -80,18 +75,12 @@ defmodule Absinthe.Type.DirectiveTest do {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}}, run(@query_fragment, Absinthe.Fixtures.ContactSchema, variables: %{"skipAge" => true}) ) - - assert_result( - {:ok, - %{errors: [%{message: ~s(In argument "if": Expected type "Boolean!", found null.)}]}}, - run(@query_fragment, Absinthe.Fixtures.ContactSchema) - ) end end describe "the `@include` directive" do @query_field """ - query Test($includePerson: Boolean) { + query Test($includePerson: Boolean!) { person @include(if: $includePerson) { name } @@ -111,23 +100,10 @@ defmodule Absinthe.Type.DirectiveTest do {:ok, %{data: %{}}}, run(@query_field, Absinthe.Fixtures.ContactSchema, variables: %{"includePerson" => false}) ) - - assert_result( - {:ok, - %{ - errors: [ - %{ - locations: [%{column: 0, line: 2}], - message: ~s(In argument "if": Expected type "Boolean!", found null.) - } - ] - }}, - run(@query_field, Absinthe.Fixtures.ContactSchema) - ) end @query_fragment """ - query Test($includeAge: Boolean) { + query Test($includeAge: Boolean!) { person { name ...Aging @include(if: $includeAge) @@ -224,4 +200,61 @@ defmodule Absinthe.Type.DirectiveTest do Absinthe.run(@query, Absinthe.Fixtures.ContactSchema) end end + + describe "directive keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Directive.TestSchemaDescriptionKeyword.__absinthe_directive__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "directive description attribute evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + Directive.TestSchemaDescriptionAttribute.__absinthe_directive__(unquote(test_label)) + + assert type.description == unquote(expected_value) + end + end) + end + + describe "directive description macro evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Directive.TestSchemaDescriptionMacro.__absinthe_directive__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "directive arg keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + Directive.TestSchemaArgDescriptionKeyword.__absinthe_directive__(unquote(test_label)) + + assert type.args[:arg_example].description == unquote(expected_value) + end + end) + end end diff --git a/test/absinthe/type/enum_test.exs b/test/absinthe/type/enum_test.exs index ed26a1d4e9..6b1109446b 100644 --- a/test/absinthe/type/enum_test.exs +++ b/test/absinthe/type/enum_test.exs @@ -2,6 +2,7 @@ defmodule Absinthe.Type.EnumTest do use Absinthe.Case, async: true alias Absinthe.Type + alias Absinthe.Fixtures.Enums defmodule TestSchema do use Absinthe.Schema @@ -42,6 +43,28 @@ defmodule Absinthe.Type.EnumTest do enum :color_channel3, values: [:red, :green, :blue, :alpha], description: "The selected color channel" + + enum :negative_value do + value :positive_one, as: 1 + value :zero, as: 0 + value :negative_one, as: -1 + end + + enum :dynamic_color do + value :red, as: color(:red) + value :green, as: color(:green) + value :blue, as: color(:blue) + end + + enum :dynamic_color_list do + values color_list() + end + + def color_list, do: [:purple, :orange, :yellow] + + def color(:red), do: {255, 0, 0} + def color(:green), do: {0, 255, 0} + def color(:blue), do: {0, 0, 255} end describe "enums" do @@ -64,5 +87,76 @@ defmodule Absinthe.Type.EnumTest do assert %Type.Enum{} = type assert %Type.Enum.Value{name: "RED", value: :red, description: nil} = type.values[:red] end + + test "value can be defined dynamically!" do + type = TestSchema.__absinthe_type__(:dynamic_color) + + assert %Type.Enum.Value{name: "RED", value: {255, 0, 0}} = type.values[:red] + assert %Type.Enum.Value{name: "GREEN", value: {0, 255, 0}} = type.values[:green] + assert %Type.Enum.Value{name: "BLUE", value: {0, 0, 255}} = type.values[:blue] + end + + test "values can be defined dynamically too" do + type = TestSchema.__absinthe_type__(:dynamic_color_list) + + assert %Type.Enum.Value{name: "YELLOW"} = type.values[:yellow] + assert %Type.Enum.Value{name: "PURPLE"} = type.values[:purple] + assert %Type.Enum.Value{name: "ORANGE"} = type.values[:orange] + end + end + + describe "enum value description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + Enums.TestSchemaValueDescriptionKeyword.__absinthe_type__(:description_keyword_argument) + + assert type.values[unquote(test_label)].description == unquote(expected_value) + end + end) + end + + describe "enum description keyword evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Enums.TestSchemaDescriptionKeyword.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "enum description attribute evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Enums.TestSchemaDescriptionAttribute.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "enum description macro evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Enums.TestSchemaDescriptionMacro.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) end end diff --git a/test/absinthe/type/import_types_test.exs b/test/absinthe/type/import_types_test.exs index 43da58c1d6..491dd59f52 100644 --- a/test/absinthe/type/import_types_test.exs +++ b/test/absinthe/type/import_types_test.exs @@ -27,5 +27,67 @@ defmodule Absinthe.Type.ImportTypesTest do test "works with an alias, {} and scoped reference" do assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :avatar) end + + test "works with __MODULE__ and {}" do + assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :flag) + assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :value_type_enum) + + assert Absinthe.Schema.lookup_type(ImportTypes.SelfContainedSchema, :decline_reasons) + assert Absinthe.Schema.lookup_type(ImportTypes.SelfContainedSchema, :credit_card) + assert Absinthe.Schema.lookup_type(ImportTypes.SelfContainedSchema, :credit_card_type) + assert Absinthe.Schema.lookup_type(ImportTypes.SelfContainedSchema, :category) + assert Absinthe.Schema.lookup_type(ImportTypes.SelfContainedSchema, :role_enum) + end + end + + describe "import_types with description function evaluation (in input_object field description)" do + # The module attribute iteration of this test is related to the test below. + # See "__absinthe_blueprint__ is callable at runtime even if there is a module attribute" for more + # information + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = ImportTypes.SchemaWithFunctionEvaluation.__absinthe_type__(:example_input_object) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + + # From inside `defp expand_ast` in `Absinthe.Schema.Notation`: + # + # > We don't want to expand `@bla` into `Module.get_attribute(module, @bla)` because this + # > function call will fail if the module is already compiled. Remember that the ast gets put + # > into a generated `__absinthe_blueprint__` function which is called at "__after_compile__" + # > time. This will be after a module has been compiled if there are multiple modules in the + # > schema (in the case of an `import_types`). + # + # This test checks that __absinthe_blueprint__ runs and doesn't raise an error saying + # "Module.get_attribute" cannot be called because the module is already compiled". This error + # happens because the `@module_attribute` gets expanded by `expand_ast` into + # `Module.get_attribute(Absinthe.Fixtures.ImportTypes.SchemaWithModuleAttribute, + # :module_attribute, )`. + # + # We ensure __absinthe_blueprint__ is runnable at runtime because in projects where the schema + # is split into multiple modules, one of the modules may already have completely finished + # compiling, dumping the Module attribute data (they are baked in to the code at compile time) + # which means that the `Module.get_attribute` call will raise the error mentioned above + # + # Above, test "works with module attribute used in imported module" also checks this same + # functionality + # + test "__absinthe_blueprint__ is callable at runtime even if there is a module attribute" do + # Sanity check. Shouldn't ever really fail (unless something is very wrong), but ensures that + # the assertion makes sense + {:module, ImportTypes.SchemaWithFunctionEvaluation} = + Code.ensure_compiled(ImportTypes.SchemaWithFunctionEvaluation) + + assert match?( + %Absinthe.Blueprint{}, + ImportTypes.SchemaWithFunctionEvaluation.__absinthe_blueprint__() + ) + end end end diff --git a/test/absinthe/type/input_object_test.exs b/test/absinthe/type/input_object_test.exs index 7fef788911..fb96cbfbac 100644 --- a/test/absinthe/type/input_object_test.exs +++ b/test/absinthe/type/input_object_test.exs @@ -1,6 +1,10 @@ defmodule Absinthe.Type.InputObjectTest do use Absinthe.Case, async: true + alias Absinthe.Fixtures.InputObject + + # Note: The arg description evaluation tests are in test/absinthe/type/query_test.exs + defmodule Schema do use Absinthe.Schema @@ -20,7 +24,7 @@ defmodule Absinthe.Type.InputObjectTest do assert %Absinthe.Type.InputObject{name: "Profile", description: "A profile"} = Schema.__absinthe_type__(:profile) - assert %{profile: "Profile"} = Schema.__absinthe_types__() + assert %{profile: "Profile"} = Schema.__absinthe_types__(:all) end test "can define fields" do @@ -28,4 +32,109 @@ defmodule Absinthe.Type.InputObjectTest do assert %Absinthe.Type.Field{name: "name", type: :string} = obj.fields.name end end + + describe "input object keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = InputObject.TestSchemaDescriptionKeyword.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "input_object description attribute evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = InputObject.TestSchemaDescriptionAttribute.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "input_object description macro evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = InputObject.TestSchemaDescriptionMacro.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "input object field keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + InputObject.TestSchemaFieldsAndArgsDescription.__absinthe_type__( + :description_keyword_argument + ) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + end + + describe "input object field attribute description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + InputObject.TestSchemaFieldsAndArgsDescription.__absinthe_type__(:description_attribute) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + end + + describe "input object field macro description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + InputObject.TestSchemaFieldsAndArgsDescription.__absinthe_type__( + :field_description_macro + ) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + end + + describe "input object field default_value evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates default_value to '#{expected_value}')" do + type = + InputObject.TestSchemaFieldsAndArgsDescription.__absinthe_type__(:field_default_value) + + assert type.fields[unquote(test_label)].default_value == unquote(expected_value) + end + end) + end end diff --git a/test/absinthe/type/interface_test.exs b/test/absinthe/type/interface_test.exs index c78800437d..59cd4f9ffa 100644 --- a/test/absinthe/type/interface_test.exs +++ b/test/absinthe/type/interface_test.exs @@ -1,8 +1,6 @@ defmodule Absinthe.Type.InterfaceTest do use Absinthe.Case, async: true - alias Absinthe.Schema.Rule - defmodule Schema do use Absinthe.Schema @@ -63,7 +61,7 @@ defmodule Absinthe.Type.InterfaceTest do test "can be defined" do obj = Schema.__absinthe_type__(:named) assert %Absinthe.Type.Interface{name: "Named", description: "An interface"} = obj - assert obj.resolve_type + assert Absinthe.Type.function(obj, :resolve_type) end test "captures the relationships in the schema" do @@ -130,12 +128,23 @@ defmodule Absinthe.Type.InterfaceTest do end describe "when it doesn't define those fields" do + alias Absinthe.Phase.Schema.Validation + test "reports schema errors" do assert_schema_error("bad_interface_schema", [ - %{rule: Rule.ObjectMustImplementInterfaces, data: %{object: "Foo", interface: "Aged"}}, - %{rule: Rule.ObjectMustImplementInterfaces, data: %{object: "Foo", interface: "Named"}}, - %{rule: Rule.ObjectInterfacesMustBeValid, data: %{object: "Quux", interface: "Foo"}}, - %{rule: Rule.InterfacesMustResolveTypes, data: "Named"} + %{ + phase: Validation.ObjectMustImplementInterfaces, + extra: %{object: :foo, interface: :aged, fields: [:age]} + }, + %{ + phase: Validation.ObjectMustImplementInterfaces, + extra: %{object: :foo, interface: :named, fields: [:name]} + }, + %{ + phase: Validation.ObjectInterfacesMustBeValid, + extra: %{object: :quux, interface: :foo, implemented_by: nil} + }, + %{phase: Validation.InterfacesMustResolveTypes, extra: :named} ]) end end @@ -262,4 +271,49 @@ defmodule Absinthe.Type.InterfaceTest do test "works even when resolve_type returns nil" do assert_data(%{"namedThing" => %{}}, run(@graphql, Schema)) end + + defmodule NestedInterfacesSchema do + use Absinthe.Schema + + interface :root do + field :root, :string + end + + interface :intermediate do + field :root, :string + field :intermediate, :string + + interface :root + end + + # Name starts with Z to order it to the back of the list of types + object :z_child do + field :root, :string + field :intermediate, :string + field :child, :string + + interface :root + interface :intermediate + + is_type_of fn _entry -> true end + end + + query do + field :root, :root do + resolve fn _, _, _ -> {:ok, %{}} end + end + end + end + + @graphql """ + query GetRoot { + root { + __typename + } + } + """ + + test "resolved type of nested interfaces" do + assert_data(%{"root" => %{"__typename" => "ZChild"}}, run(@graphql, NestedInterfacesSchema)) + end end diff --git a/test/absinthe/type/mutation_test.exs b/test/absinthe/type/mutation_test.exs new file mode 100644 index 0000000000..50e55918af --- /dev/null +++ b/test/absinthe/type/mutation_test.exs @@ -0,0 +1,72 @@ +defmodule Absinthe.Type.MutationTest do + use Absinthe.Case, async: true + + defmodule TestSchema do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + def test_function(arg1) do + arg1 + end + + query do + end + + mutation do + field :normal_string, :string do + arg :arg_example, :string, description: "string" + end + + field :local_function_call, :string do + arg :arg_example, :string, description: test_function("red") + end + + field :function_call_using_absolute_path_to_current_module, :string do + arg :arg_example, :string, + description: Absinthe.Type.MutationTest.TestSchema.test_function("red") + end + + field :standard_library_function, :string do + arg :arg_example, :string, description: String.replace("red", "e", "a") + end + + field :function_in_nested_module, :string do + arg :arg_example, :string, description: NestedModule.nested_function("hello") + end + + field :external_module_function_call, :string do + arg :arg_example, :string, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + field :module_attribute_string_concat, :string do + arg :arg_example, :string, description: "hello " <> @module_attribute + end + + field :interpolation_of_module_attribute, :string do + arg :arg_example, :string, description: "hello #{@module_attribute}" + end + end + end + + describe "mutation field arg keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = TestSchema.__absinthe_type__("RootMutationType") + + assert type.fields[unquote(test_label)].args.arg_example.description == + unquote(expected_value) + end + end) + end +end diff --git a/test/absinthe/type/object_test.exs b/test/absinthe/type/object_test.exs index 484fd43f0d..ba92ee510a 100644 --- a/test/absinthe/type/object_test.exs +++ b/test/absinthe/type/object_test.exs @@ -1,6 +1,8 @@ defmodule Absinthe.Type.ObjectTest do use Absinthe.Case, async: true + alias Absinthe.Fixtures.Object + defmodule Schema do use Absinthe.Schema @@ -26,7 +28,7 @@ defmodule Absinthe.Type.ObjectTest do assert %Absinthe.Type.Object{name: "Person", description: "A person"} = Schema.__absinthe_type__(:person) - assert %{person: "Person"} = Schema.__absinthe_types__() + assert %{person: "Person"} = Schema.__absinthe_types__(:all) end test "can define fields" do @@ -40,4 +42,91 @@ defmodule Absinthe.Type.ObjectTest do assert %Absinthe.Type.Argument{name: "height", type: :integer} = field.args.height end end + + describe "object keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Object.TestSchemaDescriptionKeyword.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "input_object description attribute evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Object.TestSchemaDescriptionAttribute.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "input_object description macro evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Object.TestSchemaDescriptionMacro.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "object field keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + Object.TestSchemaFieldsAndArgsDescription.__absinthe_type__( + :description_keyword_argument + ) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + end + + describe "object field attribute description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Object.TestSchemaFieldsAndArgsDescription.__absinthe_type__(:description_attribute) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + end + + describe "object field macro description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = + Object.TestSchemaFieldsAndArgsDescription.__absinthe_type__(:field_description_macro) + + assert type.fields[unquote(test_label)].description == unquote(expected_value) + end + end) + end end diff --git a/test/absinthe/type/query_test.exs b/test/absinthe/type/query_test.exs new file mode 100644 index 0000000000..5267791bae --- /dev/null +++ b/test/absinthe/type/query_test.exs @@ -0,0 +1,52 @@ +defmodule Absinthe.Type.QueryTest do + use Absinthe.Case, async: true + + alias Absinthe.Fixtures.Query + + describe "query field arg keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Query.TestSchemaFieldArgDescription.__absinthe_type__("RootQueryType") + + assert type.fields[unquote(test_label)].args.arg_example.description == + unquote(expected_value) + end + end) + end + + describe "query field arg default_value evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates default_value to '#{expected_value}')" do + type = Query.TestSchemaFieldArgDefaultValue.__absinthe_type__("RootQueryType") + field = type.fields[unquote(test_label)] + + assert field.args.arg_example.default_value == unquote(expected_value) + end + end) + end + + describe "query field arg default_value evaluation with import_fields" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates default_value to '#{expected_value}')" do + type = + Query.TestSchemaFieldArgDefaultValueWithImportFields.__absinthe_type__("RootQueryType") + + field = type.fields[unquote(test_label)] + + assert field.args.arg_example.default_value == unquote(expected_value) + end + end) + end +end diff --git a/test/absinthe/type/union_test.exs b/test/absinthe/type/union_test.exs index 679900a8e2..074bef85d9 100644 --- a/test/absinthe/type/union_test.exs +++ b/test/absinthe/type/union_test.exs @@ -2,6 +2,7 @@ defmodule Absinthe.Type.UnionTest do use Absinthe.Case, async: true alias Absinthe.Type + alias Absinthe.Fixtures.Union defmodule TestSchema do use Absinthe.Schema @@ -59,10 +60,10 @@ defmodule Absinthe.Type.UnionTest do assert %Absinthe.Type.Union{ name: "SearchResult", description: "A search result", - types: [:person, :business] + types: [:business, :person] } = obj - assert obj.resolve_type + assert Absinthe.Type.function(obj, :resolve_type) end test "can resolve the type of an object using resolve_type" do @@ -82,4 +83,45 @@ defmodule Absinthe.Type.UnionTest do Type.Union.resolve_type(obj, %{name: "asdf"}, %{schema: TestSchema}) end end + + describe "union keyword description evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Union.TestSchemaDescriptionKeyword.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end + + describe "union description attribute evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Absinthe.Fixtures.FunctionEvaluationHelpers.filter_test_params_for_description_attribute() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Union.TestSchemaDescriptionAttribute.__absinthe_type__(unquote(test_label)) + + assert type.description == unquote(expected_value) + end + end) + end + + describe "union description macro evaluation" do + Absinthe.Fixtures.FunctionEvaluationHelpers.function_evaluation_test_params() + |> Enum.each(fn %{ + test_label: test_label, + expected_value: expected_value + } -> + test "for #{test_label} (evaluates description to '#{expected_value}')" do + type = Union.TestSchemaDescriptionMacro.__absinthe_type__(unquote(test_label)) + assert type.description == unquote(expected_value) + end + end) + end end diff --git a/test/absinthe/union_fragment_test.exs b/test/absinthe/union_fragment_test.exs index 84347ded6d..d7884232f5 100644 --- a/test/absinthe/union_fragment_test.exs +++ b/test/absinthe/union_fragment_test.exs @@ -1,14 +1,43 @@ defmodule Absinthe.UnionFragmentTest do use Absinthe.Case, async: true + @root %{ + menu_items: [ + %{type: :menu_item, id: 1, added_by: %{username: "Ben", id: 1}, name: "Coffee"} + ], + categories: [ + %{type: :category, id: 1, added_by: %{username: "Ben", id: 1}, name: "Drinks"} + ] + } + defmodule Schema do use Absinthe.Schema + object :menu_item do + field :id, :id + field :name, :string + field :added_by, :user + end + + object :category do + field :id, :id + field :name, :string + field :added_by, :user + end + + union :search_result do + types [:menu_item, :category] + + resolve_type fn %{type: type}, _ -> type end + end + object :user do field :name, :string do resolve fn user, _, _ -> {:ok, user.username} end end + field :id, :id + field :todos, list_of(:todo) interface :named end @@ -45,6 +74,12 @@ defmodule Absinthe.UnionFragmentTest do end query do + field :search, list_of(:search_result) do + resolve fn root, _, _ -> + {:ok, root.menu_items ++ root.categories} + end + end + field :viewer, :viewer do resolve fn _, _ -> {:ok, @@ -62,6 +97,31 @@ defmodule Absinthe.UnionFragmentTest do end end + test "different sub types with the same internal field names don't cause conflict" do + doc = """ + { + search { + ... on MenuItem { + addedBy { name } + } + ... on Category { + addedBy { name id } + } + } + } + + """ + + expected = %{ + "search" => [ + %{"addedBy" => %{"name" => "Ben"}}, + %{"addedBy" => %{"name" => "Ben", "id" => "1"}} + ] + } + + assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema, root_value: @root) + end + test "it queries a heterogeneous list properly" do doc = """ { @@ -91,7 +151,7 @@ defmodule Absinthe.UnionFragmentTest do } } - assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema) + assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema, root_value: @root) end test "it queries an interface with the concrete type's field resolvers" do @@ -110,7 +170,7 @@ defmodule Absinthe.UnionFragmentTest do """ expected = %{"viewer" => %{"me" => %{"__typename" => "User", "name" => "baz"}}} - assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema) + assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema, root_value: @root) end test "it queries an interface implemented by a union type" do @@ -138,7 +198,7 @@ defmodule Absinthe.UnionFragmentTest do } } - assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema) + assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema, root_value: @root) end test "it queries an interface on an unrelated interface" do @@ -163,6 +223,6 @@ defmodule Absinthe.UnionFragmentTest do } } - assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema) + assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema, root_value: @root) end end diff --git a/test/absinthe/utils_test.exs b/test/absinthe/utils_test.exs index 1e5318dcc4..34fb9ec374 100644 --- a/test/absinthe/utils_test.exs +++ b/test/absinthe/utils_test.exs @@ -25,4 +25,44 @@ defmodule Absinthe.UtilsTest do assert "__FooBar" == Utils.camelize(@preunderscored_snake) end end + + defmodule Schema do + use Absinthe.Schema.Notation + + object :blah do + field :foo, :string + end + + object :bar do + field :blah, :string + end + end + + test "prewalking and postwalking result in the same number" do + blueprint = Schema.__absinthe_blueprint__() + + count = fn node, acc -> + send(self(), :tick) + {node, acc + 1} + end + + {_, prewalk_count} = Absinthe.Blueprint.prewalk(blueprint, 0, count) + prewalk_exec_count = count_ticks() + + {_, postwalk_count} = Absinthe.Blueprint.postwalk(blueprint, 0, count) + postwalk_exec_count = count_ticks() + + assert prewalk_count == prewalk_exec_count + assert postwalk_count == postwalk_exec_count + assert prewalk_count == postwalk_count + end + + defp count_ticks(count \\ 0) do + receive do + :tick -> count_ticks(count + 1) + after + 0 -> + count + end + end end diff --git a/test/mix/tasks/absinthe.schema.json_test.exs b/test/mix/tasks/absinthe.schema.json_test.exs new file mode 100644 index 0000000000..0e067e52a2 --- /dev/null +++ b/test/mix/tasks/absinthe.schema.json_test.exs @@ -0,0 +1,76 @@ +defmodule Mix.Tasks.Absinthe.Schema.JsonTest do + use Absinthe.Case, async: true + + alias Mix.Tasks.Absinthe.Schema.Json, as: Task + + defmodule TestSchema do + use Absinthe.Schema + + query do + field :item, :item + end + + object :item do + description "A Basic Type" + field :id, :id + field :name, :string + end + end + + defmodule TestEncoder do + def encode!(_map, opts) do + pretty_flag = Keyword.get(opts, :pretty, false) + pretty_string = if pretty_flag, do: "pretty", else: "ugly" + "test-encoder-#{pretty_string}" + end + end + + @test_schema "Mix.Tasks.Absinthe.Schema.JsonTest.TestSchema" + @test_encoder "Mix.Tasks.Absinthe.Schema.JsonTest.TestEncoder" + + describe "absinthe.schema.json" do + test "parses options" do + argv = ["output.json", "--schema", @test_schema, "--json-codec", @test_encoder, "--pretty"] + + opts = Task.parse_options(argv) + + assert opts.filename == "output.json" + assert opts.json_codec == TestEncoder + assert opts.pretty == true + assert opts.schema == TestSchema + end + + test "provides default options" do + argv = ["--schema", @test_schema] + + opts = Task.parse_options(argv) + + assert opts.filename == "./schema.json" + assert opts.json_codec == Jason + assert opts.pretty == false + assert opts.schema == TestSchema + end + + test "fails if no schema arg is provided" do + argv = [] + catch_error(Task.parse_options(argv)) + end + + test "fails if codec hasn't been loaded" do + argv = ["--schema", @test_schema, "--json-codec", "UnloadedCodec"] + opts = Task.parse_options(argv) + catch_error(Task.generate_schema(opts)) + end + + test "can use a custom codec" do + argv = ["--schema", @test_schema, "--json-codec", @test_encoder, "--pretty"] + + opts = Task.parse_options(argv) + {:ok, pretty_content} = Task.generate_schema(opts) + {:ok, ugly_content} = Task.generate_schema(%{opts | pretty: false}) + + assert pretty_content == "test-encoder-pretty" + assert ugly_content == "test-encoder-ugly" + end + end +end diff --git a/test/mix/tasks/absinthe.schema.sdl_test.exs b/test/mix/tasks/absinthe.schema.sdl_test.exs new file mode 100644 index 0000000000..cc68cfdfc4 --- /dev/null +++ b/test/mix/tasks/absinthe.schema.sdl_test.exs @@ -0,0 +1,155 @@ +defmodule Mix.Tasks.Absinthe.Schema.SdlTest do + use Absinthe.Case, async: true + + alias Mix.Tasks.Absinthe.Schema.Sdl, as: Task + + defmodule TestSchema do + use Absinthe.Schema + + """ + schema { + query: Query + } + + type Query { + helloWorld(name: String!): String + interfaceField: Being + } + + interface Being { + name: String + } + + type Human implements Being { + name: String + } + + type Robot implements Being { + name: String + } + """ + |> import_sdl + + def hydrate(%Absinthe.Blueprint.Schema.InterfaceTypeDefinition{}, _) do + {:resolve_type, &__MODULE__.resolve_type/1} + end + + def hydrate(_node, _ancestors), do: [] + + def resolve_type(_), do: false + end + + @test_schema "Mix.Tasks.Absinthe.Schema.SdlTest.TestSchema" + + defmodule TestModField do + use Absinthe.Schema.Notation + + object :test_mod_helper do + description "Simple Helper Object used to define blueprint fields" + + field :mod_field, :string do + description "extra field added by schema modification" + end + end + end + + defmodule TestModifier do + alias Absinthe.{Phase, Pipeline, Blueprint} + + # Add this module to the pipeline of phases + # to run on the schema + def pipeline(pipeline) do + Pipeline.insert_after(pipeline, Phase.Schema.TypeImports, __MODULE__) + end + + # Here's the blueprint of the schema, let's do whatever we want with it. + def run(blueprint = %Blueprint{}, _) do + test_mod_types = Blueprint.types_by_name(TestModField) + test_mod_fields = test_mod_types["TestModHelper"] + + mod_field = Blueprint.find_field(test_mod_fields, "mod_field") + + blueprint = Blueprint.add_field(blueprint, "Mod", mod_field) + + {:ok, blueprint} + end + end + + defmodule TestSchemaWithMods do + use Absinthe.Schema + + @pipeline_modifier TestModifier + + query do + field :hello_world, :mod do + arg :name, non_null(:string) + end + + field :interface_field, :being + end + + object :mod do + end + + interface :being do + field :name, :string + resolve_type(fn obj, _ -> obj.type end) + end + + object :human do + interface :being + field :name, :string + end + + object :robot do + interface :being + field :name, :string + end + end + + @test_mod_schema "Mix.Tasks.Absinthe.Schema.SdlTest.TestSchemaWithMods" + + describe "absinthe.schema.sdl" do + test "parses options" do + argv = ["output.graphql", "--schema", @test_schema] + + opts = Task.parse_options(argv) + + assert opts.filename == "output.graphql" + assert opts.schema == TestSchema + end + + test "provides default options" do + argv = ["--schema", @test_schema] + + opts = Task.parse_options(argv) + + assert opts.filename == "./schema.graphql" + assert opts.schema == TestSchema + end + + test "fails if no schema arg is provided" do + argv = [] + catch_error(Task.parse_options(argv)) + end + + test "Generate schema" do + argv = ["--schema", @test_schema] + opts = Task.parse_options(argv) + + {:ok, schema} = Task.generate_schema(opts) + assert schema =~ "helloWorld(name: String!): String" + end + + test "Generate schema with modifier" do + argv = ["--schema", @test_mod_schema] + opts = Task.parse_options(argv) + + {:ok, schema} = Task.generate_schema(opts) + + assert schema =~ "type Mod {" + assert schema =~ "modField: String" + assert schema =~ "type Robot implements Being" + end + end +end diff --git a/test/support/case.ex b/test/support/case.ex index 36d02657c9..0d453a39fc 100644 --- a/test/support/case.ex +++ b/test/support/case.ex @@ -2,6 +2,7 @@ defmodule Absinthe.Case do defmacro __using__(opts) do quote do use ExUnit.Case, unquote(opts) + import Absinthe.Case.Helpers.SchemaImplementations import Absinthe.Case.Helpers.Run import Absinthe.Case.Assertions.Result import Absinthe.Case.Assertions.Schema diff --git a/test/support/case/assertions/result.ex b/test/support/case/assertions/result.ex index 6e32e63240..857f1e14d8 100644 --- a/test/support/case/assertions/result.ex +++ b/test/support/case/assertions/result.ex @@ -14,6 +14,9 @@ defmodule Absinthe.Case.Assertions.Result do assert_error_message(Enum.join(lines, "\n"), result) end + # Dialyzer often has issues with test code, and here it says that + # the assertion on line 20 can never match, which is silly. + @dialyzer {:no_match, assert_error_message: 2} def assert_error_message(error_message, result) do assert {:ok, %{errors: errors}} = result diff --git a/test/support/case/assertions/schema.ex b/test/support/case/assertions/schema.ex index 39e466d206..6e0576d795 100644 --- a/test/support/case/assertions/schema.ex +++ b/test/support/case/assertions/schema.ex @@ -11,7 +11,7 @@ defmodule Absinthe.Case.Assertions.Schema do ## Examples ``` - iex> assert_schema_error("schema-name", [%{rule: Absinthe.Schema.Rule.TheRuleHere, data: :bar}]) + iex> assert_schema_error("schema-name", [%{phase: Absinthe.Schema.Rule.TheRuleHere, extra: :bar}]) ``` """ def assert_schema_error(schema_name, patterns) do @@ -20,17 +20,36 @@ defmodule Absinthe.Case.Assertions.Schema do load_schema(schema_name) end - patterns - |> Enum.filter(fn pattern -> - assert Enum.find(err.details, fn detail -> - pattern.rule == detail.rule && pattern.data == detail.data - end), - "Could not find error detail pattern #{inspect(pattern)} in #{inspect(err.details)}" - end) + patterns = + patterns + |> Enum.filter(fn pattern -> + assert Enum.find(err.phase_errors, fn error -> + keys = Map.keys(pattern) + Map.take(error, keys) |> handle_path == pattern |> handle_path + end), + "Could not find error detail pattern #{inspect(pattern)}\n\nin\n\n#{ + inspect(err.phase_errors) + }" + end) + + assert length(patterns) == length(err.phase_errors) + end - assert length(patterns) == length(err.details) + defp handle_path(%{locations: locations} = map) do + locations = + Enum.map(locations, fn + %{file: file} = location -> + %{location | file: file |> Path.split() |> List.last()} + + location -> + location + end) + + %{map | locations: locations} end + defp handle_path(map), do: map + def assert_notation_error(name) do assert_raise(Absinthe.Schema.Notation.Error, fn -> load_schema(name) diff --git a/test/support/case/helpers/schema_implementations.ex b/test/support/case/helpers/schema_implementations.ex new file mode 100644 index 0000000000..d05980141e --- /dev/null +++ b/test/support/case/helpers/schema_implementations.ex @@ -0,0 +1,8 @@ +defmodule Absinthe.Case.Helpers.SchemaImplementations do + def schema_implementations(module) do + [ + Module.safe_concat(module, MacroSchema), + Module.safe_concat(module, SDLSchema) + ] + end +end diff --git a/test/support/experimental_notation_helpers.ex b/test/support/experimental_notation_helpers.ex new file mode 100644 index 0000000000..593821eb46 --- /dev/null +++ b/test/support/experimental_notation_helpers.ex @@ -0,0 +1,74 @@ +defmodule ExperimentalNotationHelpers do + alias Absinthe.Blueprint + + def lookup_type(mod, type_ident) do + Blueprint.Schema.lookup_type(mod.__absinthe_blueprint__(), type_ident) + end + + def lookup_directive(mod, directive_ident) do + Blueprint.Schema.lookup_directive(mod.__absinthe_blueprint__(), directive_ident) + end + + def lookup_compiled_type(mod, type_ident) do + Absinthe.Schema.lookup_type(mod, type_ident) + end + + def lookup_compiled_directive(mod, directive_ident) do + Absinthe.Schema.lookup_directive(mod, directive_ident) + end + + def lookup_field(mod, type_ident, field_ident) do + type = Blueprint.Schema.lookup_type(mod.__absinthe_blueprint__(), type_ident) + + Enum.find(type.fields, fn + %{identifier: ^field_ident} -> + true + + _ -> + false + end) + end + + def lookup_argument(mod, type_ident, field_ident, arg_ident) do + case lookup_field(mod, type_ident, field_ident) do + nil -> + nil + + field -> + Enum.find(field.arguments, fn + %{identifier: ^arg_ident} -> + true + + _ -> + false + end) + end + end + + def lookup_compiled_field(mod, type_ident, field_ident) do + case Absinthe.Schema.lookup_type(mod, type_ident) do + nil -> + nil + + type -> + type.fields[field_ident] + end + end + + def lookup_compiled_argument(mod, type_ident, field_ident, arg_ident) do + case lookup_compiled_field(mod, type_ident, field_ident) do + nil -> + nil + + field -> + field.args[arg_ident] + end + end + + def type_count(mod) do + mod.__absinthe_blueprint__().schema_definitions + |> List.first() + |> Map.fetch!(:types) + |> length + end +end diff --git a/test/support/fixture.ex b/test/support/fixture.ex new file mode 100644 index 0000000000..dc1ea5677a --- /dev/null +++ b/test/support/fixture.ex @@ -0,0 +1,13 @@ +defmodule Absinthe.Fixture do + defmacro __using__(_) do + if System.get_env("SCHEMA_PROVIDER") == "persistent_term" do + quote do + @schema_provider Absinthe.Schema.PersistentTerm + end + else + quote do + @schema_provider Absinthe.Schema.Compiled + end + end + end +end diff --git a/test/support/fixtures/arguments_schema.ex b/test/support/fixtures/arguments_schema.ex index d3e223ea52..0e2dfc408d 100644 --- a/test/support/fixtures/arguments_schema.ex +++ b/test/support/fixtures/arguments_schema.ex @@ -1,16 +1,28 @@ defmodule Absinthe.Fixtures.ArgumentsSchema do use Absinthe.Schema + use Absinthe.Fixture @res %{ - true => "YES", - false => "NO" + true: "YES", + false: "NO" } + scalar :any, open_ended: true do + parse fn value -> {:ok, value} end + serialize fn value -> value end + end + scalar :input_name do parse fn %{value: value} -> {:ok, %{first_name: value}} end serialize fn %{first_name: name} -> name end end + scalar :input_name_raising do + parse fn %{__struct__: struct} -> + raise "inputNameRaising scalar parse was called for #{struct}" + end + end + scalar :name do serialize &to_string/1 @@ -49,7 +61,20 @@ defmodule Absinthe.Fixtures.ArgumentsSchema do field :non_null_field, non_null(:string) end + input_object :filter do + field :include, list_of(:integer) + field :exclude, list_of(:integer) + end + query do + field :entities, list_of(:any) do + arg :representations, non_null(list_of(non_null(:any))) + + resolve fn %{representations: representations}, _ -> + {:ok, representations} + end + end + field :stuff, :integer do arg :stuff, non_null(:input_stuff) @@ -58,6 +83,13 @@ defmodule Absinthe.Fixtures.ArgumentsSchema do end end + field :filter_numbers, list_of(:integer) do + arg :filter_empty, :filter, default_value: %{} + arg :filter_include, :filter, default_value: %{include: [1, 2, 3]} + arg :filter_exclude, :filter, default_value: %{exclude: [1, 2, 3]} + arg :filter_all, :filter, default_value: %{include: [1], exclude: [2, 3]} + end + field :test_boolean_input_object, :boolean do arg :input, non_null(:boolean_input_object) @@ -139,5 +171,9 @@ defmodule Absinthe.Fixtures.ArgumentsSchema do args, _ -> {:error, "Got #{inspect(args)} instead"} end end + + field :raising_thing, :string do + arg :name, :input_name_raising + end end end diff --git a/test/support/fixtures/color_schema.ex b/test/support/fixtures/color_schema.ex index 7b50c8b607..2eaeb3daee 100644 --- a/test/support/fixtures/color_schema.ex +++ b/test/support/fixtures/color_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.ColorSchema do use Absinthe.Schema + use Absinthe.Fixture @names %{ r: "RED", @@ -24,6 +25,30 @@ defmodule Absinthe.Fixtures.ColorSchema do resolve: fn %{channel: channel}, _ -> {:ok, %{name: @names[channel], value: @values[channel]}} end + + field :infos, + type: list_of(:channel_info), + args: [ + channels: [type: list_of(:channel), default_value: [:r, :g]] + ], + resolve: fn %{channels: channels}, _ -> + {:ok, + Enum.map(channels, fn channel -> + %{name: @names[channel], value: @values[channel]} + end)} + end + + field :more_infos, + type: list_of(:channel_info), + args: [ + channels: [type: list_of(:channel), default_value: :r] + ], + resolve: fn %{channels: channels}, _ -> + {:ok, + Enum.map(channels, fn channel -> + %{name: @names[channel], value: @values[channel]} + end)} + end end @desc "A color channel" diff --git a/test/support/fixtures/contact_schema.ex b/test/support/fixtures/contact_schema.ex index 5f43394923..79286637f6 100644 --- a/test/support/fixtures/contact_schema.ex +++ b/test/support/fixtures/contact_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.ContactSchema do use Absinthe.Schema + use Absinthe.Fixture @bruce %{name: "Bruce", age: 35} @others [ diff --git a/test/support/fixtures/custom_types_schema.ex b/test/support/fixtures/custom_types_schema.ex index e1b8c10303..99fd24dcf9 100644 --- a/test/support/fixtures/custom_types_schema.ex +++ b/test/support/fixtures/custom_types_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.CustomTypesSchema do use Absinthe.Schema + use Absinthe.Fixture import_types Absinthe.Type.Custom diff --git a/test/support/fixtures/default_value_schema.ex b/test/support/fixtures/default_value_schema.ex new file mode 100644 index 0000000000..7ae3fa7fed --- /dev/null +++ b/test/support/fixtures/default_value_schema.ex @@ -0,0 +1,10 @@ +defmodule Absinthe.Fixtures.DefaultValueSchema do + use Absinthe.Schema + use Absinthe.Fixture + + # Note: More examples in Absinthe.Fixtures.Query.TestSchemaFieldArgDefaultValueWithImportFields + + query do + field :microsecond, :integer, default_value: DateTime.utc_now().microsecond |> elem(0) + end +end diff --git a/test/support/fixtures/directive.ex b/test/support/fixtures/directive.ex new file mode 100644 index 0000000000..911b631738 --- /dev/null +++ b/test/support/fixtures/directive.ex @@ -0,0 +1,235 @@ +defmodule Absinthe.Fixtures.Directive do + defmodule TestSchemaDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + directive :normal_string, description: "string" do + on [:field] + end + + directive :local_function_call, description: test_function("red") do + on [:field] + end + + directive :function_call_using_absolute_path_to_current_module, + description: Absinthe.Fixtures.Directive.TestSchemaDescriptionKeyword.test_function("red") do + on [:field] + end + + directive :standard_library_function, description: String.replace("red", "e", "a") do + on [:field] + end + + directive :function_in_nested_module, description: NestedModule.nested_function("hello") do + on [:field] + end + + directive :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") do + on [:field] + end + + directive :module_attribute_string_concat, description: "hello " <> @module_attribute do + on [:field] + end + + directive :interpolation_of_module_attribute, description: "hello #{@module_attribute}" do + on [:field] + end + + def test_function(arg1) do + arg1 + end + end + + defmodule TestSchemaDescriptionAttribute do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + @desc "string" + directive :normal_string do + on [:field] + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # directive :local_function_call do + # on [:field] + # end + + # @desc Absinthe.Fixtures.Directive.TestSchemaEnumAttribute.test_function("red") + # directive :function_call_using_absolute_path_to_current_module do + # on [:field] + # end + + @desc String.replace("red", "e", "a") + directive :standard_library_function do + on [:field] + end + + @desc NestedModule.nested_function("hello") + directive :function_in_nested_module do + on [:field] + end + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + directive :external_module_function_call do + on [:field] + end + + @desc "hello " <> @module_attribute + directive :module_attribute_string_concat do + on [:field] + end + + @desc "hello #{@module_attribute}" + directive :interpolation_of_module_attribute do + on [:field] + end + end + + defmodule TestSchemaDescriptionMacro do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + directive :normal_string do + on [:field] + description "string" + end + + directive :local_function_call do + on [:field] + description test_function("red") + end + + directive :function_call_using_absolute_path_to_current_module do + on [:field] + description Absinthe.Fixtures.Directive.TestSchemaDescriptionMacro.test_function("red") + end + + directive :standard_library_function do + on [:field] + description String.replace("red", "e", "a") + end + + directive :function_in_nested_module do + on [:field] + description NestedModule.nested_function("hello") + end + + directive :external_module_function_call do + on [:field] + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + directive :module_attribute_string_concat do + on [:field] + description "hello " <> @module_attribute + end + + directive :interpolation_of_module_attribute do + on [:field] + description "hello #{@module_attribute}" + end + end + + defmodule TestSchemaArgDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + directive :normal_string do + arg :arg_example, :string, description: "string" + on [:field] + end + + directive :local_function_call do + arg :arg_example, :string, description: test_function("red") + on [:field] + end + + directive :function_call_using_absolute_path_to_current_module do + arg :arg_example, :string, + description: Absinthe.Fixtures.Directive.TestSchemaDescriptionKeyword.test_function("red") + + on [:field] + end + + directive :standard_library_function do + arg :arg_example, :string, description: String.replace("red", "e", "a") + on [:field] + end + + directive :function_in_nested_module do + arg :arg_example, :string, description: NestedModule.nested_function("hello") + on [:field] + end + + directive :external_module_function_call do + arg :arg_example, :string, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + + on [:field] + end + + directive :module_attribute_string_concat do + arg :arg_example, :string, description: "hello " <> @module_attribute + on [:field] + end + + directive :interpolation_of_module_attribute do + arg :arg_example, :string, description: "hello #{@module_attribute}" + on [:field] + end + + def test_function(arg1) do + arg1 + end + end +end diff --git a/test/support/fixtures/dynamic/bad_directives_schema.exs b/test/support/fixtures/dynamic/bad_directives_schema.exs new file mode 100644 index 0000000000..bc8974536a --- /dev/null +++ b/test/support/fixtures/dynamic/bad_directives_schema.exs @@ -0,0 +1,13 @@ +defmodule Absinthe.TestSupport.Schema.BadDirectivesSchema do + use Absinthe.Schema + + directive :mydirective do + end + + directive :mydirective2 do + on :unknown + end + + query do + end +end diff --git a/test/support/fixtures/dynamic/bad_names_schema.exs b/test/support/fixtures/dynamic/bad_names_schema.exs new file mode 100644 index 0000000000..8a93fb9b73 --- /dev/null +++ b/test/support/fixtures/dynamic/bad_names_schema.exs @@ -0,0 +1,32 @@ +defmodule Absinthe.TestSupport.Schema.BadNamesSchema do + use Absinthe.Schema + + object :car, name: "bad object name" do + # ... + end + + input_object :contact_input, name: "bad input name" do + field :email, non_null(:string) + end + + directive :mydirective, name: "bad directive name" do + on :field + end + + scalar :time, description: "ISOz time", name: "bad?scalar#name" do + parse fn x -> x end + serialize fn x -> x end + end + + query do + field :foo, :string, name: "bad field name" + + field :bar, :car do + arg :foo, :string, name: "bad arg name" + end + end + + enum :rating do + value :"1", as: 1 + end +end diff --git a/test/support/fixtures/dynamic/import_types.ex b/test/support/fixtures/dynamic/import_types.ex deleted file mode 100644 index c3e8721b12..0000000000 --- a/test/support/fixtures/dynamic/import_types.ex +++ /dev/null @@ -1,98 +0,0 @@ -defmodule Absinthe.Fixtures.ImportTypes do - defmodule AccountTypes do - use Absinthe.Schema.Notation - - object :customer do - field :id, non_null(:id) - field :name, :string - field :mailing_address, :mailing_address - field :contact_methods, list_of(:contact_method) - end - - object :employee do - field :id, non_null(:id) - field :name, :string - field :avatar, :avatar - field :weekly_schedules, list_of(:weekly_schedule) - end - end - - defmodule OrderTypes do - use Absinthe.Schema.Notation - - object :order do - field :id, non_null(:id) - field :customer, non_null(:customer) - field :receipt, non_null(:receipt) - end - end - - defmodule ReceiptTypes do - use Absinthe.Schema.Notation - - object :receipt do - field :id, non_null(:id) - field :code, non_null(:string) - end - end - - defmodule ScheduleTypes do - use Absinthe.Schema.Notation - - object :weekly_schedule do - field :id, non_null(:id) - field :employee, non_null(:employee) - end - end - - defmodule ProfileTypes do - use Absinthe.Schema.Notation - - object :mailing_address do - field :street, non_null(list_of(:string)) - field :city, non_null(:string) - field :state, non_null(:string) - field :postal_code, non_null(:string) - end - end - - defmodule AuthTypes do - use Absinthe.Schema.Notation - - object :contact_method do - field :kind, non_null(:contact_kind) - field :value, non_null(:string) - end - - enum :contact_kind do - values([:email, :phone]) - end - end - - defmodule Shared.AvatarTypes do - use Absinthe.Schema.Notation - - object :avatar do - field :height, non_null(:integer) - field :width, non_null(:integer) - field :url, non_null(:string) - end - end - - defmodule Schema do - use Absinthe.Schema - - import_types Absinthe.Fixtures.ImportTypes.{AccountTypes, OrderTypes} - import_types Absinthe.Fixtures.ImportTypes.ReceiptTypes - - alias Absinthe.Fixtures.ImportTypes - import_types ImportTypes.ScheduleTypes - import_types ImportTypes.{ProfileTypes, AuthTypes, Shared.AvatarTypes} - - query do - field :orders, list_of(:order) - field :employees, list_of(:employee) - field :customers, list_of(:customer) - end - end -end diff --git a/test/support/fixtures/dynamic/interface_cycle_schema.exs b/test/support/fixtures/dynamic/interface_cycle_schema.exs new file mode 100644 index 0000000000..ba36c8e401 --- /dev/null +++ b/test/support/fixtures/dynamic/interface_cycle_schema.exs @@ -0,0 +1,25 @@ +defmodule Absinthe.TestSupport.Schema.InterfaceCycleSchema do + use Absinthe.Schema + + @sdl """ + schema { + query: Query + } + + type Query { + node: Node + } + + interface Node implements Named & Node { + id: ID! + name: String + } + + interface Named implements Node & Named { + id: ID! + name: String + } + """ + + import_sdl @sdl +end diff --git a/test/support/fixtures/dynamic/invalid_input_types_sdl.exs b/test/support/fixtures/dynamic/invalid_input_types_sdl.exs new file mode 100644 index 0000000000..0f87f2dc77 --- /dev/null +++ b/test/support/fixtures/dynamic/invalid_input_types_sdl.exs @@ -0,0 +1,15 @@ +defmodule Absinthe.Fixtures.InvalidOutputTypesSdlSchema do + use Absinthe.Schema + + import_sdl """ + type User + + input Foo { + blah: User + } + + type Query { + foo(arg: Foo): User + } + """ +end diff --git a/test/support/fixtures/dynamic/invalid_interface_types.exs b/test/support/fixtures/dynamic/invalid_interface_types.exs new file mode 100644 index 0000000000..d1402b561f --- /dev/null +++ b/test/support/fixtures/dynamic/invalid_interface_types.exs @@ -0,0 +1,28 @@ +defmodule Absinthe.Fixtures.InvalidInterfaceTypes do + use Absinthe.Schema + + object :user do + field :name, non_null(:string) + interface :named + + is_type_of fn _ -> + true + end + end + + object :foo do + field :name, :string + interface :named + + is_type_of fn _ -> + true + end + end + + interface :named do + field :name, non_null(:string) + end + + query do + end +end diff --git a/test/support/fixtures/dynamic/invalid_output_types_sdl.exs b/test/support/fixtures/dynamic/invalid_output_types_sdl.exs new file mode 100644 index 0000000000..c313726e4b --- /dev/null +++ b/test/support/fixtures/dynamic/invalid_output_types_sdl.exs @@ -0,0 +1,17 @@ +defmodule Absinthe.Fixtures.InvalidInputTypesSdlSchema do + use Absinthe.Schema + + import_sdl """ + type User + + input Input + + type BadObject { + blah: Input + } + + type Query { + foo(invalidArg: User): User + } + """ +end diff --git a/test/support/fixtures/dynamic/prefix_schema.exs b/test/support/fixtures/dynamic/prefix_schema.exs index 21dc32ae49..96fe79f1e1 100644 --- a/test/support/fixtures/dynamic/prefix_schema.exs +++ b/test/support/fixtures/dynamic/prefix_schema.exs @@ -28,12 +28,12 @@ defmodule Absinthe.Fixtures.PrefixSchema do on Language.Field on Language.InlineFragment - instruction fn - %{if: true} -> - :skip + expand fn + %{if: true}, node -> + Blueprint.put_flag(node, :skip, __MODULE__) - _ -> - :include + _, node -> + Blueprint.put_flag(node, :include, __MODULE__) end end end diff --git a/test/support/fixtures/dynamic/unknown_import_schema.exs b/test/support/fixtures/dynamic/unknown_import_schema.exs new file mode 100644 index 0000000000..30635c8be5 --- /dev/null +++ b/test/support/fixtures/dynamic/unknown_import_schema.exs @@ -0,0 +1,8 @@ +defmodule Absinthe.TestSupport.Schema.UnknownImportSchema do + use Absinthe.Schema + + import_types Test.Unknown + + query do + end +end diff --git a/test/support/fixtures/dynamic/valid_schema.exs b/test/support/fixtures/dynamic/valid_schema.exs deleted file mode 100644 index 13413b4c0c..0000000000 --- a/test/support/fixtures/dynamic/valid_schema.exs +++ /dev/null @@ -1,12 +0,0 @@ -defmodule Absinthe.Fixtures.ValidSchema do - use Absinthe.Schema - - query do - # Query type must exist - end - - object :person do - description "A person" - field :name, :string - end -end diff --git a/test/support/fixtures/enums.ex b/test/support/fixtures/enums.ex new file mode 100644 index 0000000000..d4d3083624 --- /dev/null +++ b/test/support/fixtures/enums.ex @@ -0,0 +1,185 @@ +defmodule Absinthe.Fixtures.Enums do + defmodule TestSchemaValueDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + enum :description_keyword_argument do + value :normal_string, description: "string" + value :local_function_call, description: test_function("red") + + value :function_call_using_absolute_path_to_current_module, + description: + Absinthe.Fixtures.Enums.TestSchemaValueDescriptionKeyword.test_function("red") + + value :standard_library_function, description: String.replace("red", "e", "a") + value :function_in_nested_module, description: NestedModule.nested_function("hello") + + value :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + + value :module_attribute_string_concat, description: "hello " <> @module_attribute + value :interpolation_of_module_attribute, description: "hello #{@module_attribute}" + end + end + + defmodule TestSchemaDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + enum :normal_string, description: "string" do + end + + enum :local_function_call, description: test_function("red") do + end + + enum :function_call_using_absolute_path_to_current_module, + description: Absinthe.Fixtures.Enums.TestSchemaDescriptionKeyword.test_function("red") do + end + + enum :standard_library_function, description: String.replace("red", "e", "a") do + end + + enum :function_in_nested_module, description: NestedModule.nested_function("hello") do + end + + enum :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") do + end + + enum :module_attribute_string_concat, description: "hello " <> @module_attribute do + end + + enum :interpolation_of_module_attribute, description: "hello #{@module_attribute}" do + end + + def test_function(arg1) do + arg1 + end + end + + defmodule TestSchemaDescriptionAttribute do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + @desc "string" + enum :normal_string do + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # enum :local_function_call do + # end + + # @desc Absinthe.Fixtures.Enums.TestSchemaEnumAttribute.test_function("red") + # enum :function_call_using_absolute_path_to_current_module do + # end + + @desc String.replace("red", "e", "a") + enum :standard_library_function do + end + + @desc NestedModule.nested_function("hello") + enum :function_in_nested_module do + end + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + enum :external_module_function_call do + end + + @desc "hello " <> @module_attribute + enum :module_attribute_string_concat do + end + + @desc "hello #{@module_attribute}" + enum :interpolation_of_module_attribute do + end + end + + defmodule TestSchemaDescriptionMacro do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + enum :normal_string do + description "string" + end + + enum :local_function_call do + description test_function("red") + end + + enum :function_call_using_absolute_path_to_current_module do + description Absinthe.Fixtures.Enums.TestSchemaDescriptionMacro.test_function("red") + end + + enum :standard_library_function do + description String.replace("red", "e", "a") + end + + enum :function_in_nested_module do + description NestedModule.nested_function("hello") + end + + enum :external_module_function_call do + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + enum :module_attribute_string_concat do + description "hello " <> @module_attribute + end + + enum :interpolation_of_module_attribute do + description "hello #{@module_attribute}" + end + end +end diff --git a/test/support/fixtures/fake_definition.graphql b/test/support/fixtures/fake_definition.graphql new file mode 100644 index 0000000000..fc50419c76 --- /dev/null +++ b/test/support/fixtures/fake_definition.graphql @@ -0,0 +1,203 @@ +enum fake__Locale { + az + cz + de + de_AT + de_CH + en + en_AU + en_BORK + en_CA + en_GB + en_IE + en_IND + en_US + en_au_ocker + es + es_MX + fa + fr + fr_CA + ge + id_ID + it + ja + ko + nb_NO + nep + nl + pl + pt_BR + ru + sk + sv + tr + uk + vi + zh_CN + zh_TW +} + +enum fake__Types { + zipCode + city + streetName + # Configure address with option `useFullAddress` + streetAddress + secondaryAddress + county + country + countryCode + state + stateAbbr + latitude + longitude + + colorName + productCategory + productName + # Sum of money. Configure with options `minMoney`/`maxMoney` and 'decimalPlaces'. + money + productMaterial + product + + companyName + companyCatchPhrase + companyBS + + dbColumn + dbType + dbCollation + dbEngine + + # Configure date format with option `dateFormat` + pastDate + # Configure date format with option `dateFormat` + futureDate + # Configure date format with option `dateFormat` + recentDate + + financeAccountName + financeTransactionType + currencyCode + currencyName + currencySymbol + bitcoinAddress + internationalBankAccountNumber + bankIdentifierCode + + hackerAbbr + hackerPhrase + + # An image url. Configure image with options: `imageCategory`, + # `imageWidth`, `imageHeight` and `randomizeImageUrl` + imageUrl + # An URL for an avatar + avatarUrl + # Configure email provider with option: `emailProvider` + email + url + domainName + ipv4Address + ipv6Address + userAgent + # Configure color with option: `baseColor` + colorHex + macAddress + # Configure password with option `passwordLength` + password + + # Lorem Ipsum text. Configure size with option `loremSize` + lorem + + firstName + lastName + fullName + jobTitle + + phoneNumber + + number + uuid + word + words + locale + + filename + mimeType + fileExtension + semver +} + +enum fake__imageCategory { + abstract + animals + business + cats + city + food + nightlife + fashion + people + nature + sports + technics + transport +} + +enum fake__loremSize { + word + words + sentence + sentences + paragraph + paragraphs +} + +input fake__color { + red255: Int = 0 + green255: Int = 0 + blue255: Int = 0 +} + +input fake__options { + # Only for type `streetAddress` + useFullAddress: Boolean + # Only for type `money` + minMoney: Float + # Only for type `money` + maxMoney: Float + # Only for type `money` + decimalPlaces: Int + # Only for type `imageUrl` + imageWidth: Int + # Only for type `imageUrl` + imageHeight: Int + # Only for type `imageUrl` + imageCategory: fake__imageCategory + # Only for type `imageUrl` + randomizeImageUrl: Boolean + # Only for type `email` + emailProvider: String + # Only for type `password` + passwordLength: Int + # Only for type `lorem` + loremSize: fake__loremSize + # Only for types `*Date`. Example value: `YYYY MM DD`. + # [Full Specification](http://momentjs.com/docs/#/displaying/format/) + dateFormat: String + # Only for type `colorHex`. [Details here](https://stackoverflow.com/a/43235/4989887) + baseColor: fake__color = { red255: 0, green255: 0, blue255: 0 } + # Only for type `number` + minNumber: Float + # Only for type `number` + maxNumber: Float + # Only for type `number` + precisionNumber: Float +} + +directive @fake(type:fake__Types!, options: fake__options = {}, locale:fake__Locale) on FIELD_DEFINITION | SCALAR + + +scalar examples__JSON +directive @examples(values: [examples__JSON]!) on FIELD_DEFINITION | SCALAR diff --git a/test/support/fixtures/function_evaluation_helpers.ex b/test/support/fixtures/function_evaluation_helpers.ex new file mode 100644 index 0000000000..efcdcb0865 --- /dev/null +++ b/test/support/fixtures/function_evaluation_helpers.ex @@ -0,0 +1,30 @@ +defmodule Absinthe.Fixtures.FunctionEvaluationHelpers do + def function_evaluation_test_params do + [ + %{test_label: :normal_string, expected_value: "string"}, + %{test_label: :local_function_call, expected_value: "red"}, + %{test_label: :function_call_using_absolute_path_to_current_module, expected_value: "red"}, + %{test_label: :standard_library_function, expected_value: "rad"}, + %{test_label: :function_in_nested_module, expected_value: "hello"}, + %{test_label: :external_module_function_call, expected_value: "the value is hello"}, + %{test_label: :module_attribute_string_concat, expected_value: "hello goodbye"}, + %{test_label: :interpolation_of_module_attribute, expected_value: "hello goodbye"} + ] + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + def filter_test_params_for_description_attribute(test_params) do + Enum.filter(test_params, fn %{test_label: test_label} -> + test_label not in [ + :local_function_call, + :function_call_using_absolute_path_to_current_module + ] + end) + end + + def external_function(arg) do + "the value is #{arg}" + end +end diff --git a/test/support/fixtures/id_test_schema.ex b/test/support/fixtures/id_test_schema.ex index 3afa5dff71..5f7df90a91 100644 --- a/test/support/fixtures/id_test_schema.ex +++ b/test/support/fixtures/id_test_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.IdTestSchema do use Absinthe.Schema + use Absinthe.Fixture # Example data @items %{ diff --git a/test/support/fixtures/import_sdl_binary_fn.graphql b/test/support/fixtures/import_sdl_binary_fn.graphql new file mode 100644 index 0000000000..2b32f72771 --- /dev/null +++ b/test/support/fixtures/import_sdl_binary_fn.graphql @@ -0,0 +1,3 @@ +type User { + name: String! +} \ No newline at end of file diff --git a/test/support/fixtures/import_sdl_path_option.graphql b/test/support/fixtures/import_sdl_path_option.graphql new file mode 100644 index 0000000000..3e43ff0661 --- /dev/null +++ b/test/support/fixtures/import_sdl_path_option.graphql @@ -0,0 +1,3 @@ +input PostFilter { + name: String +} \ No newline at end of file diff --git a/test/support/fixtures/import_sdl_path_option_fn.graphql b/test/support/fixtures/import_sdl_path_option_fn.graphql new file mode 100644 index 0000000000..3b3ef33a78 --- /dev/null +++ b/test/support/fixtures/import_sdl_path_option_fn.graphql @@ -0,0 +1,12 @@ +"A submitted post" +type Post { + title: String! + upcasedTitle: String! + body: String! + """ + The post author + (is a user) + """ + author: User! + comments: [Comment]! +} \ No newline at end of file diff --git a/test/support/fixtures/import_types.ex b/test/support/fixtures/import_types.ex new file mode 100644 index 0000000000..5210ba5a3b --- /dev/null +++ b/test/support/fixtures/import_types.ex @@ -0,0 +1,212 @@ +defmodule Absinthe.Fixtures.ImportTypes do + defmodule AccountTypes do + use Absinthe.Schema.Notation + + object :customer do + field :id, non_null(:id) + field :name, :string + field :mailing_address, :mailing_address + field :contact_methods, list_of(:contact_method) + end + + object :employee do + field :id, non_null(:id) + field :name, :string + field :avatar, :avatar + field :weekly_schedules, list_of(:weekly_schedule) + end + end + + defmodule OrderTypes do + use Absinthe.Schema.Notation + + object :order do + field :id, non_null(:id) + field :customer, non_null(:customer) + field :receipt, non_null(:receipt) + end + end + + defmodule ReceiptTypes do + use Absinthe.Schema.Notation + + object :receipt do + field :id, non_null(:id) + field :code, non_null(:string) + end + end + + defmodule ScheduleTypes do + use Absinthe.Schema.Notation + + object :weekly_schedule do + field :id, non_null(:id) + field :employee, non_null(:employee) + end + end + + defmodule ProfileTypes do + use Absinthe.Schema.Notation + + object :mailing_address do + field :street, non_null(list_of(:string)) + field :city, non_null(:string) + field :state, non_null(:string) + field :postal_code, non_null(:string) + end + end + + defmodule AuthTypes do + use Absinthe.Schema.Notation + + object :contact_method do + field :kind, non_null(:contact_kind) + field :value, non_null(:string) + end + + enum :contact_kind, values: [:email, :phone] + end + + defmodule Shared.AvatarTypes do + use Absinthe.Schema.Notation + + object :avatar do + field :height, non_null(:integer) + field :width, non_null(:integer) + field :url, non_null(:string) + end + end + + defmodule Schema.Types.Flag do + use Absinthe.Schema.Notation + + object :flag do + field :name, non_null(:string) + field :key, non_null(:string) + field :enabled, non_null(:boolean) + end + end + + defmodule Schema.Types.Enum.ValueType do + use Absinthe.Schema.Notation + + enum :value_type_enum, values: [:number, :boolean, :string] + end + + defmodule Schema do + use Absinthe.Schema + use Absinthe.Fixture + + import_types Absinthe.Fixtures.ImportTypes.{AccountTypes, OrderTypes} + import_types Absinthe.Fixtures.ImportTypes.ReceiptTypes + + alias Absinthe.Fixtures.ImportTypes + import_types ImportTypes.ScheduleTypes + import_types ImportTypes.{ProfileTypes, AuthTypes, Shared.AvatarTypes} + + import_types __MODULE__.Types.{Flag, Enum.ValueType} + + query do + field :orders, list_of(:order) + field :employees, list_of(:employee) + field :customers, list_of(:customer) + end + end + + defmodule SelfContainedSchema do + use Absinthe.Schema + use Absinthe.Fixture + + defmodule PaymentTypes do + use Absinthe.Schema.Notation + + object :credit_card do + field :number, non_null(:string) + field :type, non_null(:credit_card_type) + field :expiration_month, non_null(:integer) + field :expiration_year, non_null(:integer) + field :cvv, non_null(:string) + end + end + + defmodule CardTypes do + use Absinthe.Schema.Notation + + enum :credit_card_type, values: [:visa, :mastercard, :amex] + end + + defmodule Errors.DeclineReasons do + use Absinthe.Schema.Notation + + enum :decline_reasons, values: [:insufficient_funds, :invalid_card] + end + + defmodule Types.Category do + use Absinthe.Schema.Notation + + object :category do + field :name, non_null(:string) + field :slug, non_null(:string) + field :description, :string + end + end + + defmodule Types.Enums.Role do + use Absinthe.Schema.Notation + + enum :role_enum, values: [:admin, :client] + end + + import_types __MODULE__.Errors.DeclineReasons + import_types __MODULE__.{PaymentTypes, CardTypes} + import_types __MODULE__.Types.{Category, Enums.Role} + + query do + field :credit_cards, list_of(:credit_card) + end + end + + defmodule SchemaWithFunctionEvaluationImports do + use Absinthe.Schema.Notation + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + def test_function(arg1) do + arg1 + end + + input_object :example_input_object do + field :normal_string, :string, description: "string" + field :local_function_call, :string, description: test_function("red") + + field :function_call_using_absolute_path_to_current_module, :string, + description: + Absinthe.Fixtures.ImportTypes.SchemaWithFunctionEvaluationImports.test_function("red") + + field :standard_library_function, :string, description: String.replace("red", "e", "a") + + field :function_in_nested_module, :string, + description: NestedModule.nested_function("hello") + + field :external_module_function_call, :string, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + + field :module_attribute_string_concat, :string, description: "hello " <> @module_attribute + field :interpolation_of_module_attribute, :string, description: "hello #{@module_attribute}" + end + end + + defmodule SchemaWithFunctionEvaluation do + use Absinthe.Schema + + import_types(SchemaWithFunctionEvaluationImports) + + query do + end + end +end diff --git a/test/support/fixtures/input_object.ex b/test/support/fixtures/input_object.ex new file mode 100644 index 0000000000..9dd5e84693 --- /dev/null +++ b/test/support/fixtures/input_object.ex @@ -0,0 +1,277 @@ +defmodule Absinthe.Fixtures.InputObject do + defmodule TestSchemaDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + input_object :normal_string, description: "string" do + end + + input_object :local_function_call, description: test_function("red") do + end + + input_object :function_call_using_absolute_path_to_current_module, + description: Absinthe.Fixtures.InputObject.TestSchemaDescriptionKeyword.test_function("red") do + end + + input_object :standard_library_function, description: String.replace("red", "e", "a") do + end + + input_object :function_in_nested_module, description: NestedModule.nested_function("hello") do + end + + input_object :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") do + end + + input_object :module_attribute_string_concat, description: "hello " <> @module_attribute do + end + + input_object :interpolation_of_module_attribute, description: "hello #{@module_attribute}" do + end + + def test_function(arg1) do + arg1 + end + end + + defmodule TestSchemaDescriptionAttribute do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + @desc "string" + input_object :normal_string do + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # input_object :local_function_call do + # end + + # @desc Absinthe.Fixtures.InputObject.TestSchemaAttribute.test_function("red") + # input_object :function_call_using_absolute_path_to_current_module do + # end + + @desc String.replace("red", "e", "a") + input_object :standard_library_function do + end + + @desc NestedModule.nested_function("hello") + input_object :function_in_nested_module do + end + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + input_object :external_module_function_call do + end + + @desc "hello " <> @module_attribute + input_object :module_attribute_string_concat do + end + + @desc "hello #{@module_attribute}" + input_object :interpolation_of_module_attribute do + end + end + + defmodule TestSchemaDescriptionMacro do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + input_object :normal_string do + description "string" + end + + input_object :local_function_call do + description test_function("red") + end + + input_object :function_call_using_absolute_path_to_current_module do + description Absinthe.Fixtures.InputObject.TestSchemaDescriptionMacro.test_function("red") + end + + input_object :standard_library_function do + description String.replace("red", "e", "a") + end + + input_object :function_in_nested_module do + description NestedModule.nested_function("hello") + end + + input_object :external_module_function_call do + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + input_object :module_attribute_string_concat do + description "hello " <> @module_attribute + end + + input_object :interpolation_of_module_attribute do + description "hello #{@module_attribute}" + end + end + + defmodule TestSchemaFieldsAndArgsDescription do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + input_object :description_keyword_argument do + field :normal_string, :string, description: "string" + field :local_function_call, :string, description: test_function("red") + + field :function_call_using_absolute_path_to_current_module, :string, + description: + Absinthe.Fixtures.InputObject.TestSchemaFieldsAndArgsDescription.test_function("red") + + field :standard_library_function, :string, description: String.replace("red", "e", "a") + + field :function_in_nested_module, :string, + description: NestedModule.nested_function("hello") + + field :external_module_function_call, :string, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + + field :module_attribute_string_concat, :string, description: "hello " <> @module_attribute + field :interpolation_of_module_attribute, :string, description: "hello #{@module_attribute}" + end + + input_object :description_attribute do + @desc "string" + field :normal_string, :string + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # field :local_function_call, :string + + # @desc Absinthe.Fixtures.InputObject.TestSchemaFieldsAndArgsDescription.test_function( + # "red" + # ) + # field :function_call_using_absolute_path_to_current_module, :string + + @desc String.replace("red", "e", "a") + field :standard_library_function, :string + + @desc NestedModule.nested_function("hello") + field :function_in_nested_module, :string + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + field :external_module_function_call, :string + + @desc "hello " <> @module_attribute + field :module_attribute_string_concat, :string + + @desc "hello #{@module_attribute}" + field :interpolation_of_module_attribute, :string + end + + input_object :field_description_macro do + field :normal_string, :string do + description "string" + end + + field :local_function_call, :string do + description test_function("red") + end + + field :function_call_using_absolute_path_to_current_module, :string do + description Absinthe.Fixtures.InputObject.TestSchemaFieldsAndArgsDescription.test_function( + "red" + ) + end + + field :standard_library_function, :string do + description String.replace("red", "e", "a") + end + + field :function_in_nested_module, :string do + description NestedModule.nested_function("hello") + end + + field :external_module_function_call, :string do + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + field :module_attribute_string_concat, :string do + description "hello " <> @module_attribute + end + + field :interpolation_of_module_attribute, :string do + description "hello #{@module_attribute}" + end + end + + input_object :field_default_value do + field :normal_string, :string, default_value: "string" + field :local_function_call, :string, default_value: test_function("red") + + field :function_call_using_absolute_path_to_current_module, :string, + default_value: + Absinthe.Fixtures.InputObject.TestSchemaFieldsAndArgsDescription.test_function("red") + + field :standard_library_function, :string, default_value: String.replace("red", "e", "a") + + field :function_in_nested_module, :string, + default_value: NestedModule.nested_function("hello") + + field :external_module_function_call, :string, + default_value: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + + field :module_attribute_string_concat, :string, default_value: "hello " <> @module_attribute + + field :interpolation_of_module_attribute, :string, + default_value: "hello #{@module_attribute}" + end + end +end diff --git a/test/support/fixtures/language/kitchen-sink.graphql b/test/support/fixtures/language/kitchen-sink.graphql new file mode 100644 index 0000000000..6fcf394bf3 --- /dev/null +++ b/test/support/fixtures/language/kitchen-sink.graphql @@ -0,0 +1,59 @@ +# Copyright (c) 2015-present, Facebook, Inc. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +query queryName($foo: ComplexType, $site: Site = MOBILE) { + whoever123is: node(id: [123, 456]) { + id , + ... on User @defer { + field2 { + id , + alias: field1(first:10, after:$foo,) @include(if: $foo) { + id, + ...frag + } + } + } + ... @skip(unless: $foo) { + id + } + ... { + id + } + } +} + +mutation likeStory { + like(story: 123) @defer { + story { + id + } + } +} + +subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) { + storyLikeSubscribe(input: $input) { + story { + likers { + count + } + likeSentence { + text + } + } + } +} + +fragment frag on Friend { + foo(size: $size, bar: $b, obj: {key: "value", block: """ + + block string uses \""" + + """}) +} + +{ + unnamed(truthy: true, falsey: false, nullish: null), + query +} diff --git a/test/support/fixtures/language/schema-kitchen-sink.graphql b/test/support/fixtures/language/schema-kitchen-sink.graphql new file mode 100644 index 0000000000..1c7b5c3b30 --- /dev/null +++ b/test/support/fixtures/language/schema-kitchen-sink.graphql @@ -0,0 +1,131 @@ +# Copyright (c) 2015-present, Facebook, Inc. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +schema { + query: QueryType + mutation: MutationType +} + +""" +This is a description +of the `Foo` type. +""" +type Foo implements Bar & Baz { + one: Type + """ + This is a description of the `two` field. + """ + two( + """ + This is a description of the `argument` argument. + """ + argument: InputType! + ): Type + three(argument: InputType, other: String): Int + four(argument: String = "string"): String + five(argument: [String] = ["string", "string"]): String + six(argument: InputType = {key: "value"}): Type + seven(argument: Int = null): Type +} + +type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArg): Type @onField +} + +type UndefinedType + +extend type Foo { + seven(argument: [String]): Type +} + +extend type Foo @onType + +interface Bar { + one: Type + four(argument: String = "string"): String +} + +interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArg): Type @onField +} + +interface UndefinedInterface + +extend interface Bar { + two(argument: InputType!): Type +} + +extend interface Bar @onInterface + +union Feed = Story | Article | Advert + +union AnnotatedUnion @onUnion = A | B + +union AnnotatedUnionTwo @onUnion = | A | B + +union UndefinedUnion + +extend union Feed = Photo | Video + +extend union Feed @onUnion + +scalar CustomScalar + +scalar AnnotatedScalar @onScalar + +extend scalar CustomScalar @onScalar + +enum Site { + DESKTOP + MOBILE +} + +enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE +} + +enum UndefinedEnum + +extend enum Site { + VR +} + +extend enum Site @onEnum + +input InputType { + key: String! + answer: Int = 42 +} + +input AnnotatedInput @onInputObject { + annotatedField: Type @onField +} + +input UndefinedInput + +extend input InputType { + other: Float = 1.23e4 +} + +extend input InputType @onInputObject + +directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +directive @include(if: Boolean!) + on FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +directive @include2(if: Boolean!) on + | FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +extend schema @onSchema + +extend schema @onSchema { + subscription: SubscriptionType +} diff --git a/test/support/fixtures/language/schema-with-emojis.graphql b/test/support/fixtures/language/schema-with-emojis.graphql new file mode 100644 index 0000000000..c747e88860 --- /dev/null +++ b/test/support/fixtures/language/schema-with-emojis.graphql @@ -0,0 +1,11 @@ +# Comment with a ๐Ÿ˜• emoji. +schema { + """ + Description with a ๐ŸŽ‰ emoji. + """ + query: QueryType +} + +type QueryType { + hello: String +} \ No newline at end of file diff --git a/test/support/fixtures/null_lists_schema.ex b/test/support/fixtures/null_lists_schema.ex index 8fca134e12..8ae9e10583 100644 --- a/test/support/fixtures/null_lists_schema.ex +++ b/test/support/fixtures/null_lists_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.NullListsSchema do use Absinthe.Schema + use Absinthe.Fixture query do field :nullable_list, :list_details do diff --git a/test/support/fixtures/object.ex b/test/support/fixtures/object.ex new file mode 100644 index 0000000000..bcba7e18f8 --- /dev/null +++ b/test/support/fixtures/object.ex @@ -0,0 +1,255 @@ +defmodule Absinthe.Fixtures.Object do + defmodule TestSchemaDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + object :normal_string, description: "string" do + end + + object :local_function_call, description: test_function("red") do + end + + object :function_call_using_absolute_path_to_current_module, + description: Absinthe.Fixtures.Object.TestSchemaDescriptionKeyword.test_function("red") do + end + + object :standard_library_function, description: String.replace("red", "e", "a") do + end + + object :function_in_nested_module, description: NestedModule.nested_function("hello") do + end + + object :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") do + end + + object :module_attribute_string_concat, description: "hello " <> @module_attribute do + end + + object :interpolation_of_module_attribute, description: "hello #{@module_attribute}" do + end + + def test_function(arg1) do + arg1 + end + end + + defmodule TestSchemaDescriptionAttribute do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + @desc "string" + object :normal_string do + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # object :local_function_call do + # end + + # @desc Absinthe.Fixtures.Object.TestSchemaAttribute.test_function("red") + # object :function_call_using_absolute_path_to_current_module do + # end + + @desc String.replace("red", "e", "a") + object :standard_library_function do + end + + @desc NestedModule.nested_function("hello") + object :function_in_nested_module do + end + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + object :external_module_function_call do + end + + @desc "hello " <> @module_attribute + object :module_attribute_string_concat do + end + + @desc "hello #{@module_attribute}" + object :interpolation_of_module_attribute do + end + end + + defmodule TestSchemaDescriptionMacro do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + object :normal_string do + description "string" + end + + object :local_function_call do + description test_function("red") + end + + object :function_call_using_absolute_path_to_current_module do + description Absinthe.Fixtures.Object.TestSchemaDescriptionMacro.test_function("red") + end + + object :standard_library_function do + description String.replace("red", "e", "a") + end + + object :function_in_nested_module do + description NestedModule.nested_function("hello") + end + + object :external_module_function_call do + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + object :module_attribute_string_concat do + description "hello " <> @module_attribute + end + + object :interpolation_of_module_attribute do + description "hello #{@module_attribute}" + end + end + + defmodule TestSchemaFieldsAndArgsDescription do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + object :description_keyword_argument do + field :normal_string, :string, description: "string" + field :local_function_call, :string, description: test_function("red") + + field :function_call_using_absolute_path_to_current_module, :string, + description: + Absinthe.Fixtures.Object.TestSchemaFieldsAndArgsDescription.test_function("red") + + field :standard_library_function, :string, description: String.replace("red", "e", "a") + + field :function_in_nested_module, :string, + description: NestedModule.nested_function("hello") + + field :external_module_function_call, :string, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + + field :module_attribute_string_concat, :string, description: "hello " <> @module_attribute + field :interpolation_of_module_attribute, :string, description: "hello #{@module_attribute}" + end + + object :description_attribute do + @desc "string" + field :normal_string, :string + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # field :local_function_call, :string + + # @desc Absinthe.Fixtures.Object.TestSchemaFieldsAndArgsDescription.test_function( + # "red" + # ) + # field :function_call_using_absolute_path_to_current_module, :string + + @desc String.replace("red", "e", "a") + field :standard_library_function, :string + + @desc NestedModule.nested_function("hello") + field :function_in_nested_module, :string + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + field :external_module_function_call, :string + + @desc "hello " <> @module_attribute + field :module_attribute_string_concat, :string + + @desc "hello #{@module_attribute}" + field :interpolation_of_module_attribute, :string + end + + object :field_description_macro do + field :normal_string, :string do + description "string" + end + + field :local_function_call, :string do + description test_function("red") + end + + field :function_call_using_absolute_path_to_current_module, :string do + description Absinthe.Fixtures.Object.TestSchemaFieldsAndArgsDescription.test_function( + "red" + ) + end + + field :standard_library_function, :string do + description String.replace("red", "e", "a") + end + + field :function_in_nested_module, :string do + description NestedModule.nested_function("hello") + end + + field :external_module_function_call, :string do + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + field :module_attribute_string_concat, :string do + description "hello " <> @module_attribute + end + + field :interpolation_of_module_attribute, :string do + description "hello #{@module_attribute}" + end + end + end +end diff --git a/test/support/fixtures/object_times_schema.ex b/test/support/fixtures/object_times_schema.ex index 8e27604c5e..9ca6e68e23 100644 --- a/test/support/fixtures/object_times_schema.ex +++ b/test/support/fixtures/object_times_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.ObjectTimesSchema do use Absinthe.Schema + use Absinthe.Fixture query do field :obj_times, :integer do diff --git a/test/support/fixtures/only_query_schema.ex b/test/support/fixtures/only_query_schema.ex index ceb4be51b1..fcaceccbc9 100644 --- a/test/support/fixtures/only_query_schema.ex +++ b/test/support/fixtures/only_query_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.OnlyQuerySchema do use Absinthe.Schema + use Absinthe.Fixture query do field :hello, :string do diff --git a/test/support/fixtures/pets_schema.ex b/test/support/fixtures/pets_schema.ex index c4975f6f82..b8b1b5cf20 100644 --- a/test/support/fixtures/pets_schema.ex +++ b/test/support/fixtures/pets_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.PetsSchema do use Absinthe.Schema + use Absinthe.Fixture interface :being do field :name, :string do @@ -122,6 +123,11 @@ defmodule Absinthe.Fixtures.PetsSchema do field :string_list_field, list_of(:string) end + scalar :custom_scalar do + parse & &1 + serialize & &1 + end + object :complicated_args do field :int_arg_field, :string do arg :int_arg, :integer @@ -157,6 +163,7 @@ defmodule Absinthe.Fixtures.PetsSchema do field :complex_arg_field, :string do arg :complex_arg, :complex_input + arg :complex_arg_list, list_of(:complex_input) end field :multiple_reqs, :string do @@ -175,6 +182,14 @@ defmodule Absinthe.Fixtures.PetsSchema do arg :opt1, :integer, default_value: 0 arg :opt2, :integer, default_value: 0 end + + field :optional_non_null_boolean_arg_field, :boolean do + arg :optional_boolean_arg, non_null(:boolean), default_value: true + end + + field :non_null_boolean_arg_field, :boolean do + arg :non_null_boolean_arg, non_null(:boolean) + end end query do @@ -192,6 +207,12 @@ defmodule Absinthe.Fixtures.PetsSchema do field :complicated_args, :complicated_args end + mutation do + field :create_dog, :dog do + arg :custom_scalar_input, non_null(:custom_scalar) + end + end + directive :on_query do on [:query] end @@ -205,6 +226,7 @@ defmodule Absinthe.Fixtures.PetsSchema do end directive :on_field do + repeatable true on [:field] end diff --git a/test/support/fixtures/query.ex b/test/support/fixtures/query.ex new file mode 100644 index 0000000000..8bfd274050 --- /dev/null +++ b/test/support/fixtures/query.ex @@ -0,0 +1,162 @@ +defmodule Absinthe.Fixtures.Query do + defmodule TestSchemaFieldArgDescription do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + def test_function(arg1) do + arg1 + end + + query do + field :normal_string, :string do + arg :arg_example, :string, description: "string" + end + + field :local_function_call, :string do + arg :arg_example, :string, description: test_function("red") + end + + field :function_call_using_absolute_path_to_current_module, :string do + arg :arg_example, :string, + description: Absinthe.Fixtures.Query.TestSchemaFieldArgDescription.test_function("red") + end + + field :standard_library_function, :string do + arg :arg_example, :string, description: String.replace("red", "e", "a") + end + + field :function_in_nested_module, :string do + arg :arg_example, :string, description: NestedModule.nested_function("hello") + end + + field :external_module_function_call, :string do + arg :arg_example, :string, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + field :module_attribute_string_concat, :string do + arg :arg_example, :string, description: "hello " <> @module_attribute + end + + field :interpolation_of_module_attribute, :string do + arg :arg_example, :string, description: "hello #{@module_attribute}" + end + end + end + + defmodule TestSchemaFieldArgDefaultValue do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + def test_function(arg1) do + arg1 + end + + query do + field :normal_string, :string do + arg :arg_example, :string, default_value: "string" + end + + field :local_function_call, :string do + arg :arg_example, :string, default_value: test_function("red") + end + + field :function_call_using_absolute_path_to_current_module, :string do + arg :arg_example, :string, + default_value: + Absinthe.Fixtures.Query.TestSchemaFieldArgDefaultValue.test_function("red") + end + + field :standard_library_function, :string do + arg :arg_example, :string, default_value: String.replace("red", "e", "a") + end + + field :function_in_nested_module, :string do + arg :arg_example, :string, default_value: NestedModule.nested_function("hello") + end + + field :external_module_function_call, :string do + arg :arg_example, :string, + default_value: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + field :module_attribute_string_concat, :string do + arg :arg_example, :string, default_value: "hello " <> @module_attribute + end + + field :interpolation_of_module_attribute, :string do + arg :arg_example, :string, default_value: "hello #{@module_attribute}" + end + end + end + + defmodule TestSchemaFieldArgDefaultValueWithImportFields do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + def test_function(arg1) do + arg1 + end + + query do + import_fields :field_arg_default_value + end + + object :field_arg_default_value do + field :normal_string, :string do + arg :arg_example, :string, default_value: "string" + end + + field :local_function_call, :string do + arg :arg_example, :string, default_value: test_function("red") + end + + field :function_call_using_absolute_path_to_current_module, :string do + arg :arg_example, :string, + default_value: + Absinthe.Fixtures.Query.TestSchemaFieldArgDefaultValueWithImportFields.test_function( + "red" + ) + end + + field :standard_library_function, :string do + arg :arg_example, :string, default_value: String.replace("red", "e", "a") + end + + field :function_in_nested_module, :string do + arg :arg_example, :string, default_value: NestedModule.nested_function("hello") + end + + field :external_module_function_call, :string do + arg :arg_example, :string, + default_value: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + field :module_attribute_string_concat, :string do + arg :arg_example, :string, default_value: "hello " <> @module_attribute + end + + field :interpolation_of_module_attribute, :string do + arg :arg_example, :string, default_value: "hello #{@module_attribute}" + end + end + end +end diff --git a/test/support/fixtures/scalar.ex b/test/support/fixtures/scalar.ex new file mode 100644 index 0000000000..b2ba80bb06 --- /dev/null +++ b/test/support/fixtures/scalar.ex @@ -0,0 +1,202 @@ +defmodule Absinthe.Fixtures.Scalar do + defmodule Utils do + def parse(value), do: value + def serialize(value), do: value + end + + defmodule TestSchemaDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + scalar :normal_string, description: "string" do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :local_function_call, description: test_function("red") do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :function_call_using_absolute_path_to_current_module, + description: Absinthe.Fixtures.Scalar.TestSchemaDescriptionKeyword.test_function("red") do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :standard_library_function, description: String.replace("red", "e", "a") do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :function_in_nested_module, description: NestedModule.nested_function("hello") do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :module_attribute_string_concat, description: "hello " <> @module_attribute do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + scalar :interpolation_of_module_attribute, description: "hello #{@module_attribute}" do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + def test_function(arg1) do + arg1 + end + end + + defmodule TestSchemaDescriptionAttribute do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + @desc "string" + scalar :normal_string do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # scalar :local_function_call do + # parse &Utils.parse/1 + # serialize &Utils.serialize/1 + # end + + # @desc Absinthe.Fixtures.Scalar.TestSchemaEnumAttribute.test_function("red") + # scalar :function_call_using_absolute_path_to_current_module do + # parse &Utils.parse/1 + # serialize &Utils.serialize/1 + # end + + @desc String.replace("red", "e", "a") + scalar :standard_library_function do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + @desc NestedModule.nested_function("hello") + scalar :function_in_nested_module do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + scalar :external_module_function_call do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + @desc "hello " <> @module_attribute + scalar :module_attribute_string_concat do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + + @desc "hello #{@module_attribute}" + scalar :interpolation_of_module_attribute do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + end + end + + defmodule TestSchemaDescriptionMacro do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + scalar :normal_string do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description "string" + end + + scalar :local_function_call do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description test_function("red") + end + + scalar :function_call_using_absolute_path_to_current_module do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description Absinthe.Fixtures.Scalar.TestSchemaDescriptionMacro.test_function("red") + end + + scalar :standard_library_function do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description String.replace("red", "e", "a") + end + + scalar :function_in_nested_module do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description NestedModule.nested_function("hello") + end + + scalar :external_module_function_call do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + scalar :module_attribute_string_concat do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description "hello " <> @module_attribute + end + + scalar :interpolation_of_module_attribute do + parse &Utils.parse/1 + serialize &Utils.serialize/1 + description "hello #{@module_attribute}" + end + end +end diff --git a/test/support/fixtures/strict_schema.ex b/test/support/fixtures/strict_schema.ex new file mode 100644 index 0000000000..305c587a91 --- /dev/null +++ b/test/support/fixtures/strict_schema.ex @@ -0,0 +1,57 @@ +defmodule Absinthe.Fixtures.StrictSchema do + use Absinthe.Schema + use Absinthe.Fixture + + import_types Absinthe.Type.Custom + + directive :foo_bar_directive do + arg :baz_qux, non_null(:foo_bar_input) + on [:field] + + expand fn %{baz_qux: %{naive_datetime: naive_datetime}}, node = %{flags: flags} -> + %{node | flags: Map.put(flags, :baz_qux, naive_datetime)} + end + end + + input_object :foo_bar_input do + field :naive_datetime, non_null(:naive_datetime) + end + + object :foo_bar_object do + field :naive_datetime, non_null(:naive_datetime) + end + + query do + field :foo_bar_query, :foo_bar_object do + arg :baz_qux, :foo_bar_input + + resolve fn + %{baz_qux: %{naive_datetime: naive_datetime}}, _ -> + {:ok, %{naive_datetime: naive_datetime}} + + _, %{definition: %{flags: %{baz_qux: naive_datetime}}} -> + {:ok, %{naive_datetime: naive_datetime}} + + _, _ -> + {:ok, nil} + end + end + end + + mutation do + field :foo_bar_mutation, :foo_bar_object do + arg :baz_qux, :foo_bar_input + + resolve fn + %{baz_qux: %{naive_datetime: naive_datetime}}, _ -> + {:ok, %{naive_datetime: naive_datetime}} + + _, %{definition: %{flags: %{baz_qux: naive_datetime}}} -> + {:ok, %{naive_datetime: naive_datetime}} + + _, _ -> + {:ok, nil} + end + end + end +end diff --git a/test/support/fixtures/things_schema.ex b/test/support/fixtures/things/macro_schema.ex similarity index 98% rename from test/support/fixtures/things_schema.ex rename to test/support/fixtures/things/macro_schema.ex index bcbbe4ef21..ef74641e60 100644 --- a/test/support/fixtures/things_schema.ex +++ b/test/support/fixtures/things/macro_schema.ex @@ -1,5 +1,6 @@ -defmodule Absinthe.Fixtures.ThingsSchema do +defmodule Absinthe.Fixtures.Things.MacroSchema do use Absinthe.Schema + use Absinthe.Fixture @db %{ "foo" => %{id: "foo", name: "Foo", value: 4}, diff --git a/test/support/fixtures/things/sdl_schema.ex b/test/support/fixtures/things/sdl_schema.ex new file mode 100644 index 0000000000..ae6174eb7c --- /dev/null +++ b/test/support/fixtures/things/sdl_schema.ex @@ -0,0 +1,187 @@ +defmodule Absinthe.Fixtures.Things.SDLSchema do + use Absinthe.Schema + use Absinthe.Fixture + + import_sdl """ + enum SigilsWork { + FOO + BAR + } + + enum SigilsWorkInside { + FOO + BAR + } + + enum FailureType { + MULTIPLE + WITH_CODE + WITHOUT_MESSAGE + MULTIPLE_WITH_CODE + MULTIPLE_WITHOUT_MESSAGE + } + + type RootMutationType { + updateThing(id: String!, thing: InputThing!): Thing + + failingThing(type: FailureType): Thing + } + + type RootQueryType { + version: String + + badResolution: Thing + + number(val: Int!): String + + thingByContext: Thing + + things: [Thing] + + thing( + "id of the thing" + id: String!, + + "This is a deprecated arg" + deprecatedArg: String @deprecated, + + "This is a non-null deprecated arg" + deprecatedNonNullArg: String! @deprecated, + + "This is a deprecated arg with a reason" + deprecatedArgWithReason: String @deprecated(reason: "reason"), + + "This is a non-null deprecated arg with a reason" + deprecatedNonNullArgWithReason: String! @deprecated(reason: "reason") + ): Thing + + deprecatedThing( + "id of the thing" + id: String! + ): Thing @deprecated + + deprecatedThingWithReason( + "id of the thing" + id: String! + ): Thing @deprecated(reason: "use `thing' instead") + } + + "A thing as input" + input InputThing { + value: Int + deprecatedField: String @deprecated, + deprecatedFieldWithReason: String @deprecated(reason: "reason") + deprecatedNonNullField: String! @deprecated + } + + "A thing" + type Thing { + fail( + "the id we want this field to fail on" + id: ID + ): ID + + "The ID of the thing" + id: String! + + "The name of the thing" + name: String + + "The value of the thing" + value: Int + + otherThing: Thing + } + + schema { + mutation: RootMutationType + query: RootQueryType + } + + """ + + @db %{ + "foo" => %{id: "foo", name: "Foo", value: 4}, + "bar" => %{id: "bar", name: "Bar", value: 5} + } + + def hydrate(%Absinthe.Blueprint{}, _) do + %{ + mutation: %{ + failing_thing: [ + resolve: &__MODULE__.resolve_failing_thing/3 + ] + }, + query: %{ + bad_resolution: [ + resolve: &__MODULE__.resolve_bad/3 + ], + number: [ + resolve: &__MODULE__.resolve_number/3 + ], + thing_by_context: [ + resolve: &__MODULE__.resolve_things_by_context/3 + ], + things: [ + resolve: &__MODULE__.resolve_things/3 + ], + thing: [ + resolve: &__MODULE__.resolve_thing/3 + ], + deprecated_thing: [ + resolve: &__MODULE__.resolve_thing/3 + ], + deprecated_thing_with_reason: [ + resolve: &__MODULE__.resolve_thing/3 + ] + } + } + end + + def hydrate(_node, _ancestors) do + [] + end + + def resolve_failing_thing(_, %{type: :multiple}, _) do + {:error, ["one", "two"]} + end + + def resolve_failing_thing(_, %{type: :with_code}, _) do + {:error, message: "Custom Error", code: 42} + end + + def resolve_failing_thing(_, %{type: :without_message}, _) do + {:error, code: 42} + end + + def resolve_failing_thing(_, %{type: :multiple_with_code}, _) do + {:error, [%{message: "Custom Error 1", code: 1}, %{message: "Custom Error 2", code: 2}]} + end + + def resolve_failing_thing(_, %{type: :multiple_without_message}, _) do + {:error, [%{message: "Custom Error 1", code: 1}, %{code: 2}]} + end + + def resolve_bad(_, _, _) do + :not_expected + end + + def resolve_number(_, %{val: v}, _), do: {:ok, v |> to_string} + def resolve_number(_, args, _), do: {:error, "got #{inspect(args)}"} + + def resolve_things_by_context(_, _, %{context: %{thing: id}}) do + {:ok, @db |> Map.get(id)} + end + + def resolve_things_by_context(_, _, _) do + {:error, "No :id context provided"} + end + + def resolve_things(_, _, _) do + {:ok, @db |> Map.values() |> Enum.sort_by(& &1.id)} + end + + def resolve_thing(_, %{id: id}, _) do + {:ok, @db |> Map.get(id)} + end +end diff --git a/test/support/fixtures/times_schema.ex b/test/support/fixtures/times_schema.ex index 80e6ed8f54..df0722cc2e 100644 --- a/test/support/fixtures/times_schema.ex +++ b/test/support/fixtures/times_schema.ex @@ -1,5 +1,6 @@ defmodule Absinthe.Fixtures.TimesSchema do use Absinthe.Schema + use Absinthe.Fixture query do field :times, :integer do diff --git a/test/support/fixtures/union.ex b/test/support/fixtures/union.ex new file mode 100644 index 0000000000..51cd4ad936 --- /dev/null +++ b/test/support/fixtures/union.ex @@ -0,0 +1,151 @@ +defmodule Absinthe.Fixtures.Union do + defmodule TestSchemaDescriptionKeyword do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + union :normal_string, description: "string" do + end + + union :local_function_call, description: test_function("red") do + end + + union :function_call_using_absolute_path_to_current_module, + description: Absinthe.Fixtures.Union.TestSchemaDescriptionKeyword.test_function("red") do + end + + union :standard_library_function, description: String.replace("red", "e", "a") do + end + + union :function_in_nested_module, description: NestedModule.nested_function("hello") do + end + + union :external_module_function_call, + description: Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") do + end + + union :module_attribute_string_concat, description: "hello " <> @module_attribute do + end + + union :interpolation_of_module_attribute, description: "hello #{@module_attribute}" do + end + + def test_function(arg1) do + arg1 + end + end + + defmodule TestSchemaDescriptionAttribute do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + @desc "string" + union :normal_string do + end + + # These tests do not work as test_function is not available at compile time, and the + # expression for the @desc attribute is evaluated at compile time. There is nothing we can + # really do about it + + # @desc test_function("red") + # union :local_function_call do + + # end + + # @desc Absinthe.Fixtures.Union.TestSchemaEnumAttribute.test_function("red") + # union :function_call_using_absolute_path_to_current_module do + + # end + + @desc String.replace("red", "e", "a") + union :standard_library_function do + end + + @desc NestedModule.nested_function("hello") + union :function_in_nested_module do + end + + @desc Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + union :external_module_function_call do + end + + @desc "hello " <> @module_attribute + union :module_attribute_string_concat do + end + + @desc "hello #{@module_attribute}" + union :interpolation_of_module_attribute do + end + end + + defmodule TestSchemaDescriptionMacro do + use Absinthe.Schema + @module_attribute "goodbye" + + defmodule NestedModule do + def nested_function(arg1) do + arg1 + end + end + + query do + end + + def test_function(arg1) do + arg1 + end + + union :normal_string do + description "string" + end + + union :local_function_call do + description test_function("red") + end + + union :function_call_using_absolute_path_to_current_module do + description Absinthe.Fixtures.Union.TestSchemaDescriptionMacro.test_function("red") + end + + union :standard_library_function do + description String.replace("red", "e", "a") + end + + union :function_in_nested_module do + description NestedModule.nested_function("hello") + end + + union :external_module_function_call do + description Absinthe.Fixtures.FunctionEvaluationHelpers.external_function("hello") + end + + union :module_attribute_string_concat do + description "hello " <> @module_attribute + end + + union :interpolation_of_module_attribute do + description "hello #{@module_attribute}" + end + end +end diff --git a/test/support/integration_case.ex b/test/support/integration_case.ex deleted file mode 100644 index 8e8a246457..0000000000 --- a/test/support/integration_case.ex +++ /dev/null @@ -1,185 +0,0 @@ -defmodule Absinthe.IntegrationCase do - @moduledoc """ - Integration tests consist of: - - - A `.graphql` file containing a GraphQL document to execute - - A `.exs` file alongside with the same basename, containing the scenario(s) to execute - - The files are located under the directory passed as the `:root` - option to `use Absinthe.IntegrationCase`. - - ## Setting the Schema - - The schema for a GraphQL document can be set by adding a comment at - the beginning of the `.graphql` file, eg: - - ``` - # Schema: ColorsSchema - ``` - - The schema name provided must be under `Absinthe.Fixtures`. (For - example, the schema set in the example above would be - `Absinthe.Fixtures.ColorsSchema`.) - - If no schema is set, the integration test will use the - `:default_schema` option passed to `use Absinthe.IntegrationCase`. - - ## Defining Scenarios - - You can place one or more scenarios in the `.exs` file. - - A normal scenario that checks the result of Absinthe's GraphQL - execution is a tuple of options for `Absinthe.run` (see - `Absinthe.run_opts`) and the expected result. - - You can omit the options if you aren't setting any. For instance, - here's a simple result expectation: - - ``` - {:ok, %{data: %{"defaultThing" => %{"name" => "Foo"}}}} - ``` - - This could also have been written as: - - ``` - {[], {:ok, %{data: %{"defaultThing" => %{"name" => "Foo"}}}}} - ``` - - Here's another scenario example, this time making use of the options - to set a variable: - - ``` - {[variables: %{"thingId" => "foo"}], {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}}} - ``` - - If you have more than one scenario, just wrap them in a list: - - ``` - [ - {:ok, %{data: %{"defaultThing" => %{"name" => "Foo"}}}}, - {[variables: %{"thingId" => "foo"}], {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}}} - ] - ``` - - Under normal circumstances, `assert_result/2` will be used to - compare the result of a scenario against the expectation. (Notably, - `assert_result` ignores error `:locations`, so they do not need to - be included in results.) - - ### Checking Exceptions - - If a tuple containing `:raise` and a module name is provided as the - expected result for a scenario, `assert_raise/2` will be used - instead of the normal `Absinthe.Case.assert_result/2`; this can be - used to check scenarios with invalid resolvers, etc: - - ``` - {:raise, Absinthe.ExecutionError} - ``` - - Once again, with options for `Absinthe.run`, this would look like: - - ``` - {[variables: %{"someVar" => "value}], {:raise, Absinthe.ExecutionError}} - ``` - - ### Complex Scenario Assertions - - You can totally override the assertion logic and do your own - execution, just using the GraphQL reading and schema setting logic, - by defining a `run_scenario/2` function in your test module. It - should narrowly match the test definition (so that the rest of your - tests fall through to the normal `run_scenario/2` logic). - - ``` - def run_scenario(%{name: "path/to/integration/name"} = definition, {options, expectation} = scenario) do - result = run(definition.graphql, definition.schema, options) - # Do something to check the expectation against the result, etc - end - ``` - - (For more information on the values available in `definition` above, - see `Absinthe.IntegrationCase.Definition`.) - - In the event that you don't care about the result value, set the - expectation to `:custom_assertion` (this is just a convention). For - example, here's a scenario using a variable that uses a custom - `run_scenario` match to provide its own custom assertion logic: - - ``` - {[variables: %{"name" => "something"}], :custom_assertion} - ``` - """ - - defp term_from_file!(filename) do - elem(Code.eval_file(filename), 0) - end - - defp definitions(root, default_schema) do - for graphql_file <- Path.wildcard(Path.join(root, "**/*.graphql")) do - dirname = Path.dirname(graphql_file) - basename = Path.basename(graphql_file, ".graphql") - - integration_name = - String.replace_leading(dirname, root, "") - |> Path.join(basename) - |> String.slice(1..-1) - - graphql = File.read!(graphql_file) - - raw_scenarios = - Path.join(dirname, basename <> ".exs") - |> term_from_file! - - __MODULE__.Definition.create( - integration_name, - graphql, - default_schema, - raw_scenarios - ) - end - end - - def scenario_tests(definition) do - count = length(definition.scenarios) - - for {scenario, index} <- Enum.with_index(definition.scenarios) do - quote do - test unquote(definition.name) <> ", scenario #{unquote(index) + 1} of #{unquote(count)}" do - assert_scenario(unquote(Macro.escape(definition)), unquote(Macro.escape(scenario))) - end - end - end - end - - defmacro __using__(opts) do - root = Keyword.fetch!(opts, :root) - default_schema = Macro.expand(Keyword.fetch!(opts, :default_schema), __ENV__) - definitions = definitions(root, default_schema) - - [ - quote do - use Absinthe.Case, unquote(opts) - @before_compile unquote(__MODULE__) - end, - for definition <- definitions do - scenario_tests(definition) - end - ] - end - - defmacro __before_compile__(_env) do - quote do - def assert_scenario(definition, {options, {:raise, exception}}) when is_list(options) do - assert_raise(exception, fn -> run(definition.graphql, definition.schema, options) end) - end - - def assert_scenario(definition, {options, result}) when is_list(options) do - assert_result( - result, - run(definition.graphql, definition.schema, options) - ) - end - end - end -end diff --git a/test/support/validation_phase_case.ex b/test/support/validation_phase_case.ex index 0cccbdcc38..b865078db7 100644 --- a/test/support/validation_phase_case.ex +++ b/test/support/validation_phase_case.ex @@ -2,19 +2,19 @@ defmodule Absinthe.ValidationPhaseCase do import ExUnit.Assertions alias Absinthe.{Blueprint, Schema, Phase, Pipeline, Language} - @type error_checker_t :: ([{Blueprint.t(), Blueprint.Error.t()}] -> boolean) + @type error_checker_t :: ([{Blueprint.t(), Phase.Error.t()}] -> boolean) def get_error_location(line) do case List.wrap(line) do + [] -> + "(at any line number)" + [single] -> "(from line ##{single})" multiple when is_list(multiple) -> numbers = multiple |> Enum.join(", #") "(from lines ##{numbers})" - - nil -> - "(at any line number)" end end @@ -124,11 +124,10 @@ defmodule Absinthe.ValidationPhaseCase do end end - @spec assert_valid(Schema.t(), [Phase.t()], Language.Source.t(), map) :: no_return def assert_valid(schema, validations, document, options) do result = case run(schema, validations, document, options) do - {:ok, result} -> + {:ok, result, _} -> result # :jump, etc @@ -148,13 +147,6 @@ defmodule Absinthe.ValidationPhaseCase do Enum.join(formatted_errors, "\n ") <> "\n ---" end - @spec assert_invalid( - Schema.t(), - [Phase.t()], - Language.Source.t(), - map, - [error_checker_t] | error_checker_t - ) :: no_return def assert_invalid(schema, validations, document, options, error_checkers) do result = case run(schema, validations, document, options) do @@ -190,7 +182,7 @@ defmodule Absinthe.ValidationPhaseCase do |> Pipeline.upto(Phase.Document.Validation.Result) |> Pipeline.reject(fn phase -> Regex.match?(~r/Validation/, Atom.to_string(phase)) and - not (phase in [Phase.Document.Validation.Result | validations]) + phase not in [Phase.Document.Validation.Result | validations] end) end diff --git a/test/test_helper.exs b/test/test_helper.exs index e4ad352512..3be8f1fd31 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -1,2 +1,23 @@ ExUnit.configure(exclude: [pending: true], timeout: 30_000) ExUnit.start() + +fixtures_schemas = [ + Absinthe.Fixtures.ArgumentsSchema, + Absinthe.Fixtures.ColorSchema, + Absinthe.Fixtures.ContactSchema, + Absinthe.Fixtures.CustomTypesSchema, + Absinthe.Fixtures.IdTestSchema, + Absinthe.Fixtures.NullListsSchema, + Absinthe.Fixtures.ObjectTimesSchema, + Absinthe.Fixtures.OnlyQuerySchema, + Absinthe.Fixtures.PetsSchema, + Absinthe.Fixtures.StrictSchema, + Absinthe.Fixtures.TimesSchema, + Absinthe.Fixtures.Things.SDLSchema, + Absinthe.Fixtures.Things.MacroSchema +] + +for schema <- fixtures_schemas, + schema.__absinthe_schema_provider__ == Absinthe.Schema.PersistentTerm do + Absinthe.Schema.Manager.start_link(schema) +end