Skip to content

Commit

Permalink
refactors and documents error messages
Browse files Browse the repository at this point in the history
  • Loading branch information
ityonemo committed Apr 3, 2023
1 parent 90429e2 commit cdf9bf6
Show file tree
Hide file tree
Showing 20 changed files with 283 additions and 253 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,16 +67,16 @@ defmodule SchemaModule do
""")
end
```

```
iex> SchemaModule.validate_input("some string")
{:error, schema_pointer: "#", error_value: "some string", json_pointer: "#/parameter"}}
{:error, absolute_keyword_location: "#", error_value: "some string", absolute_keyword_location: "#/parameter"}
iex> SchemaModule.validate_input(%{"parameter" => "2"})
{:error, schema_pointer: "#/properties/parameter", error_value: "2", json_pointer: "#/parameter"}}
{:error, absolute_keyword_location: "#/properties/parameter", error_value: "2", absolute_keyword_location: "#/parameter"}
iex> SchemaModule.validate_input(%{"parameter" => 2})
:ok
```

## Licensing notes
Expand Down
24 changes: 22 additions & 2 deletions lib/exonerate.ex
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,33 @@ defmodule Exonerate do
end
```
The above module generates a function `MyModule.function_name/1` that takes an erlang JSON term
The above module generates a function `MyModule.function_name/1` that takes an BEAM JSON term
(`string | number | array | map | bool | nil`) and validates it based on the the JSONschema. If
the term validates, it produces `:ok`. If the term fails to validate, it produces
`{:error, keyword}`, where the key `:json_pointer` and points to the error location in the passed
`{:error, keyword}`, where the key `:instance_location` and points to the error location in the passed
parameter, the `:schema_pointers` points to the validation that failed, and `error_value` is the
failing inner term.
## Error keywords
The following error keywords conform to the JSONSchema spec
(https://json-schema.org/draft/2020-12/json-schema-core.html#name-format):
- `:absolute_keyword_location`: a JSON pointer to the keyword in the schema that failed.
- `:instance_location`: a JSON pointer to the location in the instance that failed.
- `:errors`: a list of errors generated when a combining filter fails to match.
The following error keywords are not standard and are specific to Exonerate:
- `:error_value`: the innermost term that failed to validate.
- `:matches`: a list of JSON pointers to the keywords that matched a combining filter.
- `:reason`: a string describing the error, when the failing filter can fail for nonobvious
reasons. For example `oneOf` will fail with the reason "no matches" when none of the
child schemas match; but it will fail with the reason "multiple matches" when more than
of the child schemas match.
- `:required`: a list of object keys that were required but missing.
- `:ref_trace`: a list of `$ref` keywords that were followed to get to the failing keyword.
## Options
The following options are available:
Expand Down
6 changes: 3 additions & 3 deletions lib/exonerate/combining/any_of.ex
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ defmodule Exonerate.Combining.AnyOf do
ok

Exonerate.Tools.error_match(error) ->
{:error, Keyword.update(opts, :failures, [error], &[error | &1])}
{:error, Keyword.update(opts, :errors, [error], &[error | &1])}
end

fun, {:ok, seen} ->
Expand Down Expand Up @@ -102,7 +102,7 @@ defmodule Exonerate.Combining.AnyOf do
ok

Exonerate.Tools.error_match(error) ->
{:error, Keyword.update(opts, :failures, [error], &[error | &1])}
{:error, Keyword.update(opts, :errors, [error], &[error | &1])}
end

fun, {:ok, first_unseen_index} ->
Expand Down Expand Up @@ -137,7 +137,7 @@ defmodule Exonerate.Combining.AnyOf do
{:halt, :ok}

Exonerate.Tools.error_match(error) ->
{:cont, {:error, Keyword.update(opts, :failures, [error], &[error | &1])}}
{:cont, {:error, Keyword.update(opts, :errors, [error], &[error | &1])}}
end
end
)
Expand Down
4 changes: 2 additions & 2 deletions lib/exonerate/combining/one_of.ex
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ defmodule Exonerate.Combining.OneOf do

Exonerate.Tools.error_match(error) ->
{:cont,
{{:error, Keyword.update(opts, :failures, [error], &[error | &1])}, index + 1}}
{{:error, Keyword.update(opts, :errors, [error], &[error | &1])}, index + 1}}
end

fun, {ok = {:ok, _seen}, last, index} ->
Expand Down Expand Up @@ -107,7 +107,7 @@ defmodule Exonerate.Combining.OneOf do

Exonerate.Tools.error_match(error) ->
{:cont,
{{:error, Keyword.update(opts, :failures, [error], &[error | &1])}, index + 1}}
{{:error, Keyword.update(opts, :errors, [error], &[error | &1])}, index + 1}}
end

fun, {:ok, last, index} ->
Expand Down
173 changes: 87 additions & 86 deletions lib/exonerate/degeneracy.ex
Original file line number Diff line number Diff line change
Expand Up @@ -43,119 +43,120 @@ defmodule Exonerate.Degeneracy do

opts = Keyword.delete(opts, :entrypoint)

canonicalized = source
|> JsonPointer.resolve_json!(entrypoint)
|> canonicalize_recursive(opts)
|> case do
## very trivial
context when context === %{} ->
true
canonicalized =
source
|> JsonPointer.resolve_json!(entrypoint)
|> canonicalize_recursive(opts)
|> case do
## very trivial
context when context === %{} ->
true

## redundant filters
%{"$ref" => ref} when draft in @ref_override_drafts ->
# ref overrides all other filters in draft <= 7
%{"$ref" => ref, "type" => @all_types}
## redundant filters
%{"$ref" => ref} when draft in @ref_override_drafts ->
# ref overrides all other filters in draft <= 7
%{"$ref" => ref, "type" => @all_types}

context = %{"maximum" => max, "exclusiveMaximum" => emax}
when is_number(emax) and max >= emax ->
canonicalize_purged(context, "maximum", opts)
context = %{"maximum" => max, "exclusiveMaximum" => emax}
when is_number(emax) and max >= emax ->
canonicalize_purged(context, "maximum", opts)

context = %{"minimum" => min, "exclusiveMinimum" => emin}
when is_number(emin) and min <= emin ->
canonicalize_purged(context, "minimum", opts)
context = %{"minimum" => min, "exclusiveMinimum" => emin}
when is_number(emin) and min <= emin ->
canonicalize_purged(context, "minimum", opts)

context = %{"maxContains" => _} when not is_map_key(context, "contains") ->
canonicalize_purged(context, "maxContains", opts)
context = %{"maxContains" => _} when not is_map_key(context, "contains") ->
canonicalize_purged(context, "maxContains", opts)

context = %{"minContains" => _} when not is_map_key(context, "contains") ->
canonicalize_purged(context, "minContains", opts)
context = %{"minContains" => _} when not is_map_key(context, "contains") ->
canonicalize_purged(context, "minContains", opts)

context = %{"if" => _}
when not is_map_key(context, "then") and not is_map_key(context, "else") ->
canonicalize_purged(context, "if", opts)
context = %{"if" => _}
when not is_map_key(context, "then") and not is_map_key(context, "else") ->
canonicalize_purged(context, "if", opts)

context = %{"exclusiveMinimum" => true} when not is_map_key(context, "minimum") ->
canonicalize_purged(context, "exclusiveMinimum", opts)
context = %{"exclusiveMinimum" => true} when not is_map_key(context, "minimum") ->
canonicalize_purged(context, "exclusiveMinimum", opts)

context = %{"exclusiveMaximum" => true} when not is_map_key(context, "maximum") ->
canonicalize_purged(context, "exclusiveMaximum", opts)
context = %{"exclusiveMaximum" => true} when not is_map_key(context, "maximum") ->
canonicalize_purged(context, "exclusiveMaximum", opts)

## degenerate-OK filters
context = %{"exclusiveMinimum" => false} ->
canonicalize_purged(context, "exclusiveMinimum", opts)
## degenerate-OK filters
context = %{"exclusiveMinimum" => false} ->
canonicalize_purged(context, "exclusiveMinimum", opts)

context = %{"exclusiveMaximum" => false} ->
canonicalize_purged(context, "exclusiveMaximum", opts)
context = %{"exclusiveMaximum" => false} ->
canonicalize_purged(context, "exclusiveMaximum", opts)

context = %{"propertyNames" => true} ->
canonicalize_purged(context, "propertyNames", opts)
context = %{"propertyNames" => true} ->
canonicalize_purged(context, "propertyNames", opts)

context = %{"uniqueItems" => false} ->
canonicalize_purged(context, "uniqueItems", opts)
context = %{"uniqueItems" => false} ->
canonicalize_purged(context, "uniqueItems", opts)

context = %{"minLength" => 0} ->
canonicalize_purged(context, "minLength", opts)
context = %{"minLength" => 0} ->
canonicalize_purged(context, "minLength", opts)

context = %{"minItems" => 0} ->
canonicalize_purged(context, "minItems", opts)
context = %{"minItems" => 0} ->
canonicalize_purged(context, "minItems", opts)

context = %{"minProperties" => 0} ->
canonicalize_purged(context, "minProperties", opts)
context = %{"minProperties" => 0} ->
canonicalize_purged(context, "minProperties", opts)

context = %{"minContains" => 0, "contains" => _}
when not is_map_key(context, "maxContains") ->
canonicalize_purged(context, ["minContains", "contains"], opts)
context = %{"minContains" => 0, "contains" => _}
when not is_map_key(context, "maxContains") ->
canonicalize_purged(context, ["minContains", "contains"], opts)

context = %{"pattern" => regex_all} when regex_all in @regex_all ->
# this is not comprehensive, but it's good enough for a first pass.
canonicalize_purged(context, "pattern", opts)
context = %{"pattern" => regex_all} when regex_all in @regex_all ->
# this is not comprehensive, but it's good enough for a first pass.
canonicalize_purged(context, "pattern", opts)

context = %{"additionalItems" => _} when not is_map_key(context, "items") ->
canonicalize_purged(context, "additionalItems", opts)
context = %{"additionalItems" => _} when not is_map_key(context, "items") ->
canonicalize_purged(context, "additionalItems", opts)

context = %{"additionalItems" => _, "items" => items}
when is_map(items) or is_boolean(items) ->
canonicalize_purged(context, "additionalItems", opts)
context = %{"additionalItems" => _, "items" => items}
when is_map(items) or is_boolean(items) ->
canonicalize_purged(context, "additionalItems", opts)

context = %{"unevaluatedItems" => _, "items" => items}
when is_map(items) or is_boolean(items) ->
canonicalize_purged(context, "unevaluatedItems", opts)
context = %{"unevaluatedItems" => _, "items" => items}
when is_map(items) or is_boolean(items) ->
canonicalize_purged(context, "unevaluatedItems", opts)

### empty filter lists
context = %{"required" => []} ->
canonicalize_purged(context, "required", opts)
### empty filter lists
context = %{"required" => []} ->
canonicalize_purged(context, "required", opts)

context = %{"allOf" => []} ->
canonicalize_purged(context, "allOf", opts)
context = %{"allOf" => []} ->
canonicalize_purged(context, "allOf", opts)

# combine minLength and maxLength
context = %{"minLength" => min, "maxLength" => max} ->
# note the min-max-length string doesn't look like a normal JsonSchema filter.
context
|> Map.put("min-max-length", [min, max])
|> canonicalize_purged(["minLength", "maxLength"], opts)
# combine minLength and maxLength
context = %{"minLength" => min, "maxLength" => max} ->
# note the min-max-length string doesn't look like a normal JsonSchema filter.
context
|> Map.put("min-max-length", [min, max])
|> canonicalize_purged(["minLength", "maxLength"], opts)

## type normalization
context = %{"type" => type} when is_binary(type) ->
context
|> Map.put("type", [type])
|> canonicalize(opts)
## type normalization
context = %{"type" => type} when is_binary(type) ->
context
|> Map.put("type", [type])
|> canonicalize(opts)

context when not is_map_key(context, "type") ->
canonicalize_no_type(context, opts)
context when not is_map_key(context, "type") ->
canonicalize_no_type(context, opts)

## const and enum normalization
context = %{"const" => const, "enum" => enum} ->
if const in enum do
canonicalize_purged(context, "enum", opts)
else
context
end
## const and enum normalization
context = %{"const" => const, "enum" => enum} ->
if const in enum do
canonicalize_purged(context, "enum", opts)
else
context
end

context ->
context
end
|> canonicalize_finalize
context ->
context
end
|> canonicalize_finalize

JsonPointer.update_json!(source, entrypoint, fn _ -> canonicalized end)
end
Expand Down
2 changes: 1 addition & 1 deletion lib/exonerate/filter/property_names.ex
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ defmodule Exonerate.Filter.PropertyNames do
:ok

{:error, errors} ->
{:error, Keyword.update!(errors, :json_pointer, &Path.join(&1, key))}
{:error, Keyword.update!(errors, :instance_location, &Path.join(&1, key))}
end
end
end
Expand Down
40 changes: 24 additions & 16 deletions lib/exonerate/tools.ex
Original file line number Diff line number Diff line change
Expand Up @@ -5,32 +5,36 @@ defmodule Exonerate.Tools do
alias Exonerate.Type

# GENERAL-USE MACROS
defmacro mismatch(error_value, schema_pointer, json_pointer, opts \\ [])
defmacro mismatch(error_value, absolute_keyword_location, json_pointer, opts \\ [])

defmacro mismatch(error_value, {schema_pointer, extras}, json_pointer, opts) do
primary = Keyword.take(binding(), ~w(error_value json_pointer)a)
schema_pointer = JsonPointer.to_path(schema_pointer)
defmacro mismatch(error_value, {absolute_keyword_location, extras}, instance_location, opts) do
primary = Keyword.take(binding(), ~w(error_value instance_location)a)
absolute_keyword_location = JsonPointer.to_path(absolute_keyword_location)

schema_pointer = [
schema_pointer:
absolute_keyword_location = [
absolute_keyword_location:
quote do
Path.join(unquote(schema_pointer), unquote(extras))
Path.join(unquote(absolute_keyword_location), unquote(extras))
end
]

extras = Keyword.take(opts, ~w(reason failures matches required)a)
extras = Keyword.take(opts, ~w(reason errors matches required)a)

quote bind_quoted: [error_params: primary ++ schema_pointer ++ extras] do
quote bind_quoted: [error_params: primary ++ absolute_keyword_location ++ extras] do
{:error, error_params}
end
end

defmacro mismatch(error_value, schema_pointer, json_pointer, opts) do
primary = Keyword.take(binding(), ~w(error_value json_pointer)a)
schema_pointer = [schema_pointer: JsonPointer.to_path(schema_pointer)]
extras = Keyword.take(opts, ~w(reason failures matches required)a)
defmacro mismatch(error_value, absolute_keyword_location, instance_location, opts) do
primary = Keyword.take(binding(), ~w(error_value instance_location)a)

quote bind_quoted: [error_params: primary ++ schema_pointer ++ extras] do
absolute_keyword_location = [
absolute_keyword_location: JsonPointer.to_path(absolute_keyword_location)
]

extras = Keyword.take(opts, ~w(reason errors matches required)a)

quote bind_quoted: [error_params: primary ++ absolute_keyword_location ++ extras] do
{:error, error_params}
end
end
Expand Down Expand Up @@ -203,8 +207,12 @@ defmodule Exonerate.Tools do

def decode!(string, opts) do
case Keyword.get(opts, :decoder, Jason) do
Jason -> Jason.decode!(string)
YamlElixir -> YamlElixir.read_from_string!(string)
Jason ->
Jason.decode!(string)

YamlElixir ->
YamlElixir.read_from_string!(string)

{module, function} ->
apply(module, function, [string])
end
Expand Down
Loading

0 comments on commit cdf9bf6

Please sign in to comment.