From bd996b26726b125bf007ff46d42ef7193072f4c6 Mon Sep 17 00:00:00 2001 From: rustra Date: Fri, 19 Jun 2020 17:07:46 +0200 Subject: [PATCH 1/2] Improve specs --- lib/json/ld/compaction.ex | 975 +++++++++++++++---------- lib/json/ld/context.ex | 438 ++++++----- lib/json/ld/context/term_definition.ex | 12 +- lib/json/ld/decoder.ex | 298 ++++---- lib/json/ld/encoder.ex | 355 +++++---- lib/json/ld/exceptions.ex | 40 +- lib/json/ld/expansion.ex | 708 ++++++++++-------- lib/json/ld/flattening.ex | 334 +++++---- lib/json/ld/iri_expansion.ex | 77 +- lib/json/ld/node_identifier_map.ex | 22 +- lib/json/ld/options.ex | 19 +- lib/json/ld/utils.ex | 37 +- lib/json_ld.ex | 49 +- 13 files changed, 2018 insertions(+), 1346 deletions(-) diff --git a/lib/json/ld/compaction.ex b/lib/json/ld/compaction.ex index cf23154..0184754 100644 --- a/lib/json/ld/compaction.ex +++ b/lib/json/ld/compaction.ex @@ -2,51 +2,66 @@ defmodule JSON.LD.Compaction do @moduledoc nil import JSON.LD.Utils - alias JSON.LD.Context + alias JSON.LD.{Context, Options} - def compact(input, context, options \\ %JSON.LD.Options{}) do - with options = JSON.LD.Options.new(options), - active_context = JSON.LD.context(context, options), + @spec compact(map | [map], map | nil, Options.t() | Enum.t()) :: map + def compact(input, context, options \\ %Options{}) do + with options = Options.new(options), + active_context = JSON.LD.context(context, options), inverse_context = Context.inverse(active_context), - expanded = JSON.LD.expand(input, options) - do + expanded = JSON.LD.expand(input, options) do result = case do_compact(expanded, active_context, inverse_context, nil, options.compact_arrays) do [] -> %{} + result when is_list(result) -> - # TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec + # TODO: Spec fixme? We're setting vocab to true, as other implementations + # do it, but this is not mentioned in the spec %{compact_iri("@graph", active_context, inverse_context, nil, true) => result} + result -> result end + if Context.empty?(active_context), do: result, else: Map.put(result, "@context", context["@context"] || context) end end - defp do_compact(element, active_context, inverse_context, active_property, - compact_arrays \\ true) - - - # 1) If element is a scalar, it is already in its most compact form, so simply return element. + @spec do_compact(any, Context.t(), map, String.t() | nil, boolean) :: any + defp do_compact( + element, + active_context, + inverse_context, + active_property, + compact_arrays \\ true + ) + + # 1) If element is a scalar, it is already in its most compact form, so simply + # return element. defp do_compact(element, _, _, _, _) - when is_binary(element) or is_number(element) or is_boolean(element), - do: element + when is_binary(element) or is_number(element) or is_boolean(element), + do: element # 2) If element is an array defp do_compact(element, active_context, inverse_context, active_property, compact_arrays) - when is_list(element) do - result = Enum.reduce(element, [], fn (item, result) -> - case do_compact(item, active_context, inverse_context, active_property, compact_arrays) do - nil -> result - compacted_item -> [compacted_item | result] - end - end) |> Enum.reverse + when is_list(element) do + result = + Enum.reduce(element, [], fn item, result -> + case do_compact(item, active_context, inverse_context, active_property, compact_arrays) do + nil -> result + compacted_item -> [compacted_item | result] + end + end) + |> Enum.reverse() + if compact_arrays and length(result) == 1 and - is_nil((term_def = active_context.term_defs[active_property]) && term_def.container_mapping) do + is_nil( + (term_def = active_context.term_defs[active_property]) && term_def.container_mapping + ) do List.first(result) else result @@ -55,426 +70,597 @@ defmodule JSON.LD.Compaction do # 3) Otherwise element is a JSON object. defp do_compact(element, active_context, inverse_context, active_property, compact_arrays) - when is_map(element) do + when is_map(element) do # 4) - if (Map.has_key?(element, "@value") or Map.has_key?(element, "@id")) do + if Map.has_key?(element, "@value") or Map.has_key?(element, "@id") do result = compact_value(element, active_context, inverse_context, active_property) + if scalar?(result) do result else - do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays) + do_compact_non_scalar( + element, + active_context, + inverse_context, + active_property, + compact_arrays + ) end else - do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays) + do_compact_non_scalar( + element, + active_context, + inverse_context, + active_property, + compact_arrays + ) end end - defp do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays) do + @spec do_compact_non_scalar(any, Context.t(), map, String.t() | nil, boolean) :: any + defp do_compact_non_scalar( + element, + active_context, + inverse_context, + active_property, + compact_arrays + ) do # 5) inside_reverse = active_property == "@reverse" # 6) + 7) element - |> Enum.sort_by(fn {expanded_property , _} -> expanded_property end) - |> Enum.reduce(%{}, fn ({expanded_property, expanded_value}, result) -> - cond do - # 7.1) - expanded_property in ~w[@id @type] -> - # 7.1.1) - compacted_value = - if is_binary(expanded_value) do - compact_iri(expanded_value, active_context, inverse_context, nil, - expanded_property == "@type") + |> Enum.sort_by(fn {expanded_property, _} -> expanded_property end) + |> Enum.reduce(%{}, fn {expanded_property, expanded_value}, result -> + cond do + # 7.1) + expanded_property in ~w[@id @type] -> + # 7.1.1) + compacted_value = + if is_binary(expanded_value) do + compact_iri( + expanded_value, + active_context, + inverse_context, + nil, + expanded_property == "@type" + ) + # 7.1.2) - else - # 7.1.2.1) - # TODO: RDF.rb calls also Array#compact - if(is_list(expanded_value), - do: expanded_value, - else: [expanded_value]) - # 7.1.2.2) - |> Enum.reduce([], fn (expanded_type, compacted_value) -> - compacted_value ++ - [compact_iri(expanded_type, active_context, inverse_context, nil, true)] - end) - # 7.1.2.3) - |> case(do: ( - [compacted_value] -> compacted_value - compacted_value -> compacted_value)) + else + # 7.1.2.1) + # TODO: RDF.rb calls also Array#compact + if(is_list(expanded_value), + do: expanded_value, + else: [expanded_value] + ) + # 7.1.2.2) + |> Enum.reduce([], fn expanded_type, compacted_value -> + compacted_value ++ + [compact_iri(expanded_type, active_context, inverse_context, nil, true)] + end) + # 7.1.2.3) + |> case do + [compacted_value] -> compacted_value + compacted_value -> compacted_value end - # 7.1.3) - alias = compact_iri(expanded_property, active_context, inverse_context, nil, true) - # 7.1.4) - Map.put(result, alias, compacted_value) + end - # 7.2) - expanded_property == "@reverse" -> - # 7.2.1) - compacted_value = do_compact(expanded_value, active_context, inverse_context, "@reverse") - # 7.2.2) - {compacted_value, result} = - Enum.reduce compacted_value, {%{}, result}, - fn ({property, value}, {compacted_value, result}) -> - term_def = active_context.term_defs[property] - # 7.2.2.1) - if term_def && term_def.reverse_property do - # 7.2.2.1.1) - value = - if (!compact_arrays or term_def.container_mapping == "@set") and - !is_list(value) do - [value] - else - value - end - # 7.2.2.1.2) + 7.2.2.1.3) - {compacted_value, merge_compacted_value(result, property, value)} + # 7.1.3) + alias = compact_iri(expanded_property, active_context, inverse_context, nil, true) + # 7.1.4) + Map.put(result, alias, compacted_value) + + # 7.2) + expanded_property == "@reverse" -> + # 7.2.1) + compacted_value = + do_compact(expanded_value, active_context, inverse_context, "@reverse") + + # 7.2.2) + {compacted_value, result} = + Enum.reduce(compacted_value, {%{}, result}, fn {property, value}, + {compacted_value, result} -> + term_def = active_context.term_defs[property] + # 7.2.2.1) + if term_def && term_def.reverse_property do + # 7.2.2.1.1) + value = + if (!compact_arrays or term_def.container_mapping == "@set") and !is_list(value) do + [value] else - {Map.put(compacted_value, property, value), result} + value end - end - # 7.2.3) - unless Enum.empty?(compacted_value) do - # 7.2.3.1) - alias = compact_iri("@reverse", active_context, inverse_context, nil, true) - # 7.2.3.2) - Map.put(result, alias, compacted_value) - else - result - end - - # 7.3) - expanded_property == "@index" && - active_context.term_defs[active_property] && - active_context.term_defs[active_property].container_mapping == "@index" -> - result - - # 7.4) - expanded_property in ~w[@index @value @language] -> - # 7.4.1) - alias = compact_iri(expanded_property, active_context, inverse_context, nil, true) - # 7.4.2) - Map.put(result, alias, expanded_value) - true -> - # 7.5) - result = - if expanded_value == [] do - # 7.5.1) - item_active_property = - compact_iri(expanded_property, active_context, inverse_context, - expanded_value, true, inside_reverse) - # 7.5.2) - Map.update(result, item_active_property, [], fn - value when not is_list(value) -> [value] - value -> value - end) + # 7.2.2.1.2) + 7.2.2.1.3) + {compacted_value, merge_compacted_value(result, property, value)} else - result + {Map.put(compacted_value, property, value), result} end + end) - # 7.6) - Enum.reduce(expanded_value, result, fn (expanded_item, result) -> - # 7.6.1) - item_active_property = - compact_iri(expanded_property, active_context, inverse_context, - expanded_item, true, inside_reverse) + # 7.2.3) + unless Enum.empty?(compacted_value) do + # 7.2.3.1) + alias = compact_iri("@reverse", active_context, inverse_context, nil, true) + # 7.2.3.2) + Map.put(result, alias, compacted_value) + else + result + end - # 7.6.2) - term_def = active_context.term_defs[item_active_property] - container = (term_def && term_def.container_mapping) || nil + # 7.3) + expanded_property == "@index" && + active_context.term_defs[active_property] && + active_context.term_defs[active_property].container_mapping == "@index" -> + result - # 7.6.3) - value = (is_map(expanded_item) && expanded_item["@list"]) || expanded_item - compacted_item = - do_compact(value, active_context, inverse_context, - item_active_property, compact_arrays) + # 7.4) + expanded_property in ~w[@index @value @language] -> + # 7.4.1) + alias = compact_iri(expanded_property, active_context, inverse_context, nil, true) + # 7.4.2) + Map.put(result, alias, expanded_value) - # 7.6.4) - compacted_item = - if list?(expanded_item) do - # 7.6.4.1) - compacted_item = - unless is_list(compacted_item), - do: [compacted_item], else: compacted_item - # 7.6.4.2) - unless container == "@list" do - # 7.6.4.2.1) - compacted_item = %{ - # TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec - compact_iri("@list", active_context, inverse_context, nil, true) => - compacted_item} - # 7.6.4.2.2) - if Map.has_key?(expanded_item, "@index") do - Map.put(compacted_item, - # TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec - compact_iri("@index", active_context, inverse_context, nil, true), - expanded_item["@index"]) - else + true -> + # 7.5) + result = + if expanded_value == [] do + # 7.5.1) + item_active_property = + compact_iri( + expanded_property, + active_context, + inverse_context, + expanded_value, + true, + inside_reverse + ) + + # 7.5.2) + Map.update(result, item_active_property, [], fn + value when not is_list(value) -> [value] + value -> value + end) + else + result + end + + # 7.6) + Enum.reduce(expanded_value, result, fn expanded_item, result -> + # 7.6.1) + item_active_property = + compact_iri( + expanded_property, + active_context, + inverse_context, + expanded_item, + true, + inside_reverse + ) + + # 7.6.2) + term_def = active_context.term_defs[item_active_property] + container = (term_def && term_def.container_mapping) || nil + + # 7.6.3) + value = (is_map(expanded_item) && expanded_item["@list"]) || expanded_item + + compacted_item = + do_compact( + value, + active_context, + inverse_context, + item_active_property, + compact_arrays + ) + + # 7.6.4) + compacted_item = + if list?(expanded_item) do + # 7.6.4.1) + compacted_item = + unless is_list(compacted_item), do: [compacted_item], else: compacted_item + + # 7.6.4.2) + unless container == "@list" do + # 7.6.4.2.1) + compacted_item = %{ + # TODO: Spec fixme? We're setting vocab to true, as other + # implementations do it, but this is not mentioned in the spec + compact_iri("@list", active_context, inverse_context, nil, true) => compacted_item - end - # 7.6.4.3) + } + + # 7.6.4.2.2) + if Map.has_key?(expanded_item, "@index") do + Map.put( + compacted_item, + # TODO: Spec fixme? We're setting vocab to true, as other + # implementations do it, but this is not mentioned in the spec + compact_iri("@index", active_context, inverse_context, nil, true), + expanded_item["@index"] + ) else - if Map.has_key?(result, item_active_property) do - raise JSON.LD.CompactionToListOfListsError, - message: "The compacted document contains a list of lists as multiple lists have been compacted to the same term." - else - compacted_item - end + compacted_item end + + # 7.6.4.3) else - compacted_item + if Map.has_key?(result, item_active_property) do + raise JSON.LD.CompactionToListOfListsError, + message: + "The compacted document contains a list of lists as multiple lists have been compacted to the same term." + else + compacted_item + end end + else + compacted_item + end - # 7.6.5) - if container in ~w[@language @index] do - map_object = result[item_active_property] || %{} - compacted_item = - if container == "@language" and - is_map(compacted_item) and Map.has_key?(compacted_item, "@value"), - do: compacted_item["@value"], - else: compacted_item - map_key = expanded_item[container] - map_object = merge_compacted_value(map_object, map_key, compacted_item) - Map.put(result, item_active_property, map_object) + # 7.6.5) + if container in ~w[@language @index] do + map_object = result[item_active_property] || %{} + + compacted_item = + if container == "@language" and + is_map(compacted_item) and Map.has_key?(compacted_item, "@value"), + do: compacted_item["@value"], + else: compacted_item + + map_key = expanded_item[container] + map_object = merge_compacted_value(map_object, map_key, compacted_item) + Map.put(result, item_active_property, map_object) # 7.6.6) - else - compacted_item = - if !is_list(compacted_item) and (!compact_arrays or + else + compacted_item = + if !is_list(compacted_item) and + (!compact_arrays or container in ~w[@set @list] or expanded_property in ~w[@list @graph]), - do: [compacted_item], - else: compacted_item - merge_compacted_value(result, item_active_property, compacted_item) - end - end) - end + do: [compacted_item], + else: compacted_item + + merge_compacted_value(result, item_active_property, compacted_item) + end + end) + end end) end + @spec merge_compacted_value(map, String.t(), any) :: map defp merge_compacted_value(map, key, value) do - Map.update map, key, value, fn + Map.update(map, key, value, fn old_value when is_list(old_value) and is_list(value) -> old_value ++ value + old_value when is_list(old_value) -> old_value ++ [value] + old_value when is_list(value) -> [old_value | value] + old_value -> [old_value, value] - end + end) end - @doc """ IRI Compaction Details at """ - def compact_iri(iri, active_context, inverse_context, - value \\ nil, vocab \\ false, reverse \\ false) + @spec compact_iri(any, Context.t(), map, any | nil, boolean, boolean) :: any | nil + def compact_iri( + iri, + active_context, + inverse_context, + value \\ nil, + vocab \\ false, + reverse \\ false + ) # 1) If iri is null, return null. def compact_iri(nil, _, _, _, _, _), do: nil def compact_iri(iri, active_context, inverse_context, value, vocab, reverse) do # 2) If vocab is true and iri is a key in inverse context: - term = if vocab && Map.has_key?(inverse_context, iri) do - # 2.1) Initialize default language to active context's default language, if it has one, otherwise to @none. - # TODO: Spec fixme: This step is effectively useless; see Spec fixme on step 2.6.3 - # default_language = active_context.default_language || "@none" - # 2.3) Initialize type/language to @language, and type/language value to @null. These two variables will keep track of the preferred type mapping or language mapping for a term, based on what is compatible with value. - type_language = "@language" - type_language_value = "@null" - # 2.2) Initialize containers to an empty array. This array will be used to keep track of an ordered list of preferred container mappings for a term, based on what is compatible with value. - # 2.4) If value is a JSON object that contains the key @index, then append the value @index to containers. - containers = if index?(value), do: ["@index"], else: [] - {containers, type_language, type_language_value} = - cond do - # 2.5) If reverse is true, set type/language to @type, type/language value to @reverse, and append @set to containers. - reverse -> - containers = containers ++ ["@set"] - type_language = "@type" - type_language_value = "@reverse" - {containers, type_language, type_language_value} - # 2.6) Otherwise, if value is a list object, then set type/language and type/language value to the most specific values that work for all items in the list as follows: - list?(value) -> - # 2.6.1) If @index is a not key in value, then append @list to containers. - containers = - if not index?(value), - do: containers ++ ["@list"], else: containers - # 2.6.2) Initialize list to the array associated with the key @list in value. - list = value["@list"] - # 2.6.3) Initialize common type and common language to null. If list is empty, set common language to default language. - # TODO: Spec fixme: Setting common language to default language is effectively useless, since the only place it is used is the follow loop in 2.6.4, which is immediately left when the list is empty - {common_type, common_language} = {nil, nil} - {type_language, type_language_value} = - if Enum.empty?(list) do - {type_language, type_language_value} - else - # 2.6.4) For each item in list: - {common_type, common_language} = Enum.reduce_while list, {common_type, common_language}, - fn (item, {common_type, common_language}) -> - # 2.6.4.1) Initialize item language to @none and item type to @none. - {item_type, item_language} = {"@none", "@none"} - # 2.6.4.2) If item contains the key @value: - {item_type, item_language} = - if Map.has_key?(item, "@value") do - cond do - # 2.6.4.2.1) If item contains the key @language, then set item language to its associated value. - Map.has_key?(item, "@language") -> - {item_type, item["@language"]} - # 2.6.4.2.2) Otherwise, if item contains the key @type, set item type to its associated value. - Map.has_key?(item, "@type") -> - {item["@type"], item_language} - # 2.6.4.2.3) Otherwise, set item language to @null. - true -> - {item_type, "@null"} + term = + if vocab && Map.has_key?(inverse_context, iri) do + # 2.1) Initialize default language to active context's default language, if it has + # one, otherwise to @none. + # TODO: Spec fixme: This step is effectively useless; see Spec fixme on step 2.6.3 + # default_language = active_context.default_language || "@none" + # 2.3) Initialize type/language to @language, and type/language value to @null. + # These two variables will keep track of the preferred type mapping or language + # mapping for a term, based on what is compatible with value. + type_language = "@language" + type_language_value = "@null" + + # 2.2) Initialize containers to an empty array. This array will be used to keep + # track of an ordered list of preferred container mappings for a term, based on + # what is compatible with value. + # 2.4) If value is a JSON object that contains the key @index, then append the + # value @index to containers. + containers = if index?(value), do: ["@index"], else: [] + + {containers, type_language, type_language_value} = + cond do + # 2.5) If reverse is true, set type/language to @type, type/language value to + # @reverse, and append @set to containers. + reverse -> + containers = containers ++ ["@set"] + type_language = "@type" + type_language_value = "@reverse" + {containers, type_language, type_language_value} + + # 2.6) Otherwise, if value is a list object, then set type/language and + # type/language value to the most specific values that work for all items + # in the list as follows: + list?(value) -> + # 2.6.1) If @index is a not key in value, then append @list to containers. + containers = if not index?(value), do: containers ++ ["@list"], else: containers + # 2.6.2) Initialize list to the array associated with the key @list in value. + list = value["@list"] + + # 2.6.3) Initialize common type and common language to null. If list is + # empty, set common language to default language. + # TODO: Spec fixme: Setting common language to default language is + # effectively useless, since the only place it is used is the follow loop + # in 2.6.4, which is immediately left when the list is empty + {common_type, common_language} = {nil, nil} + + {type_language, type_language_value} = + if Enum.empty?(list) do + {type_language, type_language_value} + else + # 2.6.4) For each item in list: + {common_type, common_language} = + Enum.reduce_while( + list, + {common_type, common_language}, + fn item, {common_type, common_language} -> + # 2.6.4.1) Initialize item language to @none and item type to @none. + {item_type, item_language} = {"@none", "@none"} + # 2.6.4.2) If item contains the key @value: + {item_type, item_language} = + if Map.has_key?(item, "@value") do + cond do + # 2.6.4.2.1) If item contains the key @language, then set + # item language to its associated value. + Map.has_key?(item, "@language") -> + {item_type, item["@language"]} + + # 2.6.4.2.2) Otherwise, if item contains the key @type, set + # item type to its associated value. + Map.has_key?(item, "@type") -> + {item["@type"], item_language} + + # 2.6.4.2.3) Otherwise, set item language to @null. + true -> + {item_type, "@null"} + end + + # 2.6.4.3) Otherwise, set item type to @id. + else + {"@id", item_language} + end + + common_language = + cond do + # 2.6.4.4) If common language is null, set it to item language. + is_nil(common_language) -> + item_language + + # 2.6.4.5) Otherwise, if item language does not equal common + # language and item contains the key @value, then set common + # language to @none because list items have conflicting languages. + item_language != common_language and Map.has_key?(item, "@value") -> + "@none" + + true -> + common_language + end + + common_type = + cond do + # 2.6.4.6) If common type is null, set it to item type. + is_nil(common_type) -> + item_type + + # 2.6.4.7) Otherwise, if item type does not equal common type, + # then set common type to @none because list items have + # conflicting types. + item_type != common_type -> + "@none" + + true -> + common_type + end + + # 2.6.4.8) If common language is @none and common type is @none, + # then stop processing items in the list because it has been + # detected that there is no common language or type amongst the + # items. + if common_language == "@none" and common_type == "@none" do + {:halt, {common_type, common_language}} + else + {:cont, {common_type, common_language}} end - # 2.6.4.3) Otherwise, set item type to @id. - else - {"@id", item_language} end - common_language = - cond do - # 2.6.4.4) If common language is null, set it to item language. - is_nil(common_language) -> - item_language - # 2.6.4.5) Otherwise, if item language does not equal common language and item contains the key @value, then set common language to @none because list items have conflicting languages. - item_language != common_language and Map.has_key?(item, "@value") -> - "@none" - true -> - common_language - end - common_type = - cond do - # 2.6.4.6) If common type is null, set it to item type. - is_nil(common_type) -> - item_type - # 2.6.4.7) Otherwise, if item type does not equal common type, then set common type to @none because list items have conflicting types. - item_type != common_type -> - "@none" - true -> - common_type - end - # 2.6.4.8) If common language is @none and common type is @none, then stop processing items in the list because it has been detected that there is no common language or type amongst the items. - if common_language == "@none" and common_type == "@none" do - {:halt, {common_type, common_language}} - else - {:cont, {common_type, common_language}} - end + ) + + # 2.6.5) If common language is null, set it to @none. + common_language = if is_nil(common_language), do: "@none", else: common_language + # 2.6.6) If common type is null, set it to @none. + common_type = if is_nil(common_type), do: "@none", else: common_type + + # 2.6.7) If common type is not @none then set type/language to @type and + # type/language value to common type. + if common_type != "@none" do + type_language = "@type" + type_language_value = common_type + {type_language, type_language_value} + # 2.6.8) Otherwise, set type/language value to common language. + else + type_language_value = common_language + {type_language, type_language_value} end - # 2.6.5) If common language is null, set it to @none. - common_language = if is_nil(common_language), do: "@none", else: common_language - # 2.6.6) If common type is null, set it to @none. - common_type = if is_nil(common_type), do: "@none", else: common_type - # 2.6.7) If common type is not @none then set type/language to @type and type/language value to common type. - if common_type != "@none" do - type_language = "@type" - type_language_value = common_type - {type_language, type_language_value} - # 2.6.8) Otherwise, set type/language value to common language. - else - type_language_value = common_language - {type_language, type_language_value} end - end - {containers, type_language, type_language_value} - # 2.7) Otherwise - true -> - # 2.7.1) If value is a value object: - {containers, type_language, type_language_value} = - if is_map(value) and Map.has_key?(value, "@value") do - # 2.7.1.1) If value contains the key @language and does not contain the key @index, then set type/language value to its associated value and append @language to containers. - if Map.has_key?(value, "@language") and not Map.has_key?(value, "@index") do - type_language_value = value["@language"] - containers = containers ++ ["@language"] - {containers, type_language, type_language_value} - else - # 2.7.1.2) Otherwise, if value contains the key @type, then set type/language value to its associated value and set type/language to @type. - if Map.has_key?(value, "@type") do - type_language_value = value["@type"] - type_language = "@type" + + {containers, type_language, type_language_value} + + # 2.7) Otherwise + true -> + # 2.7.1) If value is a value object: + {containers, type_language, type_language_value} = + if is_map(value) and Map.has_key?(value, "@value") do + # 2.7.1.1) If value contains the key @language and does not contain the + # key @index, then set type/language value to its associated value and + # append @language to containers. + if Map.has_key?(value, "@language") and not Map.has_key?(value, "@index") do + type_language_value = value["@language"] + containers = containers ++ ["@language"] {containers, type_language, type_language_value} else - {containers, type_language, type_language_value} + # 2.7.1.2) Otherwise, if value contains the key @type, then set + # type/language value to its associated value and set type/language to + # @type. + if Map.has_key?(value, "@type") do + type_language_value = value["@type"] + type_language = "@type" + {containers, type_language, type_language_value} + else + {containers, type_language, type_language_value} + end end + + # 2.7.2) Otherwise, set type/language to @type and set type/language + # value to @id. + else + type_language = "@type" + type_language_value = "@id" + {containers, type_language, type_language_value} end - # 2.7.2) Otherwise, set type/language to @type and set type/language value to @id. - else - type_language = "@type" - type_language_value = "@id" - {containers, type_language, type_language_value} - end - # 2.7.3) Append @set to containers. - containers = containers ++ ["@set"] - {containers, type_language, type_language_value} - end - # 2.8) Append @none to containers. This represents the non-existence of a container mapping, and it will be the last container mapping value to be checked as it is the most generic. - containers = containers ++ ["@none"] - # 2.9) If type/language value is null, set it to @null. This is the key under which null values are stored in the inverse context entry. - type_language_value = if is_nil(type_language_value), do: "@null", else: type_language_value - # 2.10) Initialize preferred values to an empty array. This array will indicate, in order, the preferred values for a term's type mapping or language mapping. - preferred_values = [] - # 2.11) If type/language value is @reverse, append @reverse to preferred values. - preferred_values = - if type_language_value == "@reverse", - do: preferred_values ++ ["@reverse"], - else: preferred_values - # 2.12) If type/language value is @id or @reverse and value has an @id member: - preferred_values = - if type_language_value in ~w[@id @reverse] and is_map(value) and Map.has_key?(value, "@id") do - # 2.12.1) If the result of using the IRI compaction algorithm, passing active context, inverse context, the value associated with the @id key in value for iri, true for vocab, and true for document relative has a term definition in the active context with an IRI mapping that equals the value associated with the @id key in value, then append @vocab, @id, and @none, in that order, to preferred values. - # TODO: Spec fixme? document_relative is not a specified parameter of compact_iri - compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true) - if (term_def = active_context.term_defs[compact_id]) && term_def.iri_mapping == value["@id"] do - preferred_values ++ ~w[@vocab @id @none] - # 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to preferred values. + + # 2.7.3) Append @set to containers. + containers = containers ++ ["@set"] + {containers, type_language, type_language_value} + end + + # 2.8) Append @none to containers. This represents the non-existence of a container + # mapping, and it will be the last container mapping value to be checked as it is + # the most generic. + containers = containers ++ ["@none"] + + # 2.9) If type/language value is null, set it to @null. This is the key under + # which null values are stored in the inverse context entry. + type_language_value = + if is_nil(type_language_value), do: "@null", else: type_language_value + + # 2.10) Initialize preferred values to an empty array. This array will indicate, + # in order, the preferred values for a term's type mapping or language mapping. + preferred_values = [] + # 2.11) If type/language value is @reverse, append @reverse to preferred values. + preferred_values = + if type_language_value == "@reverse", + do: preferred_values ++ ["@reverse"], + else: preferred_values + + # 2.12) If type/language value is @id or @reverse and value has an @id member: + preferred_values = + if type_language_value in ~w[@id @reverse] and is_map(value) and + Map.has_key?(value, "@id") do + # 2.12.1) If the result of using the IRI compaction algorithm, passing active + # context, inverse context, the value associated with the @id key in value for + # iri, true for vocab, and true for document relative has a term definition in + # the active context with an IRI mapping that equals the value associated with + # the @id key in value, then append @vocab, @id, and @none, in that order, to + # preferred values. + # TODO: Spec fixme? document_relative is not a specified parameter of compact_iri + compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true) + + if (term_def = active_context.term_defs[compact_id]) && + term_def.iri_mapping == value["@id"] do + preferred_values ++ ~w[@vocab @id @none] + + # 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to + # preferred values. + else + preferred_values ++ ~w[@id @vocab @none] + end + + # 2.13) Otherwise, append type/language value and @none, in that order, to + # preferred values. else - preferred_values ++ ~w[@id @vocab @none] + preferred_values ++ [type_language_value, "@none"] end - # 2.13) Otherwise, append type/language value and @none, in that order, to preferred values. - else - preferred_values ++ [type_language_value, "@none"] - end - # 2.14) Initialize term to the result of the Term Selection algorithm, passing inverse context, iri, containers, type/language, and preferred values. - select_term(inverse_context, iri, containers, type_language, preferred_values) - end + + # 2.14) Initialize term to the result of the Term Selection algorithm, passing + # inverse context, iri, containers, type/language, and preferred values. + select_term(inverse_context, iri, containers, type_language, preferred_values) + end + cond do # 2.15) If term is not null, return term. not is_nil(term) -> term - # 3) At this point, there is no simple term that iri can be compacted to. If vocab is true and active context has a vocabulary mapping: - # 3.1) If iri begins with the vocabulary mapping's value but is longer, then initialize suffix to the substring of iri that does not match. If suffix does not have a term definition in active context, then return suffix. + + # 3) At this point, there is no simple term that iri can be compacted to. If vocab + # is true and active context has a vocabulary mapping: + # 3.1) If iri begins with the vocabulary mapping's value but is longer, then + # initialize suffix to the substring of iri that does not match. If suffix does not + # have a term definition in active context, then return suffix. vocab && active_context.vocab && String.starts_with?(iri, active_context.vocab) -> suffix = String.replace_prefix(iri, active_context.vocab, "") + if suffix != "" && is_nil(active_context.term_defs[suffix]) do String.replace_prefix(iri, active_context.vocab, "") else create_compact_iri(iri, active_context, value, vocab) end + true -> create_compact_iri(iri, active_context, value, vocab) end end defp create_compact_iri(iri, active_context, value, vocab) do - # 4) The iri could not be compacted using the active context's vocabulary mapping. Try to create a compact IRI, starting by initializing compact IRI to null. This variable will be used to tore the created compact IRI, if any. - compact_iri = + # 4) The iri could not be compacted using the active context's vocabulary mapping. + # Try to create a compact IRI, starting by initializing compact IRI to null. This + # variable will be used to tore the created compact IRI, if any. # 5) For each key term and value term definition in the active context: - Enum.reduce(active_context.term_defs, nil, fn ({term, term_def}, compact_iri) -> + compact_iri = + Enum.reduce(active_context.term_defs, nil, fn {term, term_def}, compact_iri -> cond do - # 5.1) If the term contains a colon (:), then continue to the next term because terms with colons can't be used as prefixes. + # 5.1) If the term contains a colon (:), then continue to the next term because + # terms with colons can't be used as prefixes. String.contains?(term, ":") -> compact_iri - # 5.2) If the term definition is null, its IRI mapping equals iri, or its IRI mapping is not a substring at the beginning of iri, the term cannot be used as a prefix because it is not a partial match with iri. Continue with the next term. + + # 5.2) If the term definition is null, its IRI mapping equals iri, or its IRI + # mapping is not a substring at the beginning of iri, the term cannot be used + # as a prefix because it is not a partial match with iri. Continue with the next + # term. is_nil(term_def) || term_def.iri_mapping == iri || not String.starts_with?(iri, term_def.iri_mapping) -> compact_iri + true -> - # 5.3) Initialize candidate by concatenating term, a colon (:), and the substring of iri that follows after the value of the term definition's IRI mapping. - candidate = term <> ":" <> (String.split_at(iri, String.length(term_def.iri_mapping)) |> elem(1)) - # 5.4) If either compact IRI is null or candidate is shorter or the same length but lexicographically less than compact IRI and candidate does not have a term definition in active context or if the term definition has an IRI mapping that equals iri and value is null, set compact IRI to candidate. + # 5.3) Initialize candidate by concatenating term, a colon (:), and the + # substring of iri that follows after the value of the term definition's IRI + # mapping. + candidate = + term <> + ":" <> (String.split_at(iri, String.length(term_def.iri_mapping)) |> elem(1)) + + # 5.4) If either compact IRI is null or candidate is shorter or the same length + # but lexicographically less than compact IRI and candidate does not have a + # term definition in active context or if the term definition has an IRI mapping + # that equals iri and value is null, set compact IRI to candidate. # TODO: Spec fixme: The specified expression is pretty ambiguous without brackets ... - # TODO: Spec fixme: "if the term definition has an IRI mapping that equals iri" is already catched in 5.2, so will never happen here ... + # TODO: Spec fixme: "if the term definition has an IRI mapping that equals iri" + # is already catched in 5.2, so will never happen here ... if (is_nil(compact_iri) or shortest_or_least?(candidate, compact_iri)) and (is_nil(active_context.term_defs[candidate]) or (active_context.term_defs[candidate].iri_mapping == iri and is_nil(value))) do @@ -484,58 +670,70 @@ defmodule JSON.LD.Compaction do end end end) + cond do # 6) If compact IRI is not null, return compact IRI. not is_nil(compact_iri) -> compact_iri + # 7) If vocab is false then transform iri to a relative IRI using the document's base IRI. not vocab -> remove_base(iri, Context.base(active_context)) + # 8) Finally, return iri as is. true -> iri end end + @spec shortest_or_least?(String.t(), String.t()) :: boolean defp shortest_or_least?(a, b) do (a_len = String.length(a)) < (b_len = String.length(b)) or (a_len == b_len and a < b) end + @spec remove_base(String.t(), String.t() | nil) :: String.t() defp remove_base(iri, nil), do: iri defp remove_base(iri, base) do base_len = String.length(base) + if String.starts_with?(iri, base) and String.at(iri, base_len) in ~w(? #) do String.split_at(iri, base_len) |> elem(1) else case URI.parse(base) do %URI{path: nil} -> iri - base -> - do_remove_base(iri, %URI{base | path: parent_path(base.path)}, 0) + base -> do_remove_base(iri, %URI{base | path: parent_path(base.path)}, 0) end end end + @spec do_remove_base(String.t(), URI.t(), non_neg_integer) :: String.t() defp do_remove_base(iri, base, index) do base_str = URI.to_string(base) + cond do String.starts_with?(iri, base_str) -> case String.duplicate("../", index) <> - (String.split_at(iri, String.length(base_str)) |> elem(1)) do - "" -> "./" + (String.split_at(iri, String.length(base_str)) |> elem(1)) do + "" -> "./" rel -> rel end - base.path == "/" -> iri + + base.path == "/" -> + iri + true -> do_remove_base(iri, %URI{base | path: parent_path(base.path)}, index + 1) end end - defp parent_path("/"), do: "/" + @spec parent_path(String.t()) :: String.t() + defp parent_path("/"), do: "/" + defp parent_path(path) do case Path.dirname(String.trim_trailing(path, "/")) do - "/" -> "/" + "/" -> "/" parent -> parent <> "/" end end @@ -545,55 +743,79 @@ defmodule JSON.LD.Compaction do Details at """ + @spec compact_value(any, Context.t(), map, String.t()) :: any def compact_value(value, active_context, inverse_context, active_property) do term_def = active_context.term_defs[active_property] # 1) Initialize number members to the number of members value contains. number_members = Enum.count(value) - # 2) If value has an @index member and the container mapping associated to active property is set to @index, decrease number members by 1. + + # 2) If value has an @index member and the container mapping associated to active + # property is set to @index, decrease number members by 1. number_members = if term_def != nil and Map.has_key?(value, "@index") and - term_def.container_mapping == "@index", - do: number_members - 1, else: number_members + term_def.container_mapping == "@index", + do: number_members - 1, + else: number_members + # 3) If number members is greater than 2, return value as it cannot be compacted. unless number_members > 2 do - {type_mapping, language_mapping} = if term_def, + {type_mapping, language_mapping} = + if term_def, do: {term_def.type_mapping, term_def.language_mapping}, else: {nil, nil} + cond do # 4) If value has an @id member id = Map.get(value, "@id") -> cond do - # 4.1) If number members is 1 and the type mapping of active property is set to @id, return the result of using the IRI compaction algorithm, passing active context, inverse context, and the value of the @id member for iri. + # 4.1) If number members is 1 and the type mapping of active property + # is set to @id, return the result of using the IRI compaction algorithm, + # passing active context, inverse context, and the value of the @id member + # for iri. number_members == 1 and type_mapping == "@id" -> compact_iri(id, active_context, inverse_context) - # 4.2) Otherwise, if number members is 1 and the type mapping of active property is set to @vocab, return the result of using the IRI compaction algorithm, passing active context, inverse context, the value of the @id member for iri, and true for vocab. + + # 4.2) Otherwise, if number members is 1 and the type mapping of active + # property is set to @vocab, return the result of using the IRI compaction + # algorithm, passing active context, inverse context, the value of the @id + # member for iri, and true for vocab. number_members == 1 and type_mapping == "@vocab" -> compact_iri(id, active_context, inverse_context, nil, true) + # 4.3) Otherwise, return value as is. true -> value end - # 5) Otherwise, if value has an @type member whose value matches the type mapping of active property, return the value associated with the @value member of value. + + # 5) Otherwise, if value has an @type member whose value matches the type mapping + # of active property, return the value associated with the @value member of value. (type = Map.get(value, "@type")) && type == type_mapping -> value["@value"] - # 6) Otherwise, if value has an @language member whose value matches the language mapping of active property, return the value associated with the @value member of value. + + # 6) Otherwise, if value has an @language member whose value matches the language + # mapping of active property, return the value associated with the @value member + # of value. + # TODO: Spec fixme: doesn't specify to check default language as well (language = Map.get(value, "@language")) && - # TODO: Spec fixme: doesn't specify to check default language as well - language in [language_mapping, active_context.default_language] -> + language in [language_mapping, active_context.default_language] -> value["@value"] + true -> - # 7) Otherwise, if number members equals 1 and either the value of the @value member is not a string, or the active context has no default language, or the language mapping of active property is set to null,, return the value associated with the @value member. + # 7) Otherwise, if number members equals 1 and either the value of the @value + # member is not a string, or the active context has no default language, or + # the language mapping of active property is set to null,, return the value + # associated with the @value member. value_value = value["@value"] + # TODO: Spec fixme: doesn't specify to check default language as well if number_members == 1 and - (not is_binary(value_value) or - !active_context.default_language or - # TODO: Spec fixme: doesn't specify to check default language as well - Context.language(active_context, active_property) == nil) do - value_value + (not is_binary(value_value) or + !active_context.default_language or + Context.language(active_context, active_property) == nil) do + value_value # 8) Otherwise, return value as is. - else - value - end + else + value + end end else value @@ -605,14 +827,15 @@ defmodule JSON.LD.Compaction do Details at """ + @spec select_term(map, String.t(), [String.t()], String.t(), [String.t()]) :: String.t() def select_term(inverse_context, iri, containers, type_language, preferred_values) do container_map = inverse_context[iri] - Enum.find_value containers, fn container -> + + Enum.find_value(containers, fn container -> if type_language_map = container_map[container] do value_map = type_language_map[type_language] - Enum.find_value preferred_values, fn item -> value_map[item] end + Enum.find_value(preferred_values, fn item -> value_map[item] end) end - end + end) end - end diff --git a/lib/json/ld/context.ex b/lib/json/ld/context.ex index c9de8c0..139ed58 100644 --- a/lib/json/ld/context.ex +++ b/lib/json/ld/context.ex @@ -1,58 +1,86 @@ defmodule JSON.LD.Context do + import JSON.LD.{IRIExpansion, Utils} + + alias JSON.LD.Context.TermDefinition + alias JSON.LD.Options + + alias RDF.IRI + + @type local :: map | String.t() | nil + @type remote :: [map] + @type value :: map | String.t() | nil + + @type t :: %__MODULE__{ + term_defs: map, + default_language: String.t() | nil, + vocab: nil, + base_iri: String.t() | boolean | nil, + api_base_iri: String.t() | nil + } + defstruct term_defs: %{}, default_language: nil, vocab: nil, base_iri: false, api_base_iri: nil - import JSON.LD.IRIExpansion - import JSON.LD.Utils - - alias JSON.LD.Context.TermDefinition - alias RDF.IRI - - - def base(%JSON.LD.Context{base_iri: false, api_base_iri: api_base_iri}), + @spec base(t) :: String.t() | nil + def base(%__MODULE__{base_iri: false, api_base_iri: api_base_iri}), do: api_base_iri - def base(%JSON.LD.Context{base_iri: base_iri}), - do: base_iri + def base(%__MODULE__{base_iri: base_iri}), + do: base_iri - def new(options \\ %JSON.LD.Options{}), - do: %JSON.LD.Context{api_base_iri: JSON.LD.Options.new(options).base} + @spec new(Options.t()) :: t + def new(options \\ %Options{}), + do: %__MODULE__{api_base_iri: Options.new(options).base} + @spec create(map, Options.t()) :: t def create(%{"@context" => json_ld_context}, options), - do: new(options) |> update(json_ld_context, [], options) + do: options |> new() |> update(json_ld_context, [], options) + @spec update(t, [local] | local, remote, Options.t()) :: t + def update(active, local, remote \\ [], options \\ %Options{}) - def update(active, local, remote \\ [], options \\ %JSON.LD.Options{}) - - def update(%JSON.LD.Context{} = active, local, remote, options) when is_list(local) do - Enum.reduce local, active, fn (local, result) -> + def update(%__MODULE__{} = active, local, remote, options) when is_list(local) do + Enum.reduce(local, active, fn local, result -> do_update(result, local, remote, options) - end + end) end # 2) If local context is not an array, set it to an array containing only local context. - def update(%JSON.LD.Context{} = active, local, remote, options) do - update(active, [local], remote, options) - end - - - # 3.1) If context is null, set result to a newly-initialized active context and continue with the next context. The base IRI of the active context is set to the IRI of the currently being processed document (which might be different from the currently being processed context), if available; otherwise to null. If set, the base option of a JSON-LD API Implementation overrides the base IRI. - defp do_update(%JSON.LD.Context{}, nil, _remote, options) do - new(options) - end + def update(%__MODULE__{} = active, local, remote, options), + do: update(active, [local], remote, options) + + # 3.1) If context is null, set result to a newly-initialized active context and continue + # with the next context. The base IRI of the active context is set to the IRI of the + # currently being processed document (which might be different from the currently being + # processed context), if available; otherwise to null. If set, the base option of a + # JSON-LD API Implementation overrides the base IRI. + @spec do_update(t, local, remote, Options.t()) :: t + defp do_update(%__MODULE__{}, nil, _remote, options), + do: new(options) # 3.2) If context is a string, [it's interpreted as a remote context] - defp do_update(%JSON.LD.Context{} = active, local, remote, options) when is_binary(local) do + defp do_update(%__MODULE__{} = active, local, remote, options) when is_binary(local) do # TODO: fetch remote context and call recursively with remote updated end # 3.4) - 3.8) - defp do_update(%JSON.LD.Context{} = active, local, remote, _) when is_map(local) do - with {base, local} <- Map.pop(local, "@base", false), - {vocab, local} <- Map.pop(local, "@vocab", false), + defp do_update(%__MODULE__{} = active, local, remote, _) when is_map(local), + do: do_update_local(active, local, remote) + + # 3.3) If context is not a JSON object, an invalid local context error has been detected + # and processing is aborted. + defp do_update(_, local, _, _) do + raise JSON.LD.InvalidLocalContextError, + message: "#{inspect(local)} is not a valid @context value" + end + + @spec do_update_local(t, map, remote) :: t + defp do_update_local(%__MODULE__{} = active, local, remote) when is_map(local) do + with {base, local} <- Map.pop(local, "@base", false), + {vocab, local} <- Map.pop(local, "@vocab", false), {language, local} <- Map.pop(local, "@language", false) do active |> set_base(base, remote) @@ -62,95 +90,117 @@ defmodule JSON.LD.Context do end end - # 3.3) If context is not a JSON object, an invalid local context error has been detected and processing is aborted. - defp do_update(_, local, _, _), - do: raise JSON.LD.InvalidLocalContextError, - message: "#{inspect local} is not a valid @context value" - - + @spec set_base(t, boolean, remote) :: t defp set_base(active, false, _), do: active + defp set_base(active, _, remote) when is_list(remote) and length(remote) > 0, do: active + defp set_base(active, base, _) do cond do - # TODO: this slightly differs from the spec, due to our false special value for base_iri; add more tests + # TODO: this slightly differs from the spec, due to our false special value for + # base_iri; add more tests is_nil(base) or IRI.absolute?(base) -> - %JSON.LD.Context{active | base_iri: base} + %__MODULE__{active | base_iri: base} + active.base_iri -> - %JSON.LD.Context{active | base_iri: absolute_iri(base, active.base_iri)} + %__MODULE__{active | base_iri: absolute_iri(base, active.base_iri)} + true -> raise JSON.LD.InvalidBaseIRIError, - message: "#{inspect base} is a relative IRI, but no active base IRI defined" + message: "#{inspect(base)} is a relative IRI, but no active base IRI defined" end end + @spec set_vocab(t, boolean | nil) :: t defp set_vocab(active, false), do: active + defp set_vocab(active, vocab) do if is_nil(vocab) or IRI.absolute?(vocab) or blank_node_id?(vocab) do - %JSON.LD.Context{active | vocab: vocab} + %__MODULE__{active | vocab: vocab} else raise JSON.LD.InvalidVocabMappingError, - message: "#{inspect vocab} is not a valid vocabulary mapping" + message: "#{inspect(vocab)} is not a valid vocabulary mapping" end end + @spec set_language(t, boolean | nil) :: t defp set_language(active, false), do: active + defp set_language(active, nil), - do: %JSON.LD.Context{active | default_language: nil} + do: %__MODULE__{active | default_language: nil} + defp set_language(active, language) when is_binary(language), - do: %JSON.LD.Context{active | default_language: String.downcase(language)} - defp set_language(_, language), - do: raise JSON.LD.InvalidDefaultLanguageError, - message: "#{inspect language} is not a valid language" + do: %__MODULE__{active | default_language: String.downcase(language)} + + defp set_language(_, language) do + raise JSON.LD.InvalidDefaultLanguageError, + message: "#{inspect(language)} is not a valid language" + end + @spec language(t, String.t()) :: String.t() | nil def language(active, term) do case Map.get(active.term_defs, term, %TermDefinition{}).language_mapping do - false -> active.default_language + false -> active.default_language language -> language end end + @spec create_term_definitions(t, map, map) :: t defp create_term_definitions(active, local, defined \\ %{}) do {active, _} = - Enum.reduce local, {active, defined}, fn ({term, value}, {active, defined}) -> + Enum.reduce(local, {active, defined}, fn {term, value}, {active, defined} -> create_term_definition(active, local, term, value, defined) - end + end) + active end @doc """ - Expands the given input according to the steps in the JSON-LD Create Term Definition Algorithm. + Expands the given input according to the steps in the JSON-LD Create Term Definition + Algorithm. see """ + @spec create_term_definition(t, map, String.t(), value, map) :: {t, map} def create_term_definition(active, local, term, value, defined) - def create_term_definition(active, _, "@base", _, defined), do: {active, defined} - def create_term_definition(active, _, "@vocab", _, defined), do: {active, defined} + def create_term_definition(active, _, "@base", _, defined), do: {active, defined} + def create_term_definition(active, _, "@vocab", _, defined), do: {active, defined} def create_term_definition(active, _, "@language", _, defined), do: {active, defined} def create_term_definition(active, local, term, value, defined) do # 3) - if term in JSON.LD.keywords, - do: raise JSON.LD.KeywordRedefinitionError, - message: "#{inspect term} is a keyword and can not be defined in context" + if term in JSON.LD.keywords() do + raise JSON.LD.KeywordRedefinitionError, + message: "#{inspect(term)} is a keyword and can not be defined in context" + end + # 1) case defined[term] do - true -> {active, defined} - false -> raise JSON.LD.CyclicIRIMappingError #, message: "#{inspect term} .." - nil -> do_create_term_definition(active, local, term, value, - Map.put(defined, term, false)) # 2) + true -> + {active, defined} + + # , message: "#{inspect term} .." + false -> + raise JSON.LD.CyclicIRIMappingError + + nil -> + # 2) + do_create_term_definition(active, local, term, value, Map.put(defined, term, false)) end end + @spec do_create_term_definition(t, map, String.t(), value, map) :: {t, map} defp do_create_term_definition(active, _local, term, nil, defined) do + # (if Map.has_key?(active.term_defs, term), + # do: put_in(active, [:term_defs, term], nil), + # else: raise "NotImplemented"), { -# (if Map.has_key?(active.term_defs, term), -# do: put_in(active, [:term_defs, term], nil), -# else: raise "NotImplemented"), - %JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, nil)}, - Map.put(defined, term, true)} + %__MODULE__{active | term_defs: Map.put(active.term_defs, term, nil)}, + Map.put(defined, term, true) + } end defp do_create_term_definition(active, local, term, %{"@id" => nil}, defined), @@ -160,82 +210,113 @@ defmodule JSON.LD.Context do do: do_create_term_definition(active, local, term, %{"@id" => value}, defined) defp do_create_term_definition(active, local, term, %{} = value, defined) do - definition = %TermDefinition{} # 9) + # 9) + definition = %TermDefinition{} + {definition, active, defined} = do_create_type_definition(definition, active, local, value, defined) + {done, definition, active, defined} = - do_create_reverse_definition(definition, active, local, value, defined) + do_create_reverse_definition(definition, active, local, value, defined) + {definition, active, defined} = unless done do {definition, active, defined} = do_create_id_definition(definition, active, local, term, value, defined) + definition = do_create_container_definition(definition, value) definition = do_create_language_definition(definition, value) + {definition, active, defined} else {definition, active, defined} end - # 18 / 11.6) Set the term definition of term in active context to definition and set the value associated with defined's key term to true. - {%JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, definition)}, - Map.put(defined, term, true)} - end - defp do_create_term_definition(_, _, _, value, _), - do: raise JSON.LD.InvalidTermDefinitionError, - message: "#{inspect value} is not a valid term definition" + # 18 / 11.6) Set the term definition of term in active context to definition and set + # the value associated with defined's key term to true. + { + %__MODULE__{active | term_defs: Map.put(active.term_defs, term, definition)}, + Map.put(defined, term, true) + } + end + defp do_create_term_definition(_, _, _, value, _) do + raise JSON.LD.InvalidTermDefinitionError, + message: "#{inspect(value)} is not a valid term definition" + end # 10.1) # TODO: RDF.rb implementation says: "SPEC FIXME: @type may be nil" - defp do_create_type_definition(_, _, _, %{"@type" => type}, _) when not is_binary(type), - do: raise JSON.LD.InvalidTypeMappingError, - message: "#{inspect type} is not a valid type mapping" + @spec do_create_type_definition(TermDefinition.t(), map, map, value, map) :: + {TermDefinition.t(), t, map} + defp do_create_type_definition(_, _, _, %{"@type" => type}, _) when not is_binary(type) do + raise JSON.LD.InvalidTypeMappingError, message: "#{inspect(type)} is not a valid type mapping" + end # 10.2) and 10.3) defp do_create_type_definition(definition, active, local, %{"@type" => type}, defined) do - {expanded_type, active, defined} = - expand_iri(type, active, false, true, local, defined) + {expanded_type, active, defined} = expand_iri(type, active, false, true, local, defined) + if IRI.absolute?(expanded_type) or expanded_type in ~w[@id @vocab] do {%TermDefinition{definition | type_mapping: expanded_type}, active, defined} else raise JSON.LD.InvalidTypeMappingError, - message: "#{inspect type} is not a valid type mapping" + message: "#{inspect(type)} is not a valid type mapping" end end defp do_create_type_definition(definition, active, _, _, defined), do: {definition, active, defined} + @spec do_create_reverse_definition(TermDefinition.t(), t, map, value, map) :: + {boolean, TermDefinition.t(), t, map} # 11) If value contains the key @reverse - defp do_create_reverse_definition(definition, active, local, - %{"@reverse" => reverse} = value, defined) do + defp do_create_reverse_definition( + definition, + active, + local, + %{"@reverse" => reverse} = value, + defined + ) do cond do - Map.has_key?(value, "@id") -> # 11.1) + # 11.1 ) + Map.has_key?(value, "@id") -> raise JSON.LD.InvalidReversePropertyError, - message: "#{inspect reverse} is not a valid reverse property" - not is_binary(reverse) -> # 11.2) + message: "#{inspect(reverse)} is not a valid reverse property" + + not is_binary(reverse) -> + # 11.2) raise JSON.LD.InvalidIRIMappingError, - message: "Expected String for @reverse value. got #{inspect reverse}" - true -> # 11.3) + message: "Expected String for @reverse value. got #{inspect(reverse)}" + + true -> + # 11.3) {expanded_reverse, active, defined} = expand_iri(reverse, active, false, true, local, defined) + definition = if IRI.absolute?(expanded_reverse) or blank_node_id?(expanded_reverse) do %TermDefinition{definition | iri_mapping: expanded_reverse} else raise JSON.LD.InvalidIRIMappingError, - message: "Non-absolute @reverse IRI: #{inspect reverse}" + message: "Non-absolute @reverse IRI: #{inspect(reverse)}" end + + # 11.4) definition = - case Map.get(value, "@container", {false}) do # 11.4) + case Map.get(value, "@container", {false}) do {false} -> definition + container when is_nil(container) or container in ~w[@set @index] -> %TermDefinition{definition | container_mapping: container} + _ -> raise JSON.LD.InvalidReversePropertyError, - message: "#{inspect reverse} is not a valid reverse property; reverse properties only support set- and index-containers" + message: + "#{inspect(reverse)} is not a valid reverse property; reverse properties only support set- and index-containers" end + # 11.5) & 11.6) {true, %TermDefinition{definition | reverse_property: true}, active, defined} end @@ -244,29 +325,31 @@ defmodule JSON.LD.Context do defp do_create_reverse_definition(definition, active, _, _, defined), do: {false, definition, active, defined} - # 13) - defp do_create_id_definition(definition, active, local, term, - %{"@id" => id}, defined) when id != term do + @spec do_create_id_definition(TermDefinition.t(), t, map, String.t(), map, map) :: + {TermDefinition.t(), t, map} + defp do_create_id_definition(definition, active, local, term, %{"@id" => id}, defined) + when id != term do if is_binary(id) do # 13.2) - {expanded_id, active, defined} = - expand_iri(id, active, false, true, local, defined) + {expanded_id, active, defined} = expand_iri(id, active, false, true, local, defined) + cond do expanded_id == "@context" -> - raise JSON.LD.InvalidKeywordAliasError, - message: "cannot alias @context" - JSON.LD.keyword?(expanded_id) or - IRI.absolute?(expanded_id) or - blank_node_id?(expanded_id) -> + raise JSON.LD.InvalidKeywordAliasError, message: "cannot alias @context" + + JSON.LD.keyword?(expanded_id) or IRI.absolute?(expanded_id) or blank_node_id?(expanded_id) -> {%TermDefinition{definition | iri_mapping: expanded_id}, active, defined} + true -> raise JSON.LD.InvalidIRIMappingError, - message: "#{inspect id} is not a valid IRI mapping; resulting IRI mapping should be a keyword, absolute IRI or blank node" + message: + "#{inspect(id)} is not a valid IRI mapping; resulting IRI mapping should be a keyword, absolute IRI or blank node" end - else # 13.1) + else + # 13.1) raise JSON.LD.InvalidIRIMappingError, - message: "expected value of @id to be a string, but got #{inspect id}" + message: "expected value of @id to be a string, but got #{inspect(id)}" end end @@ -278,122 +361,143 @@ defmodule JSON.LD.Context do case compact_iri_parts(term) do [prefix, suffix] -> prefix_mapping = local[prefix] + {active, defined} = if prefix_mapping do do_create_term_definition(active, local, prefix, prefix_mapping, defined) else {active, defined} end + if prefix_def = active.term_defs[prefix] do - {%TermDefinition{definition | iri_mapping: prefix_def.iri_mapping <> suffix}, active, defined} + {%TermDefinition{definition | iri_mapping: prefix_def.iri_mapping <> suffix}, active, + defined} else {%TermDefinition{definition | iri_mapping: term}, active, defined} end - nil -> {%TermDefinition{definition | iri_mapping: term}, active, defined} + + nil -> + {%TermDefinition{definition | iri_mapping: term}, active, defined} end - # 15) + + # 15) else if active.vocab do {%TermDefinition{definition | iri_mapping: active.vocab <> term}, active, defined} else raise JSON.LD.InvalidIRIMappingError, - message: "#{inspect term} is not a valid IRI mapping; relative term definition without vocab mapping" + message: + "#{inspect(term)} is not a valid IRI mapping; relative term definition without vocab mapping" end end end - # 16.1) + @spec do_create_container_definition(TermDefinition.t(), map) :: TermDefinition.t() defp do_create_container_definition(_, %{"@container" => container}) - when container not in ~w[@list @set @index @language], - do: raise JSON.LD.InvalidContainerMappingError, - message: "#{inspect container} is not a valid container mapping; @container must be either @list, @set, @index, or @language" + when container not in ~w[@list @set @index @language] do + raise JSON.LD.InvalidContainerMappingError, + message: + "#{inspect(container)} is not a valid container mapping; @container must be either @list, @set, @index, or @language" + end + # 16.2) defp do_create_container_definition(definition, %{"@container" => container}), do: %TermDefinition{definition | container_mapping: container} + defp do_create_container_definition(definition, _), do: definition - # 17) + @spec do_create_language_definition(TermDefinition.t(), map) :: TermDefinition.t() defp do_create_language_definition(definition, %{"@language" => language} = value) do unless Map.has_key?(value, "@type") do case language do language when is_binary(language) -> %TermDefinition{definition | language_mapping: String.downcase(language)} + language when is_nil(language) -> %TermDefinition{definition | language_mapping: nil} + _ -> raise JSON.LD.InvalidLanguageMappingError, - message: "#{inspect language} is not a valid language mapping; @language must be a string or null" + message: + "#{inspect(language)} is not a valid language mapping; @language must be a string or null" end end end - defp do_create_language_definition(definition, _), do: definition + defp do_create_language_definition(definition, _), do: definition @doc """ Inverse Context Creation algorithm Details at """ - def inverse(%JSON.LD.Context{} = context) do - # 2) Initialize default language to @none. If the active context has a default language, set default language to it. + @spec inverse(t) :: map + def inverse(%__MODULE__{} = context) do + # 2) Initialize default language to @none. If the active context has a default + # language, set default language to it. default_language = context.default_language || "@none" - # 3) For each key term and value term definition in the active context, ordered by shortest term first (breaking ties by choosing the lexicographically least term) + + # 3) For each key term and value term definition in the active context, ordered by + # shortest term first (breaking ties by choosing the lexicographically least term) context.term_defs |> Enum.sort_by(fn {term, _} -> String.length(term) end) - |> Enum.reduce(%{}, fn ({term, term_def}, result) -> - # 3.1) If the term definition is null, term cannot be selected during compaction, so continue to the next term. - if term_def do - # 3.2) Initialize container to @none. If there is a container mapping in term definition, set container to its associated value. - container = term_def.container_mapping || "@none" - # 3.3) Initialize iri to the value of the IRI mapping for the term definition. - iri = term_def.iri_mapping - - type_map = get_in(result, [iri, container, "@type"]) || %{} - language_map = get_in(result, [iri, container, "@language"]) || %{} - - {type_map, language_map} = - case term_def do - # 3.8) If the term definition indicates that the term represents a reverse property - %TermDefinition{reverse_property: true} -> - {Map.put_new(type_map, "@reverse", term), language_map} - # 3.9) Otherwise, if term definition has a type mapping - %TermDefinition{type_mapping: type_mapping} - when type_mapping != false -> - {Map.put_new(type_map, type_mapping, term), language_map} - # 3.10) Otherwise, if term definition has a language mapping (might be null) - %TermDefinition{language_mapping: language_mapping} - when language_mapping != false -> - language = language_mapping || "@null" - {type_map, Map.put_new(language_map, language, term)} - # 3.11) Otherwise - _ -> - language_map = Map.put_new(language_map, default_language, term) - language_map = Map.put_new(language_map, "@none", term) - type_map = Map.put_new(type_map, "@none", term) - {type_map, language_map} - end - - result - |> Map.put_new(iri, %{}) - |> Map.update(iri, %{}, fn container_map -> - Map.put container_map, container, %{ - "@type" => type_map, - "@language" => language_map, - } - end) - else - result - end - end) + |> Enum.reduce(%{}, fn {term, term_def}, result -> + # 3.1) If the term definition is null, term cannot be selected during compaction, + # so continue to the next term. + if term_def do + # 3.2) Initialize container to @none. If there is a container mapping in term + # definition, set container to its associated value. + container = term_def.container_mapping || "@none" + + # 3.3) Initialize iri to the value of the IRI mapping for the term definition. + iri = term_def.iri_mapping + + type_map = get_in(result, [iri, container, "@type"]) || %{} + language_map = get_in(result, [iri, container, "@language"]) || %{} + + {type_map, language_map} = + case term_def do + # 3.8) If the term definition indicates that the term represents a reverse + # property + %TermDefinition{reverse_property: true} -> + {Map.put_new(type_map, "@reverse", term), language_map} + + # 3.9) Otherwise, if term definition has a type mapping + %TermDefinition{type_mapping: type_mapping} when type_mapping != false -> + {Map.put_new(type_map, type_mapping, term), language_map} + + # 3.10) Otherwise, if term definition has a language mapping (might be null) + %TermDefinition{language_mapping: language_mapping} when language_mapping != false -> + language = language_mapping || "@null" + {type_map, Map.put_new(language_map, language, term)} + + # 3.11) Otherwise + _ -> + language_map = Map.put_new(language_map, default_language, term) + language_map = Map.put_new(language_map, "@none", term) + type_map = Map.put_new(type_map, "@none", term) + {type_map, language_map} + end + + result + |> Map.put_new(iri, %{}) + |> Map.update(iri, %{}, fn container_map -> + Map.put(container_map, container, %{"@type" => type_map, "@language" => language_map}) + end) + else + result + end + end) end - def empty?(%JSON.LD.Context{term_defs: term_defs, vocab: nil, base_iri: false, default_language: nil}) - when map_size(term_defs) == 0, - do: true + @spec empty?(t) :: boolean + def empty?(%__MODULE__{term_defs: term_defs, vocab: nil, base_iri: false, default_language: nil}) + when map_size(term_defs) == 0, + do: true + def empty?(_), do: false - end diff --git a/lib/json/ld/context/term_definition.ex b/lib/json/ld/context/term_definition.ex index 08db03e..c283d20 100644 --- a/lib/json/ld/context/term_definition.ex +++ b/lib/json/ld/context/term_definition.ex @@ -1,7 +1,15 @@ defmodule JSON.LD.Context.TermDefinition do + @type t :: %__MODULE__{ + iri_mapping: String.t() | nil, + reverse_property: boolean, + type_mapping: boolean, + language_mapping: boolean, + container_mapping: nil + } + defstruct iri_mapping: nil, reverse_property: false, - type_mapping: false, language_mapping: false, + type_mapping: false, + language_mapping: false, container_mapping: nil - end diff --git a/lib/json/ld/decoder.ex b/lib/json/ld/decoder.ex index 3bc98f5..4e19eb9 100644 --- a/lib/json/ld/decoder.ex +++ b/lib/json/ld/decoder.ex @@ -5,136 +5,169 @@ defmodule JSON.LD.Decoder do use RDF.Serialization.Decoder import JSON.LD.{NodeIdentifierMap, Utils} - alias JSON.LD.NodeIdentifierMap - alias RDF.{Dataset, Graph, NS} + alias JSON.LD.{NodeIdentifierMap, Options} + alias RDF.{BlankNode, Dataset, Graph, IRI, Literal, NS, Statement} @impl RDF.Serialization.Decoder + @spec decode(String.t(), keyword) :: {:ok, Dataset.t() | Graph.t()} | {:error, any} def decode(content, opts \\ []) do with {:ok, json_ld_object} <- parse_json(content), - dataset = to_rdf(json_ld_object, opts) do + dataset <- to_rdf(json_ld_object, opts) do {:ok, dataset} end end - def to_rdf(element, options \\ %JSON.LD.Options{}) do - with options = JSON.LD.Options.new(options) do - {:ok, node_id_map} = NodeIdentifierMap.start_link - try do - element - |> JSON.LD.expand(options) - |> JSON.LD.node_map(node_id_map) - |> Enum.sort_by(fn {graph_name, _} -> graph_name end) - |> Enum.reduce(Dataset.new, fn ({graph_name, graph}, dataset) -> - unless relative_iri?(graph_name) do - rdf_graph = - graph - |> Enum.sort_by(fn {subject, _} -> subject end) - |> Enum.reduce(Graph.new, fn ({subject, node}, rdf_graph) -> - unless relative_iri?(subject) do - node - |> Enum.sort_by(fn {property, _} -> property end) - |> Enum.reduce(rdf_graph, fn ({property, values}, rdf_graph) -> - cond do - property == "@type" -> - Graph.add rdf_graph, - node_to_rdf(subject), RDF.NS.RDF.type, - Enum.map(values, &node_to_rdf/1) - JSON.LD.keyword?(property) -> - rdf_graph - not options.produce_generalized_rdf and - blank_node_id?(property) -> - rdf_graph - relative_iri?(property) -> - rdf_graph - true -> - Enum.reduce values, rdf_graph, fn - (%{"@list" => list}, rdf_graph) -> - with {list_triples, first} <- - list_to_rdf(list, node_id_map) do - rdf_graph - |> Graph.add({node_to_rdf(subject), node_to_rdf(property), first}) - |> Graph.add(list_triples) - end - (item, rdf_graph) -> - case object_to_rdf(item) do - nil -> rdf_graph - object -> - Graph.add rdf_graph, - {node_to_rdf(subject), node_to_rdf(property), object} - end - end - end - end) - else - rdf_graph - end - end) - if Enum.empty?(rdf_graph) do - dataset - else - Dataset.add(dataset, rdf_graph, - if(graph_name == "@default", do: nil, else: graph_name)) - end - else - dataset - end - end) - after - NodeIdentifierMap.stop(node_id_map) - end + @spec to_rdf(map, Options.t() | Enum.t()) :: Dataset.t() | Graph.t() + def to_rdf(element, options \\ %Options{}) do + {:ok, node_id_map} = NodeIdentifierMap.start_link() + + options = Options.new(options) + + try do + element + |> JSON.LD.expand(options) + |> JSON.LD.node_map(node_id_map) + |> Enum.sort_by(fn {graph_name, _} -> graph_name end) + |> Enum.reduce(Dataset.new(), fn {graph_name, graph}, dataset -> + unless relative_iri?(graph_name) do + rdf_graph = + graph + |> Enum.sort_by(fn {subject, _} -> subject end) + |> Enum.reduce(Graph.new(), fn {subject, node}, rdf_graph -> + unless relative_iri?(subject) do + node + |> Enum.sort_by(fn {property, _} -> property end) + |> Enum.reduce(rdf_graph, fn {property, values}, rdf_graph -> + cond do + property == "@type" -> + Graph.add( + rdf_graph, + node_to_rdf(subject), + RDF.NS.RDF.type(), + Enum.map(values, &node_to_rdf/1) + ) + + JSON.LD.keyword?(property) -> + rdf_graph + + not options.produce_generalized_rdf and blank_node_id?(property) -> + rdf_graph + + relative_iri?(property) -> + rdf_graph + + true -> + Enum.reduce(values, rdf_graph, fn + %{"@list" => list}, rdf_graph -> + with {list_triples, first} <- list_to_rdf(list, node_id_map) do + rdf_graph + |> Graph.add({node_to_rdf(subject), node_to_rdf(property), first}) + |> Graph.add(list_triples) + end + + item, rdf_graph -> + case object_to_rdf(item) do + nil -> + rdf_graph + + object -> + Graph.add( + rdf_graph, + {node_to_rdf(subject), node_to_rdf(property), object} + ) + end + end) + end + end) + else + rdf_graph + end + end) + + if Enum.empty?(rdf_graph) do + dataset + else + graph_name = if graph_name == "@default", do: nil, else: graph_name + Dataset.add(dataset, rdf_graph, graph_name) + end + else + dataset + end + end) + after + NodeIdentifierMap.stop(node_id_map) end end + @spec parse_json(String.t(), [Jason.decode_opt()]) :: + {:ok, map} | {:error, Jason.DecodeError.t()} def parse_json(content, _opts \\ []) do Jason.decode(content) end + @spec parse_json!(String.t(), [Jason.decode_opt()]) :: map def parse_json!(content, _opts \\ []) do Jason.decode!(content) end - def node_to_rdf(nil), do: nil + @spec node_to_rdf(String.t()) :: IRI.t() | BlankNode.t() def node_to_rdf(node) do if blank_node_id?(node) do node |> String.trim_leading("_:") - |> RDF.bnode + |> RDF.bnode() else RDF.uri(node) end end + @spec object_to_rdf(map) :: IRI.t() | BlankNode.t() | Literal.t() | nil defp object_to_rdf(%{"@id" => id}) do - unless relative_iri?(id) do - node_to_rdf(id) - end + unless relative_iri?(id), do: node_to_rdf(id) end defp object_to_rdf(%{"@value" => value} = item) do datatype = item["@type"] + {value, datatype} = cond do is_boolean(value) -> - value = value |> RDF.XSD.Boolean.new() |> RDF.XSD.Boolean.canonical() |> RDF.XSD.Boolean.lexical() - datatype = if is_nil(datatype), do: NS.XSD.boolean, else: datatype + value = + value + |> RDF.XSD.Boolean.new() + |> RDF.XSD.Boolean.canonical() + |> RDF.XSD.Boolean.lexical() + + datatype = if is_nil(datatype), do: NS.XSD.boolean(), else: datatype {value, datatype} - is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double)) -> - value = value |> RDF.XSD.Double.new() |> RDF.XSD.Double.canonical() |> RDF.XSD.Double.lexical() - datatype = if is_nil(datatype), do: NS.XSD.double, else: datatype + + is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double())) -> + value = + value + |> RDF.XSD.Double.new() + |> RDF.XSD.Double.canonical() + |> RDF.XSD.Double.lexical() + + datatype = if is_nil(datatype), do: NS.XSD.double(), else: datatype {value, datatype} - is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer)) -> - value = value |> RDF.XSD.Integer.new() |> RDF.XSD.Integer.canonical() |> RDF.XSD.Integer.lexical() - datatype = if is_nil(datatype), do: NS.XSD.integer, else: datatype + + is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer())) -> + value = + value + |> RDF.XSD.Integer.new() + |> RDF.XSD.Integer.canonical() + |> RDF.XSD.Integer.lexical() + + datatype = if is_nil(datatype), do: NS.XSD.integer(), else: datatype {value, datatype} + is_nil(datatype) -> datatype = - if Map.has_key?(item, "@language") do - RDF.langString - else - NS.XSD.string - end + if Map.has_key?(item, "@language"), do: RDF.langString(), else: NS.XSD.string() + {value, datatype} + true -> {value, datatype} end @@ -146,57 +179,58 @@ defmodule JSON.LD.Decoder do end end + @spec list_to_rdf([map], pid) :: {[Statement.t()], IRI.t() | BlankNode.t()} defp list_to_rdf(list, node_id_map) do {list_triples, first, last} = - list - |> Enum.reduce({[], nil, nil}, fn (item, {list_triples, first, last}) -> - case object_to_rdf(item) do - nil -> {list_triples, first, last} - object -> - with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do - if last do - { - list_triples ++ - [{last, RDF.NS.RDF.rest, bnode}, - {bnode, RDF.NS.RDF.first, object}], - first, - bnode - } - else - { - list_triples ++ [{bnode, RDF.NS.RDF.first, object}], - bnode, - bnode - } - end + Enum.reduce(list, {[], nil, nil}, fn item, {list_triples, first, last} -> + case object_to_rdf(item) do + nil -> + {list_triples, first, last} + + object -> + with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do + if last do + { + list_triples ++ + [{last, RDF.NS.RDF.rest(), bnode}, {bnode, RDF.NS.RDF.first(), object}], + first, + bnode + } + else + { + list_triples ++ [{bnode, RDF.NS.RDF.first(), object}], + bnode, + bnode + } end - end - end) + end + end + end) + if last do - {list_triples ++ [{last, RDF.NS.RDF.rest, RDF.NS.RDF.nil}], first} + {list_triples ++ [{last, RDF.NS.RDF.rest(), RDF.NS.RDF.nil()}], first} else - {[], RDF.NS.RDF.nil} + {[], RDF.NS.RDF.nil()} end end -# This is a much nicer and faster version, but the blank node numbering is reversed. -# Although this isn't relevant, I prefer to be more spec conform (for now). -# defp list_to_rdf(list, node_id_map) do -# list -# |> Enum.reverse -# |> Enum.reduce({[], RDF.NS.RDF.nil}, fn (item, {list_triples, last}) -> -# case object_to_rdf(item) do -# nil -> {list_triples, last} -# object -> -# with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do -# { -# [{bnode, RDF.NS.RDF.first, object}, -# {bnode, RDF.NS.RDF.rest, last } | list_triples], -# bnode -# } -# end -# end -# end) -# end - + # This is a much nicer and faster version, but the blank node numbering is reversed. + # Although this isn't relevant, I prefer to be more spec conform (for now). + # defp list_to_rdf(list, node_id_map) do + # list + # |> Enum.reverse + # |> Enum.reduce({[], RDF.NS.RDF.nil}, fn (item, {list_triples, last}) -> + # case object_to_rdf(item) do + # nil -> {list_triples, last} + # object -> + # with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do + # { + # [{bnode, RDF.NS.RDF.first, object}, + # {bnode, RDF.NS.RDF.rest, last } | list_triples], + # bnode + # } + # end + # end + # end) + # end end diff --git a/lib/json/ld/encoder.ex b/lib/json/ld/encoder.ex index 2d7111c..d06ca08 100644 --- a/lib/json/ld/encoder.ex +++ b/lib/json/ld/encoder.ex @@ -4,157 +4,181 @@ defmodule JSON.LD.Encoder do use RDF.Serialization.Encoder - alias RDF.{IRI, BlankNode, Literal, XSD, NS} - - @rdf_type to_string(RDF.NS.RDF.type) - @rdf_nil to_string(RDF.NS.RDF.nil) - @rdf_first to_string(RDF.NS.RDF.first) - @rdf_rest to_string(RDF.NS.RDF.rest) - @rdf_list to_string(RDF.uri(RDF.NS.RDF.List)) + alias JSON.LD.Options + + alias RDF.{ + BlankNode, + Dataset, + Description, + Graph, + IRI, + LangString, + Literal, + NS, + Statement, + XSD + } + + @type input :: Dataset.t() | Description.t() | Graph.t() + + @rdf_type to_string(RDF.NS.RDF.type()) + @rdf_nil to_string(RDF.NS.RDF.nil()) + @rdf_first to_string(RDF.NS.RDF.first()) + @rdf_rest to_string(RDF.NS.RDF.rest()) + @rdf_list to_string(RDF.uri(RDF.NS.RDF.List)) @impl RDF.Serialization.Encoder + @spec encode(input, Options.t() | Enum.t()) :: {:ok, String.t()} | {:error, any} def encode(data, opts \\ []) do with {:ok, json_ld_object} <- from_rdf(data, opts) do encode_json(json_ld_object, opts) end end + @spec encode!(input, Options.t() | Enum.t()) :: String.t() def encode!(data, opts \\ []) do data |> from_rdf!(opts) |> encode_json!(opts) end - def from_rdf(dataset, options \\ %JSON.LD.Options{}) do + @spec from_rdf(input, Options.t() | Enum.t()) :: {:ok, [map]} | {:error, any} + def from_rdf(dataset, options \\ %Options{}) do try do {:ok, from_rdf!(dataset, options)} rescue - exception -> {:error, Exception.message(exception)} + exception -> + {:error, Exception.message(exception)} end end - def from_rdf!(rdf_data, options \\ %JSON.LD.Options{}) - - def from_rdf!(%RDF.Dataset{} = dataset, options) do - with options = JSON.LD.Options.new(options) do - graph_map = - Enum.reduce RDF.Dataset.graphs(dataset), %{}, - fn graph, graph_map -> - # 3.1) - name = to_string(graph.name || "@default") - - # 3.3) - graph_map = - if graph.name && !get_in(graph_map, ["@default", name]) do - Map.update graph_map, "@default", %{name => %{"@id" => name}}, - fn default_graph -> - Map.put(default_graph, name, %{"@id" => name}) - end - else - graph_map - end + @spec from_rdf!(input, Options.t() | Enum.t()) :: [map] + def from_rdf!(rdf_data, options \\ %Options{}) + + def from_rdf!(%Dataset{} = dataset, options) do + options = Options.new(options) - # 3.2 + 3.4) - Map.put(graph_map, name, - node_map_from_graph(graph, Map.get(graph_map, name, %{}), - options.use_native_types, options.use_rdf_type)) + graph_map = + Enum.reduce(Dataset.graphs(dataset), %{}, fn graph, graph_map -> + # 3.1) + name = to_string(graph.name || "@default") + + # 3.3) + graph_map = + if graph.name && !get_in(graph_map, ["@default", name]) do + Map.update(graph_map, "@default", %{name => %{"@id" => name}}, fn default_graph -> + Map.put(default_graph, name, %{"@id" => name}) + end) + else + graph_map end - # 4) - graph_map = - Enum.reduce graph_map, %{}, fn ({name, graph_object}, graph_map) -> - Map.put(graph_map, name, convert_list(graph_object)) + # 3.2 + 3.4) + node_map = + node_map_from_graph( + graph, + Map.get(graph_map, name, %{}), + options.use_native_types, + options.use_rdf_type + ) + + Map.put(graph_map, name, node_map) + end) + + # 4) + graph_map = + Enum.reduce(graph_map, %{}, fn {name, graph_object}, graph_map -> + Map.put(graph_map, name, convert_list(graph_object)) + end) + + # 5+6) + Map.get(graph_map, "@default", %{}) + |> Enum.sort_by(fn {subject, _} -> subject end) + |> Enum.reduce([], fn {subject, node}, result -> + # 6.1) + node = + if Map.has_key?(graph_map, subject) do + Map.put( + node, + "@graph", + graph_map[subject] + |> Enum.sort_by(fn {s, _} -> s end) + |> Enum.reduce([], fn {_s, n}, graph_nodes -> + n = Map.delete(n, "usages") + + if map_size(n) == 1 and Map.has_key?(n, "@id") do + graph_nodes + else + [n | graph_nodes] + end + end) + |> Enum.reverse() + ) + else + node end - # 5+6) - Map.get(graph_map, "@default", %{}) - |> Enum.sort_by(fn {subject, _} -> subject end) - |> Enum.reduce([], fn ({subject, node}, result) -> - # 6.1) - node = - if Map.has_key?(graph_map, subject) do - Map.put node, "@graph", - graph_map[subject] - |> Enum.sort_by(fn {s, _} -> s end) - |> Enum.reduce([], fn ({_s, n}, graph_nodes) -> - n = Map.delete(n, "usages") - if map_size(n) == 1 and Map.has_key?(n, "@id") do - graph_nodes - else - [n | graph_nodes] - end - end) - |> Enum.reverse - else - node - end - - # 6.2) - node = Map.delete(node, "usages") - if map_size(node) == 1 and Map.has_key?(node, "@id") do - result - else - [node | result] - end - end) - |> Enum.reverse - end + # 6.2) + node = Map.delete(node, "usages") + + if map_size(node) == 1 and Map.has_key?(node, "@id") do + result + else + [node | result] + end + end) + |> Enum.reverse() end def from_rdf!(rdf_data, options), - do: rdf_data |> RDF.Dataset.new() |> from_rdf!(options) + do: rdf_data |> Dataset.new() |> from_rdf!(options) # 3.5) + @spec node_map_from_graph(Graph.t(), map, boolean, boolean) :: map defp node_map_from_graph(graph, current, use_native_types, use_rdf_type) do - Enum.reduce(graph, current, fn ({subject, predicate, object}, node_map) -> - {subject, predicate, node_object} = - {to_string(subject), to_string(predicate), nil} + Enum.reduce(graph, current, fn {subject, predicate, object}, node_map -> + {subject, predicate, node_object} = {to_string(subject), to_string(predicate), nil} node = Map.get(node_map, subject, %{"@id" => subject}) + {node_object, node_map} = - if is_node_object = (match?(%IRI{}, object) || match?(%BlankNode{}, object)) do + if is_node_object = match?(%IRI{}, object) || match?(%BlankNode{}, object) do node_object = to_string(object) node_map = Map.put_new(node_map, node_object, %{"@id" => node_object}) {node_object, node_map} else {node_object, node_map} end + {node, node_map} = if is_node_object and !use_rdf_type and predicate == @rdf_type do - node = Map.update(node, "@type", [node_object], fn types -> - if node_object in types do - types - else - types ++ [node_object] - end - end) + node = + Map.update(node, "@type", [node_object], fn types -> + if node_object in types, do: types, else: types ++ [node_object] + end) + {node, node_map} else value = rdf_to_object(object, use_native_types) + node = Map.update(node, predicate, [value], fn objects -> - if value in objects do - objects - else - objects ++ [value] - end + if value in objects, do: objects, else: objects ++ [value] end) + node_map = if is_node_object do - usage = %{ - "node" => node, - "property" => predicate, - "value" => value, - } + usage = %{"node" => node, "property" => predicate, "value" => value} + Map.update(node_map, node_object, %{"usages" => [usage]}, fn object_node -> - Map.update(object_node, "usages", [usage], fn usages -> - usages ++ [usage] - end) + Map.update(object_node, "usages", [usage], fn usages -> usages ++ [usage] end) end) else node_map end + {node, node_map} end + Map.put(node_map, subject, node) end) |> update_node_usages @@ -162,39 +186,45 @@ defmodule JSON.LD.Encoder do # This function is necessary because we have no references and must update the # node member of the usage maps with later enhanced usages + @spec update_node_usages(map) :: map defp update_node_usages(node_map) do - Enum.reduce node_map, node_map, fn - ({subject, %{"usages" => _usages} = _node}, node_map) -> - update_in node_map, [subject, "usages"], fn usages -> - Enum.map usages, fn usage -> - Map.update! usage, "node", fn %{"@id" => subject} -> + Enum.reduce(node_map, node_map, fn + {subject, %{"usages" => _usages} = _node}, node_map -> + update_in(node_map, [subject, "usages"], fn usages -> + Enum.map(usages, fn usage -> + Map.update!(usage, "node", fn %{"@id" => subject} -> node_map[subject] - end - end - end - (_, node_map) -> node_map - end + end) + end) + end) + + _, node_map -> + node_map + end) end # This function is necessary because we have no references and use this # instead to update the head by path + @spec update_head(map, [String.t()], map, map) :: map defp update_head(graph_object, path, old, new) do - update_in graph_object, path, fn objects -> - Enum.map objects, fn - ^old -> new + update_in(graph_object, path, fn objects -> + Enum.map(objects, fn + ^old -> new current -> current - end - end + end) + end) end # 4) + @spec convert_list(map) :: map defp convert_list(%{@rdf_nil => nil_node} = graph_object) do - Enum.reduce nil_node["usages"], graph_object, + Enum.reduce( + nil_node["usages"], + graph_object, # 4.3.1) - fn (usage, graph_object) -> + fn usage, graph_object -> # 4.3.2) + 4.3.3) - {list, list_nodes, [subject, property] = head_path, head} = - extract_list(usage) + {list, list_nodes, [subject, property] = head_path, head} = extract_list(usage) # 4.3.4) {skip, list, list_nodes, head_path, head} = @@ -214,62 +244,87 @@ defmodule JSON.LD.Encoder do else {false, list, list_nodes, head_path, head} end + if skip do graph_object else graph_object = - update_head graph_object, head_path, head, + update_head( + graph_object, + head_path, + head, head # 4.3.5) |> Map.delete("@id") # 4.3.6) isn't necessary, since we built the list in reverse order # 4.3.7) |> Map.put("@list", list) + ) # 4.3.8) - Enum.reduce(list_nodes, graph_object, fn (node_id, graph_object) -> + Enum.reduce(list_nodes, graph_object, fn node_id, graph_object -> Map.delete(graph_object, node_id) end) end end + ) end defp convert_list(graph_object), do: graph_object - # 4.3.3) + @spec extract_list(map, [map], [String.t()]) :: {[map], [String.t()], [String.t()], map} defp extract_list(usage, list \\ [], list_nodes \\ []) defp extract_list( - %{"node" => %{ - # Spec FIXME: no mention of @id - "@id" => id = ("_:" <> _), # contrary to spec we assume/require this to be even on the initial call to be a blank node - "usages" => [usage], - @rdf_first => [first], - @rdf_rest => [_rest], - } = node, - "property" => @rdf_rest}, list, list_nodes) when map_size(node) == 4 do + %{ + "node" => + %{ + # Spec FIXME: no mention of @id + # contrary to spec we assume/require this to be even on the initial call to be a blank node + "@id" => id = "_:" <> _, + "usages" => [usage], + @rdf_first => [first], + @rdf_rest => [_rest] + } = node, + "property" => @rdf_rest + }, + list, + list_nodes + ) + when map_size(node) == 4 do extract_list(usage, [first | list], [id | list_nodes]) end defp extract_list( - %{"node" => %{ - # Spec FIXME: no mention of @id - "@id" => id = ("_:" <> _), # contrary to spec we assume/require this to be even on the initial call to be a blank node - "@type" => [@rdf_list], - "usages" => [usage], - @rdf_first => [first], - @rdf_rest => [_rest], - } = node, - "property" => @rdf_rest}, list, list_nodes) when map_size(node) == 5 do + %{ + "node" => + %{ + # Spec FIXME: no mention of @id + # contrary to spec we assume/require this to be even on the initial call to be a blank node + "@id" => id = "_:" <> _, + "@type" => [@rdf_list], + "usages" => [usage], + @rdf_first => [first], + @rdf_rest => [_rest] + } = node, + "property" => @rdf_rest + }, + list, + list_nodes + ) + when map_size(node) == 5 do extract_list(usage, [first | list], [id | list_nodes]) end - defp extract_list(%{"node" => %{"@id" => subject}, "property" => property, "value" => head}, - list, list_nodes), - do: {list, list_nodes, [subject, property], head} - + defp extract_list( + %{"node" => %{"@id" => subject}, "property" => property, "value" => head}, + list, + list_nodes + ), + do: {list, list_nodes, [subject, property], head} + @spec rdf_to_object(Statement.object(), boolean) :: map defp rdf_to_object(%IRI{} = iri, _use_native_types) do %{"@id" => to_string(iri)} end @@ -283,47 +338,59 @@ defmodule JSON.LD.Encoder do value = Literal.value(literal) converted_value = literal type = nil + {converted_value, type, result} = if use_native_types do cond do datatype == XSD.String -> {value, type, result} + datatype == XSD.Boolean -> if RDF.XSD.Boolean.valid?(literal) do {value, type, result} else - {converted_value, NS.XSD.boolean, result} + {converted_value, NS.XSD.boolean(), result} end + datatype in [XSD.Integer, XSD.Double] -> if Literal.valid?(literal) do {value, type, result} else {converted_value, type, result} end + true -> {converted_value, Literal.datatype_id(literal), result} end else cond do - datatype == RDF.LangString -> + datatype == LangString -> {converted_value, type, Map.put(result, "@language", Literal.language(literal))} + datatype == XSD.String -> {converted_value, type, result} + true -> {Literal.lexical(literal), Literal.datatype_id(literal), result} end end - result = type && Map.put(result, "@type", to_string(type)) || result - Map.put(result, "@value", - match?(%Literal{}, converted_value) && Literal.lexical(converted_value) || converted_value) - end + result = (type && Map.put(result, "@type", to_string(type))) || result + Map.put( + result, + "@value", + (match?(%Literal{}, converted_value) && Literal.lexical(converted_value)) || converted_value + ) + end + @spec encode_json(any, [Jason.encode_opt()]) :: + {:ok, String.t()} | {:error, Jason.EncodeError.t() | Exception.t()} defp encode_json(value, opts) do Jason.encode(value, opts) end + @spec encode_json!(any, [Jason.encode_opt()]) :: String.t() defp encode_json!(value, opts) do Jason.encode!(value, opts) end diff --git a/lib/json/ld/exceptions.ex b/lib/json/ld/exceptions.ex index f2430c1..21bb8d4 100644 --- a/lib/json/ld/exceptions.ex +++ b/lib/json/ld/exceptions.ex @@ -7,7 +7,8 @@ end defmodule JSON.LD.ListOfListsError do @moduledoc """ - A list of lists was detected. List of lists are not supported in this version of JSON-LD due to the algorithmic complexity. + A list of lists was detected. List of lists are not supported in this version of + JSON-LD due to the algorithmic complexity. """ defexception code: "list of lists", message: nil end @@ -42,7 +43,8 @@ end defmodule JSON.LD.MultipleContextLinkHeadersError do @moduledoc """ - Multiple HTTP Link Headers [RFC5988] using the http://www.w3.org/ns/json-ld#context link relation have been detected. + Multiple HTTP Link Headers [RFC5988] using the http://www.w3.org/ns/json-ld#context + link relation have been detected. """ defexception code: "multiple context link headers", message: nil end @@ -77,7 +79,8 @@ end defmodule JSON.LD.InvalidVocabMappingError do @moduledoc """ - An invalid vocabulary mapping has been detected, i.e., it is neither an absolute IRI nor null. + An invalid vocabulary mapping has been detected, i.e., it is neither an absolute IRI + nor null. """ defexception code: "invalid vocab mapping", message: nil end @@ -133,35 +136,40 @@ end defmodule JSON.LD.InvalidTypeMappingError do @moduledoc """ - An @type member in a term definition was encountered whose value could not be expanded to an absolute IRI. + An @type member in a term definition was encountered whose value could not be + expanded to an absolute IRI. """ defexception code: "invalid type mapping", message: nil end defmodule JSON.LD.InvalidLanguageMappingError do @moduledoc """ - An @language member in a term definition was encountered whose value was neither a string nor null and thus invalid. + An @language member in a term definition was encountered whose value was neither + a string nor null and thus invalid. """ defexception code: "invalid language mapping", message: nil end defmodule JSON.LD.CollidingKeywordsError do @moduledoc """ - Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time. + Two properties which expand to the same keyword have been detected. This might occur + if a keyword and an alias thereof are used at the same time. """ defexception code: "colliding keywords", message: nil end defmodule JSON.LD.InvalidContainerMappingError do @moduledoc """ - An @container member was encountered whose value was not one of the following strings: @list, @set, or @index. + An @container member was encountered whose value was not one of the following strings: + @list, @set, or @index. """ defexception code: "invalid container mapping", message: nil end defmodule JSON.LD.InvalidTypeValueError do @moduledoc """ - An invalid value for an @type member has been detected, i.e., the value was neither a string nor an array of strings. + An invalid value for an @type member has been detected, i.e., the value was neither + a string nor an array of strings. """ defexception code: "invalid type value", message: nil end @@ -175,7 +183,8 @@ end defmodule JSON.LD.InvalidValueObjectValueError do @moduledoc """ - An invalid value for the @value member of a value object has been detected, i.e., it is neither a scalar nor null. + An invalid value for the @value member of a value object has been detected, + i.e., it is neither a scalar nor null. """ defexception code: "invalid value object value", message: nil end @@ -187,7 +196,6 @@ defmodule JSON.LD.InvalidLanguageTaggedStringError do defexception code: "invalid language-tagged string", message: nil end - defmodule JSON.LD.InvalidLanguageTaggedValueError do @moduledoc """ A number, true, or false with an associated language tag was detected. @@ -211,14 +219,16 @@ end defmodule JSON.LD.InvalidLanguageMapValueError do @moduledoc """ - An invalid value in a language map has been detected. It has to be a string or an array of strings. + An invalid value in a language map has been detected. It has to be a string or + an array of strings. """ defexception code: "invalid language map value", message: nil end defmodule JSON.LD.CompactionToListOfListsError do @moduledoc """ - The compacted document contains a list of lists as multiple lists have been compacted to the same term. + The compacted document contains a list of lists as multiple lists have been compacted + to the same term. """ defexception code: "compaction to list of lists", message: nil end @@ -232,14 +242,16 @@ end defmodule JSON.LD.InvalidReverseValueError do @moduledoc """ - An invalid value for an @reverse member has been detected, i.e., the value was not a JSON object. + An invalid value for an @reverse member has been detected, i.e., the value was not + a JSON object. """ defexception code: "invalid @reverse value", message: nil end defmodule JSON.LD.InvalidReversePropertyValueError do @moduledoc """ - An invalid value for a reverse property has been detected. The value of an inverse property must be a node object. + An invalid value for a reverse property has been detected. The value of an inverse + property must be a node object. """ defexception code: "invalid reverse property value", message: nil end diff --git a/lib/json/ld/expansion.ex b/lib/json/ld/expansion.ex index 7caf256..6d9a5dd 100644 --- a/lib/json/ld/expansion.ex +++ b/lib/json/ld/expansion.ex @@ -3,33 +3,30 @@ defmodule JSON.LD.Expansion do import JSON.LD.{IRIExpansion, Utils} + alias JSON.LD.{Context, Options} + alias JSON.LD.Context.TermDefinition - def expand(input, options \\ %JSON.LD.Options{}) do - with options = JSON.LD.Options.new(options), - active_context = JSON.LD.Context.new(options) - do - active_context = - case options.expand_context do - %{"@context" => context} -> - JSON.LD.Context.update(active_context, context) - %{} = context -> - JSON.LD.Context.update(active_context, context) - nil -> - active_context - end + @spec expand(map, Options.t() | Enum.t()) :: [map] + def expand(input, options \\ %Options{}) do + options = Options.new(options) + active_context = Context.new(options) - case do_expand(active_context, nil, input, options) do - result = %{"@graph" => graph} when map_size(result) == 1 -> - graph - nil -> - [] - result when not is_list(result) -> - [result] - result -> result + active_context = + case options.expand_context do + %{"@context" => context} -> Context.update(active_context, context) + %{} = context -> Context.update(active_context, context) + nil -> active_context end + + case do_expand(active_context, nil, input, options) do + result = %{"@graph" => graph} when map_size(result) == 1 -> graph + nil -> [] + result when not is_list(result) -> [result] + result -> result end end + @spec do_expand(Context.t(), String.t() | nil, any | nil, Options.t()) :: map | [map] | nil defp do_expand(active_context, active_property, element, options) # 1) If element is null, return null. @@ -37,7 +34,7 @@ defmodule JSON.LD.Expansion do # 2) If element is a scalar, ... defp do_expand(active_context, active_property, element, _options) - when is_binary(element) or is_number(element) or is_boolean(element) do + when is_binary(element) or is_number(element) or is_boolean(element) do if active_property in [nil, "@graph"] do nil else @@ -47,343 +44,438 @@ defmodule JSON.LD.Expansion do # 3) If element is an array, ... defp do_expand(active_context, active_property, element, options) - when is_list(element) do + when is_list(element) do term_def = active_context.term_defs[active_property] container_mapping = term_def && term_def.container_mapping - element - |> Enum.reduce([], fn (item, result) -> - expanded_item = do_expand(active_context, active_property, item, options) - if (active_property == "@list" or container_mapping == "@list") and - (is_list(expanded_item) or Map.has_key?(expanded_item, "@list")), - do: raise JSON.LD.ListOfListsError, - message: "List of lists in #{inspect element}" - case expanded_item do - nil -> result - list when is_list(list) -> - result ++ list - expanded_item -> - result ++ [expanded_item] - end + + Enum.reduce(element, [], fn item, result -> + expanded_item = do_expand(active_context, active_property, item, options) + + if (active_property == "@list" or container_mapping == "@list") and + (is_list(expanded_item) or Map.has_key?(expanded_item, "@list")) do + raise JSON.LD.ListOfListsError, message: "List of lists in #{inspect(element)}" + end + + case expanded_item do + nil -> result + list when is_list(list) -> result ++ list + expanded_item -> result ++ [expanded_item] + end end) end # 4) - 13) defp do_expand(active_context, active_property, element, options) - when is_map(element) do + when is_map(element) do # 5) active_context = if Map.has_key?(element, "@context") do - JSON.LD.Context.update(active_context, Map.get(element, "@context"), [], options) + Context.update(active_context, Map.get(element, "@context"), [], options) else active_context end + # 6) and 7) - result = element - |> Enum.sort_by(fn {key, _} -> key end) - |> Enum.reduce(%{}, fn ({key, value}, result) -> - if key != "@context" do # 7.1) - expanded_property = expand_iri(key, active_context, false, true) - if expanded_property && # 7.2) - (String.contains?(expanded_property, ":") || JSON.LD.keyword?(expanded_property)) do # 7.3) - if JSON.LD.keyword?(expanded_property) do # 7.4) - if active_property == "@reverse", # 7.4.1) - do: raise JSON.LD.InvalidReversePropertyMapError, - message: "An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps." - if Map.has_key?(result, expanded_property), # 7.4.2) - do: raise JSON.LD.CollidingKeywordsError, - message: "Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time." - - expanded_value = case expanded_property do - "@id" -> # 7.4.3) - if is_binary(value) do - expand_iri(value, active_context, true) - else - raise JSON.LD.InvalidIdValueError, - message: "#{inspect value} is not a valid @id value" + result = + element + |> Enum.sort_by(fn {key, _} -> key end) + |> Enum.reduce(%{}, fn {key, value}, result -> + # 7.1) + if key != "@context" do + expanded_property = expand_iri(key, active_context, false, true) + # 7.2) + # 7.3) + if is_binary(expanded_property) && + (String.contains?(expanded_property, ":") || JSON.LD.keyword?(expanded_property)) do + # 7.4) + # expanded_property is not a keyword + if JSON.LD.keyword?(expanded_property) do + # 7.4.1) + if active_property == "@reverse" do + raise JSON.LD.InvalidReversePropertyMapError, + message: + "An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps." + end + + # 7.4.2) + if Map.has_key?(result, expanded_property) do + raise JSON.LD.CollidingKeywordsError, + message: + "Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time." + end + + expanded_value = + case expanded_property do + # 7.4.3) + "@id" -> + if is_binary(value) do + expand_iri(value, active_context, true) + else + raise JSON.LD.InvalidIdValueError, + message: "#{inspect(value)} is not a valid @id value" + end + + # 7.4.4) + "@type" -> + cond do + is_binary(value) -> + expand_iri(value, active_context, true, true) + + is_list(value) and Enum.all?(value, &is_binary/1) -> + Enum.map(value, fn item -> + expand_iri(item, active_context, true, true) + end) + + true -> + raise JSON.LD.InvalidTypeValueError, + message: "#{inspect(value)} is not a valid @type value" + end + + # 7.4.5) + "@graph" -> + do_expand(active_context, "@graph", value, options) + + # 7.4.6) + "@value" -> + if scalar?(value) or is_nil(value) do + if is_nil(value) do + {:skip, Map.put(result, "@value", nil)} + else + value + end + else + raise JSON.LD.InvalidValueObjectValueError, + message: + "#{inspect(value)} is not a valid value for the @value member of a value object; neither a scalar nor null" + end + + # 7.4.7) + "@language" -> + if is_binary(value) do + String.downcase(value) + else + raise JSON.LD.InvalidLanguageTaggedStringError, + message: "#{inspect(value)} is not a valid language-tag" + end + + # 7.4.8) + "@index" -> + if is_binary(value) do + value + else + raise JSON.LD.InvalidIndexValueError, + message: "#{inspect(value)} is not a valid @index value" + end + + # 7.4.9) + "@list" -> + # 7.4.9.1) + if active_property in [nil, "@graph"] do + {:skip, result} + else + value = do_expand(active_context, active_property, value, options) + + # Spec FIXME: need to be sure that result is a list + # [from RDF.rb implementation] + value = if is_list(value), do: value, else: [value] + + # If expanded value is a list object, a list of lists error + # has been detected and processing is aborted. + # Spec FIXME: Also look at each object if result is a list + # [from RDF.rb implementation] + if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end) do + raise JSON.LD.ListOfListsError, + message: "List of lists in #{inspect(value)}" + end + + value + end + + # 7.4.10) + "@set" -> + do_expand(active_context, active_property, value, options) + + # 7.4.11) + "@reverse" -> + unless is_map(value) do + raise JSON.LD.InvalidReverseValueError, + message: "#{inspect(value)} is not a valid @reverse value" + end + + # 7.4.11.1) + expanded_value = do_expand(active_context, "@reverse", value, options) + + # 7.4.11.2) If expanded value contains an @reverse member, + # i.e., properties that are reversed twice, execute for each of its + # property and item the following steps: + new_result = + if Map.has_key?(expanded_value, "@reverse") do + Enum.reduce( + expanded_value["@reverse"], + result, + fn {property, item}, new_result -> + items = if is_list(item), do: item, else: [item] + + Map.update(new_result, property, items, fn members -> + members ++ items + end) + end + ) + else + result + end + + # 7.4.11.3) + new_result = + if Map.keys(expanded_value) != ["@reverse"] do + reverse_map = + Enum.reduce(expanded_value, Map.get(new_result, "@reverse", %{}), fn + {property, items}, reverse_map when property != "@reverse" -> + Enum.each(items, fn item -> + if Map.has_key?(item, "@value") or Map.has_key?(item, "@list") do + raise JSON.LD.InvalidReversePropertyValueError, + message: + "invalid value for a reverse property in #{inspect(item)}" + end + end) + + Map.update(reverse_map, property, items, fn members -> + members ++ items + end) + + _, reverse_map -> + reverse_map + end) + + Map.put(new_result, "@reverse", reverse_map) + else + new_result + end + + {:skip, new_result} + + _ -> + nil end - "@type" -> # 7.4.4) + + # 7.4.12) + case expanded_value do + nil -> result + {:skip, new_result} -> new_result + expanded_value -> Map.put(result, expanded_property, expanded_value) + end + else + term_def = active_context.term_defs[key] + + expanded_value = cond do - is_binary(value) -> - expand_iri(value, active_context, true, true) - is_list(value) and Enum.all?(value, &is_binary/1) -> - Enum.map value, fn item -> - expand_iri(item, active_context, true, true) end - true -> - raise JSON.LD.InvalidTypeValueError, - message: "#{inspect value} is not a valid @type value" - end - "@graph" -> # 7.4.5) - do_expand(active_context, "@graph", value, options) - "@value" -> # 7.4.6) - if scalar?(value) or is_nil(value) do - if is_nil(value) do - {:skip, Map.put(result, "@value", nil)} - else + # 7.5) Otherwise, if key's container mapping in active context is + # @language and value is a JSON object then value is expanded from + # a language map as follows: + is_map(value) && term_def && term_def.container_mapping == "@language" -> value - end - else - raise JSON.LD.InvalidValueObjectValueError, - message: "#{inspect value} is not a valid value for the @value member of a value object; neither a scalar nor null" - end - "@language" -> # 7.4.7) - if is_binary(value), - do: String.downcase(value), - else: raise JSON.LD.InvalidLanguageTaggedStringError, - message: "#{inspect value} is not a valid language-tag" - "@index" -> # 7.4.8) - if is_binary(value), - do: value, - else: raise JSON.LD.InvalidIndexValueError, - message: "#{inspect value} is not a valid @index value" - "@list" -> # 7.4.9) - if active_property in [nil, "@graph"] do # 7.4.9.1) - {:skip, result} - else - value = do_expand(active_context, active_property, value, options) - - # Spec FIXME: need to be sure that result is a list [from RDF.rb implementation] - value = if is_list(value), - do: value, - else: [value] - - # If expanded value is a list object, a list of lists error has been detected and processing is aborted. - # Spec FIXME: Also look at each object if result is a list [from RDF.rb implementation] - if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end), - do: raise JSON.LD.ListOfListsError, - message: "List of lists in #{inspect value}" - value + |> Enum.sort_by(fn {language, _} -> language end) + |> Enum.reduce([], fn {language, language_value}, language_map_result -> + language_map_result ++ + (if(is_list(language_value), do: language_value, else: [language_value]) + |> Enum.map(fn + item when is_binary(item) -> + %{"@value" => item, "@language" => String.downcase(language)} + + item -> + raise JSON.LD.InvalidLanguageMapValueError, + message: "#{inspect(item)} is not a valid language map value" + end)) + end) + + # 7.6) + is_map(value) && term_def && term_def.container_mapping == "@index" -> + value + |> Enum.sort_by(fn {index, _} -> index end) + |> Enum.reduce([], fn {index, index_value}, index_map_result -> + index_map_result ++ + ( + index_value = + if is_list(index_value), do: index_value, else: [index_value] + + index_value = do_expand(active_context, key, index_value, options) + + Enum.map(index_value, fn item -> Map.put_new(item, "@index", index) end) + ) + end) + + # 7.7) + true -> + do_expand(active_context, key, value, options) end - "@set" -> # 7.4.10) - do_expand(active_context, active_property, value, options) - "@reverse" -> # 7.4.11) - unless is_map(value), - do: raise JSON.LD.InvalidReverseValueError, - message: "#{inspect value} is not a valid @reverse value" - expanded_value = do_expand(active_context, "@reverse", value, options) # 7.4.11.1) - new_result = - if Map.has_key?(expanded_value, "@reverse") do # 7.4.11.2) If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps: - Enum.reduce expanded_value["@reverse"], result, - fn ({property, item}, new_result) -> - items = if is_list(item), - do: item, - else: [item] - Map.update(new_result, property, items, fn members -> - members ++ items - end) - end + + # 7.8) + if is_nil(expanded_value) do + result + else + # 7.9) + expanded_value = + if term_def && term_def.container_mapping == "@list" && + !(is_map(expanded_value) && Map.has_key?(expanded_value, "@list")) do + %{ + "@list" => + if(is_list(expanded_value), do: expanded_value, else: [expanded_value]) + } else - result + expanded_value end - new_result = - if Map.keys(expanded_value) != ["@reverse"] do # 7.4.11.3) - reverse_map = - Enum.reduce expanded_value, Map.get(new_result, "@reverse", %{}), fn - ({property, items}, reverse_map) when property != "@reverse" -> - Enum.each(items, fn item -> - if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"), - do: raise JSON.LD.InvalidReversePropertyValueError, - message: "invalid value for a reverse property in #{inspect item}" - end) - Map.update(reverse_map, property, items, fn members -> - members ++ items - end) - (_, reverse_map) -> reverse_map + + # 7.10) Otherwise, if the term definition associated to key indicates that + # it is a reverse property + # Spec FIXME: this is not an otherwise [from RDF.rb implementation] + # 7.11) + if term_def && term_def.reverse_property do + reverse_map = Map.get(result, "@reverse", %{}) + + reverse_map = + if(is_list(expanded_value), do: expanded_value, else: [expanded_value]) + |> Enum.reduce(reverse_map, fn item, reverse_map -> + if Map.has_key?(item, "@value") or Map.has_key?(item, "@list") do + raise JSON.LD.InvalidReversePropertyValueError, + message: "invalid value for a reverse property in #{inspect(item)}" end - Map.put(new_result, "@reverse", reverse_map) - else - new_result - end - {:skip, new_result} - _ -> - nil - end - # 7.4.12) - case expanded_value do - nil -> - result - {:skip, new_result} -> - new_result - expanded_value -> - Map.put(result, expanded_property, expanded_value) - end - else # expanded_property is not a keyword - term_def = active_context.term_defs[key] - expanded_value = cond do - # 7.5) Otherwise, if key's container mapping in active context is @language and value is a JSON object then value is expanded from a language map as follows: - is_map(value) && term_def && term_def.container_mapping == "@language" -> - value - |> Enum.sort_by(fn {language, _} -> language end) - |> Enum.reduce([], fn ({language, language_value}, language_map_result) -> - language_map_result ++ ( - if(is_list(language_value), - do: language_value, - else: [language_value]) - |> Enum.map(fn - item when is_binary(item) -> - %{ - "@value" => item, - "@language" => String.downcase(language) - } - item -> - raise JSON.LD.InvalidLanguageMapValueError, - message: "#{inspect item} is not a valid language map value" + Map.update(reverse_map, expanded_property, [item], fn members -> + members ++ [item] + end) + end) - end) - ) - end) - # 7.6) - is_map(value) && term_def && term_def.container_mapping == "@index" -> - value - |> Enum.sort_by(fn {index, _} -> index end) - |> Enum.reduce([], fn ({index, index_value}, index_map_result) -> - index_map_result ++ ( - index_value = if(is_list(index_value), - do: index_value, - else: [index_value]) - index_value = do_expand(active_context, key, index_value, options) - Enum.map(index_value, fn item -> - Map.put_new(item, "@index", index) - end) - ) - end) - # 7.7) - true -> - do_expand(active_context, key, value, options) - end - # 7.8) - if is_nil(expanded_value) do - result - else - # 7.9) - expanded_value = - if (term_def && term_def.container_mapping == "@list") && - !(is_map(expanded_value) && Map.has_key?(expanded_value, "@list")) do - %{"@list" => - (if is_list(expanded_value), - do: expanded_value, - else: [expanded_value])} + Map.put(result, "@reverse", reverse_map) else - expanded_value + expanded_value = + if is_list(expanded_value), do: expanded_value, else: [expanded_value] + + Map.update(result, expanded_property, expanded_value, fn values -> + expanded_value ++ values + end) end - # 7.10) Otherwise, if the term definition associated to key indicates that it is a reverse property - # Spec FIXME: this is not an otherwise [from RDF.rb implementation] - if term_def && term_def.reverse_property do - reverse_map = Map.get(result, "@reverse", %{}) - reverse_map = - if(is_list(expanded_value), - do: expanded_value, - else: [expanded_value]) - |> Enum.reduce(reverse_map, fn (item, reverse_map) -> - if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"), - do: raise JSON.LD.InvalidReversePropertyValueError, - message: "invalid value for a reverse property in #{inspect item}" - Map.update reverse_map, expanded_property, [item], fn members -> - members ++ [item] - end - end) - Map.put(result, "@reverse", reverse_map) - else # 7.11) - expanded_value = if is_list(expanded_value), - do: expanded_value, - else: [expanded_value] - Map.update result, expanded_property, expanded_value, - fn values -> expanded_value ++ values end end end + else + result end else result end - else - result - end - end) + end) - result = case result do - # 8) - %{"@value" => value} -> - with keys = Map.keys(result) do # 8.1) - if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) || - ("@language" in keys and "@type" in keys) do - raise JSON.LD.InvalidValueObjectError, - message: "value object with disallowed members" + result = + case result do + # 8) + %{"@value" => value} -> + # 8.1) + with keys = Map.keys(result) do + if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) || + ("@language" in keys and "@type" in keys) do + raise JSON.LD.InvalidValueObjectError, + message: "value object with disallowed members" + end + end + + cond do + # 8.2) + value == nil -> + nil + + # 8.3) + !is_binary(value) and Map.has_key?(result, "@language") -> + raise JSON.LD.InvalidLanguageTaggedValueError, + message: "@value '#{inspect(value)}' is tagged with a language" + + # 8.4) + (type = result["@type"]) && !RDF.uri?(type) -> + raise JSON.LD.InvalidTypedValueError, + message: "@value '#{inspect(value)}' has invalid type #{inspect(type)}" + + true -> + result end - end - cond do - value == nil -> nil # 8.2) - !is_binary(value) and Map.has_key?(result, "@language") -> # 8.3) - raise JSON.LD.InvalidLanguageTaggedValueError, - message: "@value '#{inspect value}' is tagged with a language" - (type = result["@type"]) && !RDF.uri?(type) -> # 8.4) - raise JSON.LD.InvalidTypedValueError, - message: "@value '#{inspect value}' has invalid type #{inspect type}" - true -> result - end - # 9) - %{"@type" => type} when not is_list(type) -> - Map.put(result, "@type", [type]) - # 10) - %{"@set" => set} -> - validate_set_or_list_object(result) - set - %{"@list" => _} -> - validate_set_or_list_object(result) - result - _ -> result - end + + # 9) + %{"@type" => type} when not is_list(type) -> + Map.put(result, "@type", [type]) + + # 10) + %{"@set" => set} -> + validate_set_or_list_object(result) + set + + %{"@list" => _} -> + validate_set_or_list_object(result) + result + + _ -> + result + end # 11) If result contains only the key @language, set result to null. - result = if is_map(result) and map_size(result) == 1 and Map.has_key?(result, "@language"), - do: nil, else: result + result = + if is_map(result) and map_size(result) == 1 and Map.has_key?(result, "@language"), + do: nil, + else: result # 12) If active property is null or @graph, drop free-floating values as follows: - # Spec FIXME: Due to case 10) we might land with a list here; other implementations deal with that, by just returning in step 10) - result = if is_map(result) and active_property in [nil, "@graph"] and ( - Enum.empty?(result) or - Map.has_key?(result, "@value") or Map.has_key?(result, "@list") or - (map_size(result) == 1 and Map.has_key?(result, "@id"))), - do: nil, else: result + # Spec FIXME: Due to case 10) we might land with a list here; other implementations + # deal with that, by just returning in step 10) + result = + if is_map(result) and active_property in [nil, "@graph"] and + (Enum.empty?(result) or + Map.has_key?(result, "@value") or Map.has_key?(result, "@list") or + (map_size(result) == 1 and Map.has_key?(result, "@id"))), + do: nil, + else: result result end + @spec validate_set_or_list_object(map) :: true defp validate_set_or_list_object(object) when map_size(object) == 1, do: true - defp validate_set_or_list_object(object = %{"@index" => _}) - when map_size(object) == 2, do: true + + defp validate_set_or_list_object(%{"@index" => _} = object) when map_size(object) == 2, do: true + defp validate_set_or_list_object(object) do raise JSON.LD.InvalidSetOrListObjectError, - message: "set or list object with disallowed members: #{inspect object}" + message: "set or list object with disallowed members: #{inspect(object)}" end - @doc """ Details at """ + @spec expand_value(Context.t(), String.t(), any) :: map def expand_value(active_context, active_property, value) do - with term_def = Map.get(active_context.term_defs, active_property, - %JSON.LD.Context.TermDefinition{}) do - cond do - term_def.type_mapping == "@id" -> - %{"@id" => expand_iri(value, active_context, true, false)} - term_def.type_mapping == "@vocab" -> - %{"@id" => expand_iri(value, active_context, true, true)} - type_mapping = term_def.type_mapping -> - %{"@value" => value, "@type" => type_mapping} - is_binary(value) -> - language_mapping = term_def.language_mapping - cond do - language_mapping -> - %{"@value" => value, "@language" => language_mapping} - language_mapping == false && active_context.default_language -> - %{"@value" => value, "@language" => active_context.default_language} - true -> + term_def = Map.get(active_context.term_defs, active_property, %TermDefinition{}) + + cond do + term_def.type_mapping == "@id" -> + %{"@id" => expand_iri(value, active_context, true, false)} + + term_def.type_mapping == "@vocab" -> + %{"@id" => expand_iri(value, active_context, true, true)} + + type_mapping = term_def.type_mapping -> + %{"@value" => value, "@type" => type_mapping} + + is_binary(value) -> + language_mapping = term_def.language_mapping + + cond do + language_mapping -> + %{"@value" => value, "@language" => language_mapping} + + language_mapping == false && active_context.default_language -> + %{"@value" => value, "@language" => active_context.default_language} + + true -> %{"@value" => value} - end - true -> - %{"@value" => value} - end + end + + true -> + %{"@value" => value} end end - end diff --git a/lib/json/ld/flattening.ex b/lib/json/ld/flattening.ex index fcb2ae7..a04b513 100644 --- a/lib/json/ld/flattening.ex +++ b/lib/json/ld/flattening.ex @@ -2,64 +2,71 @@ defmodule JSON.LD.Flattening do @moduledoc nil import JSON.LD.{NodeIdentifierMap, Utils} - alias JSON.LD.NodeIdentifierMap - - - def flatten(input, context \\ nil, options \\ %JSON.LD.Options{}) do - with options = JSON.LD.Options.new(options), - expanded = JSON.LD.expand(input, options), - node_map = node_map(expanded) - do - default_graph = - Enum.reduce node_map, node_map["@default"], fn - ({"@default", _}, default_graph) -> default_graph - ({graph_name, graph}, default_graph) -> - entry = - if Map.has_key?(default_graph, graph_name) do - default_graph[graph_name] - else - %{"@id" => graph_name} - end - graph_entry = - graph - |> Stream.reject(fn {_, node} -> - Map.has_key?(node, "@id") and map_size(node) == 1 end) - |> Enum.sort_by(fn {id, _} -> id end) - # TODO: Spec fixme: Spec doesn't handle the case, when a "@graph" member already exists - |> Enum.reduce(Map.get(entry, "@graph", []), fn ({_, node}, graph_entry) -> - [node | graph_entry] - end) - |> Enum.reverse - - Map.put(default_graph, graph_name, - Map.put(entry, "@graph", graph_entry)) - end + alias JSON.LD.{NodeIdentifierMap, Options} - flattened = - default_graph - |> Enum.sort_by(fn {id, _} -> id end) - |> Enum.reduce([], fn ({_, node}, flattened) -> - if not (Enum.count(node) == 1 and Map.has_key?(node, "@id")) do - [node | flattened] - else - flattened - end - end) - |> Enum.reverse - - if context && !Enum.empty?(flattened) do # TODO: Spec fixme: !Enum.empty?(flattened) is not in the spec, but in other implementations (Ruby, Java, Go, ...) - JSON.LD.compact(flattened, context, options) - else - flattened - end + @spec flatten(map | [map], map | nil, Options.t() | Enum.t()) :: [map] + def flatten(input, context \\ nil, options \\ %Options{}) do + options = Options.new(options) + expanded = JSON.LD.expand(input, options) + node_map = node_map(expanded) + + default_graph = + Enum.reduce(node_map, node_map["@default"], fn + {"@default", _}, default_graph -> + default_graph + + {graph_name, graph}, default_graph -> + entry = + if Map.has_key?(default_graph, graph_name) do + default_graph[graph_name] + else + %{"@id" => graph_name} + end + + graph_entry = + graph + |> Stream.reject(fn {_, node} -> + Map.has_key?(node, "@id") and map_size(node) == 1 + end) + |> Enum.sort_by(fn {id, _} -> id end) + # TODO: Spec fixme: Spec doesn't handle the case, when a "@graph" member + # already exists + |> Enum.reduce(Map.get(entry, "@graph", []), fn {_, node}, graph_entry -> + [node | graph_entry] + end) + |> Enum.reverse() + + Map.put(default_graph, graph_name, Map.put(entry, "@graph", graph_entry)) + end) + + flattened = + default_graph + |> Enum.sort_by(fn {id, _} -> id end) + |> Enum.reduce([], fn {_, node}, flattened -> + if not (Enum.count(node) == 1 and Map.has_key?(node, "@id")) do + [node | flattened] + else + flattened + end + end) + |> Enum.reverse() + + # TODO: Spec fixme: !Enum.empty?(flattened) is not in the spec, but in other + # implementations (Ruby, Java, Go, ...) + if context && !Enum.empty?(flattened) do + JSON.LD.compact(flattened, context, options) + else + flattened end end + @spec node_map([map], pid | nil) :: map def node_map(input, node_id_map \\ nil) def node_map(input, nil) do - {:ok, node_id_map} = NodeIdentifierMap.start_link + {:ok, node_id_map} = NodeIdentifierMap.start_link() + try do node_map(input, node_id_map) after @@ -76,30 +83,68 @@ defmodule JSON.LD.Flattening do Details at """ - def generate_node_map(element, node_map, node_id_map, active_graph \\ "@default", - active_subject \\ nil, active_property \\ nil, list \\ nil) + @spec generate_node_map( + [map] | map, + map, + pid, + String.t(), + String.t() | nil, + String.t() | nil, + pid | nil + ) :: map + def generate_node_map( + element, + node_map, + node_id_map, + active_graph \\ "@default", + active_subject \\ nil, + active_property \\ nil, + list \\ nil + ) # 1) - def generate_node_map(element, node_map, node_id_map, active_graph, active_subject, - active_property, list) when is_list(element) do - Enum.reduce element, node_map, fn (item, node_map) -> - generate_node_map(item, node_map, node_id_map, active_graph, active_subject, - active_property, list) - end + def generate_node_map( + element, + node_map, + node_id_map, + active_graph, + active_subject, + active_property, + list + ) + when is_list(element) do + Enum.reduce(element, node_map, fn item, node_map -> + generate_node_map( + item, + node_map, + node_id_map, + active_graph, + active_subject, + active_property, + list + ) + end) end - # 2) - def generate_node_map(element, node_map, node_id_map, active_graph, active_subject, - active_property, list) when is_map(element) do + def generate_node_map( + element, + node_map, + node_id_map, + active_graph, + active_subject, + active_property, + list + ) + when is_map(element) do node_map = Map.put_new(node_map, active_graph, %{}) node = node_map[active_graph][active_subject] # 3) element = if old_types = Map.get(element, "@type") do - new_types = Enum.reduce(List.wrap(old_types), [], - fn (item, types) -> + new_types = + Enum.reduce(List.wrap(old_types), [], fn item, types -> if blank_node_id?(item) do identifier = generate_blank_node_id(node_id_map, item) types ++ [identifier] @@ -107,24 +152,24 @@ defmodule JSON.LD.Flattening do types ++ [item] end end) - Map.put(element, "@type", - if(is_list(old_types), do: new_types, else: List.first(new_types))) + + Map.put( + element, + "@type", + if(is_list(old_types), do: new_types, else: List.first(new_types)) + ) else element end cond do - # 4) Map.has_key?(element, "@value") -> if is_nil(list) do if node do update_in(node_map, [active_graph, active_subject, active_property], fn nil -> [element] - items -> - unless element in items, - do: items ++ [element], - else: items + items -> unless element in items, do: items ++ [element], else: items end) else node_map @@ -137,19 +182,28 @@ defmodule JSON.LD.Flattening do # 5) Map.has_key?(element, "@list") -> {:ok, result_list} = new_list() + {node_map, result} = try do { - generate_node_map(element["@list"], node_map, node_id_map, - active_graph, active_subject, active_property, result_list), + generate_node_map( + element["@list"], + node_map, + node_id_map, + active_graph, + active_subject, + active_property, + result_list + ), get_list(result_list) } - after - terminate_list(result_list) - end + after + terminate_list(result_list) + end + if node do update_in(node_map, [active_graph, active_subject, active_property], fn - nil -> [result] + nil -> [result] items -> items ++ [result] end) else @@ -160,14 +214,12 @@ defmodule JSON.LD.Flattening do true -> # 6.1) {id, element} = Map.pop(element, "@id") + id = if id do - if blank_node_id?(id) do - generate_blank_node_id(node_id_map, id) - else - id - end - # 6.2) + if blank_node_id?(id), do: generate_blank_node_id(node_id_map, id), else: id + + # 6.2) else generate_blank_node_id(node_id_map) end @@ -182,7 +234,9 @@ defmodule JSON.LD.Flattening do node_map end - # 6.4) TODO: Spec fixme: "this line is asked for by the spec, but it breaks various tests" (according to Java and Go implementation, which perform this step before 6.7) instead) + # 6.4) TODO: Spec fixme: "this line is asked for by the spec, but it breaks + # various tests" (according to Java and Go implementation, which perform this + # step before 6.7) instead) node = node_map[active_graph][id] # 6.5) @@ -190,28 +244,32 @@ defmodule JSON.LD.Flattening do if is_map(active_subject) do unless Map.has_key?(node, active_property) do update_in(node_map, [active_graph, id, active_property], fn - nil -> [active_subject] + nil -> + [active_subject] + items -> - unless active_subject in items, - do: items ++ [active_subject], - else: items + unless active_subject in items, do: items ++ [active_subject], else: items end) else node_map end - # 6.6) + + # 6.6) else unless is_nil(active_property) do reference = %{"@id" => id} + if is_nil(list) do update_in(node_map, [active_graph, active_subject, active_property], fn - nil -> [reference] + nil -> + [reference] + items -> - unless reference in items, - do: items ++ [reference], - else: items + unless reference in items, do: items ++ [reference], else: items end) - # 6.6.3) TODO: Spec fixme: specs says to add ELEMENT to @list member, should be REFERENCE + + # 6.6.3) TODO: Spec fixme: specs says to add ELEMENT to @list member, + # should be REFERENCE else append_to_list(list, reference) node_map @@ -225,15 +283,13 @@ defmodule JSON.LD.Flattening do {node_map, element} = if Map.has_key?(element, "@type") do node_map = - Enum.reduce element["@type"], node_map, fn (type, node_map) -> + Enum.reduce(element["@type"], node_map, fn type, node_map -> update_in(node_map, [active_graph, id, "@type"], fn nil -> [type] - items -> - unless type in items, - do: items ++ [type], - else: items + items -> unless type in items, do: items ++ [type], else: items end) - end + end) + element = Map.delete(element, "@type") {node_map, element} else @@ -244,6 +300,7 @@ defmodule JSON.LD.Flattening do {node_map, element} = if Map.has_key?(element, "@index") do {element_index, element} = Map.pop(element, "@index") + node_map = if node_index = get_in(node_map, [active_graph, id, "@index"]) do if not deep_compare(node_index, element_index) do @@ -251,10 +308,11 @@ defmodule JSON.LD.Flattening do message: "Multiple conflicting indexes have been found for the same node." end else - update_in node_map, [active_graph, id], fn node -> + update_in(node_map, [active_graph, id], fn node -> Map.put(node, "@index", element_index) - end + end) end + {node_map, element} else {node_map, element} @@ -265,12 +323,21 @@ defmodule JSON.LD.Flattening do if Map.has_key?(element, "@reverse") do referenced_node = %{"@id" => id} {reverse_map, element} = Map.pop(element, "@reverse") - node_map = Enum.reduce reverse_map, node_map, fn ({property, values}, node_map) -> - Enum.reduce values, node_map, fn (value, node_map) -> - generate_node_map(value, node_map, node_id_map, active_graph, - referenced_node, property) - end - end + + node_map = + Enum.reduce(reverse_map, node_map, fn {property, values}, node_map -> + Enum.reduce(values, node_map, fn value, node_map -> + generate_node_map( + value, + node_map, + node_id_map, + active_graph, + referenced_node, + property + ) + end) + end) + {node_map, element} else {node_map, element} @@ -288,55 +355,60 @@ defmodule JSON.LD.Flattening do # 6.11) element |> Enum.sort_by(fn {property, _} -> property end) - |> Enum.reduce(node_map, fn ({property, value}, node_map) -> - property = - if blank_node_id?(property) do - generate_blank_node_id(node_id_map, property) - else - property - end - node_map = - unless Map.has_key?(node_map[active_graph][id], property) do - update_in node_map, [active_graph, id], fn node -> - Map.put(node, property, []) - end - else - node_map - end - generate_node_map(value, node_map, node_id_map, active_graph, id, property) - end) + |> Enum.reduce(node_map, fn {property, value}, node_map -> + property = + if blank_node_id?(property) do + generate_blank_node_id(node_id_map, property) + else + property + end + + node_map = + unless Map.has_key?(node_map[active_graph][id], property) do + update_in(node_map, [active_graph, id], fn node -> Map.put(node, property, []) end) + else + node_map + end + + generate_node_map(value, node_map, node_id_map, active_graph, id, property) + end) end end + @spec deep_compare(map | [map], map | [map]) :: boolean defp deep_compare(v1, v2) when is_map(v1) and is_map(v2) do Enum.count(v1) == Enum.count(v2) && Enum.all?(v1, fn {k, v} -> Map.has_key?(v2, k) && deep_compare(v, v2[k]) end) end + defp deep_compare(v1, v2) when is_list(v1) and is_list(v2) do Enum.count(v1) == Enum.count(v2) && MapSet.new(v1) == MapSet.new(v2) end + defp deep_compare(v, v), do: true defp deep_compare(_, _), do: false - + @spec new_list :: Agent.on_start() defp new_list do - Agent.start_link fn -> %{"@list" => []} end + Agent.start_link(fn -> %{"@list" => []} end) end + @spec terminate_list(pid) :: :ok defp terminate_list(pid) do - Agent.stop pid + :ok = Agent.stop(pid) end + @spec get_list(pid) :: map defp get_list(pid) do - Agent.get pid, fn list_node -> list_node end + Agent.get(pid, fn list_node -> list_node end) end + @spec append_to_list(pid, map) :: :ok defp append_to_list(pid, element) do - Agent.update pid, fn list_node -> + Agent.update(pid, fn list_node -> Map.update(list_node, "@list", [element], fn list -> list ++ [element] end) - end + end) end - end diff --git a/lib/json/ld/iri_expansion.ex b/lib/json/ld/iri_expansion.ex index eaaabae..9d58e0d 100644 --- a/lib/json/ld/iri_expansion.ex +++ b/lib/json/ld/iri_expansion.ex @@ -1,18 +1,28 @@ defmodule JSON.LD.IRIExpansion do - import JSON.LD.Utils - @keywords JSON.LD.keywords # to allow this to be used in function guard clauses, we redefine this here + alias JSON.LD.Context + + # to allow this to be used in function guard clauses, we redefine this here + @keywords JSON.LD.keywords() @doc """ see http://json-ld.org/spec/latest/json-ld-api/#iri-expansion """ - def expand_iri(value, active_context, doc_relative \\ false, vocab \\ false, - local_context \\ nil, defined \\ nil) + @spec expand_iri(String.t(), Context.t(), boolean, boolean, map | nil, map | nil) :: + {String.t(), Context.t(), map} | String.t() + def expand_iri( + value, + active_context, + doc_relative \\ false, + vocab \\ false, + local_context \\ nil, + defined \\ nil + ) # 1) If value is a keyword or null, return value as is. def expand_iri(value, active_context, _, _, local_context, defined) - when is_nil(value) or value in @keywords do + when is_nil(value) or value in @keywords do if local_context || defined do {value, active_context, defined} else @@ -25,19 +35,28 @@ defmodule JSON.LD.IRIExpansion do {active_context, defined} = if local_context && local_context[value] && defined[value] != true do local_def = local_context[value] + JSON.LD.Context.create_term_definition( - active_context, local_context, value, local_def, defined) + active_context, + local_context, + value, + local_def, + defined + ) else {active_context, defined} end {result, active_context, defined} = cond do - # 3) If vocab is true and the active context has a term definition for value, return the associated IRI mapping. + # 3) If vocab is true and the active context has a term definition for value, + # return the associated IRI mapping. vocab && Map.has_key?(active_context.term_defs, value) -> result = (term_def = active_context.term_defs[value]) && term_def.iri_mapping {result, active_context, defined} - # 4) If value contains a colon (:), it is either an absolute IRI, a compact IRI, or a blank node identifier + + # 4) If value contains a colon (:), it is either an absolute IRI, a compact IRI, + # or a blank node identifier String.contains?(value, ":") -> case compact_iri_parts(value) do [prefix, suffix] -> @@ -45,33 +64,56 @@ defmodule JSON.LD.IRIExpansion do {active_context, defined} = if local_context && local_context[prefix] && defined[prefix] != true do local_def = local_context[prefix] + JSON.LD.Context.create_term_definition( - active_context, local_context, prefix, local_def, defined) + active_context, + local_context, + prefix, + local_def, + defined + ) else {active_context, defined} end + # 4.4) result = if prefix_def = active_context.term_defs[prefix] do prefix_def.iri_mapping <> suffix else - value # 4.5) + # 4.5) + value end + {result, active_context, defined} + nil -> - {value, active_context, defined} # 4.2) + # 4.2) + {value, active_context, defined} end - # 5) If vocab is true, and active context has a vocabulary mapping, return the result of concatenating the vocabulary mapping with value. + + # 5) If vocab is true, and active context has a vocabulary mapping, return the + # result of concatenating the vocabulary mapping with value. vocab && active_context.vocab -> vocabulary_mapping = active_context.vocab {vocabulary_mapping <> value, active_context, defined} - # 6) Otherwise, if document relative is true, set value to the result of resolving value against the base IRI. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987]. + + # 6) Otherwise, if document relative is true, set value to the result of resolving + # value against the base IRI. Only the basic algorithm in section 5.2 of [RFC3986] + # is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are + # performed. Characters additionally allowed in IRI references are treated in the + # same way that unreserved characters are treated in URI references, per section + # 6.5 of [RFC3987]. doc_relative -> {absolute_iri(value, JSON.LD.Context.base(active_context)), active_context, defined} -# TODO: RDF.rb's implementation differs from the spec here, by checking if base_iri is actually present in the previous clause and adding the following additional clause. Another Spec error? -# if local_context && RDF::URI(value).relative? -# # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted. -# raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}" + + # TODO: RDF.rb's implementation differs from the spec here, by checking if + # base_iri is actually present in the previous clause and adding the following + # additional clause. Another Spec error? + # if local_context && RDF::URI(value).relative? + # # If local context is not null and value is not an absolute IRI, an invalid + # # IRI mapping error has been detected and processing is aborted. + # raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}" # 7) Return value as is. true -> {value, active_context, defined} @@ -83,5 +125,4 @@ defmodule JSON.LD.IRIExpansion do result end end - end diff --git a/lib/json/ld/node_identifier_map.ex b/lib/json/ld/node_identifier_map.ex index 711df86..c113ac9 100644 --- a/lib/json/ld/node_identifier_map.ex +++ b/lib/json/ld/node_identifier_map.ex @@ -5,12 +5,14 @@ defmodule JSON.LD.NodeIdentifierMap do # Client API + @spec start_link(keyword) :: GenServer.on_start() def start_link(opts \\ []) do GenServer.start_link(__MODULE__, :ok, opts) end + @spec stop(GenServer.server(), atom, timeout) :: :ok def stop(pid, reason \\ :normal, timeout \\ :infinity) do - GenServer.stop(pid, reason, timeout) + :ok = GenServer.stop(pid, reason, timeout) end @doc """ @@ -18,32 +20,28 @@ defmodule JSON.LD.NodeIdentifierMap do Details at """ + @spec generate_blank_node_id(GenServer.server(), String.t() | nil) :: String.t() def generate_blank_node_id(pid, identifier \\ nil) do GenServer.call(pid, {:generate_id, identifier}) end - # Server Callbacks + @spec init(:ok) :: {:ok, map} def init(:ok) do {:ok, %{map: %{}, counter: 0}} end + @spec handle_call({:generate_id, String.t() | nil}, GenServer.from(), map) :: + {:reply, String.t(), map} def handle_call({:generate_id, identifier}, _, %{map: map, counter: counter} = state) do if identifier && map[identifier] do {:reply, map[identifier], state} else blank_node_id = "_:b#{counter}" - {:reply, blank_node_id, %{ - counter: counter + 1, - map: - if identifier do - Map.put(map, identifier, blank_node_id) - else - map - end - }} + map = if identifier, do: Map.put(map, identifier, blank_node_id), else: map + + {:reply, blank_node_id, %{counter: counter + 1, map: map}} end end - end diff --git a/lib/json/ld/options.ex b/lib/json/ld/options.ex index 5c0f8f1..997eb7f 100644 --- a/lib/json/ld/options.ex +++ b/lib/json/ld/options.ex @@ -5,6 +5,17 @@ defmodule JSON.LD.Options do as specified at """ + @type t :: %__MODULE__{ + base: String.t() | nil, + compact_arrays: boolean, + document_loader: nil, + expand_context: map | nil, + produce_generalized_rdf: boolean, + use_rdf_type: boolean, + use_native_types: boolean, + processing_mode: String.t() + } + defstruct base: nil, compact_arrays: true, document_loader: nil, @@ -14,8 +25,10 @@ defmodule JSON.LD.Options do use_native_types: false, processing_mode: "json-ld-1.0" - def new(), do: %JSON.LD.Options{} - def new(%JSON.LD.Options{} = options), do: options - def new(options), do: struct(JSON.LD.Options, options) + @spec new :: t + def new(), do: %__MODULE__{} + @spec new(t | Enum.t()) :: t + def new(%__MODULE__{} = options), do: options + def new(options), do: struct(__MODULE__, options) end diff --git a/lib/json/ld/utils.ex b/lib/json/ld/utils.ex index f9e82a1..9569f5b 100644 --- a/lib/json/ld/utils.ex +++ b/lib/json/ld/utils.ex @@ -1,5 +1,4 @@ defmodule JSON.LD.Utils do - alias RDF.IRI @doc """ @@ -12,28 +11,30 @@ defmodule JSON.LD.Utils do Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per [section 6.5 of RFC3987](http://tools.ietf.org/html/rfc3987#section-6.5) """ + @spec absolute_iri(String.t(), String.t() | nil) :: IRI.coercible() | nil def absolute_iri(value, base_iri) def absolute_iri(value, nil), do: value - def absolute_iri(value, base_iri), - do: value |> RDF.IRI.absolute(base_iri) |> to_string + def absolute_iri(value, base_iri), + do: value |> IRI.absolute(base_iri) |> to_string + @spec relative_iri?(String.t()) :: boolean def relative_iri?(value), do: not (JSON.LD.keyword?(value) or IRI.absolute?(value) or blank_node_id?(value)) + @spec compact_iri_parts(String.t(), boolean) :: [String.t()] | nil def compact_iri_parts(compact_iri, exclude_bnode \\ true) do with [prefix, suffix] <- String.split(compact_iri, ":", parts: 2) do - if not(String.starts_with?(suffix, "//")) and - not(exclude_bnode and prefix == "_"), - do: [prefix, suffix] + if not String.starts_with?(suffix, "//") and + not (exclude_bnode and prefix == "_"), + do: [prefix, suffix] else _ -> nil end end - @doc """ Checks if the given value is a blank node identifier. @@ -44,19 +45,23 @@ defmodule JSON.LD.Utils do see """ + @spec blank_node_id?(String.t()) :: boolean def blank_node_id?("_:" <> _), do: true - def blank_node_id?(_), do: false + def blank_node_id?(_), do: false - - def scalar?(value) when is_binary(value) or is_number(value) or - is_boolean(value), do: true + @spec scalar?(any) :: boolean + def scalar?(value) when is_binary(value) or is_number(value) or is_boolean(value), do: true def scalar?(_), do: false - def list?(%{"@list" => _}), do: true - def list?(_), do: false + @spec list?(map | nil) :: boolean + def list?(%{"@list" => _}), do: true + def list?(_), do: false + + @spec index?(map | nil) :: boolean def index?(%{"@index" => _}), do: true - def index?(_), do: false - def value?(%{"@value" => _}), do: true - def value?(_), do: false + def index?(_), do: false + @spec value?(map | nil) :: boolean + def value?(%{"@value" => _}), do: true + def value?(_), do: false end diff --git a/lib/json_ld.ex b/lib/json_ld.ex index 6eda65a..6b1e496 100644 --- a/lib/json_ld.ex +++ b/lib/json_ld.ex @@ -1,15 +1,14 @@ defmodule JSON.LD do - use RDF.Serialization.Format import RDF.Sigils - @id ~I - @name :jsonld - @extension "jsonld" - @media_type "application/ld+json" + alias JSON.LD.{Compaction, Context, Expansion, Flattening, Options} - def options, do: JSON.LD.Options.new + @id ~I + @name :jsonld + @extension "jsonld" + @media_type "application/ld+json" @keywords ~w[ @base @@ -29,20 +28,24 @@ defmodule JSON.LD do : ] + @spec options :: Options.t() + def options, do: Options.new() + @doc """ The set of all JSON-LD keywords. see """ + @spec keywords :: [String.t()] def keywords, do: @keywords @doc """ Returns if the given value is a JSON-LD keyword. """ + @spec keyword?(String.t()) :: boolean def keyword?(value) when is_binary(value) and value in @keywords, do: true def keyword?(_value), do: false - @doc """ Expands the given input according to the steps in the JSON-LD Expansion Algorithm. @@ -54,9 +57,9 @@ defmodule JSON.LD do Details at """ - defdelegate expand(input, options \\ %JSON.LD.Options{}), - to: JSON.LD.Expansion - + @spec expand(map, Options.t() | Enum.t()) :: [map] + defdelegate expand(input, options \\ %Options{}), + to: Expansion @doc """ Compacts the given input according to the steps in the JSON-LD Compaction Algorithm. @@ -71,9 +74,9 @@ defmodule JSON.LD do Details at """ - defdelegate compact(input, context, options \\ %JSON.LD.Options{}), - to: JSON.LD.Compaction - + @spec compact(map | [map], map | nil, Options.t() | Enum.t()) :: map + defdelegate compact(input, context, options \\ %Options{}), + to: Compaction @doc """ Flattens the given input according to the steps in the JSON-LD Flattening Algorithm. @@ -87,9 +90,9 @@ defmodule JSON.LD do Details at """ - defdelegate flatten(input, context \\ nil, options \\ %JSON.LD.Options{}), - to: JSON.LD.Flattening - + @spec flatten(map | [map], map | nil, Options.t() | Enum.t()) :: [map] + defdelegate flatten(input, context \\ nil, options \\ %Options{}), + to: Flattening @doc """ Generator function for `JSON.LD.Context`s. @@ -97,19 +100,19 @@ defmodule JSON.LD do You can either pass a map with a `"@context"` key having the JSON-LD context object its value, or the JSON-LD context object directly. """ - def context(args, opts \\ %JSON.LD.Options{}) + @spec context(map, Options.t()) :: Context.t() + def context(args, opts \\ %Options{}) def context(%{"@context" => _} = object, options), - do: JSON.LD.Context.create(object, options) + do: Context.create(object, options) def context(context, options), - do: JSON.LD.Context.create(%{"@context" => context}, options) - + do: Context.create(%{"@context" => context}, options) @doc """ Generator function for JSON-LD node maps. """ - def node_map(input, node_id_map \\ nil), - do: JSON.LD.Flattening.node_map(input, node_id_map) - + @spec node_map([map], pid | nil) :: map + defdelegate node_map(input, node_id_map \\ nil), + to: Flattening end From a986eb302e583a930c0bf34a3760ab0d3b9e21c9 Mon Sep 17 00:00:00 2001 From: rustra Date: Fri, 19 Jun 2020 17:55:53 +0200 Subject: [PATCH 2/2] Fix some Credo warnings --- lib/json/ld/compaction.ex | 52 +++++++++++++++---------------- lib/json/ld/decoder.ex | 60 ++++++++++++++++++------------------ lib/json/ld/encoder.ex | 10 +++--- lib/json/ld/expansion.ex | 12 ++++---- lib/json/ld/iri_expansion.ex | 6 ++-- lib/json/ld/options.ex | 2 +- lib/json/ld/utils.ex | 13 ++++---- 7 files changed, 77 insertions(+), 78 deletions(-) diff --git a/lib/json/ld/compaction.ex b/lib/json/ld/compaction.ex index 0184754..a7c33f5 100644 --- a/lib/json/ld/compaction.ex +++ b/lib/json/ld/compaction.ex @@ -7,28 +7,28 @@ defmodule JSON.LD.Compaction do @spec compact(map | [map], map | nil, Options.t() | Enum.t()) :: map def compact(input, context, options \\ %Options{}) do - with options = Options.new(options), - active_context = JSON.LD.context(context, options), - inverse_context = Context.inverse(active_context), - expanded = JSON.LD.expand(input, options) do - result = - case do_compact(expanded, active_context, inverse_context, nil, options.compact_arrays) do - [] -> - %{} - - result when is_list(result) -> - # TODO: Spec fixme? We're setting vocab to true, as other implementations - # do it, but this is not mentioned in the spec - %{compact_iri("@graph", active_context, inverse_context, nil, true) => result} - - result -> - result - end + options = Options.new(options) + active_context = JSON.LD.context(context, options) + inverse_context = Context.inverse(active_context) + expanded = JSON.LD.expand(input, options) - if Context.empty?(active_context), - do: result, - else: Map.put(result, "@context", context["@context"] || context) - end + result = + case do_compact(expanded, active_context, inverse_context, nil, options.compact_arrays) do + [] -> + %{} + + result when is_list(result) -> + # TODO: Spec fixme? We're setting vocab to true, as other implementations + # do it, but this is not mentioned in the spec + %{compact_iri("@graph", active_context, inverse_context, nil, true) => result} + + result -> + result + end + + if Context.empty?(active_context), + do: result, + else: Map.put(result, "@context", context["@context"] || context) end @spec do_compact(any, Context.t(), map, String.t() | nil, boolean) :: any @@ -58,10 +58,10 @@ defmodule JSON.LD.Compaction do end) |> Enum.reverse() + term_def = active_context.term_defs[active_property] + if compact_arrays and length(result) == 1 and - is_nil( - (term_def = active_context.term_defs[active_property]) && term_def.container_mapping - ) do + is_nil(term_def && term_def.container_mapping) do List.first(result) else result @@ -579,9 +579,9 @@ defmodule JSON.LD.Compaction do # preferred values. # TODO: Spec fixme? document_relative is not a specified parameter of compact_iri compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true) + term_def = active_context.term_defs[compact_id] - if (term_def = active_context.term_defs[compact_id]) && - term_def.iri_mapping == value["@id"] do + if term_def && term_def.iri_mapping == value["@id"] do preferred_values ++ ~w[@vocab @id @none] # 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to diff --git a/lib/json/ld/decoder.ex b/lib/json/ld/decoder.ex index 4e19eb9..39dd7e2 100644 --- a/lib/json/ld/decoder.ex +++ b/lib/json/ld/decoder.ex @@ -7,7 +7,7 @@ defmodule JSON.LD.Decoder do import JSON.LD.{NodeIdentifierMap, Utils} alias JSON.LD.{NodeIdentifierMap, Options} - alias RDF.{BlankNode, Dataset, Graph, IRI, Literal, NS, Statement} + alias RDF.{BlankNode, Dataset, Graph, IRI, Literal, NS, Statement, XSD} @impl RDF.Serialization.Decoder @spec decode(String.t(), keyword) :: {:ok, Dataset.t() | Graph.t()} | {:error, any} @@ -44,7 +44,7 @@ defmodule JSON.LD.Decoder do Graph.add( rdf_graph, node_to_rdf(subject), - RDF.NS.RDF.type(), + NS.RDF.type(), Enum.map(values, &node_to_rdf/1) ) @@ -135,9 +135,9 @@ defmodule JSON.LD.Decoder do is_boolean(value) -> value = value - |> RDF.XSD.Boolean.new() - |> RDF.XSD.Boolean.canonical() - |> RDF.XSD.Boolean.lexical() + |> XSD.Boolean.new() + |> XSD.Boolean.canonical() + |> XSD.Boolean.lexical() datatype = if is_nil(datatype), do: NS.XSD.boolean(), else: datatype {value, datatype} @@ -145,9 +145,9 @@ defmodule JSON.LD.Decoder do is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double())) -> value = value - |> RDF.XSD.Double.new() - |> RDF.XSD.Double.canonical() - |> RDF.XSD.Double.lexical() + |> XSD.Double.new() + |> XSD.Double.canonical() + |> XSD.Double.lexical() datatype = if is_nil(datatype), do: NS.XSD.double(), else: datatype {value, datatype} @@ -155,9 +155,9 @@ defmodule JSON.LD.Decoder do is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer())) -> value = value - |> RDF.XSD.Integer.new() - |> RDF.XSD.Integer.canonical() - |> RDF.XSD.Integer.lexical() + |> XSD.Integer.new() + |> XSD.Integer.canonical() + |> XSD.Integer.lexical() datatype = if is_nil(datatype), do: NS.XSD.integer(), else: datatype {value, datatype} @@ -173,9 +173,9 @@ defmodule JSON.LD.Decoder do end if language = item["@language"] do - RDF.Literal.new(value, language: language, canonicalize: true) + Literal.new(value, language: language, canonicalize: true) else - RDF.Literal.new(value, datatype: datatype, canonicalize: true) + Literal.new(value, datatype: datatype, canonicalize: true) end end @@ -188,29 +188,29 @@ defmodule JSON.LD.Decoder do {list_triples, first, last} object -> - with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do - if last do - { - list_triples ++ - [{last, RDF.NS.RDF.rest(), bnode}, {bnode, RDF.NS.RDF.first(), object}], - first, - bnode - } - else - { - list_triples ++ [{bnode, RDF.NS.RDF.first(), object}], - bnode, - bnode - } - end + bnode = node_to_rdf(generate_blank_node_id(node_id_map)) + + if last do + { + list_triples ++ + [{last, NS.RDF.rest(), bnode}, {bnode, NS.RDF.first(), object}], + first, + bnode + } + else + { + list_triples ++ [{bnode, NS.RDF.first(), object}], + bnode, + bnode + } end end end) if last do - {list_triples ++ [{last, RDF.NS.RDF.rest(), RDF.NS.RDF.nil()}], first} + {list_triples ++ [{last, NS.RDF.rest(), NS.RDF.nil()}], first} else - {[], RDF.NS.RDF.nil()} + {[], NS.RDF.nil()} end end diff --git a/lib/json/ld/encoder.ex b/lib/json/ld/encoder.ex index d06ca08..6afd9b5 100644 --- a/lib/json/ld/encoder.ex +++ b/lib/json/ld/encoder.ex @@ -44,12 +44,10 @@ defmodule JSON.LD.Encoder do @spec from_rdf(input, Options.t() | Enum.t()) :: {:ok, [map]} | {:error, any} def from_rdf(dataset, options \\ %Options{}) do - try do - {:ok, from_rdf!(dataset, options)} - rescue - exception -> - {:error, Exception.message(exception)} - end + {:ok, from_rdf!(dataset, options)} + rescue + exception -> + {:error, Exception.message(exception)} end @spec from_rdf!(input, Options.t() | Enum.t()) :: [map] diff --git a/lib/json/ld/expansion.ex b/lib/json/ld/expansion.ex index 6d9a5dd..d770f14 100644 --- a/lib/json/ld/expansion.ex +++ b/lib/json/ld/expansion.ex @@ -369,12 +369,12 @@ defmodule JSON.LD.Expansion do # 8) %{"@value" => value} -> # 8.1) - with keys = Map.keys(result) do - if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) || - ("@language" in keys and "@type" in keys) do - raise JSON.LD.InvalidValueObjectError, - message: "value object with disallowed members" - end + keys = Map.keys(result) + + if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) || + ("@language" in keys and "@type" in keys) do + raise JSON.LD.InvalidValueObjectError, + message: "value object with disallowed members" end cond do diff --git a/lib/json/ld/iri_expansion.ex b/lib/json/ld/iri_expansion.ex index 9d58e0d..1539d2f 100644 --- a/lib/json/ld/iri_expansion.ex +++ b/lib/json/ld/iri_expansion.ex @@ -36,7 +36,7 @@ defmodule JSON.LD.IRIExpansion do if local_context && local_context[value] && defined[value] != true do local_def = local_context[value] - JSON.LD.Context.create_term_definition( + Context.create_term_definition( active_context, local_context, value, @@ -65,7 +65,7 @@ defmodule JSON.LD.IRIExpansion do if local_context && local_context[prefix] && defined[prefix] != true do local_def = local_context[prefix] - JSON.LD.Context.create_term_definition( + Context.create_term_definition( active_context, local_context, prefix, @@ -105,7 +105,7 @@ defmodule JSON.LD.IRIExpansion do # same way that unreserved characters are treated in URI references, per section # 6.5 of [RFC3987]. doc_relative -> - {absolute_iri(value, JSON.LD.Context.base(active_context)), active_context, defined} + {absolute_iri(value, Context.base(active_context)), active_context, defined} # TODO: RDF.rb's implementation differs from the spec here, by checking if # base_iri is actually present in the previous clause and adding the following diff --git a/lib/json/ld/options.ex b/lib/json/ld/options.ex index 997eb7f..0f8b534 100644 --- a/lib/json/ld/options.ex +++ b/lib/json/ld/options.ex @@ -26,7 +26,7 @@ defmodule JSON.LD.Options do processing_mode: "json-ld-1.0" @spec new :: t - def new(), do: %__MODULE__{} + def new, do: %__MODULE__{} @spec new(t | Enum.t()) :: t def new(%__MODULE__{} = options), do: options diff --git a/lib/json/ld/utils.ex b/lib/json/ld/utils.ex index 9569f5b..115f480 100644 --- a/lib/json/ld/utils.ex +++ b/lib/json/ld/utils.ex @@ -26,12 +26,13 @@ defmodule JSON.LD.Utils do @spec compact_iri_parts(String.t(), boolean) :: [String.t()] | nil def compact_iri_parts(compact_iri, exclude_bnode \\ true) do - with [prefix, suffix] <- String.split(compact_iri, ":", parts: 2) do - if not String.starts_with?(suffix, "//") and - not (exclude_bnode and prefix == "_"), - do: [prefix, suffix] - else - _ -> nil + case String.split(compact_iri, ":", parts: 2) do + [prefix, suffix] -> + if not String.starts_with?(suffix, "//") and not (exclude_bnode and prefix == "_"), + do: [prefix, suffix] + + _ -> + nil end end