Skip to content

Commit

Permalink
Merge branch 'master'
Browse files Browse the repository at this point in the history
  • Loading branch information
rustra committed Jun 20, 2020
2 parents bee796b + a986eb3 commit 9ad0e7c
Show file tree
Hide file tree
Showing 15 changed files with 917 additions and 772 deletions.
337 changes: 207 additions & 130 deletions lib/json/ld/compaction.ex

Large diffs are not rendered by default.

242 changes: 135 additions & 107 deletions lib/json/ld/context.ex

Large diffs are not rendered by default.

8 changes: 8 additions & 0 deletions lib/json/ld/context/term_definition.ex
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
defmodule JSON.LD.Context.TermDefinition do
@type t :: %__MODULE__{
iri_mapping: String.t() | nil,
reverse_property: boolean,
type_mapping: boolean,
language_mapping: boolean,
container_mapping: nil
}

defstruct iri_mapping: nil,
reverse_property: false,
type_mapping: false,
Expand Down
241 changes: 118 additions & 123 deletions lib/json/ld/decoder.ex
Original file line number Diff line number Diff line change
Expand Up @@ -5,113 +5,113 @@ defmodule JSON.LD.Decoder do
use RDF.Serialization.Decoder

import JSON.LD.{NodeIdentifierMap, Utils}
alias JSON.LD.NodeIdentifierMap
alias RDF.{Dataset, Graph, NS}

alias JSON.LD.{NodeIdentifierMap, Options}
alias RDF.{BlankNode, Dataset, Graph, IRI, Literal, NS, Statement, XSD}

@impl RDF.Serialization.Decoder
@spec decode(String.t(), keyword) :: {:ok, Dataset.t() | Graph.t()} | {:error, any}
def decode(content, opts \\ []) do
with {:ok, json_ld_object} <- parse_json(content),
dataset = to_rdf(json_ld_object, opts) do
dataset <- to_rdf(json_ld_object, opts) do
{:ok, dataset}
end
end

def to_rdf(element, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options) do
{:ok, node_id_map} = NodeIdentifierMap.start_link()

try do
element
|> JSON.LD.expand(options)
|> JSON.LD.node_map(node_id_map)
|> Enum.sort_by(fn {graph_name, _} -> graph_name end)
|> Enum.reduce(Dataset.new(), fn {graph_name, graph}, dataset ->
unless relative_iri?(graph_name) do
rdf_graph =
graph
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce(Graph.new(), fn {subject, node}, rdf_graph ->
unless relative_iri?(subject) do
node
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(rdf_graph, fn {property, values}, rdf_graph ->
cond do
property == "@type" ->
Graph.add(
rdf_graph,
node_to_rdf(subject),
RDF.NS.RDF.type(),
Enum.map(values, &node_to_rdf/1)
)

JSON.LD.keyword?(property) ->
rdf_graph

not options.produce_generalized_rdf and
blank_node_id?(property) ->
rdf_graph

relative_iri?(property) ->
rdf_graph

true ->
Enum.reduce(values, rdf_graph, fn
%{"@list" => list}, rdf_graph ->
with {list_triples, first} <-
list_to_rdf(list, node_id_map) do
@spec to_rdf(map, Options.t() | Enum.t()) :: Dataset.t() | Graph.t()
def to_rdf(element, options \\ %Options{}) do
{:ok, node_id_map} = NodeIdentifierMap.start_link()

options = Options.new(options)

try do
element
|> JSON.LD.expand(options)
|> JSON.LD.node_map(node_id_map)
|> Enum.sort_by(fn {graph_name, _} -> graph_name end)
|> Enum.reduce(Dataset.new(), fn {graph_name, graph}, dataset ->
unless relative_iri?(graph_name) do
rdf_graph =
graph
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce(Graph.new(), fn {subject, node}, rdf_graph ->
unless relative_iri?(subject) do
node
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(rdf_graph, fn {property, values}, rdf_graph ->
cond do
property == "@type" ->
Graph.add(
rdf_graph,
node_to_rdf(subject),
NS.RDF.type(),
Enum.map(values, &node_to_rdf/1)
)

JSON.LD.keyword?(property) ->
rdf_graph

not options.produce_generalized_rdf and blank_node_id?(property) ->
rdf_graph

relative_iri?(property) ->
rdf_graph

true ->
Enum.reduce(values, rdf_graph, fn
%{"@list" => list}, rdf_graph ->
with {list_triples, first} <- list_to_rdf(list, node_id_map) do
rdf_graph
|> Graph.add({node_to_rdf(subject), node_to_rdf(property), first})
|> Graph.add(list_triples)
end

item, rdf_graph ->
case object_to_rdf(item) do
nil ->
rdf_graph
|> Graph.add({node_to_rdf(subject), node_to_rdf(property), first})
|> Graph.add(list_triples)
end

item, rdf_graph ->
case object_to_rdf(item) do
nil ->
rdf_graph

object ->
Graph.add(
rdf_graph,
{node_to_rdf(subject), node_to_rdf(property), object}
)
end
end)
end
end)
else
rdf_graph
end
end)

if Enum.empty?(rdf_graph) do
dataset
else
Dataset.add(
dataset,
rdf_graph,
if(graph_name == "@default", do: nil, else: graph_name)
)
end
else

object ->
Graph.add(
rdf_graph,
{node_to_rdf(subject), node_to_rdf(property), object}
)
end
end)
end
end)
else
rdf_graph
end
end)

if Enum.empty?(rdf_graph) do
dataset
else
graph_name = if graph_name == "@default", do: nil, else: graph_name
Dataset.add(dataset, rdf_graph, graph_name)
end
end)
after
NodeIdentifierMap.stop(node_id_map)
end
else
dataset
end
end)
after
NodeIdentifierMap.stop(node_id_map)
end
end

@spec parse_json(String.t(), [Jason.decode_opt()]) ::
{:ok, map} | {:error, Jason.DecodeError.t()}
def parse_json(content, _opts \\ []) do
Jason.decode(content)
end

@spec parse_json!(String.t(), [Jason.decode_opt()]) :: map
def parse_json!(content, _opts \\ []) do
Jason.decode!(content)
end

def node_to_rdf(nil), do: nil

@spec node_to_rdf(String.t()) :: IRI.t() | BlankNode.t()
def node_to_rdf(node) do
if blank_node_id?(node) do
node
Expand All @@ -122,10 +122,9 @@ defmodule JSON.LD.Decoder do
end
end

@spec object_to_rdf(map) :: IRI.t() | BlankNode.t() | Literal.t() | nil
defp object_to_rdf(%{"@id" => id}) do
unless relative_iri?(id) do
node_to_rdf(id)
end
unless relative_iri?(id), do: node_to_rdf(id)
end

defp object_to_rdf(%{"@value" => value} = item) do
Expand All @@ -136,40 +135,36 @@ defmodule JSON.LD.Decoder do
is_boolean(value) ->
value =
value
|> RDF.XSD.Boolean.new()
|> RDF.XSD.Boolean.canonical()
|> RDF.XSD.Boolean.lexical()
|> XSD.Boolean.new()
|> XSD.Boolean.canonical()
|> XSD.Boolean.lexical()

datatype = if is_nil(datatype), do: NS.XSD.boolean(), else: datatype
{value, datatype}

is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double())) ->
value =
value
|> RDF.XSD.Double.new()
|> RDF.XSD.Double.canonical()
|> RDF.XSD.Double.lexical()
|> XSD.Double.new()
|> XSD.Double.canonical()
|> XSD.Double.lexical()

datatype = if is_nil(datatype), do: NS.XSD.double(), else: datatype
{value, datatype}

is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer())) ->
value =
value
|> RDF.XSD.Integer.new()
|> RDF.XSD.Integer.canonical()
|> RDF.XSD.Integer.lexical()
|> XSD.Integer.new()
|> XSD.Integer.canonical()
|> XSD.Integer.lexical()

datatype = if is_nil(datatype), do: NS.XSD.integer(), else: datatype
{value, datatype}

is_nil(datatype) ->
datatype =
if Map.has_key?(item, "@language") do
RDF.langString()
else
NS.XSD.string()
end
if Map.has_key?(item, "@language"), do: RDF.langString(), else: NS.XSD.string()

{value, datatype}

Expand All @@ -178,44 +173,44 @@ defmodule JSON.LD.Decoder do
end

if language = item["@language"] do
RDF.Literal.new(value, language: language, canonicalize: true)
Literal.new(value, language: language, canonicalize: true)
else
RDF.Literal.new(value, datatype: datatype, canonicalize: true)
Literal.new(value, datatype: datatype, canonicalize: true)
end
end

@spec list_to_rdf([map], pid) :: {[Statement.t()], IRI.t() | BlankNode.t()}
defp list_to_rdf(list, node_id_map) do
{list_triples, first, last} =
list
|> Enum.reduce({[], nil, nil}, fn item, {list_triples, first, last} ->
Enum.reduce(list, {[], nil, nil}, fn item, {list_triples, first, last} ->
case object_to_rdf(item) do
nil ->
{list_triples, first, last}

object ->
with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do
if last do
{
list_triples ++
[{last, RDF.NS.RDF.rest(), bnode}, {bnode, RDF.NS.RDF.first(), object}],
first,
bnode
}
else
{
list_triples ++ [{bnode, RDF.NS.RDF.first(), object}],
bnode,
bnode
}
end
bnode = node_to_rdf(generate_blank_node_id(node_id_map))

if last do
{
list_triples ++
[{last, NS.RDF.rest(), bnode}, {bnode, NS.RDF.first(), object}],
first,
bnode
}
else
{
list_triples ++ [{bnode, NS.RDF.first(), object}],
bnode,
bnode
}
end
end
end)

if last do
{list_triples ++ [{last, RDF.NS.RDF.rest(), RDF.NS.RDF.nil()}], first}
{list_triples ++ [{last, NS.RDF.rest(), NS.RDF.nil()}], first}
else
{[], RDF.NS.RDF.nil()}
{[], NS.RDF.nil()}
end
end

Expand Down
3 changes: 2 additions & 1 deletion lib/json/ld/document_loader.ex
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ defmodule JSON.LD.DocumentLoader do
"""

alias JSON.LD.DocumentLoader.RemoteDocument
alias JSON.LD.Options

@callback load(String.t(), JSON.LD.Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
@callback load(String.t(), Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
end
5 changes: 5 additions & 0 deletions lib/json/ld/document_loader/default.ex
Original file line number Diff line number Diff line change
@@ -1,15 +1,20 @@
defmodule JSON.LD.DocumentLoader.Default do
@behaviour JSON.LD.DocumentLoader

alias HTTPoison.{AsyncResponse, Response}

alias JSON.LD.DocumentLoader.RemoteDocument
alias JSON.LD.Options

@spec load(String.t(), Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
def load(url, _options) do
with {:ok, res} <- http_get(url),
{:ok, data} <- Jason.decode(res.body) do
{:ok, %RemoteDocument{document: data, document_url: res.request_url}}
end
end

@spec http_get(String.t()) :: {:ok, Response.t() | AsyncResponse.t()} | {:error, any}
defp http_get(url) do
HTTPoison.get(url, [accept: "application/ld+json"], follow_redirect: true)
rescue
Expand Down

0 comments on commit 9ad0e7c

Please sign in to comment.