From c1a5cf1158f9ce16597a3da46c367d061f957a94 Mon Sep 17 00:00:00 2001 From: Oleg Date: Thu, 16 Mar 2023 09:21:07 +0100 Subject: [PATCH] Add possibility to generate spiders from YML definitions 1. Create eex template for a basic YML spider 2. Create few helper functions to extract requests and items from basic YML spider 3. Create HTTP API to manage YML spiders --- .gitignore | 3 +- Dockerfile | 2 +- lib/crawly/api.ex | 148 +++++++++++++++++++++++++++- lib/crawly/application.ex | 6 ++ lib/crawly/spiders_storage.ex | 101 +++++++++++++++++++ lib/crawly/utils.ex | 129 ++++++++++++++++++++++-- mix.exs | 5 +- mix.lock | 10 ++ priv/index.html.eex | 16 ++- priv/list.html.eex | 14 ++- priv/new.html.eex | 24 +++++ priv/yml_spider_template.eex | 35 +++++++ test/api_test.exs | 178 +++++++++++++++++++++++++++++++++- test/spiders_storage_test.exs | 43 ++++++++ test/utils_test.exs | 168 +++++++++++++++++++++++++++++++- 15 files changed, 861 insertions(+), 21 deletions(-) create mode 100644 lib/crawly/spiders_storage.ex create mode 100644 priv/new.html.eex create mode 100644 priv/yml_spider_template.eex create mode 100644 test/spiders_storage_test.exs diff --git a/.gitignore b/.gitignore index b9a86305..cf9541ae 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ erl_crash.dump .DS_Store .idea/ crawly.iml -.write_to_filetests \ No newline at end of file +.write_to_filetests +dets_spiders_storage diff --git a/Dockerfile b/Dockerfile index 40785e52..36492f0b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -58,4 +58,4 @@ RUN mkdir /app/spiders EXPOSE 4001 -ENTRYPOINT [ "/app/bin/crawly", "start_iex" ] \ No newline at end of file +ENTRYPOINT [ "/app/bin/crawly", "start" ] diff --git a/lib/crawly/api.ex b/lib/crawly/api.ex index fa6f7c57..b5c7baf8 100644 --- a/lib/crawly/api.ex +++ b/lib/crawly/api.ex @@ -3,8 +3,48 @@ defmodule Crawly.API.Router do Crawly HTTP API. Allows to schedule/stop/get_stats of all running spiders. """ + + require Logger + use Plug.Router + @spider_validation_schema %{ + "type" => "object", + "additionalProperties" => false, + "required" => ["name", "links_to_follow", "fields", "start_urls"], + "properties" => %{ + "name" => %{"type" => "string"}, + "base_url" => %{"type" => "string", "format" => "uri"}, + "start_urls" => %{ + "type" => "array", + "items" => %{"type" => "string", "format" => "uri"} + }, + "links_to_follow" => %{ + "type" => "array", + "items" => %{ + "type" => "object", + "additionalProperties" => false, + "properties" => %{ + "selector" => %{"type" => "string"}, + "attribute" => %{"type" => "string"} + } + } + }, + "fields" => %{ + "type" => "array", + "items" => %{ + "type" => "object", + "additionalProperties" => false, + "properties" => %{ + "name" => %{"type" => "string"}, + "selector" => %{"type" => "string"} + } + } + } + } + } + + plug(Plug.Parsers, parsers: [:urlencoded, :multipart]) plug(:match) plug(:dispatch) @@ -41,11 +81,19 @@ defmodule Crawly.API.Router do {num, scheduled} end + editable? = + case Crawly.SpidersStorage.get(spider_name) do + {:error, :not_found} -> false + {:ok, _value} -> true + _ -> false + end + %{ name: spider_name, scheduled: scheduled, scraped: scraped, - state: state + state: state, + editable?: editable? } end ) @@ -54,6 +102,87 @@ defmodule Crawly.API.Router do send_resp(conn, 200, response) end + get "/new" do + spider_name = Map.get(conn.query_params, "spider_name", "") + + spider_data = + case spider_name do + "" -> + {:ok, ""} + + name -> + Crawly.SpidersStorage.get(name) + end + + case spider_data do + {:error, :not_found} -> + send_resp(conn, 404, "Page not found") + + {:ok, value} -> + response = + render_template("new.html.eex", + data: %{ + "errors" => "", + "spider" => value, + "spider_name" => spider_name + } + ) + + send_resp(conn, 200, response) + end + end + + post "/new" do + name_from_query_params = Map.get(conn.query_params, "spider_name", "") + spider_yml = Map.get(conn.body_params, "spider") + + # Validate incoming data with json schema + validation_result = + case validate_new_spider_request(spider_yml) do + {:error, errors} -> + {:error, "#{inspect(errors)}"} + + %{"name" => spider_name} = yml -> + # Check if spider already registered, but allow editing spiders + case {is_spider_registered(spider_name), + spider_name == name_from_query_params} do + {true, false} -> + {:error, + "Spider with this name already exists. Try editing it instead of overriding"} + + _ -> + {:ok, yml} + end + end + + case validation_result do + {:ok, %{"name" => spider_name} = _parsed_yml} -> + :ok = Crawly.SpidersStorage.put(spider_name, spider_yml) + + # Now we can finally load the spider + Crawly.Utils.load_yml_spider(spider_yml) + + # Now we can redirect to the homepage + conn + |> put_resp_header("location", "/") + |> send_resp(conn.status || 302, "Redirect") + + {:error, errors} -> + # Show errors and spider + data = %{"errors" => errors, "spider" => spider_yml} + response = render_template("new.html.eex", data: data) + send_resp(conn, 400, response) + end + end + + delete "/spider/:spider_name" do + Crawly.SpidersStorage.delete(spider_name) + + conn + |> put_resp_header("location", "/") + |> send_resp(conn.status || 302, "Redirect") + end + get "/spiders" do msg = case Crawly.Engine.running_spiders() do @@ -192,7 +321,7 @@ defmodule Crawly.API.Router do loaded_spiders = case Crawly.load_spiders() do {:ok, spiders} -> spiders - {:error, _} -> [] + {:error, :no_spiders_dir} -> [] end send_resp( @@ -206,6 +335,21 @@ defmodule Crawly.API.Router do send_resp(conn, 404, "Oops! Page not found!") end + defp validate_new_spider_request(maybe_yml) do + with {:ok, yml} <- YamlElixir.read_from_string(maybe_yml), + :ok <- ExJsonSchema.Validator.validate(@spider_validation_schema, yml) do + yml + else + {:error, _err} = err -> err + end + end + + defp is_spider_registered(name) do + module_name_str = "Elixir." <> name + module_name = String.to_atom(module_name_str) + Enum.member?(Crawly.Utils.list_spiders(), module_name) + end + defp render_template(template_name, assigns) do base_dir = :code.priv_dir(:crawly) template = Path.join(base_dir, template_name) diff --git a/lib/crawly/application.ex b/lib/crawly/application.ex index 66e4cdce..a2d32506 100644 --- a/lib/crawly/application.ex +++ b/lib/crawly/application.ex @@ -9,6 +9,12 @@ defmodule Crawly.Application do # Try to load spiders from the SPIDERS_DIR (for crawly standalone setup) Crawly.load_spiders() + # Open dets storage to store spiders data + Crawly.SpidersStorage.init() + + # Load spiders stored in the SpidersStorage + Crawly.Utils.load_yml_spiders() + import Supervisor.Spec, warn: false # List all child processes to be supervised diff --git a/lib/crawly/spiders_storage.ex b/lib/crawly/spiders_storage.ex new file mode 100644 index 00000000..983ff101 --- /dev/null +++ b/lib/crawly/spiders_storage.ex @@ -0,0 +1,101 @@ +defmodule Crawly.SpidersStorage do + @moduledoc """ + Module for storing spider information using the `:dets` storage mechanism. + + This module provides functionality for storing and retrieving + spider information in a term storage. + + The `:dets` module is used to store the information in a disk-based table. + Functions: + - `init/0`: Initializes the storage to store spider information. + - `put/2`: Inserts the given spider name and YAML configuration into the storage. + - `get/1`: Retrieves the YAML configuration for the given spider name. + - `list/0`: Returns a list of all spider names stored in the storage. + - `delete/1`: Deletes the YAML configuration for the given spider name. + - `clear/0`: Deletes all spider information from the storage. + """ + @dets_table :dets_spiders_storage + + require Logger + + @typep spider_name() :: binary() | module() + @typep spider_yml() :: binary() + + @doc """ + Initialize storage to store spiders information + """ + @spec init :: {:error, any} | {:ok, any} + def init() do + Logger.info("Opening/checking dynamic spiders storage") + :dets.open_file(@dets_table, type: :set) + end + + @doc """ + Insert a given object in a term storage + + iex(1)> Crawly.SpidersStorage.put(Test, "12345") + :ok + """ + @spec put(spider_name(), spider_yml()) :: :ok | {:error, term()} + def put(spider_name, spider_yml) do + :dets.insert(@dets_table, {spider_name, spider_yml}) + end + + @doc """ + Return value for the given key from the term storage. + + iex(1)> Crawly.SpidersStorage.get(Test) + {:ok, "12345"} + + iex(1)> Crawly.SpidersStorage.get(T) + {:error, :not_found} + """ + @spec get(spider_name()) :: + {:ok, spider_yml()} | {:error, :not_found} | {:error, term()} + def get(spider_name) do + case :dets.lookup(@dets_table, spider_name) do + {:error, _error} = err -> err + [] -> {:error, :not_found} + [{^spider_name, spider_yml}] -> {:ok, spider_yml} + end + end + + @doc """ + Makes a simple list from the spiders storage. + + iex(17)> Crawly.SpidersStorage.list() + [Test4, Test3, Test2, Test1, Test] + """ + @spec list() :: [spider_name()] | {:error, term()} + def list() do + first = :dets.first(@dets_table) + list(first, []) + end + + @doc """ + Deletes a given object + + iex(17)> Crawly.SpidersStorage.delete(Test1) + :ok + """ + @spec delete(spider_name()) :: :ok | {:error, term()} + def delete(spider_name) do + :dets.delete(@dets_table, spider_name) + end + + @doc """ + Deletes all objects from the storage + + iex(17)> Crawly.SpidersStorage.clear() + :ok + """ + @spec clear() :: :ok | {:error, term()} + def clear(), do: :dets.delete_all_objects(@dets_table) + + defp list(:"$end_of_table", acc), do: acc + + defp list(current_element, acc) do + next = :dets.next(@dets_table, current_element) + list(next, [current_element | acc]) + end +end diff --git a/lib/crawly/utils.ex b/lib/crawly/utils.ex index 20c7c62f..b55afd6f 100644 --- a/lib/crawly/utils.ex +++ b/lib/crawly/utils.ex @@ -2,6 +2,9 @@ defmodule Crawly.Utils do @moduledoc ~S""" Utility functions for Crawly """ + + @spider_storage_key :crawly_spiders + require Logger @doc """ @@ -153,9 +156,7 @@ defmodule Crawly.Utils do """ @spec list_spiders() :: [module()] def list_spiders() do - modules = - get_modules_from_applications() ++ - :persistent_term.get(:crawly_spiders, []) + modules = get_modules_from_applications() ++ registered_spiders() Enum.reduce( modules, @@ -206,9 +207,6 @@ defmodule Crawly.Utils do dir -> {:ok, files} = File.ls(dir) - # Remove all previous spiders data from the persistent_term storage - :persistent_term.put(:crawly_spiders, []) - Enum.each( files, fn file -> @@ -216,13 +214,126 @@ defmodule Crawly.Utils do [{module, _binary}] = Code.compile_file(path) # Use persistent term to store information about loaded spiders - spiders = :persistent_term.get(:crawly_spiders, []) - :persistent_term.put(:crawly_spiders, [module | spiders]) + register_spider(module) end ) + + {:ok, registered_spiders()} end + end + + def load_yml_spiders() do + Enum.each( + Crawly.SpidersStorage.list(), + fn spider -> + {:ok, spider_yml} = Crawly.SpidersStorage.get(spider) + Crawly.Utils.load_yml_spider(spider_yml) + end + ) + end + + @doc """ + Register a given spider (so it's visible in the spiders list) + """ + @spec register_spider(module()) :: :ok + def register_spider(name) do + known_spiders = :persistent_term.get(@spider_storage_key, []) + :persistent_term.put(@spider_storage_key, Enum.uniq([name | known_spiders])) + end + + @doc """ + Return a list of registered spiders + """ + @spec registered_spiders() :: [module()] + def registered_spiders(), do: :persistent_term.get(@spider_storage_key, []) + + @doc """ + Remove all previousely registered dynamic spiders + """ + @spec clear_registered_spiders() :: :ok + def clear_registered_spiders() do + :persistent_term.put(@spider_storage_key, []) + end + + @doc """ + A helper function that is used by YML spiders + + Extract requests from a given document using a given set of selectors + builds absolute_urls. + + Selectors are aprovided as a JSON encoded list of maps, that contain + selector and attribute keys. E.g. + selectors = [%{"selector" => "a", "attribute" => "href"}] + + Base URL is required to build absolute url from extracted links + """ + @spec extract_requests(document, selectors, base_url) :: requests + when document: [Floki.html_node()], + selectors: binary(), + base_url: binary(), + requests: [Crawly.Request.t()] + def extract_requests(document, selectors, base_url) do + selectors = Poison.decode!(selectors) + + Enum.reduce( + selectors, + [], + fn %{"selector" => selector, "attribute" => attr}, acc -> + links = document |> Floki.find(selector) |> Floki.attribute(attr) + urls = Crawly.Utils.build_absolute_urls(links, base_url) + requests = Crawly.Utils.requests_from_urls(urls) + requests ++ acc + end + ) + end + + @doc """ + A helper function that is used by YML spiders + + Extract items (actually one item) from a given document using a + given set of selectors. + + Selectors are aprovided as a JSON encoded list of maps, that contain + name and selector binary keys. For example: + + field_selectors = [%{"selector" => "h1", "name" => "title"}] + """ + @spec extract_items(document, field_selectors) :: items + when document: [Floki.html_node()], + field_selectors: binary(), + items: [map()] + def extract_items(document, field_selectors) do + fields = Poison.decode!(field_selectors) + + item = + Enum.reduce( + fields, + %{}, + fn %{"name" => name, "selector" => selector}, acc -> + field_value = document |> Floki.find(selector) |> Floki.text() + Map.put(acc, name, field_value) + end + ) + + [item] + end + + @spec load_yml_spider(binary()) :: {term(), Code.binding()} + def load_yml_spider(yml_binary) do + {:ok, yml_map} = YamlElixir.read_from_string(yml_binary) + + path = + Path.join( + :code.priv_dir(:crawly), + "./yml_spider_template.eex" + ) + + template = EEx.eval_file(path, spider: yml_map) + + name = String.to_atom("Elixir." <> Map.get(yml_map, "name")) + register_spider(name) - {:ok, :persistent_term.get(:crawly_spiders, [])} + Code.eval_string(template) end ############################################################################## diff --git a/mix.exs b/mix.exs index 25a5b883..a039d386 100644 --- a/mix.exs +++ b/mix.exs @@ -56,9 +56,12 @@ defmodule Crawly.Mixfile do {:earmark, "~> 1.2", only: :dev}, {:meck, "~> 0.9", only: :test}, {:excoveralls, "~> 0.14.6", only: :test}, + {:yaml_elixir, "~> 2.9"}, + {:floki, "~> 0.33.0"}, + {:ex_json_schema, "~> 0.9.2"}, # Add floki only for crawly standalone release - {:floki, "~> 0.33.0", only: :standalone_crawly}, + # {:floki, "~> 0.33.0", only: [:test, :standalone_crawly]}, {:logger_file_backend, "~> 0.0.11", only: [:test, :dev]} ] end diff --git a/mix.lock b/mix.lock index 9503e2c2..b7810fff 100644 --- a/mix.lock +++ b/mix.lock @@ -1,14 +1,17 @@ %{ "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, + "castore": {:hex, :castore, "1.0.1", "240b9edb4e9e94f8f56ab39d8d2d0a57f49e46c56aced8f873892df8ff64ff5a", [:mix], [], "hexpm", "b4951de93c224d44fac71614beabd88b71932d0b1dea80d2f80fb9044e01bbb3"}, "certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"}, "cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.3.1", "ebd1a1d7aff97f27c66654e78ece187abdc646992714164380d8a041eda16754", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3a6efd3366130eab84ca372cbd4a7d3c3a97bdfcfb4911233b035d117063f0af"}, "cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"}, "credo": {:hex, :credo, "1.5.6", "e04cc0fdc236fefbb578e0c04bd01a471081616e741d386909e527ac146016c6", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "4b52a3e558bd64e30de62a648518a5ea2b6e3e5d2b164ef5296244753fc7eb17"}, + "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, "earmark": {:hex, :earmark, "1.4.15", "2c7f924bf495ec1f65bd144b355d0949a05a254d0ec561740308a54946a67888", [:mix], [{:earmark_parser, ">= 1.4.13", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "3b1209b85bc9f3586f370f7c363f6533788fb4e51db23aa79565875e7f9999ee"}, "earmark_parser": {:hex, :earmark_parser, "1.4.16", "607709303e1d4e3e02f1444df0c821529af1c03b8578dfc81bb9cf64553d02b9", [:mix], [], "hexpm", "69fcf696168f5a274dd012e3e305027010658b2d1630cef68421d6baaeaccead"}, "elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"}, "ex_doc": {:hex, :ex_doc, "0.25.3", "3edf6a0d70a39d2eafde030b8895501b1c93692effcbd21347296c18e47618ce", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "9ebebc2169ec732a38e9e779fd0418c9189b3ca93f4a676c961be6c1527913f5"}, + "ex_json_schema": {:hex, :ex_json_schema, "0.9.2", "c9a42e04e70cd70eb11a8903a22e8ec344df16edef4cb8e6ec84ed0caffc9f0f", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "4854329cb352b6c01c4c4b8dbfb3be14dc5bea19ea13e0eafade4ff22ba55224"}, "excoveralls": {:hex, :excoveralls, "0.14.6", "610e921e25b180a8538229ef547957f7e04bd3d3e9a55c7c5b7d24354abbba70", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "0eceddaa9785cfcefbf3cd37812705f9d8ad34a758e513bb975b081dce4eb11e"}, "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, "floki": {:hex, :floki, "0.33.1", "f20f1eb471e726342b45ccb68edb9486729e7df94da403936ea94a794f072781", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "461035fd125f13fdf30f243c85a0b1e50afbec876cbf1ceefe6fddd2e6d712c6"}, @@ -28,12 +31,19 @@ "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, "nimble_parsec": {:hex, :nimble_parsec, "1.1.0", "3a6fca1550363552e54c216debb6a9e95bd8d32348938e13de5eda962c0d7f89", [:mix], [], "hexpm", "08eb32d66b706e913ff748f11694b17981c0b04a33ef470e33e11b3d3ac8f54b"}, "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, + "phoenix": {:hex, :phoenix, "1.6.16", "e5bdd18c7a06da5852a25c7befb72246de4ddc289182285f8685a40b7b5f5451", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e15989ff34f670a96b95ef6d1d25bad0d9c50df5df40b671d8f4a669e050ac39"}, + "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.1", "ba04e489ef03763bf28a17eb2eaddc2c20c6d217e2150a61e3298b0f4c2012b5", [:mix], [], "hexpm", "81367c6d1eea5878ad726be80808eb5a787a23dee699f96e72b1109c57cdd8d9"}, + "phoenix_template": {:hex, :phoenix_template, "1.0.1", "85f79e3ad1b0180abb43f9725973e3b8c2c3354a87245f91431eec60553ed3ef", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "157dc078f6226334c91cb32c1865bf3911686f8bcd6bcff86736f6253e6993ee"}, + "phoenix_view": {:hex, :phoenix_view, "2.0.2", "6bd4d2fd595ef80d33b439ede6a19326b78f0f1d8d62b9a318e3d9c1af351098", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "a929e7230ea5c7ee0e149ffcf44ce7cf7f4b6d2bfe1752dd7c084cdff152d36f"}, "plug": {:hex, :plug, "1.12.1", "645678c800601d8d9f27ad1aebba1fdb9ce5b2623ddb961a074da0b96c35187d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d57e799a777bc20494b784966dc5fbda91eb4a09f571f76545b72a634ce0d30b"}, "plug_cowboy": {:hex, :plug_cowboy, "2.5.1", "7cc96ff645158a94cf3ec9744464414f02287f832d6847079adfe0b58761cbd0", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "107d0a5865fa92bcb48e631cc0729ae9ccfa0a9f9a1bd8f01acb513abf1c2d64"}, "plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"}, "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, + "redirect": {:hex, :redirect, "0.4.0", "98b46053504ee517bc3ad2fd04c064b64b48d339e1e18266355b30c4f8bb52b0", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.3 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "dfa29a8ecbad066ed0b73b34611cf24c78101719737f37bdf750f39197d67b97"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"}, "telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"}, "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, + "yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"}, + "yaml_elixir": {:hex, :yaml_elixir, "2.9.0", "9a256da867b37b8d2c1ffd5d9de373a4fda77a32a45b452f1708508ba7bbcb53", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "0cb0e7d4c56f5e99a6253ed1a670ed0e39c13fc45a6da054033928607ac08dfc"}, } diff --git a/priv/index.html.eex b/priv/index.html.eex index a9f0b7e8..aeba6a6e 100644 --- a/priv/index.html.eex +++ b/priv/index.html.eex @@ -2,6 +2,19 @@