-
Notifications
You must be signed in to change notification settings - Fork 3
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
🧠 Ingest OIO trigger and suppress relevant predictions #351
Changes from all commits
e5df291
d8a1ccf
18d5863
de45a32
c7cffab
81a7776
09c9bfb
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
defmodule Concentrate.Filter.Suppress.Supervisor do | ||
@moduledoc """ | ||
Supervisor for the extra servers needed for suppressing predictions based on signs config. | ||
|
||
* HTTP producer to fetch the signs config | ||
* Consumer / map of suppressed stops | ||
""" | ||
@one_day 86_400_000 | ||
|
||
require Logger | ||
|
||
def start_link(config) do | ||
if config[:url] do | ||
Supervisor.start_link( | ||
[ | ||
{ | ||
Concentrate.producer_for_url(config[:url]), | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Would it be worth adding a test to either confirm that it actually starts pulling this file? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. sure thing, focusing on getting the terraform changes in so that I can put this on dev for more thorough testing but can add a test for this |
||
{ | ||
config[:url], | ||
parser: Concentrate.Parser.SignsConfig, | ||
fetch_after: 1_000, | ||
content_warning_timeout: @one_day, | ||
name: :stop_prediction_status_producer | ||
} | ||
}, | ||
{Concentrate.Filter.Suppress.StopPredictionStatus, | ||
subscribe_to: [:stop_prediction_status_producer]} | ||
], | ||
strategy: :rest_for_one | ||
) | ||
else | ||
:ignore | ||
end | ||
end | ||
|
||
def child_spec(opts) do | ||
%{ | ||
id: __MODULE__, | ||
start: {__MODULE__, :start_link, [opts]}, | ||
type: :supervisor | ||
} | ||
end | ||
end |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,148 @@ | ||
defmodule Concentrate.Producer.S3 do | ||
@moduledoc """ | ||
GenStage Producer for s3. | ||
""" | ||
|
||
use GenStage | ||
require Logger | ||
|
||
@start_link_opts [:name] | ||
|
||
defmodule State do | ||
@moduledoc false | ||
defstruct [ | ||
:bucket, | ||
:etag, | ||
:ex_aws, | ||
:fetch_after, | ||
:last_fetch, | ||
:last_modified, | ||
:next_fetch_ref, | ||
:object, | ||
:parser_opts, | ||
:parser, | ||
:url | ||
] | ||
end | ||
|
||
alias __MODULE__.State | ||
|
||
def start_link({url, opts}) when is_binary(url) and is_list(opts) do | ||
start_link_opts = Keyword.take(opts, @start_link_opts) | ||
opts = Keyword.drop(opts, @start_link_opts) | ||
GenStage.start_link(__MODULE__, {url, opts}, start_link_opts) | ||
end | ||
|
||
@impl GenStage | ||
def init({url, opts}) do | ||
{parser, parser_opts} = | ||
case Keyword.fetch!(opts, :parser) do | ||
module when is_atom(module) -> | ||
{&module.parse/2, []} | ||
|
||
{module, opts} when is_atom(module) and is_list(opts) -> | ||
{&module.parse/2, opts} | ||
|
||
fun when is_function(fun, 2) -> | ||
{fun, []} | ||
end | ||
|
||
{bucket, object} = parse_s3_url(url) | ||
|
||
fetch_after = Keyword.get(opts, :fetch_after) | ||
ex_aws = Keyword.get(opts, :ex_aws, ExAws) | ||
|
||
{ | ||
:producer, | ||
%State{ | ||
bucket: bucket, | ||
ex_aws: ex_aws, | ||
fetch_after: fetch_after, | ||
last_fetch: monotonic_now() - fetch_after - 1, | ||
object: object, | ||
parser_opts: parser_opts, | ||
parser: parser, | ||
url: url | ||
}, | ||
dispatcher: GenStage.BroadcastDispatcher | ||
} | ||
end | ||
|
||
defp parse_s3_url(url) do | ||
%URI{host: bucket, path: object} = URI.parse(url) | ||
|
||
{bucket, object} | ||
end | ||
|
||
@impl GenStage | ||
def handle_demand(_, state) do | ||
state = schedule_fetch(state) | ||
|
||
{:noreply, [], state} | ||
end | ||
|
||
@impl GenStage | ||
def handle_info(:fetch, state) do | ||
state = %{state | next_fetch_ref: nil, last_fetch: monotonic_now()} | ||
state = schedule_fetch(state) | ||
|
||
case state.ex_aws.request( | ||
ExAws.S3.get_object(state.bucket, state.object, | ||
if_none_match: state.etag, | ||
if_modified_since: state.last_modified | ||
) | ||
) do | ||
{:ok, %{status_code: 200, body: body, headers: headers}} -> | ||
state = %{ | ||
state | ||
| last_modified: get_header(headers, "last-modified"), | ||
etag: get_header(headers, "etag") | ||
} | ||
|
||
{:noreply, parse_response(body, state), state} | ||
|
||
{:ok, %{status_code: 304}} -> | ||
{:noreply, [], state} | ||
|
||
{_, error} -> | ||
Logger.warn( | ||
"#{__MODULE__} error fetching s3 url=#{state.url}} error=#{inspect(error, limit: :infinity)}" | ||
) | ||
|
||
{:noreply, [], state} | ||
end | ||
end | ||
|
||
defp schedule_fetch(%{next_fetch_ref: nil} = state) do | ||
next_fetch_after = max(state.last_fetch + state.fetch_after - monotonic_now(), 0) | ||
next_fetch_ref = Process.send_after(self(), :fetch, next_fetch_after) | ||
|
||
%{state | next_fetch_ref: next_fetch_ref} | ||
end | ||
|
||
# coveralls-ignore-start | ||
defp schedule_fetch(%{next_fetch_ref: _} = state) do | ||
# already scheduled! this isn't always hit during testing (but it is | ||
# sometimes) so we skip the coverage check. | ||
state | ||
end | ||
|
||
# coveralls-ignore-stop | ||
|
||
defp monotonic_now do | ||
System.monotonic_time(:millisecond) | ||
end | ||
|
||
defp get_header(headers, header) do | ||
Enum.find_value(headers, fn {key, value} -> | ||
String.downcase(key) == header and value | ||
end) | ||
end | ||
|
||
defp parse_response(body, state) do | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @nlwstein this was added so that the events list wasn't empty if the new state should be empty. otherwise the |
||
case state.parser.(body, state.parser_opts) do | ||
[] -> [:empty] | ||
events -> events | ||
end | ||
end | ||
end |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit: I'm ambivalent personally, but there seems to be a preference here towards avoiding commented code. Maybe this should just be whacked, and then added when it's actually in use.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this was actually already merged in and is being updated in this PR