Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions config/config.exs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ config :algora, Oban,
queues: [
event_consumers: 1,
comment_consumers: 1,
search_consumers: 1,
github_og_image: 5,
notify_bounty: 1,
notify_tip_intent: 1,
Expand Down
1 change: 1 addition & 0 deletions config/dev.exs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ config :algora, :github,
private_key: System.get_env("GITHUB_PRIVATE_KEY"),
pat: System.get_env("GITHUB_PAT"),
pat_enabled: System.get_env("GITHUB_PAT_ENABLED", "false") == "true",
bot_handle: System.get_env("GITHUB_BOT_HANDLE"),
oauth_state_ttl: String.to_integer(System.get_env("GITHUB_OAUTH_STATE_TTL", "600")),
oauth_state_salt: System.get_env("GITHUB_OAUTH_STATE_SALT", "github-oauth-state")

Expand Down
5 changes: 3 additions & 2 deletions config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@ if config_env() == :prod do
app_id: System.fetch_env!("GITHUB_APP_ID"),
webhook_secret: System.fetch_env!("GITHUB_WEBHOOK_SECRET"),
private_key: System.fetch_env!("GITHUB_PRIVATE_KEY"),
pat: System.fetch_env!("GITHUB_PAT"),
pat_enabled: System.get_env("GITHUB_PAT_ENABLED", "true") == "true",
pat: System.get_env("GITHUB_PAT"),
pat_enabled: System.get_env("GITHUB_PAT_ENABLED", "false") == "true",
bot_handle: System.get_env("GITHUB_BOT_HANDLE"),
oauth_state_ttl: String.to_integer(System.get_env("GITHUB_OAUTH_STATE_TTL", "600")),
oauth_state_salt: System.fetch_env!("GITHUB_OAUTH_STATE_SALT")

Expand Down
2 changes: 1 addition & 1 deletion lib/algora/application.ex
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ defmodule Algora.Application do
{Finch, name: Algora.Finch},
Algora.Github.TokenPool,
Algora.Github.Poller.RootSupervisor,
Algora.Stargazer,
# Start to serve requests, typically the last entry
AlgoraWeb.Endpoint,
Algora.Stargazer,
TwMerge.Cache
]

Expand Down
45 changes: 27 additions & 18 deletions lib/algora/bounties/jobs/notify_bounty.ex
Original file line number Diff line number Diff line change
Expand Up @@ -21,34 +21,39 @@ defmodule Algora.Bounties.Jobs.NotifyBounty do
"command_source" => command_source
}
}) do
ticket_ref = %{
owner: ticket_ref["owner"],
repo: ticket_ref["repo"],
number: ticket_ref["number"]
}

body = """
💎 **#{owner_login}** is offering a **#{amount}** bounty for this issue

👉 Got a pull request resolving this? Claim the bounty by commenting `/claim ##{ticket_ref["number"]}` in your PR and joining #{AlgoraWeb.Endpoint.host()}
👉 Got a pull request resolving this? Claim the bounty by commenting `/claim ##{ticket_ref.number}` in your PR and joining #{AlgoraWeb.Endpoint.host()}
"""

if Github.pat_enabled() do
with {:ok, comment} <-
Github.create_issue_comment(
with {:ok, ticket} <-
Workspace.ensure_ticket(
Github.pat(),
ticket_ref["owner"],
ticket_ref["repo"],
ticket_ref["number"],
body
),
{:ok, ticket} <-
Workspace.ensure_ticket(Github.pat(), ticket_ref["owner"], ticket_ref["repo"], ticket_ref["number"]) do
# TODO: update existing command response if it exists
Workspace.create_command_response(%{
comment: comment,
command_source: command_source,
ticket_ref.owner,
ticket_ref.repo,
ticket_ref.number
) do
Workspace.ensure_command_response(%{
token: Github.pat(),
ticket_ref: ticket_ref,
command_id: command_id,
ticket_id: ticket.id
command_type: :bounty,
command_source: command_source,
ticket: ticket,
body: body
})
end
else
Logger.info("""
Github.create_issue_comment(Github.pat(), "#{ticket_ref["owner"]}", "#{ticket_ref["repo"]}", #{ticket_ref["number"]},
Github.create_issue_comment(Github.pat(), "#{ticket_ref.owner}", "#{ticket_ref.repo}", #{ticket_ref.number},
\"\"\"
#{body}
\"\"\")
Expand All @@ -73,9 +78,13 @@ defmodule Algora.Bounties.Jobs.NotifyBounty do
}

with {:ok, token} <- Github.get_installation_token(installation_id),
{:ok, ticket} <- Workspace.ensure_ticket(token, ticket_ref.owner, ticket_ref.repo, ticket_ref.number),
{:ok, ticket} <-
Workspace.ensure_ticket(token, ticket_ref.owner, ticket_ref.repo, ticket_ref.number),
bounties when bounties != [] <- Bounties.list_bounties(ticket_id: ticket.id),
{:ok, _} <- Github.add_labels(token, ticket_ref.owner, ticket_ref.repo, ticket_ref.number, ["💎 Bounty"]) do
{:ok, _} <-
Github.add_labels(token, ticket_ref.owner, ticket_ref.repo, ticket_ref.number, [
"💎 Bounty"
]) do
attempts = Bounties.list_attempts_for_ticket(ticket.id)
claims = Bounties.list_claims([ticket.id])

Expand Down
7 changes: 5 additions & 2 deletions lib/algora/integrations/github/client.ex
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,8 @@ defmodule Algora.Github.Client do

@impl true
def list_installation_repos(access_token) do
with {:ok, %{"repositories" => repos}} <- fetch(access_token, "/installation/repositories", "GET") do
with {:ok, %{"repositories" => repos}} <-
fetch(access_token, "/installation/repositories", "GET") do
{:ok, repos}
end
end
Expand Down Expand Up @@ -242,6 +243,8 @@ defmodule Algora.Github.Client do

@impl true
def add_labels(access_token, owner, repo, number, labels) do
fetch(access_token, "/repos/#{owner}/#{repo}/issues/#{number}/labels", "POST", %{labels: labels})
fetch(access_token, "/repos/#{owner}/#{repo}/issues/#{number}/labels", "POST", %{
labels: labels
})
end
end
1 change: 1 addition & 0 deletions lib/algora/integrations/github/github.ex
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ defmodule Algora.Github do
def private_key, do: [:github, :private_key] |> Algora.config() |> String.replace("\\n", "\n")
def pat, do: Algora.config([:github, :pat])
def pat_enabled, do: Algora.config([:github, :pat_enabled])
def bot_handle, do: Algora.config([:github, :bot_handle])

def install_url_base, do: "https://github.com/apps/#{app_handle()}/installations"
def install_url_new, do: "#{install_url_base()}/new"
Expand Down
246 changes: 246 additions & 0 deletions lib/algora/integrations/github/poller/search.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,246 @@
defmodule Algora.Github.Poller.Search do
@moduledoc false
use GenServer

import Ecto.Query, warn: false

alias Algora.Admin
alias Algora.Github
alias Algora.Github.Command
alias Algora.Parser
alias Algora.Repo
alias Algora.Search
alias Algora.Util

require Logger

@per_page 10
@poll_interval :timer.seconds(3)

# Client API
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end

def pause(pid) do
GenServer.cast(pid, :pause)
end

def resume(pid) do
GenServer.cast(pid, :resume)
end

# Server callbacks
@impl true
def init(opts) do
provider = Keyword.fetch!(opts, :provider)

{:ok,
%{
provider: provider,
cursor: nil,
paused: not Algora.config([:auto_start_pollers])
}, {:continue, :setup}}
end

@impl true
def handle_continue(:setup, state) do
{:ok, cursor} = get_or_create_cursor()
schedule_poll()

{:noreply, %{state | cursor: cursor}}
end

@impl true
def handle_info(:poll, %{paused: true} = state) do
{:noreply, state}
end

@impl true
def handle_info(:poll, state) do
{:ok, new_state} = poll(state)
schedule_poll()
{:noreply, new_state}
end

@impl true
def handle_cast(:pause, state) do
{:noreply, %{state | paused: true}}
end

@impl true
def handle_cast(:resume, state) do
schedule_poll()
{:noreply, %{state | paused: false}}
end

@impl true
def handle_call(:get_provider, _from, state) do
{:reply, state.provider, state}
end

@impl true
def handle_call(:is_paused, _from, state) do
{:reply, state.paused, state}
end

defp schedule_poll do
Process.send_after(self(), :poll, @poll_interval)
end

def poll(state) do
with {:ok, tickets} <- fetch_tickets(state),
if(length(tickets) > 0, do: Logger.debug("Processing #{length(tickets)} tickets")),
{:ok, updated_cursor} <- process_batch(tickets, state) do
{:ok, %{state | cursor: updated_cursor}}
else
{:error, reason} ->
Logger.error("Failed to fetch tickets: #{inspect(reason)}")
{:ok, state}
end
end

defp process_batch([], state), do: {:ok, state.cursor}

defp process_batch(tickets, state) do
Repo.transact(fn ->
with :ok <- process_tickets(tickets, state) do
update_last_polled(state.cursor, List.last(tickets))
end
end)
end

defp process_tickets(tickets, state) do
Enum.reduce_while(tickets, :ok, fn ticket, _acc ->
case process_ticket(ticket, state) do
{:ok, _} -> {:cont, :ok}
error -> {:halt, error}
end
end)
end

def fetch_tickets(state) do
case search("bounty", since: DateTime.to_iso8601(state.cursor.timestamp)) do
{:ok, %{"data" => %{"search" => %{"nodes" => tickets}}}} ->
{:ok, tickets}

_ ->
{:error, :no_tickets_found}
end
end

defp get_or_create_cursor do
case Search.get_search_cursor("github") do
nil ->
Search.create_search_cursor(%{provider: "github", timestamp: DateTime.utc_now()})

search_cursor ->
{:ok, search_cursor}
end
end

defp update_last_polled(search_cursor, %{"updatedAt" => updated_at}) do
with {:ok, updated_at, _} <- DateTime.from_iso8601(updated_at),
{:ok, cursor} <-
Search.update_search_cursor(search_cursor, %{
timestamp: updated_at,
last_polled_at: DateTime.utc_now()
}) do
{:ok, cursor}
else
{:error, reason} -> Logger.error("Failed to update search cursor: #{inspect(reason)}")
end
end

defp process_ticket(%{"updatedAt" => updated_at, "url" => url} = ticket, state) do
with {:ok, updated_at, _} <- DateTime.from_iso8601(updated_at),
{:ok, [ticket_ref: ticket_ref], _, _, _, _} <- Parser.full_ticket_ref(url) do
Logger.info("Latency: #{DateTime.diff(DateTime.utc_now(), updated_at, :second)}s")

ticket["comments"]["nodes"]
|> Enum.reject(fn comment ->
already_processed? =
case DateTime.from_iso8601(comment["updatedAt"]) do
{:ok, comment_updated_at, _} ->
DateTime.before?(comment_updated_at, state.cursor.timestamp)

{:error, _} ->
true
end

bot? = comment["author"]["login"] == Github.bot_handle()

bot? or already_processed?
end)
|> Enum.flat_map(fn comment ->
case Command.parse(comment["body"]) do
{:ok, [command | _]} -> [{comment, command}]
_ -> []
end
end)
|> Enum.reduce_while(:ok, fn {comment, command}, _acc ->
res =
%{
comment: comment,
command: Util.term_to_base64(command),
ticket_ref: Util.term_to_base64(ticket_ref)
}
|> Github.Poller.SearchConsumer.new()
|> Oban.insert()

case res do
{:ok, _job} -> {:cont, :ok}
error -> {:halt, error}
end
end)
else
{:error, reason} ->
Logger.error("Failed to parse commands from ticket: #{inspect(ticket)}. Reason: #{inspect(reason)}")

:ok
end
end

def search(q, opts \\ []) do
per_page = opts[:per_page] || 10
since = opts[:since]

search_query =
if opts[:since] do
"#{q} in:comment is:issue repo:acme-incorporated/webapp sort:updated-asc updated:>#{opts[:since]}"
else
"#{q} in:comment is:issue repo:acme-incorporated/webapp sort:updated-asc"
end

query = """
query issues($search_query: String!) {
search(first: #{per_page}, type: ISSUE, query: $search_query) {
issueCount
pageInfo {
hasNextPage
}
nodes {
__typename
... on Issue {
url
updatedAt
comments(last: 3, orderBy: {field: UPDATED_AT, direction: DESC}) {
nodes {
updatedAt
databaseId
author {
login
}
body
}
}
}
}
}
}
"""

body = %{query: query, variables: %{search_query: search_query}}
Github.Client.fetch(Admin.token!(), "/graphql", "POST", body)
end
end
Loading