Skip to content
This repository has been archived by the owner on Apr 9, 2020. It is now read-only.

Commit

Permalink
Add language to the model, get it from API in periodic process. Fix #17
Browse files Browse the repository at this point in the history
  • Loading branch information
Nagasaki45 committed Aug 8, 2016
1 parent b257a9b commit fdfb8ab
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 5 deletions.
30 changes: 27 additions & 3 deletions lib/periodic/periodic.ex
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ defmodule Krihelinator.Periodic do
Stream.concat(scrape_trending(), existing_repos_to_scrape())
|> Stream.uniq(fn repo -> repo.name end)
|> Stream.map(&Pipeline.StatsScraper.scrape/1)
|> Stream.map(&add_api_only_updates/1)
|> Enum.each(&handle_scraped/1)
Logger.info "Periodic process finished successfully!"
reschedule_work()
Expand Down Expand Up @@ -83,6 +84,27 @@ defmodule Krihelinator.Periodic do
|> Stream.map(&Map.from_struct/1)
end

@doc """
Some data is not available on the pulse page but available on the API.
Get it and update the repo.
"""
def add_api_only_updates(repo) do
updates =
GithubAPI.limited_get("repos/#{repo.name}")
|> handle_api_call

Map.merge(repo, updates)
end

@api_only_fields ~w(language description)a

def handle_api_call({:ok, %{body: map, status_code: 200}}) do
for key <- @api_only_fields, into: %{} do
{key, Map.get(map, Atom.to_string(key))}
end
end
def handle_api_call(_otherwise), do: %{error: :api_error}

@doc """
Decide what to do with the scraped data. Specific errors might trigger save,
other deletes, or ignores.
Expand All @@ -91,12 +113,14 @@ defmodule Krihelinator.Periodic do
Pipeline.DataHandler.save_to_db(repo)
end

def handle_scraped(%{error: :timeout}=repo) do
Logger.info "Scraping #{repo.name} timed out. No stats updated"
@ignorable_errors ~w(timeout api_error)a

def handle_scraped(%{error: error}=repo) when error in @ignorable_errors do
Logger.info "Failed to process #{repo.name} due to #{error}. No update done"
end

def handle_scraped(%{error: error}=repo) do
Logger.info "Failed to scrape #{repo.name} due to #{error}. Deleting!"
Logger.info "Failed to process #{repo.name} due to #{error}. Deleting!"
(from r in GithubRepo, where: r.name == ^repo.name)
|> Repo.delete_all
end
Expand Down
9 changes: 9 additions & 0 deletions priv/repo/migrations/20160808171600_add_language_field.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
defmodule Krihelinator.Repo.Migrations.AddLanguageField do
use Ecto.Migration

def change do
alter table(:repos) do
add :language, :string
end
end
end
6 changes: 4 additions & 2 deletions web/models/github_repo.ex
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ defmodule Krihelinator.GithubRepo do
schema "repos" do
field :name, :string
field :description, :string
field :language, :string
field :merged_pull_requests, :integer
field :proposed_pull_requests, :integer
field :closed_issues, :integer
Expand All @@ -17,8 +18,9 @@ defmodule Krihelinator.GithubRepo do
timestamps()
end

@allowed ~w(name description merged_pull_requests proposed_pull_requests
closed_issues new_issues commits authors trending user_requested)a
@allowed ~w(name description language merged_pull_requests
proposed_pull_requests closed_issues new_issues commits authors
trending user_requested)a
@required ~w(name merged_pull_requests proposed_pull_requests
closed_issues new_issues commits authors)a

Expand Down

0 comments on commit fdfb8ab

Please sign in to comment.