Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch most of the log events to debug #58

Merged
merged 1 commit into from
Feb 19, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/crawly/data_storage/data_storage.ex
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ defmodule Crawly.DataStorage do
end

def start_link([]) do
Logger.info("Starting data storage")
Logger.debug("Starting data storage")

GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/manager.ex
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ defmodule Crawly.Manager do
use GenServer

def start_link(spider_name) do
Logger.info("Starting the manager for #{spider_name}")
Logger.debug("Starting the manager for #{spider_name}")
GenServer.start_link(__MODULE__, spider_name)
end

Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/csv_encoder.ex
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ defmodule Crawly.Pipelines.CSVEncoder do
:undefined ->
# only for when both tuple and global config is not provided

Logger.info(
Logger.error(
"Dropping item: #{inspect(item)}. Reason: No fields declared for CSVEncoder"
)

Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/duplicates_filter.ex
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ defmodule Crawly.Pipelines.DuplicatesFilter do
{item, new_state}

true ->
Logger.info("[error] Duplicates filter, removed item: #{inspect(item)}")
Logger.debug("Duplicates filter dropped item: #{inspect(item)}")
{false, state}
end
end
Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/json_encoder.ex
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ defmodule Crawly.Pipelines.JSONEncoder do
{new_item, state}

{:error, reason} ->
Logger.info(
Logger.error(
"Could not encode the following item: #{inspect(item)} into json,
reason: #{inspect(reason)}"
)
Expand Down
10 changes: 5 additions & 5 deletions lib/crawly/worker.ex
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ defmodule Crawly.Worker do

case :epipe.run(functions, {request, spider_name}) do
{:error, _step, reason, _step_state} ->
# TODO: Add retry logic
Logger.error(

Logger.debug(
fn ->
"Crawly worker could not process the request to #{
inspect(request.url)
Expand Down Expand Up @@ -115,7 +115,7 @@ defmodule Crawly.Worker do
error, reason ->
stacktrace = :erlang.get_stacktrace()

Logger.error(
Logger.debug(
"Could not parse item, error: #{inspect(error)}, reason: #{
inspect(reason)
}, stacktrace: #{inspect(stacktrace)}
Expand Down Expand Up @@ -164,7 +164,7 @@ defmodule Crawly.Worker do

case retries <= max_retries do
true ->
Logger.info("Request to #{request.url}, is scheduled for retry")
Logger.debug("Request to #{request.url}, is scheduled for retry")

middlewares = request.middlewares -- ignored_middlewares

Expand All @@ -176,7 +176,7 @@ defmodule Crawly.Worker do

:ok = Crawly.RequestsStorage.store(spider, request)
false ->
Logger.info("Dropping request to #{request.url}, (max retries)")
Logger.error("Dropping request to #{request.url}, (max retries)")
:ok
end

Expand Down