Skip to content

Commit

Permalink
Switch most of the log events to debug
Browse files Browse the repository at this point in the history
Otherwise all events are extremely verbose.
  • Loading branch information
oltarasenko committed Feb 17, 2020
1 parent 2d530e0 commit 8885050
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 10 deletions.
2 changes: 1 addition & 1 deletion lib/crawly/data_storage/data_storage.ex
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ defmodule Crawly.DataStorage do
end

def start_link([]) do
Logger.info("Starting data storage")
Logger.debug("Starting data storage")

GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/manager.ex
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ defmodule Crawly.Manager do
use GenServer

def start_link(spider_name) do
Logger.info("Starting the manager for #{spider_name}")
Logger.debug("Starting the manager for #{spider_name}")
GenServer.start_link(__MODULE__, spider_name)
end

Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/csv_encoder.ex
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ defmodule Crawly.Pipelines.CSVEncoder do
:undefined ->
# only for when both tuple and global config is not provided

Logger.info(
Logger.error(
"Dropping item: #{inspect(item)}. Reason: No fields declared for CSVEncoder"
)

Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/duplicates_filter.ex
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ defmodule Crawly.Pipelines.DuplicatesFilter do
{item, new_state}

true ->
Logger.info("[error] Duplicates filter, removed item: #{inspect(item)}")
Logger.debug("Duplicates filter dropped item: #{inspect(item)}")
{false, state}
end
end
Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/json_encoder.ex
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ defmodule Crawly.Pipelines.JSONEncoder do
{new_item, state}

{:error, reason} ->
Logger.info(
Logger.error(
"Could not encode the following item: #{inspect(item)} into json,
reason: #{inspect(reason)}"
)
Expand Down
10 changes: 5 additions & 5 deletions lib/crawly/worker.ex
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ defmodule Crawly.Worker do

case :epipe.run(functions, {request, spider_name}) do
{:error, _step, reason, _step_state} ->
# TODO: Add retry logic
Logger.error(

Logger.debug(
fn ->
"Crawly worker could not process the request to #{
inspect(request.url)
Expand Down Expand Up @@ -115,7 +115,7 @@ defmodule Crawly.Worker do
error, reason ->
stacktrace = :erlang.get_stacktrace()

Logger.error(
Logger.debug(
"Could not parse item, error: #{inspect(error)}, reason: #{
inspect(reason)
}, stacktrace: #{inspect(stacktrace)}
Expand Down Expand Up @@ -164,7 +164,7 @@ defmodule Crawly.Worker do

case retries <= max_retries do
true ->
Logger.info("Request to #{request.url}, is scheduled for retry")
Logger.debug("Request to #{request.url}, is scheduled for retry")

middlewares = request.middlewares -- ignored_middlewares

Expand All @@ -176,7 +176,7 @@ defmodule Crawly.Worker do

:ok = Crawly.RequestsStorage.store(spider, request)
false ->
Logger.info("Dropping request to #{request.url}, (max retries)")
Logger.error("Dropping request to #{request.url}, (max retries)")
:ok
end

Expand Down

0 comments on commit 8885050

Please sign in to comment.