Skip to content

Commit

Permalink
Increase the error level of some messages (#282)
Browse files Browse the repository at this point in the history
* changed some log levels

* fix: logging level and message for auto-load
  • Loading branch information
dogweather committed Dec 13, 2023
1 parent 443b379 commit f863b5b
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion lib/crawly.ex
Expand Up @@ -141,7 +141,7 @@ defmodule Crawly do
Crawly.Models.YMLSpider.load()
rescue
error ->
Logger.debug("Could not load spiders: #{inspect(error)}")
Logger.info("No spiders found to auto-load: #{inspect(error)}")
end
end
end
4 changes: 2 additions & 2 deletions lib/crawly/engine.ex
Expand Up @@ -48,7 +48,7 @@ defmodule Crawly.Engine do
def start_spider(spider_name, opts \\ [])

def start_spider(spider_name, crawl_id) when is_binary(crawl_id) do
Logger.warn(
Logger.warning(
"Deprecation Warning: Setting the crawl_id as second positional argument is deprecated. Please use the :crawl_id option instead. Refer to docs for more info (https://hexdocs.pm/crawly/Crawly.Engine.html#start_spider/2) "
)

Expand All @@ -67,7 +67,7 @@ defmodule Crawly.Engine do
configure_spider_logs(spider_name, opts[:crawl_id])

{true, false} ->
Logger.warn(
Logger.error(
":logger_file_backend https://github.com/onkel-dirtus/logger_file_backend#loggerfilebackend must be installed as a peer dependency if log_to_file config is set to true"
)

Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/duplicates_filter.ex
Expand Up @@ -38,7 +38,7 @@ defmodule Crawly.Pipelines.DuplicatesFilter do

case item_id do
nil ->
Logger.info(
Logger.error(
"Duplicates filter pipeline is inactive, item_id option is required
to make it operational."
)
Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/pipelines/validate.ex
Expand Up @@ -47,7 +47,7 @@ defmodule Crawly.Pipelines.Validate do
{item, state}

_ ->
Logger.info(
Logger.warning(
"Dropping item: #{inspect(item)}. Reason: missing required fields"
)

Expand Down
2 changes: 1 addition & 1 deletion lib/crawly/requests_storage/requests_storage_worker.ex
Expand Up @@ -95,7 +95,7 @@ defmodule Crawly.RequestsStorage.Worker do
GenServer.call(pid, command)
catch
error, reason ->
Logger.debug(Exception.format(error, reason, __STACKTRACE__))
Logger.error(Exception.format(error, reason, __STACKTRACE__))
end

defp pipe_request(request, state) do
Expand Down
6 changes: 3 additions & 3 deletions lib/crawly/worker.ex
Expand Up @@ -51,7 +51,7 @@ defmodule Crawly.Worker do
:ok
else
{:error, reason} ->
Logger.debug(
Logger.warning(
"Crawly worker could not process the request to #{inspect(request.url)} reason: #{inspect(reason)}"
)
end
Expand Down Expand Up @@ -122,11 +122,11 @@ defmodule Crawly.Worker do
{:ok, {parsed_item, response, spider_name}}
catch
error, reason ->
Logger.debug(
Logger.warning(
"Could not parse item, error: #{inspect(error)}, reason: #{inspect(reason)}"
)

Logger.debug(Exception.format(:error, error, __STACKTRACE__))
Logger.warning(Exception.format(:error, error, __STACKTRACE__))

{:error, reason}
end
Expand Down

0 comments on commit f863b5b

Please sign in to comment.