diff --git a/lib/crawly.ex b/lib/crawly.ex index 7a0178d9..c1662ada 100644 --- a/lib/crawly.ex +++ b/lib/crawly.ex @@ -141,7 +141,7 @@ defmodule Crawly do Crawly.Models.YMLSpider.load() rescue error -> - Logger.debug("Could not load spiders: #{inspect(error)}") + Logger.info("No spiders found to auto-load: #{inspect(error)}") end end end diff --git a/lib/crawly/engine.ex b/lib/crawly/engine.ex index 432edb02..8ab1afb5 100644 --- a/lib/crawly/engine.ex +++ b/lib/crawly/engine.ex @@ -48,7 +48,7 @@ defmodule Crawly.Engine do def start_spider(spider_name, opts \\ []) def start_spider(spider_name, crawl_id) when is_binary(crawl_id) do - Logger.warn( + Logger.warning( "Deprecation Warning: Setting the crawl_id as second positional argument is deprecated. Please use the :crawl_id option instead. Refer to docs for more info (https://hexdocs.pm/crawly/Crawly.Engine.html#start_spider/2) " ) @@ -67,7 +67,7 @@ defmodule Crawly.Engine do configure_spider_logs(spider_name, opts[:crawl_id]) {true, false} -> - Logger.warn( + Logger.error( ":logger_file_backend https://github.com/onkel-dirtus/logger_file_backend#loggerfilebackend must be installed as a peer dependency if log_to_file config is set to true" ) diff --git a/lib/crawly/pipelines/duplicates_filter.ex b/lib/crawly/pipelines/duplicates_filter.ex index 6aecdb9e..2abcc05b 100644 --- a/lib/crawly/pipelines/duplicates_filter.ex +++ b/lib/crawly/pipelines/duplicates_filter.ex @@ -38,7 +38,7 @@ defmodule Crawly.Pipelines.DuplicatesFilter do case item_id do nil -> - Logger.info( + Logger.error( "Duplicates filter pipeline is inactive, item_id option is required to make it operational." ) diff --git a/lib/crawly/pipelines/validate.ex b/lib/crawly/pipelines/validate.ex index 591d3045..4e657348 100644 --- a/lib/crawly/pipelines/validate.ex +++ b/lib/crawly/pipelines/validate.ex @@ -47,7 +47,7 @@ defmodule Crawly.Pipelines.Validate do {item, state} _ -> - Logger.info( + Logger.warning( "Dropping item: #{inspect(item)}. Reason: missing required fields" ) diff --git a/lib/crawly/requests_storage/requests_storage_worker.ex b/lib/crawly/requests_storage/requests_storage_worker.ex index 8a793d5a..ff0cad6f 100644 --- a/lib/crawly/requests_storage/requests_storage_worker.ex +++ b/lib/crawly/requests_storage/requests_storage_worker.ex @@ -95,7 +95,7 @@ defmodule Crawly.RequestsStorage.Worker do GenServer.call(pid, command) catch error, reason -> - Logger.debug(Exception.format(error, reason, __STACKTRACE__)) + Logger.error(Exception.format(error, reason, __STACKTRACE__)) end defp pipe_request(request, state) do diff --git a/lib/crawly/worker.ex b/lib/crawly/worker.ex index 02149db4..10a07f1d 100644 --- a/lib/crawly/worker.ex +++ b/lib/crawly/worker.ex @@ -51,7 +51,7 @@ defmodule Crawly.Worker do :ok else {:error, reason} -> - Logger.debug( + Logger.warning( "Crawly worker could not process the request to #{inspect(request.url)} reason: #{inspect(reason)}" ) end @@ -122,11 +122,11 @@ defmodule Crawly.Worker do {:ok, {parsed_item, response, spider_name}} catch error, reason -> - Logger.debug( + Logger.warning( "Could not parse item, error: #{inspect(error)}, reason: #{inspect(reason)}" ) - Logger.debug(Exception.format(:error, error, __STACKTRACE__)) + Logger.warning(Exception.format(:error, error, __STACKTRACE__)) {:error, reason} end