Skip to content

Commit

Permalink
chore: move base class import to init
Browse files Browse the repository at this point in the history
  • Loading branch information
yolile committed Jan 21, 2022
1 parent 14ec3ff commit 4f3015e
Show file tree
Hide file tree
Showing 107 changed files with 128 additions and 122 deletions.
7 changes: 7 additions & 0 deletions kingfisher_scrapy/base_spiders/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from .base_spider import BaseSpider # noqa: F401
from .compressed_file_spider import CompressedFileSpider # noqa: F401
from .simple_spider import SimpleSpider # noqa: F401
from .big_file_spider import BigFileSpider # noqa: F401
from .index_spider import IndexSpider # noqa: F401
from .links_spider import LinksSpider # noqa: F401
from .periodic_spider import PeriodicSpider # noqa: F401
2 changes: 1 addition & 1 deletion kingfisher_scrapy/base_spiders/base_spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def build_request(self, url, formatter, **kwargs):
If the last component of a URL's path is unique, use it as the file name. For example:
>>> from kingfisher_scrapy.base_spiders.base_spider import BaseSpider
>>> from kingfisher_scrapy.base_spiders import BaseSpider
>>> from kingfisher_scrapy.util import components
>>> url = 'https://example.com/package.json'
>>> formatter = components(-1)
Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/base_spiders/big_file_spider.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import handle_http_error


Expand All @@ -12,7 +12,7 @@ class BigFileSpider(SimpleSpider):
.. code-block:: python
from kingfisher_scrapy.base_spiders_big_file_spider import BigFileSpider
from kingfisher_scrapy.base_spiders import BigFileSpider
from kingfisher_scrapy.util import components
class MySpider(BigFileSpider):
Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/base_spiders/compressed_file_spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from rarfile import RarFile

from kingfisher_scrapy.base_spiders.base_spider import BaseSpider
from kingfisher_scrapy.base_spiders import BaseSpider
from kingfisher_scrapy.exceptions import UnknownArchiveFormatError
from kingfisher_scrapy.items import File
from kingfisher_scrapy.util import get_file_name_and_extension, handle_http_error
Expand All @@ -22,7 +22,7 @@ class CompressedFileSpider(BaseSpider):
.. code-block:: python
from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.util import components
class MySpider(CompressedFileSpider):
Expand Down
7 changes: 3 additions & 4 deletions kingfisher_scrapy/base_spiders/index_spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from jsonpointer import resolve_pointer

from kingfisher_scrapy import util
from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.exceptions import IncoherentConfigurationError
from kingfisher_scrapy.items import FileError
from kingfisher_scrapy.util import handle_http_error, parameters
Expand All @@ -25,8 +25,7 @@ class IndexSpider(SimpleSpider):
query string parameters. The spider then yields a request for each offset/page.
#. If the ``page`` query string parameter is zero-indexed, set ``start_page = 0``.
#. Set ``formatter`` to set the file name like in
:meth:`~kingfisher_scrapy.base_spiders.base_spider.BaseSpider.build_request`.
#. Set ``formatter`` to set the file name like in :meth:`~kingfisher_scrapy.base_spiders.BaseSpider.build_request`.
If ``total_pages_pointer`` or ``use_page = True``, it defaults to ``parameters(<param_page>)``. Otherwise, if
``count_pointer`` is set and ``use_page = False``, it defaults to ``parameters(<param_offset>)``.
#. Write a ``start_requests`` method to yield the initial URL. The request's ``callback`` parameter should be set
Expand All @@ -43,7 +42,7 @@ class IndexSpider(SimpleSpider):
to check for an error response, or to extract the page count from an HTML page - override the ``parse_list_loader``
method. If this method returns a ``FileError``, then ``parse_list`` yields it and returns.
Otherwise, results are yielded from all responses by :meth:`~kingfisher_scrapy.base_spider.SimpleSpider.parse`. To
Otherwise, results are yielded from all responses by :meth:`~kingfisher_scrapy.SimpleSpider.parse`. To
change this method, set a ``parse_list_callback`` class attribute to a method's name as a string.
The names of the query string parameters 'page', 'limit' and 'offset' are customizable. Define the ``param_page``,
Expand Down
6 changes: 3 additions & 3 deletions kingfisher_scrapy/base_spiders/links_spider.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from jsonpointer import resolve_pointer

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.exceptions import MissingNextLinkError
from kingfisher_scrapy.util import handle_http_error

Expand All @@ -13,7 +13,7 @@ class LinksSpider(SimpleSpider):
#. Inherit from ``LinksSpider``
#. Set a ``data_type`` class attribute to the data type of the API responses
#. Set a ``formatter`` class attribute to set the file name like in
:meth:`~kingfisher_scrapy.base_spiders.base_spider.BaseSpider.build_request`
:meth:`~kingfisher_scrapy.base_spiders.BaseSpider.build_request`
#. Write a ``start_requests`` method to request the first page of API results
#. Optionally, set a ``next_pointer`` class attribute to the JSON Pointer for the next link (default "/links/next")
Expand All @@ -23,7 +23,7 @@ class LinksSpider(SimpleSpider):
import scrapy
from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
class MySpider(LinksSpider):
name = 'my_spider'
Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/base_spiders/periodic_spider.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from kingfisher_scrapy import util
from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider


class PeriodicSpider(SimpleSpider):
Expand All @@ -17,7 +17,7 @@ class PeriodicSpider(SimpleSpider):
pattern = 'http://comprasestatales.gub.uy/ocds/rss/{0.year:d}/{0.month:02d}'
#. Set a ``formatter`` class attribute to set the file name like in
:meth:`~kingfisher_scrapy.base_spiders.base_spider.BaseSpider.build_request`
:meth:`~kingfisher_scrapy.base_spiders.BaseSpider.build_request`
#. Set a ``default_from_date`` class attribute to a year ("YYYY") or year-month ("YYYY-MM")
#. If the source stopped publishing, set a ``default_until_date`` class attribute to a year or year-month
#. Optionally, set a ``start_requests_callback`` class attribute to a method's name as a string - otherwise, it
Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/base_spiders/simple_spider.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from kingfisher_scrapy.base_spiders.base_spider import BaseSpider
from kingfisher_scrapy.base_spiders import BaseSpider
from kingfisher_scrapy.util import handle_http_error


Expand All @@ -14,7 +14,7 @@ class SimpleSpider(BaseSpider):
import scrapy
from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
class MySpider(SimpleSpider):
name = 'my_spider'
Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/commands/checkall.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from scrapy.utils.misc import walk_modules
from scrapy.utils.spider import iter_spider_classes

from kingfisher_scrapy.base_spiders.periodic_spider import PeriodicSpider
from kingfisher_scrapy.base_spiders import PeriodicSpider

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/afghanistan_packages_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/afghanistan_records.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/afghanistan_releases.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/argentina_buenos_aires.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/argentina_vialidad.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider


class ArgentinaVialidad(SimpleSpider):
Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/armenia.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
from kingfisher_scrapy.util import get_parameter_value, parameters, replace_parameters

MILLISECONDS_PER_DAY = 86400000
Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/australia.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
from kingfisher_scrapy.util import parameters


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/australia_new_south_wales.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import handle_http_error, parameters


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/bolivia_agetic.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/canada_buyandsell.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/canada_montreal.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.index_spider import IndexSpider
from kingfisher_scrapy.base_spiders import IndexSpider
from kingfisher_scrapy.util import browser_user_agent


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/canada_quebec.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/spiders/chile_compra_api_base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import date

from kingfisher_scrapy.base_spiders.index_spider import IndexSpider
from kingfisher_scrapy.base_spiders.periodic_spider import PeriodicSpider
from kingfisher_scrapy.base_spiders import IndexSpider
from kingfisher_scrapy.base_spiders import PeriodicSpider
from kingfisher_scrapy.exceptions import SpiderArgumentError
from kingfisher_scrapy.items import FileError
from kingfisher_scrapy.util import components, handle_http_error
Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/spiders/chile_compra_bulk.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders.periodic_spider import PeriodicSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.base_spiders import PeriodicSpider
from kingfisher_scrapy.items import FileError
from kingfisher_scrapy.util import components

Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/colombia_ani_records.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider


class ColombiaANIRecords(SimpleSpider):
Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/colombia_api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
from kingfisher_scrapy.util import parameters


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/colombia_bulk.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.exceptions import SpiderArgumentError
from kingfisher_scrapy.util import components, handle_http_error

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/croatia.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.util import browser_user_agent, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/digiwhist_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import scrapy

from kingfisher_scrapy.base_spiders.base_spider import BaseSpider
from kingfisher_scrapy.base_spiders import BaseSpider
from kingfisher_scrapy.util import handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/dominican_republic_api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
from kingfisher_scrapy.util import parameters


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/dominican_republic_bulk.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import scrapy

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/ecuador_emergency.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
4 changes: 2 additions & 2 deletions kingfisher_scrapy/spiders/ecuador_sercop.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders.periodic_spider import PeriodicSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.base_spiders import PeriodicSpider
from kingfisher_scrapy.util import components


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/europe_dynamic_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import scrapy

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider
from kingfisher_scrapy.util import components, handle_http_error, join


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/france.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.big_file_spider import BigFileSpider
from kingfisher_scrapy.base_spiders import BigFileSpider
from kingfisher_scrapy.util import components, handle_http_error


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/georgia_opendata.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.compressed_file_spider import CompressedFileSpider
from kingfisher_scrapy.base_spiders import CompressedFileSpider


class GeorgiaOpendata(CompressedFileSpider):
Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/georgia_records.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
from kingfisher_scrapy.util import parameters


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/georgia_releases.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.links_spider import LinksSpider
from kingfisher_scrapy.base_spiders import LinksSpider
from kingfisher_scrapy.util import parameters


Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/spiders/honduras_cost.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import scrapy

from kingfisher_scrapy.base_spiders.simple_spider import SimpleSpider
from kingfisher_scrapy.base_spiders import SimpleSpider


class HondurasCoST(SimpleSpider):
Expand Down

0 comments on commit 4f3015e

Please sign in to comment.