Skip to content

Commit

Permalink
Migrated to Black (#16)
Browse files Browse the repository at this point in the history
* black configuration

* py39 not supported

* black formatted
  • Loading branch information
exxamalte committed Nov 22, 2020
1 parent 98534a4 commit 1dd959d
Show file tree
Hide file tree
Showing 22 changed files with 912 additions and 731 deletions.
6 changes: 6 additions & 0 deletions .pre-commit-config.yaml
@@ -0,0 +1,6 @@
repos:
- repo: https://github.com/psf/black
rev: 20.8b1
hooks:
- id: black
language_version: python3
110 changes: 55 additions & 55 deletions aio_georss_client/consts.py
@@ -1,63 +1,63 @@
"""Constants for feeds and feed entries."""

ATTR_ATTRIBUTION = 'attribution'
CUSTOM_ATTRIBUTE = 'custom_attribute'
ATTR_ATTRIBUTION = "attribution"
CUSTOM_ATTRIBUTE = "custom_attribute"

DEFAULT_REQUEST_TIMEOUT = 10

UPDATE_OK = 'OK'
UPDATE_OK_NO_DATA = 'OK_NO_DATA'
UPDATE_ERROR = 'ERROR'
UPDATE_OK = "OK"
UPDATE_OK_NO_DATA = "OK_NO_DATA"
UPDATE_ERROR = "ERROR"

XML_ATTR_HREF = '@href'
XML_ATTR_TERM = '@term'
XML_ATTR_HREF = "@href"
XML_ATTR_TERM = "@term"

XML_CDATA = '#text'
XML_CDATA = "#text"

XML_TAG_AUTHOR = 'author'
XML_TAG_CATEGORY = 'category'
XML_TAG_CHANNEL = 'channel'
XML_TAG_CONTENT = 'content'
XML_TAG_CONTRIBUTOR = 'contributor'
XML_TAG_COPYRIGHT = 'copyright'
XML_TAG_DC_DATE = 'dc:date'
XML_TAG_DESCRIPTION = 'description'
XML_TAG_DOCS = 'docs'
XML_TAG_ENTRY = 'entry'
XML_TAG_FEED = 'feed'
XML_TAG_GDACS_BBOX = 'gdacs:bbox'
XML_TAG_GENERATOR = 'generator'
XML_TAG_GEO_LAT = 'geo:lat'
XML_TAG_GEO_LONG = 'geo:long'
XML_TAG_GEO_POINT = 'geo:Point'
XML_TAG_GEORSS_POINT = 'georss:point'
XML_TAG_GEORSS_POLYGON = 'georss:polygon'
XML_TAG_GEORSS_WHERE = 'georss:where'
XML_TAG_GML_EXTERIOR = 'gml:exterior'
XML_TAG_GML_LINEAR_RING = 'gml:LinearRing'
XML_TAG_GML_POINT = 'gml:Point'
XML_TAG_GML_POLYGON = 'gml:Polygon'
XML_TAG_GML_POS = 'gml:pos'
XML_TAG_GML_POS_LIST = 'gml:posList'
XML_TAG_GUID = 'guid'
XML_TAG_HEIGHT = 'height'
XML_TAG_ID = 'id'
XML_TAG_IMAGE = 'image'
XML_TAG_ITEM = 'item'
XML_TAG_LANGUAGE = 'language'
XML_TAG_LAST_BUILD_DATE = 'lastBuildDate'
XML_TAG_LINK = 'link'
XML_TAG_MANAGING_EDITOR = 'managingEditor'
XML_TAG_NAME = 'name'
XML_TAG_PUB_DATE = 'pubDate'
XML_TAG_PUBLISHED = 'published'
XML_TAG_RIGHTS = 'rights'
XML_TAG_RSS = 'rss'
XML_TAG_SOURCE = 'source'
XML_TAG_SUBTITLE = 'subtitle'
XML_TAG_SUMMARY = 'summary'
XML_TAG_TITLE = 'title'
XML_TAG_TTL = 'ttl'
XML_TAG_UPDATED = 'updated'
XML_TAG_URL = 'url'
XML_TAG_WIDTH = 'width'
XML_TAG_AUTHOR = "author"
XML_TAG_CATEGORY = "category"
XML_TAG_CHANNEL = "channel"
XML_TAG_CONTENT = "content"
XML_TAG_CONTRIBUTOR = "contributor"
XML_TAG_COPYRIGHT = "copyright"
XML_TAG_DC_DATE = "dc:date"
XML_TAG_DESCRIPTION = "description"
XML_TAG_DOCS = "docs"
XML_TAG_ENTRY = "entry"
XML_TAG_FEED = "feed"
XML_TAG_GDACS_BBOX = "gdacs:bbox"
XML_TAG_GENERATOR = "generator"
XML_TAG_GEO_LAT = "geo:lat"
XML_TAG_GEO_LONG = "geo:long"
XML_TAG_GEO_POINT = "geo:Point"
XML_TAG_GEORSS_POINT = "georss:point"
XML_TAG_GEORSS_POLYGON = "georss:polygon"
XML_TAG_GEORSS_WHERE = "georss:where"
XML_TAG_GML_EXTERIOR = "gml:exterior"
XML_TAG_GML_LINEAR_RING = "gml:LinearRing"
XML_TAG_GML_POINT = "gml:Point"
XML_TAG_GML_POLYGON = "gml:Polygon"
XML_TAG_GML_POS = "gml:pos"
XML_TAG_GML_POS_LIST = "gml:posList"
XML_TAG_GUID = "guid"
XML_TAG_HEIGHT = "height"
XML_TAG_ID = "id"
XML_TAG_IMAGE = "image"
XML_TAG_ITEM = "item"
XML_TAG_LANGUAGE = "language"
XML_TAG_LAST_BUILD_DATE = "lastBuildDate"
XML_TAG_LINK = "link"
XML_TAG_MANAGING_EDITOR = "managingEditor"
XML_TAG_NAME = "name"
XML_TAG_PUB_DATE = "pubDate"
XML_TAG_PUBLISHED = "published"
XML_TAG_RIGHTS = "rights"
XML_TAG_RSS = "rss"
XML_TAG_SOURCE = "source"
XML_TAG_SUBTITLE = "subtitle"
XML_TAG_SUMMARY = "summary"
XML_TAG_TITLE = "title"
XML_TAG_TTL = "ttl"
XML_TAG_UPDATED = "updated"
XML_TAG_URL = "url"
XML_TAG_WIDTH = "width"
119 changes: 72 additions & 47 deletions aio_georss_client/feed.py
Expand Up @@ -9,8 +9,13 @@
import aiohttp
from aiohttp import ClientSession, client_exceptions

from .consts import (ATTR_ATTRIBUTION, DEFAULT_REQUEST_TIMEOUT, UPDATE_ERROR,
UPDATE_OK, UPDATE_OK_NO_DATA)
from .consts import (
ATTR_ATTRIBUTION,
DEFAULT_REQUEST_TIMEOUT,
UPDATE_ERROR,
UPDATE_OK,
UPDATE_OK_NO_DATA,
)
from .feed_entry import FeedEntry
from .xml_parser import Feed, XmlParser
from .xml_parser.feed_item import FeedItem
Expand All @@ -23,12 +28,14 @@
class GeoRssFeed(Generic[T_FEED_ENTRY], ABC):
"""GeoRSS feed base class."""

def __init__(self,
websession: ClientSession,
home_coordinates: Tuple[float, float],
url: str,
filter_radius: float = None,
filter_categories: List[str] = None):
def __init__(
self,
websession: ClientSession,
home_coordinates: Tuple[float, float],
url: str,
filter_radius: float = None,
filter_categories: List[str] = None,
):
"""Initialise this service."""
self._websession = websession
self._home_coordinates = home_coordinates
Expand All @@ -39,15 +46,21 @@ def __init__(self,

def __repr__(self):
"""Return string representation of this feed."""
return '<{}(home={}, url={}, radius={}, categories={})>'.format(
self.__class__.__name__, self._home_coordinates, self._url,
self._filter_radius, self._filter_categories)
return "<{}(home={}, url={}, radius={}, categories={})>".format(
self.__class__.__name__,
self._home_coordinates,
self._url,
self._filter_radius,
self._filter_categories,
)

@abstractmethod
def _new_entry(self,
home_coordinates: Tuple[float, float],
rss_entry: FeedItem,
global_data: Dict) -> T_FEED_ENTRY:
def _new_entry(
self,
home_coordinates: Tuple[float, float],
rss_entry: FeedItem,
global_data: Dict,
) -> T_FEED_ENTRY:
"""Generate a new entry."""
pass

Expand All @@ -68,11 +81,11 @@ async def update(self) -> Tuple[str, Optional[List[T_FEED_ENTRY]]]:
global_data = self._extract_from_feed(rss_data)
# Extract data from feed entries.
for rss_entry in rss_data.entries:
entries.append(self._new_entry(self._home_coordinates,
rss_entry, global_data))
entries.append(
self._new_entry(self._home_coordinates, rss_entry, global_data)
)
filtered_entries = self._filter_entries(entries)
self._last_timestamp = self._extract_last_timestamp(
filtered_entries)
self._last_timestamp = self._extract_last_timestamp(filtered_entries)
return UPDATE_OK, filtered_entries
else:
# Should not happen.
Expand All @@ -85,17 +98,14 @@ async def update(self) -> Tuple[str, Optional[List[T_FEED_ENTRY]]]:
self._last_timestamp = None
return UPDATE_ERROR, None

async def _fetch(self,
method: str = "GET",
headers=None,
params=None) -> Tuple[str, Optional[Feed]]:
async def _fetch(
self, method: str = "GET", headers=None, params=None
) -> Tuple[str, Optional[Feed]]:
"""Fetch GeoRSS data from external source."""
try:
timeout = aiohttp.ClientTimeout(
total=self._client_session_timeout())
timeout = aiohttp.ClientTimeout(total=self._client_session_timeout())
async with self._websession.request(
method, self._url, headers=headers, params=params,
timeout=timeout
method, self._url, headers=headers, params=params, timeout=timeout
) as response:
try:
response.raise_for_status()
Expand All @@ -106,17 +116,21 @@ async def _fetch(self,
self.feed_data = feed_data
return UPDATE_OK, feed_data
except client_exceptions.ClientError as client_error:
_LOGGER.warning("Fetching data from %s failed with %s",
self._url, client_error)
_LOGGER.warning(
"Fetching data from %s failed with %s", self._url, client_error
)
return UPDATE_ERROR, None
except client_exceptions.ClientError as client_error:
_LOGGER.warning("Requesting data from %s failed with "
"client error: %s",
self._url, client_error)
_LOGGER.warning(
"Requesting data from %s failed with " "client error: %s",
self._url,
client_error,
)
return UPDATE_ERROR, None
except asyncio.TimeoutError:
_LOGGER.warning("Requesting data from %s failed with "
"timeout error", self._url)
_LOGGER.warning(
"Requesting data from %s failed with " "timeout error", self._url
)
return UPDATE_ERROR, None

async def _read_response(self, response):
Expand All @@ -125,7 +139,7 @@ async def _read_response(self, response):
raw_response = await response.read()
_LOGGER.debug("Response encoding %s", response.get_encoding())
if raw_response.startswith(codecs.BOM_UTF8):
return await response.text('utf-8-sig')
return await response.text("utf-8-sig")
return await response.text()
return None

Expand All @@ -135,22 +149,31 @@ def _filter_entries(self, entries: List[T_FEED_ENTRY]):
_LOGGER.debug("Entries before filtering %s", filtered_entries)
# Always remove entries without geometry
filtered_entries = list(
filter(lambda entry:
entry.geometries is not None and len(entry.geometries) >= 1,
filtered_entries))
filter(
lambda entry: entry.geometries is not None
and len(entry.geometries) >= 1,
filtered_entries,
)
)
# Filter by distance.
if self._filter_radius:
filtered_entries = list(
filter(lambda entry:
entry.distance_to_home <= self._filter_radius,
filtered_entries))
filter(
lambda entry: entry.distance_to_home <= self._filter_radius,
filtered_entries,
)
)
# Filter by category.
if self._filter_categories:
filtered_entries = list(
filter(lambda entry:
len({entry.category}.intersection(
self._filter_categories)) > 0,
filtered_entries))
filter(
lambda entry: len(
{entry.category}.intersection(self._filter_categories)
)
> 0,
filtered_entries,
)
)
_LOGGER.debug("Entries after filtering %s", filtered_entries)
return filtered_entries

Expand All @@ -163,12 +186,14 @@ def _extract_from_feed(self, feed: Feed) -> Dict:
return global_data

def _extract_last_timestamp(
self, feed_entries: List[T_FEED_ENTRY]) -> Optional[datetime]:
self, feed_entries: List[T_FEED_ENTRY]
) -> Optional[datetime]:
"""Determine latest (newest) entry from the filtered feed."""
if feed_entries:
dates = sorted(
[entry.published for entry in feed_entries if entry.published],
reverse=True)
reverse=True,
)
if dates:
last_timestamp = dates[0]
_LOGGER.debug("Last timestamp: %s", last_timestamp)
Expand Down
30 changes: 17 additions & 13 deletions aio_georss_client/feed_entry.py
Expand Up @@ -18,16 +18,14 @@
class FeedEntry(ABC):
"""Feed entry base class."""

def __init__(self,
home_coordinates: Tuple[float, float],
rss_entry: FeedItem):
def __init__(self, home_coordinates: Tuple[float, float], rss_entry: FeedItem):
"""Initialise this feed entry."""
self._home_coordinates = home_coordinates
self._rss_entry = rss_entry

def __repr__(self):
"""Return string representation of this entry."""
return '<{}(id={})>'.format(self.__class__.__name__, self.external_id)
return "<{}(id={})>".format(self.__class__.__name__, self.external_id)

@property
def features(self) -> List[Type[Geometry]]:
Expand All @@ -39,8 +37,9 @@ def geometries(self) -> Optional[List[Geometry]]:
"""Return all geometries of this entry."""
if self._rss_entry:
# Return all geometries that are of type defined in features.
return list(filter(lambda x: type(x) in self.features,
self._rss_entry.geometries))
return list(
filter(lambda x: type(x) in self.features, self._rss_entry.geometries)
)
return None

@property
Expand Down Expand Up @@ -95,8 +94,11 @@ def _search_in_title(self, regexp):
@property
def category(self) -> Optional[str]:
"""Return the category of this entry."""
if self._rss_entry and self._rss_entry.category \
and isinstance(self._rss_entry.category, list):
if (
self._rss_entry
and self._rss_entry.category
and isinstance(self._rss_entry.category, list)
):
# To keep this simple, just return the first category.
return self._rss_entry.category[0]
return None
Expand All @@ -115,9 +117,12 @@ def distance_to_home(self) -> float:
distance = float("inf")
if self.geometries and len(self.geometries) >= 1:
for geometry in self.geometries:
distance = min(distance,
GeoRssDistanceHelper.distance_to_geometry(
self._home_coordinates, geometry))
distance = min(
distance,
GeoRssDistanceHelper.distance_to_geometry(
self._home_coordinates, geometry
),
)
return distance

@property
Expand Down Expand Up @@ -152,5 +157,4 @@ def _search_in_description(self, regexp):
@staticmethod
def _string2boolean(value: str) -> bool:
"""Convert value to boolean."""
return isinstance(value, str) \
and value.strip().lower() in {'true', 'yes', '1'}
return isinstance(value, str) and value.strip().lower() in {"true", "yes", "1"}

0 comments on commit 1dd959d

Please sign in to comment.