Skip to content

Commit

Permalink
refactor to reduce code redundancy; more typehints
Browse files Browse the repository at this point in the history
  • Loading branch information
granitosaurus committed Mar 29, 2018
1 parent e2d7741 commit fbc24b0
Showing 1 changed file with 20 additions and 17 deletions.
37 changes: 20 additions & 17 deletions ggmt/matchticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
from urllib.parse import urljoin

import requests
from parsel import Selector
from parsel import Selector, SelectorList

from ggmt import Match


def parse_time(text):
def time_to_seconds(text: str) -> int:
"""
converts text time to seconds
:returns: seconds integer
Expand All @@ -26,7 +26,7 @@ def parse_time(text):
return seconds


def clean_stream_url(url):
def clean_stream_url(url: str) -> str:
"""
Converts various stream embed urls to normal channel urls.
:param url: dirty embed url
Expand Down Expand Up @@ -75,7 +75,7 @@ def __init__(self, game):
def download_matches(self, crawl_stream: bool = True) -> List[Match]:
"""
Downloads live and upcoming matches.
:return: list of eticker.Match objects
:return: list of Match objects
"""
resp = self.session.get(self.game_url)
if resp.status_code != 200:
Expand All @@ -89,7 +89,7 @@ def download_matches(self, crawl_stream: bool = True) -> List[Match]:
def download_history(self, crawl_stream: bool = True) -> List[Match]:
"""
Downloads recent matches.
:return: list of eticker.Match objects
:return: list of Match objects
"""
resp = self.session.get('{}/gosubet'.format(self.game_url))
if resp.status_code != 200:
Expand All @@ -107,7 +107,7 @@ def _find_match(self, sel: Selector) -> Match:
item['id'] = (re.findall('matches/(\d+)', item['url']) or [None])[0]
item['game'] = next((g for g in self.games if g in item['url'].lower()))
item['time'] = xpath("td[@class='status']/span/text()")
item['time_secs'] = parse_time(item['time'])
item['time_secs'] = time_to_seconds(item['time'])
item['timestamp'] = int((datetime.now() + timedelta(item['time_secs'])).timestamp())
item['t1'] = xpath(".//span[contains(@class,'opp1')]/span/text()")
item['t1_country'] = xpath(".//span[contains(@class,'opp1')]/span[contains(@class,'flag')]/@title")
Expand Down Expand Up @@ -137,23 +137,26 @@ def update_match_streams(self, matches: List[Match]) -> List[Match]:
updated.append(item)
return updated

def find_matches(self, sel: Selector) -> Generator[Match, None, None]:
def _find_matches(self, sel: SelectorList):
"""
Generator to find live and upcoming matches in parsel.Selector object
:returns: yields eticker.Match objects
Base match finder method
:param sel: html Selector of match region
:return: Generator Matches
"""
matches = sel.xpath("//table[@id='gb-matches']//tr")
for match in matches:
for match in sel:
item = self._find_match(match)
yield item

def find_matches(self, sel: Selector) -> Generator[Match, None, None]:
"""
Generator to find live and upcoming matches in parsel.Selector object
:returns: Generator for Match objects
"""
yield from self._find_matches(sel.xpath("//table[@id='gb-matches']//tr"))

def find_history(self, sel: Selector) -> Generator[Match, None, None]:
"""
Generator to find recent matches in parsel.Selector object
:returns: yields eticker.Match objects
:returns: Generator for Match objects
"""
matches = sel.xpath("//h2[contains(text(),'Recent')]/..//tr")
for match in matches:
item = self._find_match(match)
yield item

yield from self._find_matches(sel.xpath("//h2[contains(text(),'Recent')]/..//tr"))

0 comments on commit fbc24b0

Please sign in to comment.