Skip to content

Commit

Permalink
Improve proper search by only get cached propers and disable search i…
Browse files Browse the repository at this point in the history
…n providers
  • Loading branch information
fernandog committed Jul 18, 2016
1 parent cf70721 commit bbb90e8
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 52 deletions.
34 changes: 3 additions & 31 deletions sickbeard/properFinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,43 +116,15 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran
for cur_provider in providers:
threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name)

logger.log('Searching for any new PROPER releases from {provider}'.format
logger.log('Searching cache for any PROPER releases from {provider}'.format
(provider=cur_provider.name))

try:
cur_propers = cur_provider.find_propers(recently_aired)
except AuthException as e:
logger.log('Authentication error: {error}'.format
(error=ex(e)), logger.DEBUG)
continue
except (SocketTimeout) as e:
logger.log('Socket time out while searching for propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
continue
except (requests_exceptions.HTTPError, requests_exceptions.TooManyRedirects) as e:
logger.log('HTTP error while searching for propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
continue
except requests_exceptions.ConnectionError as e:
logger.log('Connection error while searching for propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
continue
except requests_exceptions.Timeout as e:
logger.log('Connection timed out while searching for propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
continue
except requests_exceptions.ContentDecodingError as e:
logger.log('Content-Encoding was gzip, but content was not compressed while searching for propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
continue
except Exception as e:
if 'ECONNRESET' in e or (hasattr(e, 'errno') and e.errno == errno.ECONNRESET):
logger.log('Connection reset by peer while searching for propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
else:
logger.log('Unknown exception while searching for propers in {provider}, skipping: {error}'.format
logger.log('Unknown exception while fetching for cached propers in {provider}, skipping: {error}'.format
(provider=cur_provider.name, error=ex(e)), logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
continue

# if they haven't been added by a different provider than add the proper to the list
Expand Down
26 changes: 5 additions & 21 deletions sickrage/providers/GenericProvider.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,27 +124,11 @@ def download_result(self, result):
return False

def find_propers(self, proper_candidates):
results = []

for proper_candidate in proper_candidates:
show_obj = Show.find(sickbeard.showList, int(proper_candidate[b'showid'])) if proper_candidate[b'showid'] else None

if show_obj:
episode_obj = show_obj.getEpisode(proper_candidate[b'season'], proper_candidate[b'episode'])

for term in self.proper_strings:
search_strings = self._get_episode_search_strings(episode_obj, add_string=term)

for item in self.search(search_strings[0], ep_obj=episode_obj):
title, url = self._get_title_and_url(item)
seeders, leechers = self._get_result_info(item)
size = self._get_size(item)
pubdate = self._get_pubdate(item)
torrent_hash = self._get_hash(item)

results.append(Proper(title, url, datetime.today(), show_obj, seeders, leechers, size, pubdate, torrent_hash))

return results
""" Find provers"""

results = self.cache.listPropers(proper_candidates)
return [Proper(x[b'name'], x[b'url'], datetime.fromtimestamp(x[b'time']), self.show, x[b'seeders'],
x[b'leechers'], x[b'size'], x[b'pubdate'], x[b'hash']) for x in results]

def find_search_results(self, show, episodes, search_mode, forced_search=False,
download_current_quality=False, manual_search=False,
Expand Down

0 comments on commit bbb90e8

Please sign in to comment.