diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index 45ade88f97..5a079cd7af 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -116,43 +116,15 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran for cur_provider in providers: threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name) - logger.log('Searching for any new PROPER releases from {provider}'.format + logger.log('Searching cache for any PROPER releases from {provider}'.format (provider=cur_provider.name)) try: cur_propers = cur_provider.find_propers(recently_aired) - except AuthException as e: - logger.log('Authentication error: {error}'.format - (error=ex(e)), logger.DEBUG) - continue - except (SocketTimeout) as e: - logger.log('Socket time out while searching for propers in {provider}, skipping: {error}'.format - (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - continue - except (requests_exceptions.HTTPError, requests_exceptions.TooManyRedirects) as e: - logger.log('HTTP error while searching for propers in {provider}, skipping: {error}'.format - (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - continue - except requests_exceptions.ConnectionError as e: - logger.log('Connection error while searching for propers in {provider}, skipping: {error}'.format - (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - continue - except requests_exceptions.Timeout as e: - logger.log('Connection timed out while searching for propers in {provider}, skipping: {error}'.format - (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - continue - except requests_exceptions.ContentDecodingError as e: - logger.log('Content-Encoding was gzip, but content was not compressed while searching for propers in {provider}, skipping: {error}'.format - (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - continue except Exception as e: - if 'ECONNRESET' in e or (hasattr(e, 'errno') and e.errno == errno.ECONNRESET): - logger.log('Connection reset by peer while searching for propers in {provider}, skipping: {error}'.format - (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - else: - logger.log('Unknown exception while searching for propers in {provider}, skipping: {error}'.format + logger.log('Unknown exception while fetching for cached propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) - logger.log(traceback.format_exc(), logger.DEBUG) + logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 464967c43b..4a62a89bc8 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -124,27 +124,11 @@ def download_result(self, result): return False def find_propers(self, proper_candidates): - results = [] - - for proper_candidate in proper_candidates: - show_obj = Show.find(sickbeard.showList, int(proper_candidate[b'showid'])) if proper_candidate[b'showid'] else None - - if show_obj: - episode_obj = show_obj.getEpisode(proper_candidate[b'season'], proper_candidate[b'episode']) - - for term in self.proper_strings: - search_strings = self._get_episode_search_strings(episode_obj, add_string=term) - - for item in self.search(search_strings[0], ep_obj=episode_obj): - title, url = self._get_title_and_url(item) - seeders, leechers = self._get_result_info(item) - size = self._get_size(item) - pubdate = self._get_pubdate(item) - torrent_hash = self._get_hash(item) - - results.append(Proper(title, url, datetime.today(), show_obj, seeders, leechers, size, pubdate, torrent_hash)) - - return results + """ Find provers""" + + results = self.cache.listPropers(proper_candidates) + return [Proper(x[b'name'], x[b'url'], datetime.fromtimestamp(x[b'time']), self.show, x[b'seeders'], + x[b'leechers'], x[b'size'], x[b'pubdate'], x[b'hash']) for x in results] def find_search_results(self, show, episodes, search_mode, forced_search=False, download_current_quality=False, manual_search=False,