From aebf360ac90c116a64e0492e1fa4a68a672122d4 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 5 Jun 2016 16:27:13 +0200 Subject: [PATCH 01/85] Fix and clean up limetorrents provider --- sickbeard/providers/limetorrents.py | 85 ++++++++++++++++------------- 1 file changed, 47 insertions(+), 38 deletions(-) diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index faa3a61225..c13c3174e2 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -1,20 +1,20 @@ # coding=utf-8 # Author: Gonçalo M. (aka duramato/supergonkas) # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -36,9 +36,7 @@ class LimeTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - """ - Search provider LimeTorrents - """ + """Search provider LimeTorrents.""" def __init__(self): @@ -70,7 +68,7 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals """ - Search the provider for results + Search the provider for results. :param search_strings: Search to perform :param age: Not used for this provider @@ -79,28 +77,31 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man :return: A list of items found """ results = [] + for mode in search_strings: logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode == 'RSS': - for page in range(1, 4): - search_url = self.urls['rss'].format(page=page) - data = self.get_url(search_url, returns='text') - items = self.parse(data, mode) - results += items + search_url = self.urls['rss'].format(page=1) else: + logger.log("Search string: {0}".format(search_string), logger.DEBUG) search_url = self.urls['search'].format(query=search_string) - data = self.get_url(search_url, returns='text') - items = self.parse(data, mode) - results += items + + data = self.get_url(search_url, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) continue + + items = self.parse(data, mode) + if items: + results += items + return results def parse(self, data, mode): """ - Parse search results for items + Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS @@ -108,40 +109,47 @@ def parse(self, data, mode): :return: A list of items found """ items = [] + with BS4Parser(data, 'html5lib') as html: torrent_table = html('table', class_='table2') - if mode != 'RSS' and len(torrent_table) < 2: + + if mode != 'RSS' and torrent_table and len(torrent_table) < 2: logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) return torrent_table = torrent_table[0 if mode == 'RSS' else 1] torrent_rows = torrent_table('tr') - first = True - for result in torrent_rows: - # Skip the first, since it isn't a valid result - if first: - first = False - continue + + # Skip the first row, since it isn't a valid result + for result in torrent_rows[1:]: cells = result('td') try: verified = result('img', title='Verified torrent') if self.confirmed and not verified: continue + titleinfo = result('a') info = titleinfo[1]['href'] torrent_id = id_regex.search(info).group(1) + url = result.find('a', rel='nofollow') if not url: continue + torrent_hash = hash_regex.search(url['href']).group(2) - if not torrent_id or not torrent_hash: + if not any([torrent_id, torrent_hash]): continue + with suppress(requests.exceptions.Timeout): # Suppress the timeout since we are not interested in actually getting the results - hashdata = self.session.get(self.urls['update'], timeout=0.1, - params={'torrent_id': torrent_id, - 'infohash': torrent_hash}) + self.session.get(self.urls['update'], timeout=0.1, + params={'torrent_id': torrent_id, + 'infohash': torrent_hash}) + title = titleinfo[1].get_text(strip=True) + if not title: + continue + # Remove comma as thousands separator from larger number like 2,000 seeders = 2000 seeders = try_int(cells[3].get_text(strip=True).replace(',', '')) leechers = try_int(cells[4].get_text(strip=True).replace(',', '')) @@ -151,8 +159,8 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the minimum ' - 'seeders or leechers: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue @@ -162,21 +170,22 @@ def parse(self, data, mode): 'size': size, 'seeders': seeders, 'leechers': leechers, + 'pubdate': None, 'hash': torrent_hash or '' } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - # if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) items.append(item) - - except StandardError: - logger.log(u"Failed parsing provider. Traceback: {!r}".format(traceback.format_exc()), logger.ERROR) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - + # For each search mode sort all the items by seeders if available items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) + return items From a2371bf1164280812cd5e01f5d83256c1f5daa44 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 5 Jun 2016 18:02:56 +0200 Subject: [PATCH 02/85] More cleanup --- sickbeard/providers/limetorrents.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index c13c3174e2..2eae5c3cf1 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -22,9 +22,10 @@ import requests import traceback -from requests.compat import urljoin from contextlib2 import suppress +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -128,16 +129,16 @@ def parse(self, data, mode): if self.confirmed and not verified: continue - titleinfo = result('a') - info = titleinfo[1]['href'] - torrent_id = id_regex.search(info).group(1) - url = result.find('a', rel='nofollow') - if not url: + title_info = result('a') + info = title_info[1]['href'] + if not all([url, title_info, info]): continue + title = title_info[1].get_text(strip=True) + torrent_id = id_regex.search(info).group(1) torrent_hash = hash_regex.search(url['href']).group(2) - if not any([torrent_id, torrent_hash]): + if not all([title, torrent_id, torrent_hash]): continue with suppress(requests.exceptions.Timeout): @@ -146,10 +147,6 @@ def parse(self, data, mode): params={'torrent_id': torrent_id, 'infohash': torrent_hash}) - title = titleinfo[1].get_text(strip=True) - if not title: - continue - # Remove comma as thousands separator from larger number like 2,000 seeders = 2000 seeders = try_int(cells[3].get_text(strip=True).replace(',', '')) leechers = try_int(cells[4].get_text(strip=True).replace(',', '')) @@ -171,7 +168,7 @@ def parse(self, data, mode): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': torrent_hash or '' + 'hash': torrent_hash } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From 8d6b7d1421b4c383e5e732a156ec26294648e16e Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 10:17:07 -0400 Subject: [PATCH 03/85] Update pullaprove --- .pullapprove.yml | 32 +++----------------------------- 1 file changed, 3 insertions(+), 29 deletions(-) diff --git a/.pullapprove.yml b/.pullapprove.yml index f4e7046f97..4265923062 100644 --- a/.pullapprove.yml +++ b/.pullapprove.yml @@ -4,40 +4,14 @@ reject_regex: (^Rejected|^Fix it) reset_on_push: false reviewers: - - name: backend-devs + name: devs required: 1 members: - duramato - fernandog - labrys - medariox + - ratoaq2 - p0psicles - - adaur -# # CONDITIONS REQUIRE PRO ACCOUNT -# conditions: -# branches: -# - master -# - beta -# - develop -# files: -# - "*.py" - - - name: gui-devs - required: 1 - members: - - Labrys - OmgImAlexis - - p0psicles -# # CONDITIONS REQUIRE PRO ACCOUNT -# conditions: -# branches: -# - master -# - beta -# - develop -# files: -# - "gui/" - - - name: support - required: 0 - members: - - neoatomic + - adaur From e97c6c4ac894dee2860a0c03f50d1353c98f3692 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 5 Jun 2016 18:04:45 +0200 Subject: [PATCH 04/85] Fix and clean up torrentproject provider --- sickbeard/providers/torrentproject.py | 91 +++++++++++++++------------ 1 file changed, 52 insertions(+), 39 deletions(-) diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index f0c16279a2..64ece6ab04 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -1,24 +1,25 @@ # coding=utf-8 # Author: Gonçalo M. (aka duramato/supergonkas) # +# This file is part of Medusa. # -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals -from requests.compat import urljoin import validators +import traceback from sickbeard import logger, tvcache from sickbeard.common import USER_AGENT @@ -32,7 +33,7 @@ class TorrentProjectProvider(TorrentProvider): # pylint: disable=too-many-insta def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentProject") + TorrentProvider.__init__(self, 'TorrentProject') # Credentials self.public = True @@ -58,65 +59,77 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'out': 'json', 'filter': 2101, + 'showmagnets': 'on', 'num': 150 } for mode in search_strings: # Mode = RSS, Season, Episode items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_params['s'] = search_string if self.custom_url: if not validators.url(self.custom_url): - logger.log("Invalid custom url set, please check your settings", logger.WARNING) + logger.log('Invalid custom url set, please check your settings', logger.WARNING) return results search_url = self.custom_url else: search_url = self.url torrents = self.get_url(search_url, params=search_params, returns='json') - if not (torrents and "total_found" in torrents and int(torrents["total_found"]) > 0): - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + if not (torrents and int(torrents.pop('total_found', 0)) > 0): + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - del torrents["total_found"] - - results = [] - for i in torrents: - title = torrents[i].get("title") - seeders = try_int(torrents[i].get("seeds"), 1) - leechers = try_int(torrents[i].get("leechs"), 0) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + for result in torrents: + try: + title = torrents[result].get('title') + download_url = torrents[result].get('magnet') + if not all([title, download_url]): + continue + + download_url += self._custom_trackers + seeders = try_int(torrents[result].get('seeds'), 1) + leechers = try_int(torrents[result].get('leechs'), 0) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) + continue + + torrent_hash = torrents[result].get('torrent_hash') + torrent_size = torrents[result].get('torrent_size') + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': torrent_hash + } if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - t_hash = torrents[i].get("torrent_hash") - torrent_size = torrents[i].get("torrent_size") - size = convert_size(torrent_size) or -1 - download_url = torrents[i].get("magnet") + self._custom_trackers - pubdate = '' #TBA - - if not all([title, download_url]): + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': t_hash} - - if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - # For each search mode sort all the items by seeders if available items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items From 4333dd9824bc0ae5ac2d3df820dbdc1c0b3944cb Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 5 Jun 2016 20:15:42 +0200 Subject: [PATCH 05/85] Fixed issue with calling attribute on the show indexer param, instead of the show_obj object. * Renamed some camelCase showObj while I was at it. --- sickbeard/server/web/home/handler.py | 38 ++++++++++++++-------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/sickbeard/server/web/home/handler.py b/sickbeard/server/web/home/handler.py index fdfc074cac..a22ab3d019 100644 --- a/sickbeard/server/web/home/handler.py +++ b/sickbeard/server/web/home/handler.py @@ -1388,7 +1388,7 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], except CantRefreshShowException as msg: errors.append('Unable to refresh this show:{error}'.format(error=msg)) # grab updated info from TVDB - # showObj.loadEpisodesFromIndexer() + # show_obj.loadEpisodesFromIndexer() # rescan the episodes in the new folder except NoNFOException: errors.append('The folder at {location} doesn\'t contain a tvshow.nfo - ' @@ -1434,7 +1434,7 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], return self.redirect('/home/displayShow?show={show}'.format(show=show)) - def erase_cache(self, showObj): + def erase_cache(self, show_obj): try: main_db_con = db.DBConnection('cache.db') @@ -1452,36 +1452,36 @@ def erase_cache(self, showObj): main_db_con.action( b'DELETE FROM \'{provider}\' ' b'WHERE indexerid = ?'.format(provider=cur_provider.get_id()), - [showObj.indexerid] + [show_obj.indexerid] ) except Exception: logger.log(u'Unable to delete cached results for provider {provider} for show: {show}'.format - (provider=cur_provider, show=showObj.name), logger.DEBUG) + (provider=cur_provider, show=show_obj.name), logger.DEBUG) except Exception: logger.log(u'Unable to delete cached results for show: {show}'.format - (show=showObj.name), logger.DEBUG) + (show=show_obj.name), logger.DEBUG) def togglePause(self, show=None): - error, show = Show.pause(show) + error, show_obj = Show.pause(show) if error is not None: return self._genericMessage('Error', error) ui.notifications.message('{show} has been {state}'.format - (show=show.name, state='paused' if show.paused else 'resumed')) + (show=show_obj.name, state='paused' if show_obj.paused else 'resumed')) - return self.redirect('/home/displayShow?show={show}'.format(show=show.indexerid)) + return self.redirect('/home/displayShow?show={show}'.format(show=show_obj.indexerid)) def deleteShow(self, show=None, full=0): if show: - error, show = Show.delete(show, full) + error, show_obj = Show.delete(show, full) if error is not None: return self._genericMessage('Error', error) ui.notifications.message('{show} has been {state} {details}'.format( - show=show.name, + show=show_obj.name, state='trashed' if sickbeard.TRASH_REMOVE_SHOW else 'deleted', details='(with all related media)' if full else '(media untouched)', )) @@ -1489,16 +1489,16 @@ def deleteShow(self, show=None, full=0): time.sleep(cpu_presets[sickbeard.CPU_PRESET]) # Remove show from 'RECENT SHOWS' in 'Shows' menu - sickbeard.SHOWS_RECENT = [x for x in sickbeard.SHOWS_RECENT if x['indexerid'] != show.indexerid] + sickbeard.SHOWS_RECENT = [x for x in sickbeard.SHOWS_RECENT if x['indexerid'] != show_obj.indexerid] # Don't redirect to the default page, so the user can confirm that the show was deleted return self.redirect('/home/') def refreshShow(self, show=None): - error, show = Show.refresh(show) + error, show_obj = Show.refresh(show) # This is a show validation error - if error is not None and show is None: + if error is not None and show_obj is None: return self._genericMessage('Error', error) # This is a refresh error @@ -1507,7 +1507,7 @@ def refreshShow(self, show=None): time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - return self.redirect('/home/displayShow?show={show}'.format(show=show.indexerid)) + return self.redirect('/home/displayShow?show={show}'.format(show=show_obj.indexerid)) def updateShow(self, show=None, force=0): @@ -1528,7 +1528,7 @@ def updateShow(self, show=None, force=0): # just give it some time time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - return self.redirect('/home/displayShow?show={show}'.format(show=show.indexerid)) + return self.redirect('/home/displayShow?show={show}'.format(show=show_obj.indexerid)) def subtitleShow(self, show=None, force=0): @@ -1545,7 +1545,7 @@ def subtitleShow(self, show=None, force=0): time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - return self.redirect('/home/displayShow?show={show}'.format(show=show.indexerid)) + return self.redirect('/home/displayShow?show={show}'.format(show=show_obj.indexerid)) def updateKODI(self, show=None): show_name = None @@ -1567,7 +1567,7 @@ def updateKODI(self, show=None): ui.notifications.error('Unable to contact one or more KODI host(s): {host}'.format(host=host)) if show_obj: - return self.redirect('/home/displayShow?show={show}'.format(show=show.indexerid)) + return self.redirect('/home/displayShow?show={show}'.format(show=show_obj.indexerid)) else: return self.redirect('/home/') @@ -1592,7 +1592,7 @@ def updateEMBY(self, show=None): ui.notifications.error('Unable to contact Emby host: {host}'.format(host=sickbeard.EMBY_HOST)) if show_obj: - return self.redirect('/home/displayShow?show={show}'.format(show=show.indexerid)) + return self.redirect('/home/displayShow?show={show}'.format(show=show_obj.indexerid)) else: return self.redirect('/home/') @@ -1796,7 +1796,7 @@ def testRename(self, show=None): t = PageTemplate(rh=self, filename='testRename.mako') submenu = [{ 'title': 'Edit', - 'path': 'home/editShow?show={show}'.format(show=show.indexerid), + 'path': 'home/editShow?show={show}'.format(show=show_obj.indexerid), 'icon': 'ui-icon ui-icon-pencil' }] From f6f96c45f944763ce9524806bf0178102d2eb5e5 Mon Sep 17 00:00:00 2001 From: medariox Date: Sun, 5 Jun 2016 19:31:49 +0200 Subject: [PATCH 06/85] Update subliminal to df321d8 (develop) and itasa provider --- lib/subliminal/__init__.py | 2 +- lib/subliminal/providers/itasa.py | 202 +++++++++++++++++++++--------- lib/subliminal/score.py | 28 ++--- 3 files changed, 156 insertions(+), 76 deletions(-) diff --git a/lib/subliminal/__init__.py b/lib/subliminal/__init__.py index 0b94f73b10..73b137e987 100644 --- a/lib/subliminal/__init__.py +++ b/lib/subliminal/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- __title__ = 'subliminal' -__version__ = '2.0.rc1' +__version__ = '2.1.0.dev' __short_version__ = '.'.join(__version__.split('.')[:2]) __author__ = 'Antoine Bertin' __license__ = 'MIT' diff --git a/lib/subliminal/providers/itasa.py b/lib/subliminal/providers/itasa.py index 0e828801b8..3c01203086 100644 --- a/lib/subliminal/providers/itasa.py +++ b/lib/subliminal/providers/itasa.py @@ -2,12 +2,13 @@ import copy import io import logging +import re from babelfish import Language from guessit import guessit try: from lxml import etree -except ImportError: +except ImportError: # pragma: no cover try: import xml.etree.cElementTree as etree except ImportError: @@ -17,7 +18,7 @@ from . import Provider from .. import __version__ -from .. cache import SHOW_EXPIRATION_TIME, region +from .. cache import EPISODE_EXPIRATION_TIME, SHOW_EXPIRATION_TIME, region from .. exceptions import AuthenticationError, ConfigurationError, TooManyRequests from .. subtitle import (Subtitle, fix_line_ending, guess_matches, sanitize) from .. video import Episode @@ -28,18 +29,19 @@ class ItaSASubtitle(Subtitle): provider_name = 'itasa' - def __init__(self, sub_id, series, season, episode, format, full_data, hash=None): + def __init__(self, sub_id, series, season, episode, video_format, year, tvdb_id, full_data): super(ItaSASubtitle, self).__init__(Language('ita')) self.sub_id = sub_id self.series = series self.season = season self.episode = episode - self.format = format + self.format = video_format + self.year = year + self.tvdb_id = tvdb_id self.full_data = full_data - self.hash = hash @property - def id(self): + def id(self): # pragma: no cover return self.sub_id def get_matches(self, video, hearing_impaired=False): @@ -57,13 +59,10 @@ def get_matches(self, video, hearing_impaired=False): # format if video.format and video.format.lower() in self.format.lower(): matches.add('format') - if not video.format and not self.format: - matches.add('format') - # hash - if 'itasa' in video.hashes and self.hash == video.hashes['itasa']: - print('Hash %s' % video.hashes['itasa']) - if 'series' in matches and 'season' in matches and 'episode' in matches: - matches.add('hash') + if video.year and self.year == video.year: + matches.add('year') + if video.series_tvdb_id and self.tvdb_id == video.series_tvdb_id: + matches.add('tvdb_id') # other properties matches |= guess_matches(video, guessit(self.full_data), partial=True) @@ -76,8 +75,6 @@ class ItaSAProvider(Provider): video_types = (Episode,) - required_hash = 'itasa' - server_url = 'https://api.italiansubs.net/api/rest/' apikey = 'd86ad6ec041b334fac1e512174ee04d5' @@ -90,10 +87,12 @@ def __init__(self, username=None, password=None): self.password = password self.logged_in = False self.login_itasa = False + self.session = None + self.auth_code = None def initialize(self): self.session = Session() - self.session.headers = {'User-Agent': 'Subliminal/%s' % __version__} + self.session.headers['User-Agent'] = 'Subliminal/%s' % __version__ # login if self.username is not None and self.password is not None: @@ -110,7 +109,6 @@ def initialize(self): if root.find('status').text == 'fail': raise AuthenticationError(root.find('error/message').text) - # logger.debug('Logged in: \n' + etree.tostring(root)) self.auth_code = root.find('data/user/authcode').text data = { @@ -148,7 +146,7 @@ def _get_show_ids(self): # populate the show ids show_ids = {} for show in root.findall('data/shows/show'): - if show.find('name').text is None: + if show.find('name').text is None: # pragma: no cover continue show_ids[sanitize(show.find('name').text).lower()] = int(show.find('id').text) logger.debug('Found %d show ids', len(show_ids)) @@ -186,14 +184,14 @@ def _search_show_id(self, series): return show_id # Not in the first page of result try next (if any) - next = root.find('data/next') - while next.text is not None: + next_page = root.find('data/next') + while next_page.text is not None: # pragma: no cover - r = self.session.get(next.text, timeout=10) + r = self.session.get(next_page.text, timeout=10) r.raise_for_status() root = etree.fromstring(r.content) - logger.info('Loading suggestion page %s', root.find('data/page').text) + logger.info('Loading suggestion page %r', root.find('data/page').text) # Looking for show in following pages for show in root.findall('data/shows/show'): @@ -203,7 +201,7 @@ def _search_show_id(self, series): return show_id - next = root.find('data/next') + next_page = root.find('data/next') # No matches found logger.warning('Show id not found: suggestions does not match') @@ -216,6 +214,7 @@ def get_show_id(self, series, country_code=None): First search in the result of :meth:`_get_show_ids` and fallback on a search with :meth:`_search_show_id` :param str series: series of the episode. + :param str country_code: the country in which teh show is aired. :return: the show id, if found. :rtype: int or None @@ -241,6 +240,7 @@ def get_show_id(self, series, country_code=None): return show_id + @region.cache_on_arguments(expiration_time=EPISODE_EXPIRATION_TIME) def _download_zip(self, sub_id): # download the subtitle logger.info('Downloading subtitle %r', sub_id) @@ -256,10 +256,62 @@ def _download_zip(self, sub_id): return r.content - def query(self, series, season, episode, format, country=None, hash=None): + def _get_season_subtitles(self, show_id, season, sub_format): + params = { + 'apikey': self.apikey, + 'show_id': show_id, + 'q': 'Stagione %d' % season, + 'version': sub_format + } + r = self.session.get(self.server_url + 'subtitles/search', params=params, timeout=30) + r.raise_for_status() + root = etree.fromstring(r.content) + + if int(root.find('data/count').text) == 0: + logger.warning('Subtitles for season not found') + return [] + + subs = [] + # Looking for subtitles in first page + for subtitle in root.findall('data/subtitles/subtitle'): + if 'stagione %d' % season in subtitle.find('name').text.lower(): + logger.debug('Found season zip id %d - %r - %r', + int(subtitle.find('id').text), + subtitle.find('name').text, + subtitle.find('version').text) + + content = self._download_zip(int(subtitle.find('id').text)) + if not is_zipfile(io.BytesIO(content)): # pragma: no cover + if 'limite di download' in content: + raise TooManyRequests() + else: + raise ConfigurationError('Not a zip file: %r' % content) + + with ZipFile(io.BytesIO(content)) as zf: + episode_re = re.compile('s(\d{1,2})e(\d{1,2})') + for index, name in enumerate(zf.namelist()): + match = episode_re.search(name) + if not match: # pragma: no cover + logger.debug('Cannot decode subtitle %r', name) + else: + sub = ItaSASubtitle( + int(subtitle.find('id').text), + subtitle.find('show_name').text, + int(match.group(1)), + int(match.group(2)), + None, + None, + None, + name) + sub.content = fix_line_ending(zf.read(name)) + subs.append(sub) + + return subs + + def query(self, series, season, episode, video_format, resolution, country=None): # To make queries you need to be logged in - if not self.logged_in: + if not self.logged_in: # pragma: no cover raise ConfigurationError('Cannot query if not logged in') # get the show id @@ -269,16 +321,33 @@ def query(self, series, season, episode, format, country=None, hash=None): return [] # get the page of the season of the show - logger.info('Getting the subtitle of show id %d, season %d episode %d, format %s', show_id, - season, episode, format) + logger.info('Getting the subtitle of show id %d, season %d episode %d, format %r', show_id, + season, episode, video_format) subtitles = [] - # Default format is HDTV - sub_format = '' - if format is None or format.lower() == 'hdtv': - sub_format = 'normale' + # Default format is SDTV + if not video_format or video_format.lower() == 'hdtv': + if resolution in ('1080i', '1080p', '720p'): + sub_format = resolution + else: + sub_format = 'normale' else: - sub_format = format.lower() + sub_format = video_format.lower() + + # Look for year + params = { + 'apikey': self.apikey + } + r = self.session.get(self.server_url + 'shows/' + str(show_id), params=params, timeout=30) + r.raise_for_status() + root = etree.fromstring(r.content) + + year = root.find('data/show/started').text + if year: + year = int(year.split('-', 1)[0]) + tvdb_id = root.find('data/show/id_tvdb').text + if tvdb_id: + tvdb_id = int(tvdb_id) params = { 'apikey': self.apikey, @@ -286,20 +355,29 @@ def query(self, series, season, episode, format, country=None, hash=None): 'q': '%dx%02d' % (season, episode), 'version': sub_format } - logger.debug(params) r = self.session.get(self.server_url + 'subtitles/search', params=params, timeout=30) r.raise_for_status() root = etree.fromstring(r.content) if int(root.find('data/count').text) == 0: logger.warning('Subtitles not found') - return [] - - # Looking for subtitlles in first page + # If no subtitle are found for single episode try to download all season zip + subs = self._get_season_subtitles(show_id, season, sub_format) + if subs: + for subtitle in subs: + subtitle.format = video_format + subtitle.year = year + subtitle.tvdb_id = tvdb_id + + return subs + else: + return [] + + # Looking for subtitles in first page for subtitle in root.findall('data/subtitles/subtitle'): if '%dx%02d' % (season, episode) in subtitle.find('name').text.lower(): - logger.debug('Found subtitle id %d - %s - %s', + logger.debug('Found subtitle id %d - %r - %r', int(subtitle.find('id').text), subtitle.find('name').text, subtitle.find('version').text) @@ -309,27 +387,28 @@ def query(self, series, season, episode, format, country=None, hash=None): subtitle.find('show_name').text, season, episode, - format, - subtitle.find('name').text, - hash) + video_format, + year, + tvdb_id, + subtitle.find('name').text) subtitles.append(sub) # Not in the first page of result try next (if any) - next = root.find('data/next') - while next.text is not None: + next_page = root.find('data/next') + while next_page.text is not None: # pragma: no cover - r = self.session.get(next.text, timeout=30) + r = self.session.get(next_page.text, timeout=30) r.raise_for_status() root = etree.fromstring(r.content) - logger.info('Loading subtitles page %s', root.data.page.text) + logger.info('Loading subtitles page %r', root.data.page.text) # Looking for show in following pages for subtitle in root.findall('data/subtitles/subtitle'): if '%dx%02d' % (season, episode) in subtitle.find('name').text.lower(): - logger.debug('Found subtitle id %d - %s - %s', + logger.debug('Found subtitle id %d - %r - %r', int(subtitle.find('id').text), subtitle.find('name').text, subtitle.find('version').text) @@ -339,39 +418,40 @@ def query(self, series, season, episode, format, country=None, hash=None): subtitle.find('show_name').text, season, episode, - format, - subtitle.find('name').text, - hash) + video_format, + year, + tvdb_id, + subtitle.find('name').text) subtitles.append(sub) - next = root.find('data/next') + next_page = root.find('data/next') - # Dowload the subs found, can be more than one in zip + # Download the subs found, can be more than one in zip additional_subs = [] for sub in subtitles: # open the zip content = self._download_zip(sub.sub_id) - if not is_zipfile(io.BytesIO(content)): + if not is_zipfile(io.BytesIO(content)): # pragma: no cover if 'limite di download' in content: raise TooManyRequests() else: raise ConfigurationError('Not a zip file: %r' % content) with ZipFile(io.BytesIO(content)) as zf: - if len(zf.namelist()) > 1: + if len(zf.namelist()) > 1: # pragma: no cover - for name in enumerate(zf.namelist()): + for index, name in enumerate(zf.namelist()): - if name[0] == 0: - # First elemnent - sub.content = fix_line_ending(zf.read(name[1])) - sub.full_data = name[1] + if index == 0: + # First element + sub.content = fix_line_ending(zf.read(name)) + sub.full_data = name else: add_sub = copy.deepcopy(sub) - add_sub.content = fix_line_ending(zf.read(name[1])) - add_sub.full_data = name[1] + add_sub.content = fix_line_ending(zf.read(name)) + add_sub.full_data = name additional_subs.append(add_sub) else: sub.content = fix_line_ending(zf.read(zf.namelist()[0])) @@ -380,7 +460,7 @@ def query(self, series, season, episode, format, country=None, hash=None): return subtitles + additional_subs def list_subtitles(self, video, languages): - return self.query(video.series, video.season, video.episode, video.format, hash=video.hashes.get('itasa')) + return self.query(video.series, video.season, video.episode, video.format, video.resolution) - def download_subtitle(self, subtitle): + def download_subtitle(self, subtitle): # pragma: no cover pass diff --git a/lib/subliminal/score.py b/lib/subliminal/score.py index b4d30e249c..6646441714 100755 --- a/lib/subliminal/score.py +++ b/lib/subliminal/score.py @@ -36,12 +36,12 @@ #: Scores for episodes -episode_scores = {'hash': 215, 'series': 108, 'year': 54, 'season': 18, 'episode': 18, 'release_group': 9, - 'format': 4, 'audio_codec': 2, 'resolution': 1, 'hearing_impaired': 1, 'video_codec': 1} +episode_scores = {'hash': 359, 'series': 180, 'year': 90, 'season': 30, 'episode': 30, 'release_group': 15, + 'format': 7, 'audio_codec': 3, 'resolution': 2, 'video_codec': 2, 'hearing_impaired': 1} #: Scores for movies -movie_scores = {'hash': 71, 'title': 36, 'year': 18, 'release_group': 9, - 'format': 4, 'audio_codec': 2, 'resolution': 1, 'hearing_impaired': 1, 'video_codec': 1} +movie_scores = {'hash': 119, 'title': 60, 'year': 30, 'release_group': 15, + 'format': 7, 'audio_codec': 3, 'resolution': 2, 'video_codec': 2, 'hearing_impaired': 1} def get_scores(video): @@ -160,13 +160,13 @@ def solve_episode_equations(): Eq(audio_codec, video_codec + 1), # resolution counts as much as video_codec - Eq(resolution, video_codec), + Eq(resolution, video_codec), - # hearing impaired is as much as resolution - Eq(hearing_impaired, resolution), + # video_codec is the least valuable match but counts more than the sum of all scoring increasing matches + Eq(video_codec, hearing_impaired + 1), - # video_codec is the least valuable match - Eq(video_codec, 1), + # hearing impaired is only used for score increasing, so put it to 1 + Eq(hearing_impaired, 1), ] return solve(equations, [hash, series, year, season, episode, release_group, format, audio_codec, resolution, @@ -200,13 +200,13 @@ def solve_movie_equations(): Eq(audio_codec, video_codec + 1), # resolution counts as much as video_codec - Eq(resolution, video_codec), + Eq(resolution, video_codec), - # hearing impaired is as much as resolution - Eq(hearing_impaired, resolution), + # video_codec is the least valuable match but counts more than the sum of all scoring increasing matches + Eq(video_codec, hearing_impaired + 1), - # video_codec is the least valuable match - Eq(video_codec, 1), + # hearing impaired is only used for score increasing, so put it to 1 + Eq(hearing_impaired, 1), ] return solve(equations, [hash, title, year, release_group, format, audio_codec, resolution, hearing_impaired, From ea413978571389ff11adf46087ed7100e9bb8f0d Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 7 Jun 2016 12:58:28 -0300 Subject: [PATCH 07/85] Add PP missing parameter 'delete_on' to api --- sickbeard/server/api/core.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py index ee9a15738c..b82b072f15 100644 --- a/sickbeard/server/api/core.py +++ b/sickbeard/server/api/core.py @@ -1280,6 +1280,7 @@ class CMD_PostProcess(ApiCall): "return_data": {"desc": "Returns the result of the post-process"}, "process_method": {"desc": "How should valid post-processed files be handled"}, "is_priority": {"desc": "Replace the file even if it exists in a higher quality"}, + "delete_on": {"desc": "Delete files and folders"}, "failed": {"desc": "Mark download as failed"}, "type": {"desc": "The type of post-process being requested"}, } @@ -1294,6 +1295,7 @@ def __init__(self, args, kwargs): self.process_method, args = self.check_params(args, kwargs, "process_method", False, False, "string", ["copy", "symlink", "hardlink", "move"]) self.is_priority, args = self.check_params(args, kwargs, "is_priority", False, False, "bool", []) + self.delete_on, args = self.check_params(args, kwargs, "delete_on", False, False, "bool", []) self.failed, args = self.check_params(args, kwargs, "failed", False, False, "bool", []) self.type, args = self.check_params(args, kwargs, "type", "auto", None, "string", ["auto", "manual"]) # super, missing, help @@ -1311,7 +1313,8 @@ def run(self): self.type = "manual" data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, - is_priority=self.is_priority, failed=self.failed, proc_type=self.type) + is_priority=self.is_priority, delete_on=self.delete_on, failed=self.failed, + proc_type=self.type) if not self.return_data: data = "" From 0f7073fce94eec4db6a573ae087a2f4e0006edff Mon Sep 17 00:00:00 2001 From: Labrys Date: Tue, 7 Jun 2016 20:09:15 -0400 Subject: [PATCH 08/85] Minor rewording --- sickbeard/server/api/core.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py index b82b072f15..f4da89be53 100644 --- a/sickbeard/server/api/core.py +++ b/sickbeard/server/api/core.py @@ -1280,7 +1280,7 @@ class CMD_PostProcess(ApiCall): "return_data": {"desc": "Returns the result of the post-process"}, "process_method": {"desc": "How should valid post-processed files be handled"}, "is_priority": {"desc": "Replace the file even if it exists in a higher quality"}, - "delete_on": {"desc": "Delete files and folders"}, + "delete_files": {"desc": "Delete files and folders like auto processing"}, "failed": {"desc": "Mark download as failed"}, "type": {"desc": "The type of post-process being requested"}, } @@ -1295,7 +1295,7 @@ def __init__(self, args, kwargs): self.process_method, args = self.check_params(args, kwargs, "process_method", False, False, "string", ["copy", "symlink", "hardlink", "move"]) self.is_priority, args = self.check_params(args, kwargs, "is_priority", False, False, "bool", []) - self.delete_on, args = self.check_params(args, kwargs, "delete_on", False, False, "bool", []) + self.delete_files, args = self.check_params(args, kwargs, "delete_files", False, False, "bool", []) self.failed, args = self.check_params(args, kwargs, "failed", False, False, "bool", []) self.type, args = self.check_params(args, kwargs, "type", "auto", None, "string", ["auto", "manual"]) # super, missing, help @@ -1313,7 +1313,7 @@ def run(self): self.type = "manual" data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, - is_priority=self.is_priority, delete_on=self.delete_on, failed=self.failed, + is_priority=self.is_priority, delete_on=self.delete_files, failed=self.failed, proc_type=self.type) if not self.return_data: From 1c9e97a6b5a103eab3f419acacce69e2788c7474 Mon Sep 17 00:00:00 2001 From: p0ps Date: Thu, 9 Jun 2016 19:28:17 +0200 Subject: [PATCH 09/85] Fixed failedDownoads page, when using limit = all (#678) Limit was passed as string --- sickbeard/server/web/manage/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/server/web/manage/handler.py b/sickbeard/server/web/manage/handler.py index 9d4cbce014..1470e1f1fd 100644 --- a/sickbeard/server/web/manage/handler.py +++ b/sickbeard/server/web/manage/handler.py @@ -696,7 +696,7 @@ def manageTorrents(self): def failedDownloads(self, limit=100, toRemove=None): failed_db_con = db.DBConnection('failed.db') - if limit: + if int(limit): sql_results = failed_db_con.select( b'SELECT * ' b'FROM failed ' From 05ac4f44e087586c01c73d0b1dac0e9294cd38e5 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 12 Jun 2016 17:32:09 +0200 Subject: [PATCH 10/85] Duplicate imports --- sickbeard/server/web/__init__.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/sickbeard/server/web/__init__.py b/sickbeard/server/web/__init__.py index ece587b3af..31c0914462 100644 --- a/sickbeard/server/web/__init__.py +++ b/sickbeard/server/web/__init__.py @@ -36,12 +36,6 @@ HomeChangeLog, HomePostProcess, HomeAddShows, - Home, - HomeIRC, - HomeNews, - HomeChangeLog, - HomePostProcess, - HomeAddShows, ) from sickbeard.server.web.manage import ( Manage, From a8a79e58b8b75010c872a8669ef7dcc78424fef5 Mon Sep 17 00:00:00 2001 From: supergonkas Date: Tue, 14 Jun 2016 15:57:10 +0100 Subject: [PATCH 11/85] Add xspeeds icon (#685) --- gui/slick/images/providers/xspeeds.png | Bin 0 -> 573 bytes gui/slick/images/providers/xspeeds_eu.png | Bin 0 -> 573 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 gui/slick/images/providers/xspeeds.png create mode 100644 gui/slick/images/providers/xspeeds_eu.png diff --git a/gui/slick/images/providers/xspeeds.png b/gui/slick/images/providers/xspeeds.png new file mode 100644 index 0000000000000000000000000000000000000000..13ca7fdbf8e4e3ba2061ec105d170262b47ba773 GIT binary patch literal 573 zcmV-D0>b@?P)fPgWp6C5K=W~ufe*?b}1M2lUN~IDCg#z;VJc)l7i$y#>KB8K!@;R5w;r{*} zcXxNlX0sxI43yv(sZ`g{UteFW!{H!G1Ux-Gfr3Dt zP6rhIexKW1uh;ne{KRlLWDN!bjK^b4CKD0B?RC3d2(ef!aCLP>0IvG>_C~{_xp3a& z@sLCSwawaWHst56ghC-)US47}8sVQ{V6)kT-EJ2FOnrWSh7i-~6n?)Sfj|JkU=Wo` z1&v07jC`HtatW)|Dgt=Q`~4mtA0OgXdc7V)Tdfv#!~?F?Y6Rd}8I8sx;Pv&DO%D$b z+}iv5JN_5D-HunT)9FM2SAThVVYAt6hS%$*0Sclk(*Ak(7Z(>$DwR;FR7Zfz<-+;- zIhzdz15Qs*advhFyfo4}PO>U2|>&w3AJJ00000 LNkvXXu0mjfw1NQG literal 0 HcmV?d00001 diff --git a/gui/slick/images/providers/xspeeds_eu.png b/gui/slick/images/providers/xspeeds_eu.png new file mode 100644 index 0000000000000000000000000000000000000000..13ca7fdbf8e4e3ba2061ec105d170262b47ba773 GIT binary patch literal 573 zcmV-D0>b@?P)fPgWp6C5K=W~ufe*?b}1M2lUN~IDCg#z;VJc)l7i$y#>KB8K!@;R5w;r{*} zcXxNlX0sxI43yv(sZ`g{UteFW!{H!G1Ux-Gfr3Dt zP6rhIexKW1uh;ne{KRlLWDN!bjK^b4CKD0B?RC3d2(ef!aCLP>0IvG>_C~{_xp3a& z@sLCSwawaWHst56ghC-)US47}8sVQ{V6)kT-EJ2FOnrWSh7i-~6n?)Sfj|JkU=Wo` z1&v07jC`HtatW)|Dgt=Q`~4mtA0OgXdc7V)Tdfv#!~?F?Y6Rd}8I8sx;Pv&DO%D$b z+}iv5JN_5D-HunT)9FM2SAThVVYAt6hS%$*0Sclk(*Ak(7Z(>$DwR;FR7Zfz<-+;- zIhzdz15Qs*advhFyfo4}PO>U2|>&w3AJJ00000 LNkvXXu0mjfw1NQG literal 0 HcmV?d00001 From 5c74c48f0aaff4d6baccc853fc3844ee435b0977 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 14 Jun 2016 16:59:23 +0200 Subject: [PATCH 12/85] Update subliminal to 2.0.3 (master), update Itasa (#682) --- lib/subliminal/__init__.py | 2 +- lib/subliminal/cli.py | 6 ++-- lib/subliminal/providers/itasa.py | 43 +++++++++++++++++--------- lib/subliminal/providers/legendastv.py | 2 +- 4 files changed, 35 insertions(+), 18 deletions(-) diff --git a/lib/subliminal/__init__.py b/lib/subliminal/__init__.py index 73b137e987..187e618b31 100644 --- a/lib/subliminal/__init__.py +++ b/lib/subliminal/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- __title__ = 'subliminal' -__version__ = '2.1.0.dev' +__version__ = '2.0.3' __short_version__ = '.'.join(__version__.split('.')[:2]) __author__ = 'Antoine Bertin' __license__ = 'MIT' diff --git a/lib/subliminal/cli.py b/lib/subliminal/cli.py index f6c5425a84..e2a78cf153 100644 --- a/lib/subliminal/cli.py +++ b/lib/subliminal/cli.py @@ -6,6 +6,7 @@ from __future__ import division from collections import defaultdict from datetime import timedelta +import glob import json import logging import os @@ -15,7 +16,7 @@ from babelfish import Error as BabelfishError, Language import click from dogpile.cache.backends.file import AbstractFileLock -from dogpile.core import ReadWriteMutex +from dogpile.util.readwrite_lock import ReadWriteMutex from six.moves import configparser from subliminal import (AsyncProviderPool, Episode, Movie, Video, __version__, check_video, compute_score, get_scores, @@ -266,7 +267,8 @@ def subliminal(ctx, addic7ed, itasa, legendastv, opensubtitles, subscenter, cach def cache(ctx, clear_subliminal): """Cache management.""" if clear_subliminal: - os.remove(os.path.join(ctx.parent.params['cache_dir'], cache_file)) + for file in glob.glob(os.path.join(ctx.parent.params['cache_dir'], cache_file) + '*'): + os.remove(file) click.echo('Subliminal\'s cache cleared.') else: click.echo('Nothing done.') diff --git a/lib/subliminal/providers/itasa.py b/lib/subliminal/providers/itasa.py index 3c01203086..f4478113ff 100644 --- a/lib/subliminal/providers/itasa.py +++ b/lib/subliminal/providers/itasa.py @@ -268,8 +268,15 @@ def _get_season_subtitles(self, show_id, season, sub_format): root = etree.fromstring(r.content) if int(root.find('data/count').text) == 0: - logger.warning('Subtitles for season not found') - return [] + logger.warning('Subtitles for season not found, try with rip suffix') + + params['version'] = sub_format + 'rip' + r = self.session.get(self.server_url + 'subtitles/search', params=params, timeout=30) + r.raise_for_status() + root = etree.fromstring(r.content) + if int(root.find('data/count').text) == 0: + logger.warning('Subtitles for season not found') + return [] subs = [] # Looking for subtitles in first page @@ -360,18 +367,26 @@ def query(self, series, season, episode, video_format, resolution, country=None) root = etree.fromstring(r.content) if int(root.find('data/count').text) == 0: - logger.warning('Subtitles not found') - # If no subtitle are found for single episode try to download all season zip - subs = self._get_season_subtitles(show_id, season, sub_format) - if subs: - for subtitle in subs: - subtitle.format = video_format - subtitle.year = year - subtitle.tvdb_id = tvdb_id - - return subs - else: - return [] + logger.warning('Subtitles not found, try with rip suffix') + + params['version'] = sub_format + 'rip' + r = self.session.get(self.server_url + 'subtitles/search', params=params, timeout=30) + r.raise_for_status() + root = etree.fromstring(r.content) + if int(root.find('data/count').text) == 0: + logger.warning('Subtitles not found, go season mode') + + # If no subtitle are found for single episode try to download all season zip + subs = self._get_season_subtitles(show_id, season, sub_format) + if subs: + for subtitle in subs: + subtitle.format = video_format + subtitle.year = year + subtitle.tvdb_id = tvdb_id + + return subs + else: + return [] # Looking for subtitles in first page for subtitle in root.findall('data/subtitles/subtitle'): diff --git a/lib/subliminal/providers/legendastv.py b/lib/subliminal/providers/legendastv.py index 7c3cc74d13..cdd16aca25 100644 --- a/lib/subliminal/providers/legendastv.py +++ b/lib/subliminal/providers/legendastv.py @@ -303,7 +303,7 @@ def get_archives(self, title_id, language_code): archives.append(archive) # stop on last page - if soup.find('a', attrs={'class': 'load_more'}, text='carregar mais') is None: + if soup.find('a', attrs={'class': 'load_more'}, string='carregar mais') is None: break # increment page count From 5878701a31da668f4ac370cf5b45b5931a098280 Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Wed, 15 Jun 2016 04:33:20 -0400 Subject: [PATCH 13/85] Unicode mass edit (#688) * Fix unicode error when changing root directories with mass edit * Fix typo --- sickbeard/server/web/home/add_shows.py | 4 ++-- sickbeard/server/web/home/handler.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sickbeard/server/web/home/add_shows.py b/sickbeard/server/web/home/add_shows.py index 6bbe2ea9ec..d651b7a823 100644 --- a/sickbeard/server/web/home/add_shows.py +++ b/sickbeard/server/web/home/add_shows.py @@ -149,7 +149,7 @@ def massAddTable(self, rootDir=None): dir_results = main_db_con.select( b'SELECT indexer_id ' b'FROM tv_shows ' - b'WHERE location = ? LIMIT 1', + b'WHERE location = ? LIMIT 1', [cur_path] ) @@ -534,7 +534,7 @@ def finishAddShow(): series_pieces = whichSeries.split('|') if (whichSeries and rootDir) or (whichSeries and fullShowPath and len(series_pieces) > 1): if len(series_pieces) < 6: - logger.log(u'Unable to add show due to show selection. Not anough arguments: %s' % (repr(series_pieces)), + logger.log(u'Unable to add show due to show selection. Not enough arguments: %s' % (repr(series_pieces)), logger.ERROR) ui.notifications.error('Unknown error. Unable to add show due to problem with show selection.') return self.redirect('/addShows/existingShows/') diff --git a/sickbeard/server/web/home/handler.py b/sickbeard/server/web/home/handler.py index a22ab3d019..1812c19628 100644 --- a/sickbeard/server/web/home/handler.py +++ b/sickbeard/server/web/home/handler.py @@ -1369,7 +1369,6 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], show_obj.rls_ignore_words = rls_ignore_words.strip() show_obj.rls_require_words = rls_require_words.strip() - location = location.decode('UTF-8') # if we change location clear the db of episodes, change it, write to db, and rescan old_location = ek(os.path.normpath, show_obj._location) new_location = ek(os.path.normpath, location) From 179af4bed7725ae2c558db42bc019398bfebc3ec Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Fri, 17 Jun 2016 11:22:18 -0400 Subject: [PATCH 14/85] Fix notifications (#694) * Fix wording * Fix #693 - UnicodeError in notifications --- sickbeard/search_queue.py | 4 ++-- sickbeard/server/web/core/base.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 533bd0c9b5..e4e6f7c810 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -331,10 +331,10 @@ def run(self): self.results = search_result self.success = True if self.manual_search_type == 'season': - ui.notifications.message("We have found season pack results for {0}".format(self.show.name), + ui.notifications.message("We have found season packs for {0}".format(self.show.name), "These should become visible in the manual select page.") else: - ui.notifications.message("We have found single results for {0}".format(self.segment[0].prettyName()), + ui.notifications.message("We have found results for {0}".format(self.segment[0].prettyName()), "These should become visible in the manual select page.") else: ui.notifications.message('No results were found') diff --git a/sickbeard/server/web/core/base.py b/sickbeard/server/web/core/base.py index e997bb2cf4..20c4d7a477 100644 --- a/sickbeard/server/web/core/base.py +++ b/sickbeard/server/web/core/base.py @@ -472,9 +472,9 @@ def get_messages(self): cur_notification_num = 1 for cur_notification in ui.notifications.get_notifications(self.request.remote_ip): messages['notification-{number}'.format(number=cur_notification_num)] = { - 'title': cur_notification.title, - 'message': cur_notification.message, - 'type': cur_notification.type, + 'title': '{0}'.format(cur_notification.title), + 'message': '{0}'.format(cur_notification.message), + 'type': '{0}'.format(cur_notification.type), } cur_notification_num += 1 From 9882225e07201c181176eeff450e9167246487e9 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 24 May 2016 19:30:54 +0200 Subject: [PATCH 15/85] Just some code cleaning * changed % to format * cut long lines --- sickbeard/tvcache.py | 154 ++++++++++++++++++++++--------------------- 1 file changed, 80 insertions(+), 74 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 06b543f12a..1456bdb622 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -32,68 +32,70 @@ class CacheDBConnection(db.DBConnection): - def __init__(self, providerName): + def __init__(self, provider_id): db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: - if not self.hasTable(providerName): - logger.log(u"Creating cache table for provider {}".format(providerName), logger.DEBUG) + if not self.hasTable(provider_id): + logger.log(u'Creating cache table for provider {0}'.format(provider_id), logger.DEBUG) self.action( - "CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)") + 'CREATE TABLE [{0}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC,' + 'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_id)) else: - sql_results = self.select("SELECT url, COUNT(url) AS count FROM [" + providerName + "] GROUP BY url HAVING count > 1") + sql_results = self.select('SELECT url, COUNT(url) AS count FROM [{0}] ' + 'GROUP BY url HAVING count > 1'.format(provider_id)) for cur_dupe in sql_results: - self.action("DELETE FROM [" + providerName + "] WHERE url = ?", [cur_dupe["url"]]) + self.action('DELETE FROM [{0}] WHERE url = ?'.format(provider_id), [cur_dupe['url']]) # remove wrong old index - self.action("DROP INDEX IF EXISTS idx_url") + self.action('DROP INDEX IF EXISTS idx_url') # add unique index to prevent further dupes from happening if one does not exist - logger.log(u"Creating UNIQUE URL index for {}".format(providerName), logger.DEBUG) - self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url_" + providerName + " ON [" + providerName + "] (url)") + logger.log(u'Creating UNIQUE URL index for {0}'.format(provider_id), logger.DEBUG) + self.action('CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'.format(provider_id, provider_id)) # add release_group column to table if missing - if not self.hasColumn(providerName, 'release_group'): - self.addColumn(providerName, 'release_group', "TEXT", "") + if not self.hasColumn(provider_id, 'release_group'): + self.addColumn(provider_id, 'release_group', 'TEXT', '') # add version column to table if missing - if not self.hasColumn(providerName, 'version'): - self.addColumn(providerName, 'version', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'version'): + self.addColumn(provider_id, 'version', 'NUMERIC', '-1') # add seeders column to table if missing - if not self.hasColumn(providerName, 'seeders'): - self.addColumn(providerName, 'seeders', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'seeders'): + self.addColumn(provider_id, 'seeders', 'NUMERIC', '-1') # add leechers column to table if missing - if not self.hasColumn(providerName, 'leechers'): - self.addColumn(providerName, 'leechers', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'leechers'): + self.addColumn(provider_id, 'leechers', 'NUMERIC', '-1') # add size column to table if missing - if not self.hasColumn(providerName, 'size'): - self.addColumn(providerName, 'size', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'size'): + self.addColumn(provider_id, 'size', 'NUMERIC', '-1') # add pubdate column to table if missing - if not self.hasColumn(providerName, 'pubdate'): - self.addColumn(providerName, 'pubdate', "NUMERIC", "") + if not self.hasColumn(provider_id, 'pubdate'): + self.addColumn(provider_id, 'pubdate', 'NUMERIC', '') # add hash column to table if missing - if not self.hasColumn(providerName, 'hash'): - self.addColumn(providerName, 'hash', "NUMERIC", "") + if not self.hasColumn(provider_id, 'hash'): + self.addColumn(provider_id, 'hash', 'NUMERIC', '') except Exception as e: - if str(e) != "table [" + providerName + "] already exists": + if str(e) != 'table [{0}] already exists'.format(provider_id): raise # Create the table if it's not already there try: if not self.hasTable('lastUpdate'): - self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)") + self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except Exception as e: - logger.log(u"Error while searching " + self.provider.name + ", skipping: " + repr(e), logger.DEBUG) + logger.log(u'Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG) - if str(e) != "table lastUpdate already exists": + if str(e) != 'table lastUpdate already exists': raise @@ -132,7 +134,7 @@ def trim_cache(self, days=None): retention_period = now - (days * 86400) logger.log(u'Removing cache entries older than {x} days from {provider}'.format (x=days, provider=self.providerID)) - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.action( b'DELETE FROM [{provider}] ' b'WHERE time < ? '.format(provider=self.providerID), @@ -196,26 +198,27 @@ def updateCache(self): cache_db_con.mass_action(cl) except AuthException as e: - logger.log(u"Authentication error: " + ex(e), logger.ERROR) + logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) except Exception as e: - logger.log(u"Error while searching " + self.provider.name + ", skipping: " + repr(e), logger.DEBUG) + logger.log(u'Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) def update_cache_manual_search(self, manual_data=None): try: cl = [] for item in manual_data: - logger.log(u"Adding to cache item found in manual search: {}".format(item.name), logger.DEBUG) + logger.log(u'Adding to cache item found in manual search: {0}'.format(item.name), logger.DEBUG) ci = self._addCacheEntry(item.name, item.url, item.seeders, item.leechers, item.size, item.pubdate, item.hash) if ci is not None: cl.append(ci) except Exception as e: - logger.log(u"Error while adding to cache item found in manual seach for provider " + self.provider.name + ", skipping: " + repr(e), logger.WARNING) + logger.log(u'Error while adding to cache item found in manual seach for provider {0}, skipping: {1!r}'.format + (self.provider.name, e), logger.WARNING) results = [] cache_db_con = self._getDB() if cl: - logger.log("Mass updating cache table with manual results for provider: {}".format(self.provider.name), logger.DEBUG) + logger.log(u'Mass updating cache table with manual results for provider: {0}'.format(self.provider.name), logger.DEBUG) results = cache_db_con.mass_action(cl) return any(results) @@ -227,7 +230,7 @@ def getRSSFeed(self, url, params=None): @staticmethod def _translateTitle(title): - return u'' + title.replace(' ', '.') + return u'{0}'.format(title.replace(' ', '.')) @staticmethod def _translateLinkURL(url): @@ -250,18 +253,17 @@ def _parseItem(self, item): return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, hash) else: - logger.log( - u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable", - logger.DEBUG) + logger.log(u'The data returned from the {0} feed is incomplete, this result is unusable'.format + (self.provider.name), logger.DEBUG) return False def _getLastUpdate(self): cache_db_con = self._getDB() - sql_results = cache_db_con.select("SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID]) + sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) if sql_results: - lastTime = int(sql_results[0]["time"]) + lastTime = int(sql_results[0]['time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -271,10 +273,10 @@ def _getLastUpdate(self): def _getLastSearch(self): cache_db_con = self._getDB() - sql_results = cache_db_con.select("SELECT time FROM lastSearch WHERE provider = ?", [self.providerID]) + sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) if sql_results: - lastTime = int(sql_results[0]["time"]) + lastTime = int(sql_results[0]['time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -288,7 +290,7 @@ def setLastUpdate(self, toDate=None): cache_db_con = self._getDB() cache_db_con.upsert( - "lastUpdate", + 'lastUpdate', {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID} ) @@ -299,7 +301,7 @@ def setLastSearch(self, toDate=None): cache_db_con = self._getDB() cache_db_con.upsert( - "lastSearch", + 'lastSearch', {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID} ) @@ -310,7 +312,8 @@ def setLastSearch(self, toDate=None): def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): - logger.log(u"Last update was too soon, using old cache: " + str(self.lastUpdate) + ". Updated less then " + str(self.minTime) + " minutes ago", logger.DEBUG) + logger.log(u'Last update was too soon, using old cache: {0}. Updated less then {1} minutes ago.'.format + (self.lastUpdate, self.minTime), logger.DEBUG) return False return True @@ -327,7 +330,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): try: parse_result = NameParser().parse(name) except (InvalidNameException, InvalidShowException) as error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log(u'{0}'.format(error), logger.DEBUG) return None if not parse_result or not parse_result.series_name: @@ -339,7 +342,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): if season is not None and episodes is not None: # store episodes as a seperated string - episodeText = "|" + "|".join({str(episode) for episode in episodes if episode}) + "|" + episodeText = '|{0}|'.format('|'.join({str(episode) for episode in episodes if episode})) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) @@ -355,11 +358,13 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): # get version version = parse_result.version - logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) + logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.providerID), logger.DEBUG) return [ - "INSERT OR REPLACE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", - [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version, seeders, leechers, size, pubdate, hash]] + 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' + 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.providerID), + [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, + release_group, version, seeders, leechers, size, pubdate, hash]] def searchCache(self, episode, forced_search=False, downCurQuality=False): neededEps = self.findNeededEpisodes(episode, forced_search, downCurQuality) @@ -367,10 +372,10 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): def listPropers(self, date=None): cache_db_con = self._getDB() - sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'" + sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.providerID) if date is not None: - sql += " AND time >= " + str(int(time.mktime(date.timetuple()))) + sql += ' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) propers_results = cache_db_con.select(sql) return [x for x in propers_results if x['indexerid']] @@ -381,17 +386,18 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) cache_db_con = self._getDB() if not episode: - sql_results = cache_db_con.select("SELECT * FROM [" + self.providerID + "]") + sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.providerID)) elif not isinstance(episode, list): sql_results = cache_db_con.select( - "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", - [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"]) + 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.providerID), + [episode.show.indexerid, episode.season, '%|{0}|%'.format(episode.episode)]) else: for epObj in episode: cl.append([ - "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN (" + ",".join( - [str(x) for x in epObj.wantedQuality]) + ")", - [epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"]]) + 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'.format + (self.providerID, ','.join( + [str(x) for x in epObj.wantedQuality])), + [epObj.show.indexerid, epObj.season, '%|{0}|%'.format(epObj.episode)]]) sql_results = cache_db_con.mass_action(cl, fetchall=True) sql_results = list(itertools.chain(*sql_results)) @@ -399,55 +405,55 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) # for each cache entry for curResult in sql_results: # ignored/required words, and non-tv junk - if not show_name_helpers.filterBadReleases(curResult["name"]): + if not show_name_helpers.filterBadReleases(curResult['name']): continue # get the show object, or if it's not one of our shows then ignore it - showObj = Show.find(sickbeard.showList, int(curResult["indexerid"])) + showObj = Show.find(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: - logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG) + logger.log(u'{0} is not an anime, skiping'.format(showObj.name), logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) - curSeason = int(curResult["season"]) + curSeason = int(curResult['season']) if curSeason == -1: continue - curEp = curResult["episodes"].split("|")[1] + curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) - curQuality = int(curResult["quality"]) - curReleaseGroup = curResult["release_group"] - curVersion = curResult["version"] + curQuality = int(curResult['quality']) + curReleaseGroup = curResult['release_group'] + curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, forced_search, downCurQuality): - logger.log(u"Ignoring " + curResult["name"], logger.DEBUG) + logger.log(u'Ignoring {0}'.format(curResult['name']), logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object - title = curResult["name"] - url = curResult["url"] + title = curResult['name'] + url = curResult['url'] - logger.log(u"Found result " + title + " at " + url) + logger.log(u'Found result {0} at {1}'.format(title, url)) result = self.provider.get_result([epObj]) result.show = showObj result.url = url - result.seeders = curResult["seeders"] - result.leechers = curResult["leechers"] - result.size = curResult["size"] - result.pubdate = curResult["pubdate"] - result.hash = curResult["hash"] + result.seeders = curResult['seeders'] + result.leechers = curResult['leechers'] + result.size = curResult['size'] + result.pubdate = curResult['pubdate'] + result.hash = curResult['hash'] result.name = title result.quality = curQuality result.release_group = curReleaseGroup From e06050399af3d5a4e75497bf281b5f1230162a6d Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 24 May 2016 22:03:15 +0200 Subject: [PATCH 16/85] Some cleaning, and implemented a get_last_cached_items() method, for some testing. --- sickbeard/tvcache.py | 53 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 1456bdb622..ca812cc836 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -102,17 +102,17 @@ def __init__(self, provider_id): class TVCache(object): def __init__(self, provider, **kwargs): self.provider = provider - self.providerID = self.provider.get_id() - self.providerDB = None + self.provider_id = self.provider.get_id() + self.provider_db = None self.minTime = kwargs.pop(u'min_time', 10) self.search_params = kwargs.pop(u'search_params', dict(RSS=[''])) - def _getDB(self): + def _get_db(self): # init provider database if not done already - if not self.providerDB: - self.providerDB = CacheDBConnection(self.providerID) + if not self.provider_db: + self.provider_db = CacheDBConnection(self.provider_id) - return self.providerDB + return self.provider_db def _clearCache(self): """ @@ -193,7 +193,7 @@ def updateCache(self): if ci is not None: cl.append(ci) - cache_db_con = self._getDB() + cache_db_con = self._get_db() if cl: cache_db_con.mass_action(cl) @@ -216,7 +216,7 @@ def update_cache_manual_search(self, manual_data=None): (self.provider.name, e), logger.WARNING) results = [] - cache_db_con = self._getDB() + cache_db_con = self._get_db() if cl: logger.log(u'Mass updating cache table with manual results for provider: {0}'.format(self.provider.name), logger.DEBUG) results = cache_db_con.mass_action(cl) @@ -241,7 +241,6 @@ def _parseItem(self, item): seeders, leechers = self._get_result_info(item) size = self._get_size(item) pubdate = self._get_pubdate(item) - hash = self._get_hash(item) self._checkItemAuth(title, url) @@ -249,8 +248,8 @@ def _parseItem(self, item): title = self._translateTitle(title) url = self._translateLinkURL(url) - # logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, hash) + # Placed the self._get_hash(item) inline, because hash is a buildin. Could cause issues. + return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, self._get_hash(item)) else: logger.log(u'The data returned from the {0} feed is incomplete, this result is unusable'.format @@ -259,8 +258,8 @@ def _parseItem(self, item): return False def _getLastUpdate(self): - cache_db_con = self._getDB() - sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) + cache_db_con = self._get_db() + sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.provider_id]) if sql_results: lastTime = int(sql_results[0]['time']) @@ -272,8 +271,8 @@ def _getLastUpdate(self): return datetime.datetime.fromtimestamp(lastTime) def _getLastSearch(self): - cache_db_con = self._getDB() - sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) + cache_db_con = self._get_db() + sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.provider_id]) if sql_results: lastTime = int(sql_results[0]['time']) @@ -288,22 +287,22 @@ def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.upsert( 'lastUpdate', {'time': int(time.mktime(toDate.timetuple()))}, - {'provider': self.providerID} + {'provider': self.provider_id} ) def setLastSearch(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.upsert( 'lastSearch', {'time': int(time.mktime(toDate.timetuple()))}, - {'provider': self.providerID} + {'provider': self.provider_id} ) lastUpdate = property(_getLastUpdate) @@ -358,11 +357,11 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): # get version version = parse_result.version - logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.providerID), logger.DEBUG) + logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.provider_id), logger.DEBUG) return [ 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' - 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.providerID), + 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.provider_id), [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version, seeders, leechers, size, pubdate, hash]] @@ -371,8 +370,8 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): return neededEps[episode] if episode in neededEps else [] def listPropers(self, date=None): - cache_db_con = self._getDB() - sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.providerID) + cache_db_con = self._get_db() + sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.provider_id) if date is not None: sql += ' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) @@ -384,18 +383,18 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) neededEps = {} cl = [] - cache_db_con = self._getDB() + cache_db_con = self._get_db() if not episode: - sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.providerID)) + sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.provider_id)) elif not isinstance(episode, list): sql_results = cache_db_con.select( - 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.providerID), + 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.provider_id), [episode.show.indexerid, episode.season, '%|{0}|%'.format(episode.episode)]) else: for epObj in episode: cl.append([ 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'.format - (self.providerID, ','.join( + (self.provider_id, ','.join( [str(x) for x in epObj.wantedQuality])), [epObj.show.indexerid, epObj.season, '%|{0}|%'.format(epObj.episode)]]) From a76e364273383798471540ab93a45db45bdaa5cc Mon Sep 17 00:00:00 2001 From: P0psicles Date: Wed, 25 May 2016 15:30:52 +0200 Subject: [PATCH 17/85] First version of the parse reducing. Used a new table, that's used for all providers, to keep track of 5 newest releases. Then all results are matched to this table. Making it universal for all providers, and reducing the amounts of parses. * Made sure that cache items older then 7 days are also deleted for this table. * In the process of cleaning the cache after 7 days, noticed the other one never works. Maybe it's intentional, but then it should be disabled. --- sickbeard/databases/cache_db.py | 10 ++++++ sickbeard/tvcache.py | 56 ++++++++++++++++++++++++++++++--- 2 files changed, 62 insertions(+), 4 deletions(-) diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index 7e5549bb83..7a9525ae03 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -124,3 +124,13 @@ def execute(self): self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);") self.connection.action("INSERT INTO scene_names SELECT * FROM tmp_scene_names;") self.connection.action("DROP TABLE tmp_scene_names;") + + +class AddProviderRssCache(ConvertSceneNamesToIndexerScheme): # pylint:disable=too-many-ancestors + """A provider cache table thats used to keep track of the last parsed search results""" + def test(self): + return self.hasTable("provider_rss_cache") + + def execute(self): + self.connection.action( + "CREATE TABLE provider_rss_cache (rss_cache_id INTEGER PRIMARY KEY, name TEXT, url TEXT, time NUMERIC DEFAULT 0, provider_id TEXT NOT NULL);") diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index ca812cc836..70b3388168 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -114,6 +114,12 @@ def _get_db(self): return self.provider_db + def _clearProviderRssCache(self): + cache_db_con = self._get_db() + today = int(time.mktime(datetime.datetime.today().timetuple())) + # Keep item in cache for 7 days + cache_db_con.action('DELETE FROM provider_rss_cache WHERE provider_id = ? AND time < ? ', [self.provider_id, today - 7 * 86400]) # 86400 POSIX day (exact value) + def _clearCache(self): """ Performs requalar cache cleaning as required @@ -122,6 +128,7 @@ def _clearCache(self): if sickbeard.CACHE_TRIMMING: # trim items older than MAX_CACHE_AGE days self.trim_cache(days=sickbeard.MAX_CACHE_AGE) + self._clearProviderRssCache() def trim_cache(self, days=None): """ @@ -187,16 +194,35 @@ def updateCache(self): # set updated self.setLastUpdate() + # get last 5 provider_rss_cache results + recent_results = self.get_last_cached_items(5) + found_recent_results = 0 + stop_at = 1 + cl = [] - for item in data['entries'] or []: - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) + index = 0 + for index, item in enumerate(data['entries'] or []): + if recent_results and item.get('link').strip() in [cache_item['url'].strip() for cache_item in recent_results]: + found_recent_results += 1 + + if found_recent_results >= stop_at: + logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format + (self.provider_id), logger.ERROR) + break + try: + ci = self._parseItem(item) + if ci is not None: + cl.append(ci) + except UnicodeDecodeError, e: + continue cache_db_con = self._get_db() if cl: cache_db_con.mass_action(cl) + # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 + self._update_provider_rss_cache(data['entries'][0:min(index, 5)]) + except AuthException as e: logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) except Exception as e: @@ -223,6 +249,28 @@ def update_cache_manual_search(self, manual_data=None): return any(results) + def _update_provider_rss_cache(self, items): + """Updates the table provider_rss_cache with a limited amount of the latest search result url's""" + + cache_db_con = self._get_db() + new_items = [] + sql_results = [] + + for item in items: + # get the current timestamp + cur_time = int(time.mktime(datetime.datetime.today().timetuple())) + + logger.log(u"Added provider_rss_cache item: {0}".format(item.get('link'), self.provider_id), logger.DEBUG) + + new_items.append(["INSERT OR REPLACE INTO provider_rss_cache (name, url, time, provider_id) VALUES (?,?,?,?)", + [item.get('title'), item.get('link'), cur_time, self.provider_id]]) + + if new_items: + logger.log(u'Mass updating provider_rss_cache table with results for provider: {0}'.format(self.provider.name), logger.DEBUG) + sql_results = cache_db_con.mass_action(new_items) + + return any(sql_results) + def getRSSFeed(self, url, params=None): if self.provider.login(): return getFeed(url, params=params, request_hook=self.provider.get_url) From 1effe0403774e5cce0ed2d5d1a495ae8a88d6dd2 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Wed, 25 May 2016 15:33:21 +0200 Subject: [PATCH 18/85] Missed the camelCase --- sickbeard/tvcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 70b3388168..5dd5f35bf9 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -114,7 +114,7 @@ def _get_db(self): return self.provider_db - def _clearProviderRssCache(self): + def _clear_provider_rss_cache(self): cache_db_con = self._get_db() today = int(time.mktime(datetime.datetime.today().timetuple())) # Keep item in cache for 7 days @@ -128,7 +128,7 @@ def _clearCache(self): if sickbeard.CACHE_TRIMMING: # trim items older than MAX_CACHE_AGE days self.trim_cache(days=sickbeard.MAX_CACHE_AGE) - self._clearProviderRssCache() + self._clear_provider_rss_cache() def trim_cache(self, days=None): """ From 43184ccf9ac73021b28038fee68b8b34b4ebac62 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Thu, 26 May 2016 23:13:16 +0200 Subject: [PATCH 19/85] Removed the sqlite solution, and replaced with a dict[provider][{}]. Still need to put in the logic to only remember last 5 items per provider key. --- sickbeard/__init__.py | 5 +++- sickbeard/databases/cache_db.py | 10 -------- sickbeard/tvcache.py | 35 +++------------------------ sickrage/providers/GenericProvider.py | 11 +++++++++ 4 files changed, 18 insertions(+), 43 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 809da9f5eb..65f1326d99 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -606,6 +606,8 @@ PRIVACY_LEVEL = 'normal' +provider_recent_results = {} + def get_backlog_cycle_time(): cycletime = DAILYSEARCH_FREQUENCY * 2 + 7 @@ -659,7 +661,8 @@ def initialize(consoleLogging=True): # pylint: disable=too-many-locals, too-man AUTOPOSTPROCESSOR_FREQUENCY, SHOWUPDATE_HOUR, \ ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \ ANIME_SPLIT_HOME, SCENE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \ - DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED + DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED, \ + provider_recent_results if __INITIALIZED__: return False diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index 7a9525ae03..7e5549bb83 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -124,13 +124,3 @@ def execute(self): self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);") self.connection.action("INSERT INTO scene_names SELECT * FROM tmp_scene_names;") self.connection.action("DROP TABLE tmp_scene_names;") - - -class AddProviderRssCache(ConvertSceneNamesToIndexerScheme): # pylint:disable=too-many-ancestors - """A provider cache table thats used to keep track of the last parsed search results""" - def test(self): - return self.hasTable("provider_rss_cache") - - def execute(self): - self.connection.action( - "CREATE TABLE provider_rss_cache (rss_cache_id INTEGER PRIMARY KEY, name TEXT, url TEXT, time NUMERIC DEFAULT 0, provider_id TEXT NOT NULL);") diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 5dd5f35bf9..622b699f58 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -114,12 +114,6 @@ def _get_db(self): return self.provider_db - def _clear_provider_rss_cache(self): - cache_db_con = self._get_db() - today = int(time.mktime(datetime.datetime.today().timetuple())) - # Keep item in cache for 7 days - cache_db_con.action('DELETE FROM provider_rss_cache WHERE provider_id = ? AND time < ? ', [self.provider_id, today - 7 * 86400]) # 86400 POSIX day (exact value) - def _clearCache(self): """ Performs requalar cache cleaning as required @@ -128,7 +122,6 @@ def _clearCache(self): if sickbeard.CACHE_TRIMMING: # trim items older than MAX_CACHE_AGE days self.trim_cache(days=sickbeard.MAX_CACHE_AGE) - self._clear_provider_rss_cache() def trim_cache(self, days=None): """ @@ -195,14 +188,14 @@ def updateCache(self): self.setLastUpdate() # get last 5 provider_rss_cache results - recent_results = self.get_last_cached_items(5) + recent_results = self.provider.recent_results found_recent_results = 0 stop_at = 1 cl = [] index = 0 for index, item in enumerate(data['entries'] or []): - if recent_results and item.get('link').strip() in [cache_item['url'].strip() for cache_item in recent_results]: + if recent_results and item.get('link').strip() in [cached_item['link'].strip() for cached_item in recent_results]: found_recent_results += 1 if found_recent_results >= stop_at: @@ -221,7 +214,7 @@ def updateCache(self): cache_db_con.mass_action(cl) # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 - self._update_provider_rss_cache(data['entries'][0:min(index, 5)]) + self.provider.recent_results = data['entries'][0:min(index, 5)] except AuthException as e: logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) @@ -249,28 +242,6 @@ def update_cache_manual_search(self, manual_data=None): return any(results) - def _update_provider_rss_cache(self, items): - """Updates the table provider_rss_cache with a limited amount of the latest search result url's""" - - cache_db_con = self._get_db() - new_items = [] - sql_results = [] - - for item in items: - # get the current timestamp - cur_time = int(time.mktime(datetime.datetime.today().timetuple())) - - logger.log(u"Added provider_rss_cache item: {0}".format(item.get('link'), self.provider_id), logger.DEBUG) - - new_items.append(["INSERT OR REPLACE INTO provider_rss_cache (name, url, time, provider_id) VALUES (?,?,?,?)", - [item.get('title'), item.get('link'), cur_time, self.provider_id]]) - - if new_items: - logger.log(u'Mass updating provider_rss_cache table with results for provider: {0}'.format(self.provider.name), logger.DEBUG) - sql_results = cache_db_con.mass_action(new_items) - - return any(sql_results) - def getRSSFeed(self, url, params=None): if self.provider.login(): return getFeed(url, params=params, request_hook=self.provider.get_url) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 0c8566acdd..aa6a81d22b 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -71,6 +71,7 @@ def __init__(self, name): self.supports_backlog = True self.url = '' self.urls = {} + self.max_recent_items = 5 shuffle(self.bt_cache_urls) @@ -518,3 +519,13 @@ def _make_url(self, result): def _verify_download(self, file_name=None): # pylint: disable=unused-argument,no-self-use return True + + @property + def recent_results(self): + return sickbeard.provider_recent_results.get(self.get_id()) or [] + + @recent_results.setter + def recent_results(self, items): + if not sickbeard.provider_recent_results.get(self.get_id()): + sickbeard.provider_recent_results.update({self.get_id(): []}) + sickbeard.provider_recent_results[self.get_id()] += items From b3c5afcef20f3c17348e6ebbbbf7fdf210a7d514 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 10:09:22 +0200 Subject: [PATCH 20/85] Added options for only saving latest 5 (configurable per prov) results per provider. --- sickrage/providers/GenericProvider.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index aa6a81d22b..3edb4f659c 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -522,10 +522,16 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): - return sickbeard.provider_recent_results.get(self.get_id()) or [] + if sickbeard.provider_recent_results.get(self.get_id()): + return sickbeard.provider_recent_results.get(self.get_id())[::-1] + else: + return [] @recent_results.setter def recent_results(self, items): if not sickbeard.provider_recent_results.get(self.get_id()): sickbeard.provider_recent_results.update({self.get_id(): []}) sickbeard.provider_recent_results[self.get_id()] += items + if items: + del sickbeard.provider_recent_results[self.get_id()][:len(sickbeard.provider_recent_results[self.get_id()]) - self.max_recent_items] + pass From 6fdd86211739ec3e3191fca32d1840074707890b Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 10:25:58 +0200 Subject: [PATCH 21/85] Comments and variable cleanup --- sickbeard/tvcache.py | 11 ++++++----- sickrage/providers/GenericProvider.py | 1 - 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 622b699f58..7fbb588418 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -187,10 +187,10 @@ def updateCache(self): # set updated self.setLastUpdate() - # get last 5 provider_rss_cache results + # get last 5 rss cache results recent_results = self.provider.recent_results - found_recent_results = 0 - stop_at = 1 + found_recent_results = 0 # A counter that keeps track of the number of items that have been found in cache + stop_at = 3 # Configuration as an error margin, to stop at. The lower the number, the faster it will stop parsing items cl = [] index = 0 @@ -213,8 +213,9 @@ def updateCache(self): if cl: cache_db_con.mass_action(cl) - # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 - self.provider.recent_results = data['entries'][0:min(index, 5)] + # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 + # (overwritable per provider, throug hthe max_recent_items attribute. + self.provider.recent_results = data['entries'][0:min(index, self.provider.max_recent_items)] except AuthException as e: logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 3edb4f659c..bebe33237c 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -534,4 +534,3 @@ def recent_results(self, items): sickbeard.provider_recent_results[self.get_id()] += items if items: del sickbeard.provider_recent_results[self.get_id()][:len(sickbeard.provider_recent_results[self.get_id()]) - self.max_recent_items] - pass From 3576271d5936e54bd3ff9ada34ea40a4c878fe0f Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 16:27:10 +0200 Subject: [PATCH 22/85] Removed the inverse sorting * Also fixed a bug where already existing url's where added to the list, when the stop_at paramater is set to > 1. This because the 2 results it neglects, are still attempted to be added to the list. Now it isn't. Keeping the list unique. --- sickbeard/tvcache.py | 2 +- sickrage/providers/GenericProvider.py | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 7fbb588418..6c12ce3d23 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -195,7 +195,7 @@ def updateCache(self): cl = [] index = 0 for index, item in enumerate(data['entries'] or []): - if recent_results and item.get('link').strip() in [cached_item['link'].strip() for cached_item in recent_results]: + if recent_results and item['link'] in {cache_item['link'] for cache_item in recent_results}: found_recent_results += 1 if found_recent_results >= stop_at: diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index bebe33237c..45b09e9042 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -523,14 +523,19 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): if sickbeard.provider_recent_results.get(self.get_id()): - return sickbeard.provider_recent_results.get(self.get_id())[::-1] + return sickbeard.provider_recent_results.get(self.get_id()) else: return [] @recent_results.setter def recent_results(self, items): - if not sickbeard.provider_recent_results.get(self.get_id()): - sickbeard.provider_recent_results.update({self.get_id(): []}) - sickbeard.provider_recent_results[self.get_id()] += items + recent_results = sickbeard.provider_recent_results + if not recent_results.get(self.get_id()): + recent_results.update({self.get_id(): []}) if items: - del sickbeard.provider_recent_results[self.get_id()][:len(sickbeard.provider_recent_results[self.get_id()]) - self.max_recent_items] + add_to_list = [] + for item in items: + if item['link'] not in {cache_item['link'] for cache_item in recent_results[self.get_id()]}: + add_to_list += [item] + recent_results[self.get_id()] = add_to_list + recent_results[self.get_id()] + recent_results[self.get_id()][:self.max_recent_items] From 243b562cbbcc616dcddfdbb36b9339aac0c5f293 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 19:39:04 +0200 Subject: [PATCH 23/85] Fixed some dumb coding mistakes. --- sickbeard/tvcache.py | 7 +++---- sickrage/providers/GenericProvider.py | 12 ++++++------ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 6c12ce3d23..aaeee5dfc2 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -190,15 +190,14 @@ def updateCache(self): # get last 5 rss cache results recent_results = self.provider.recent_results found_recent_results = 0 # A counter that keeps track of the number of items that have been found in cache - stop_at = 3 # Configuration as an error margin, to stop at. The lower the number, the faster it will stop parsing items cl = [] index = 0 for index, item in enumerate(data['entries'] or []): - if recent_results and item['link'] in {cache_item['link'] for cache_item in recent_results}: + if item['link'] in {cache_item['link'] for cache_item in recent_results}: found_recent_results += 1 - if found_recent_results >= stop_at: + if found_recent_results >= self.provider.stop_at: logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format (self.provider_id), logger.ERROR) break @@ -206,7 +205,7 @@ def updateCache(self): ci = self._parseItem(item) if ci is not None: cl.append(ci) - except UnicodeDecodeError, e: + except UnicodeDecodeError: continue cache_db_con = self._get_db() diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 45b09e9042..b54ad790fd 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -71,7 +71,10 @@ def __init__(self, name): self.supports_backlog = True self.url = '' self.urls = {} + + # Paramaters for reducting the daily search results parsing self.max_recent_items = 5 + self.stop_at = 3 shuffle(self.bt_cache_urls) @@ -522,10 +525,7 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): - if sickbeard.provider_recent_results.get(self.get_id()): - return sickbeard.provider_recent_results.get(self.get_id()) - else: - return [] + return sickbeard.provider_recent_results.get(self.get_id(), []) @recent_results.setter def recent_results(self, items): @@ -537,5 +537,5 @@ def recent_results(self, items): for item in items: if item['link'] not in {cache_item['link'] for cache_item in recent_results[self.get_id()]}: add_to_list += [item] - recent_results[self.get_id()] = add_to_list + recent_results[self.get_id()] - recent_results[self.get_id()][:self.max_recent_items] + results = add_to_list + recent_results[self.get_id()] + recent_results[self.get_id()] = results[:self.max_recent_items] From 5a02f80bd6b69047d5a00a6e71c09c15ebb51094 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 19:45:12 +0200 Subject: [PATCH 24/85] Added Warning message, for UnicodeError in parsing. --- sickbeard/tvcache.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index aaeee5dfc2..5bd7d96fb2 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -205,8 +205,9 @@ def updateCache(self): ci = self._parseItem(item) if ci is not None: cl.append(ci) - except UnicodeDecodeError: - continue + except UnicodeDecodeError as e: + logger.log(u'Unicode decoding error, missed parsing item from provider {0}: {1!r}'.format + (self.provider.name, e), logger.WARNING) cache_db_con = self._get_db() if cl: From 79131d1cae7938190077f19cdbb565680eb18df3 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 20:08:56 +0200 Subject: [PATCH 25/85] Missed the _getDB snakecase rename. --- sickbeard/providers/binsearch.py | 2 +- sickbeard/providers/womble.py | 2 +- sickrage/providers/GenericProvider.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py index fc48e7e191..30d8cd9cd6 100644 --- a/sickbeard/providers/binsearch.py +++ b/sickbeard/providers/binsearch.py @@ -114,7 +114,7 @@ def updateCache(self): cl.append(ci) if cl: - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.mass_action(cl) def _checkAuth(self, data): diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index f5333c6ae4..7158e3da80 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -66,7 +66,7 @@ def updateCache(self): cl.append(ci) if cl: - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.mass_action(cl) def _checkAuth(self, data): diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index b54ad790fd..5c62009501 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -327,7 +327,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if cl: # pylint: disable=protected-access # Access to a protected member of a client class - db = self.cache._getDB() + db = self.cache._get_db() db.mass_action(cl) return results From 4acef226ce776c23596183d825d70e6dfd1e9ff8 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 29 May 2016 21:39:04 +0200 Subject: [PATCH 26/85] Moved global to GenericProvider, changed var name. --- sickbeard/__init__.py | 5 +---- sickrage/providers/GenericProvider.py | 7 +++++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 65f1326d99..809da9f5eb 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -606,8 +606,6 @@ PRIVACY_LEVEL = 'normal' -provider_recent_results = {} - def get_backlog_cycle_time(): cycletime = DAILYSEARCH_FREQUENCY * 2 + 7 @@ -661,8 +659,7 @@ def initialize(consoleLogging=True): # pylint: disable=too-many-locals, too-man AUTOPOSTPROCESSOR_FREQUENCY, SHOWUPDATE_HOUR, \ ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \ ANIME_SPLIT_HOME, SCENE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \ - DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED, \ - provider_recent_results + DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED if __INITIALIZED__: return False diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 5c62009501..a595e97682 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -39,6 +39,10 @@ from sickrage.helper.exceptions import ex +# Keep a list of per provider of recent provider search results +recent_results = {} + + class GenericProvider(object): # pylint: disable=too-many-instance-attributes NZB = 'nzb' TORRENT = 'torrent' @@ -525,11 +529,10 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): - return sickbeard.provider_recent_results.get(self.get_id(), []) + return recent_results.get(self.get_id(), []) @recent_results.setter def recent_results(self, items): - recent_results = sickbeard.provider_recent_results if not recent_results.get(self.get_id()): recent_results.update({self.get_id(): []}) if items: From d53f1da4d2eaa8a6c8cb8fe74062fed86bd4c0a5 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 30 May 2016 09:39:14 +0200 Subject: [PATCH 27/85] This should in no way be an error ofcourse --- sickbeard/tvcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 5bd7d96fb2..3dae8f2bf7 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -199,7 +199,7 @@ def updateCache(self): if found_recent_results >= self.provider.stop_at: logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format - (self.provider_id), logger.ERROR) + (self.provider_id), logger.DEBUG) break try: ci = self._parseItem(item) From a7388c56ef316948ea5f1f419ff865418fee2ca3 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 3 Jun 2016 16:11:19 +0200 Subject: [PATCH 28/85] Moved seeders sorting out of providers code and into the sickbeard/search.py searchProviders() code. * Removed the lambda sort from all providers * Corrected bug introducted in TVCache * Removed bogus condition from GenericProvider. That condition can never be true. --- sickbeard/providers/abnormal.py | 2 -- sickbeard/providers/alpharatio.py | 2 -- sickbeard/providers/bitcannon.py | 2 -- sickbeard/providers/bitsnoop.py | 2 -- sickbeard/providers/bluetigers.py | 3 --- sickbeard/providers/cpasbien.py | 2 -- sickbeard/providers/danishbits.py | 2 -- sickbeard/providers/elitetorrent.py | 3 --- sickbeard/providers/extratorrent.py | 2 -- sickbeard/providers/freshontv.py | 2 -- sickbeard/providers/gftracker.py | 2 -- sickbeard/providers/hd4free.py | 5 +---- sickbeard/providers/hdspace.py | 2 -- sickbeard/providers/hdtorrents.py | 3 --- sickbeard/providers/hounddawgs.py | 3 --- sickbeard/providers/ilovetorrents.py | 3 --- sickbeard/providers/iptorrents.py | 3 --- sickbeard/providers/kat.py | 3 --- sickbeard/providers/limetorrents.py | 3 --- sickbeard/providers/morethantv.py | 2 -- sickbeard/providers/norbits.py | 2 -- sickbeard/providers/nyaatorrents.py | 2 -- sickbeard/providers/pretome.py | 3 --- sickbeard/providers/rarbg.py | 2 -- sickbeard/providers/scc.py | 3 --- sickbeard/providers/sceneelite.py | 6 ++---- sickbeard/providers/scenetime.py | 3 --- sickbeard/providers/speedcd.py | 2 -- sickbeard/providers/t411.py | 3 --- sickbeard/providers/thepiratebay.py | 2 -- sickbeard/providers/tntvillage.py | 3 --- sickbeard/providers/tokyotoshokan.py | 2 -- sickbeard/providers/torrentbytes.py | 2 -- sickbeard/providers/torrentday.py | 2 -- sickbeard/providers/torrentleech.py | 2 -- sickbeard/providers/torrentproject.py | 2 -- sickbeard/providers/torrentz.py | 2 -- sickbeard/providers/transmitthenet.py | 2 -- sickbeard/providers/tvchaosuk.py | 2 -- sickbeard/providers/xthor.py | 2 -- sickbeard/search.py | 4 ++++ sickbeard/tvcache.py | 4 ++-- sickrage/providers/GenericProvider.py | 8 ++------ 43 files changed, 11 insertions(+), 105 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index c22ea6255a..d7e791ecb0 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -158,8 +158,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index e30c2f537a..d85e9faba8 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -171,8 +171,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index 4d9b4a4f23..66aca08fa2 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -110,8 +110,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index 7197a72401..c9d98a17f8 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -118,8 +118,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 867a8f1184..487668f1a9 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -136,9 +136,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index ec582029d9..7802a4c420 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -91,8 +91,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index bcde3185d0..56b414e540 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -171,8 +171,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index 9809c08ae2..c0720c8295 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -132,9 +132,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 7b05cf9163..4764ffbb2b 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -110,8 +110,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index e30f3f3587..95ce723fdc 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -227,8 +227,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index e82a3b6928..85c78b4eba 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -179,8 +179,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index c69890595e..27d94860ea 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -70,7 +70,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('fl', '') if mode != 'RSS': - logger.log(u"Search string: " + search_string.strip(), logger.DEBUG) + logger.log(u"Search string: {0}".format(search_string), logger.DEBUG) search_params['search'] = search_string else: search_params.pop('search', '') @@ -122,9 +122,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 3a5156393e..24f26183a0 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -162,8 +162,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index b61d69b2bc..65334f8789 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -170,9 +170,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index bbdf2953e1..c0d0486e93 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -176,9 +176,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index 9d1824ff14..0e6a027b93 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -158,9 +158,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: {0}".format(traceback.format_exc()), logger.WARNING) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 0f029ab1f6..7b3b178863 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -157,9 +157,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception as e: logger.log(u"Failed parsing provider. Error: %r" % ex(e), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index bd74f40149..7d93428388 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -132,9 +132,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 2eae5c3cf1..8e8e1dd798 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -180,9 +180,6 @@ def parse(self, data, mode): (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - return items diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index 3806e71442..fd3647286d 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -181,8 +181,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 6ca871c9cf..15983351d5 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -129,8 +129,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many title, seeders, leechers), logger.DEBUG) items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index bf04548b4b..d461ec56ab 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -114,8 +114,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index abcdc3f12f..0f2c9cb82e 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -161,9 +161,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index f0e0aea7ab..67fbbddc7d 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -176,8 +176,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 96d813e513..652dd77de2 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -159,9 +159,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/sceneelite.py b/sickbeard/providers/sceneelite.py index c038588ccd..3c7f987e70 100644 --- a/sickbeard/providers/sceneelite.py +++ b/sickbeard/providers/sceneelite.py @@ -137,15 +137,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != "RSS": logger.log("Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) + (title, seeders, leechers), logger.DEBUG) items.append(item) except StandardError: continue - - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) + results += items return results diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 73c34a3ea7..496c4f500b 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -142,9 +142,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 3caf39bf51..490f520759 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -172,8 +172,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 4207044fe7..fe5f9ddb1e 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -154,9 +154,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 9826e78154..c7a89111a6 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -163,8 +163,6 @@ def process_column_header(th): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 049accc078..8ccb4de307 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -395,9 +395,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 19a0c54713..a935ddd9a7 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -112,8 +112,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 399a0608ff..00d1a51fb3 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -154,8 +154,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 267ec87779..a40958def1 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -166,8 +166,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 56fa9a4eb2..496cb68883 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -169,8 +169,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 64ece6ab04..a7ef07e76b 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -130,8 +130,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index af8bfd4893..c9cad22098 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -112,8 +112,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 92b3065da9..9113244236 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -174,8 +174,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index e03564b198..0e1006e916 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -168,8 +168,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index d4feb83ada..e3e7c4a2be 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -184,8 +184,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/search.py b/sickbeard/search.py index 89fe022186..1457d8f805 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -584,6 +584,10 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m else: foundResults[cur_provider.name][curEp] = searchResults[curEp] + # Sort the list by seeders if possible + if cur_provider.provider_type == 'torrent': + foundResults[cur_provider.name][curEp].sort(key=lambda d: int(d.seeders), reverse=True) + break elif not cur_provider.search_fallback or searchCount == 2: break diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 3dae8f2bf7..adcfddd940 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -344,7 +344,7 @@ def shouldClearCache(self): return False - def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): + def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=None, indexer_id=0): try: parse_result = NameParser().parse(name) @@ -383,7 +383,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.provider_id), [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, - release_group, version, seeders, leechers, size, pubdate, hash]] + release_group, version, seeders, leechers, size, pubdate, torrent_hash]] def searchCache(self, episode, forced_search=False, downCurQuality=False): neededEps = self.findNeededEpisodes(episode, forced_search, downCurQuality) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index a595e97682..96f62875e5 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -140,10 +140,6 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, continue - # NOTE: searched_scene_season is always None? - if (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' and searched_scene_season == episode.scene_season: - continue - search_strings = [] searched_scene_season = episode.scene_season @@ -183,7 +179,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, (seeders, leechers) = self._get_result_info(item) size = self._get_size(item) pubdate = self._get_pubdate(item) - hash = self._get_hash(item) + torrent_hash = self._get_hash(item) try: parse_result = NameParser(parse_method=('normal', 'anime')[show.is_anime]).parse(title) @@ -272,7 +268,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class - ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, hash, parse_result=parse_result) + ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=parse_result) if ci is not None: cl.append(ci) From 1cf6622529d1cd53125d789cc2a2a86694a83f08 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 4 Jun 2016 20:14:12 +0200 Subject: [PATCH 29/85] Standardize first 10 providers --- sickbeard/providers/abnormal.py | 41 ++++++---- sickbeard/providers/alpharatio.py | 123 +++++++++++++++-------------- sickbeard/providers/anizb.py | 28 ++++--- sickbeard/providers/binsearch.py | 13 ++-- sickbeard/providers/bitcannon.py | 87 ++++++++++++--------- sickbeard/providers/bitsnoop.py | 53 ++++++++----- sickbeard/providers/bluetigers.py | 56 +++++++++----- sickbeard/providers/btdigg.py | 102 +++++++++++++----------- sickbeard/providers/btn.py | 124 +++++++++++++++--------------- sickbeard/providers/cpasbien.py | 52 ++++++++----- 10 files changed, 387 insertions(+), 292 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index d7e791ecb0..d550cbad92 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -1,26 +1,26 @@ # coding=utf-8 # Author: adaur # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re +import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -86,7 +86,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], + 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', + 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], # Both ASC and DESC are available for sort direction 'way': 'DESC' } @@ -96,12 +97,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size @@ -115,7 +116,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_table = html.find(class_='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -131,7 +132,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: title = cells[labels.index('Release')].get_text(strip=True) - download_url = urljoin(self.url, cells[labels.index('DL')].find('a', class_='tooltip')['href']) + download = cells[labels.index('DL')].find('a', class_='tooltip')['href'] + download_url = urljoin(self.url, download) if not all([title, download_url]): continue @@ -141,7 +143,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + 'minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue @@ -149,13 +152,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_size = cells[size_index].get_text() size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index d85e9faba8..5306d4a247 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -1,25 +1,25 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +36,7 @@ class AlphaRatioProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "AlphaRatio") + TorrentProvider.__init__(self, 'AlphaRatio') # Credentials self.username = None @@ -47,14 +47,14 @@ def __init__(self): self.minleech = None # URLs - self.url = "http://alpharatio.cc" + self.url = 'http://alpharatio.cc' self.urls = { - "login": urljoin(self.url, "login.php"), - "search": urljoin(self.url, "torrents.php"), + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), } # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Cache self.cache = tvcache.TVCache(self) @@ -64,20 +64,20 @@ def login(self): return True login_params = { - "username": self.username, - "password": self.password, - "login": "submit", - "remember_me": "on", + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', } - response = self.get_url(self.urls["login"], post_data=login_params, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search("Invalid Username/password", response) \ - or re.search("Login :: AlphaRatio.cc", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if re.search('Invalid Username/password', response) \ + or re.search('Login :: AlphaRatio.cc', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -89,86 +89,97 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - "searchstr": "", - "filter_cat[1]": 1, - "filter_cat[2]": 1, - "filter_cat[3]": 1, - "filter_cat[4]": 1, - "filter_cat[5]": 1 + 'searchstr': '', + 'filter_cat[1]': 1, + 'filter_cat[2]': 1, + 'filter_cat[3]': 1, + 'filter_cat[4]': 1, + 'filter_cat[5]': 1 } # Units - units = ["B", "KB", "MB", "GB", "TB", "PB"] + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] def process_column_header(td): - result = "" + result = '' if td.a and td.a.img: - result = td.a.img.get("title", td.a.get_text(strip=True)) + result = td.a.img.get('title', td.a.get_text(strip=True)) if not result: result = td.get_text(strip=True) return result for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {search}".format - (search=search_string.decode("utf-8")), logger.DEBUG) + if mode != 'RSS': + logger.log('Search string: {search}'.format + (search=search_string.decode('utf-8')), logger.DEBUG) - search_params["searchstr"] = search_string - search_url = self.urls["search"] - data = self.get_url(search_url, params=search_params, returns="text") + search_params['searchstr'] = search_string + search_url = self.urls['search'] + data = self.get_url(search_url, params=search_params, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", id="torrent_table") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='torrent_table') + torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - # "", "", "Name /Year", "Files", "Time", "Size", "Snatches", "Seeders", "Leechers" - labels = [process_column_header(label) for label in torrent_rows[0]("td")] + # '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers' + labels = [process_column_header(label) for label in torrent_rows[0]('td')] # Skip column headers for result in torrent_rows[1:]: - cells = result("td") + cells = result('td') if len(cells) < len(labels): continue try: - title = cells[labels.index("Name /Year")].find("a", dir="ltr").get_text(strip=True) - download_url = urljoin(self.url, cells[labels.index("Name /Year")].find("a", title="Download")["href"]) + title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True) + download = cells[labels.index('Name /Year')].find('a', title='Download')['href'] + download_url = urljoin(self.url, download) if not all([title, download_url]): continue - seeders = try_int(cells[labels.index("Seeders")].get_text(strip=True)) - leechers = try_int(cells[labels.index("Leechers")].get_text(strip=True)) + seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) + leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - torrent_size = cells[labels.index("Size")].get_text(strip=True) + torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 91bca698fa..5a4ec03068 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -16,21 +16,26 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . +from __future__ import unicode_literals + import traceback from sickbeard import logger, tvcache + from sickrage.providers.nzb.NZBProvider import NZBProvider from sickrage.helper.common import try_int + from requests.compat import urljoin + from bs4 import BeautifulSoup class Anizb(NZBProvider): # pylint: disable=too-many-instance-attributes - """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches""" + '''Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches''' def __init__(self): # Provider Init - NZBProvider.__init__(self, "Anizb") + NZBProvider.__init__(self, 'Anizb') self.public = True self.supports_absolute_numbering = True @@ -47,24 +52,22 @@ def __init__(self): self.cache = tvcache.TVCache(self) def _get_size(self, item): - """Override the default _get_size to prevent it from extracting using it the default tags""" + '''Override the default _get_size to prevent it from extracting using it the default tags''' return try_int(item.get('size')) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals - """Start searching for anime using the provided search_strings. Used for backlog and daily""" - _ = age - _ = ep_obj + '''Start searching for anime using the provided search_strings. Used for backlog and daily''' results = [] if self.show and not self.show.is_anime: return results for mode in search_strings: - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) try: @@ -72,7 +75,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log(u'No data returned from provider', logger.DEBUG) continue if not data.startswith(' # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re + from requests.compat import urljoin from sickbeard import logger, tvcache @@ -44,6 +43,7 @@ def __init__(self): class BinSearchCache(tvcache.TVCache): + def __init__(self, provider_obj, **kwargs): kwargs.pop('search_params', None) # does not use _getRSSData so strip param from kwargs... search_params = None # ...and pass None instead @@ -120,4 +120,5 @@ def updateCache(self): def _checkAuth(self, data): return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None + provider = BinSearchProvider() diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index 66aca08fa2..a7abf41ae2 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -1,27 +1,27 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.compat import urljoin import validators +import traceback + +from requests.compat import urljoin from sickbeard import logger, tvcache @@ -33,82 +33,92 @@ class BitCannonProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "BitCannon") + TorrentProvider.__init__(self, 'BitCannon') self.minseed = None self.minleech = None self.custom_url = None self.api_key = None - self.cache = tvcache.TVCache(self, search_params={"RSS": ["tv", "anime"]}) + self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals results = [] - url = "http://localhost:3000/" + url = 'http://localhost:3000/' if self.custom_url: if not validators.url(self.custom_url, require_tld=False): - logger.log("Invalid custom url set, please check your settings", logger.WARNING) + logger.log('Invalid custom url set, please check your settings', logger.WARNING) return results url = self.custom_url search_params = {} anime = ep_obj and ep_obj.show and ep_obj.show.anime - search_params["category"] = ("tv", "anime")[bool(anime)] + search_params['category'] = ('tv', 'anime')[bool(anime)] if self.api_key: - search_params["apiKey"] = self.api_key + search_params['apiKey'] = self.api_key for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_params["q"] = search_string - if mode != "RSS": - logger.log("Search string: {}".format(search_string), logger.DEBUG) + search_params['q'] = search_string + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_url = urljoin(url, "api/search") - parsed_json = self.get_url(search_url, params=search_params, returns="json") + search_url = urljoin(url, 'api/search') + parsed_json = self.get_url(search_url, params=search_params, returns='json') if not parsed_json: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not self._check_auth_from_data(parsed_json): return results - for result in parsed_json.pop("torrents", {}): + for result in parsed_json.pop('torrents', {}): try: - title = result.pop("title", "") + title = result.pop('title', '') - info_hash = result.pop("infoHash", "") - download_url = "magnet:?xt=urn:btih:" + info_hash + info_hash = result.pop('infoHash', '') + download_url = 'magnet:?xt=urn:btih:' + info_hash if not all([title, download_url, info_hash]): continue - swarm = result.pop("swarm", None) + swarm = result.pop('swarm', None) if swarm: - seeders = try_int(swarm.pop("seeders", 0)) - leechers = try_int(swarm.pop("leechers", 0)) + seeders = try_int(swarm.pop('seeders', 0)) + leechers = try_int(swarm.pop('leechers', 0)) else: seeders = leechers = 0 if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1})".format + 'minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - size = convert_size(result.pop("size", -1)) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + size = convert_size(result.pop('size', -1)) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError, KeyError, ValueError): - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items @@ -117,12 +127,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man @staticmethod def _check_auth_from_data(data): if not all([isinstance(data, dict), - data.pop("status", 200) != 401, - data.pop("message", "") != "Invalid API key"]): + data.pop('status', 200) != 401, + data.pop('message', '') != 'Invalid API key']): - logger.log("Invalid api key. Check your settings", logger.WARNING) + logger.log('Invalid api key. Check your settings', logger.WARNING) return False return True + provider = BitCannonProvider() diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index c9d98a17f8..4522a8d437 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -1,27 +1,28 @@ # coding=utf-8 # Author: Gonçalo M. (aka duramato/supergonkas) # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import traceback +import sickbeard + from bs4 import BeautifulSoup -import sickbeard from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -32,7 +33,7 @@ class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-at def __init__(self): - TorrentProvider.__init__(self, "BitSnoop") + TorrentProvider.__init__(self, 'BitSnoop') self.urls = { 'index': 'http://bitsnoop.com', @@ -52,13 +53,14 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals results = [] + for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) try: @@ -66,11 +68,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith(' # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re -from requests.utils import dict_from_cookiejar import traceback +from requests.utils import dict_from_cookiejar + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser + from sickrage.helper.common import try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -32,7 +34,7 @@ class BlueTigersProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "BLUETIGERS") + TorrentProvider.__init__(self, 'BLUETIGERS') self.username = None self.password = None @@ -48,7 +50,7 @@ def __init__(self): } self.search_params = { - "c16": 1, "c10": 1, "c130": 1, "c131": 1, "c17": 1, "c18": 1, "c19": 1, "c9": 1 + 'c16': 1, 'c10': 1, 'c130': 1, 'c131': 1, 'c17': 1, 'c18': 1, 'c19': 1, 'c9': 1 } self.url = self.urls['base_url'] @@ -70,27 +72,28 @@ def login(self): if re.search('account-logout.php', check_login): return True else: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('account-login.php', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] + if not self.login(): return results for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params['search'] = search_string @@ -101,17 +104,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: with BS4Parser(data, 'html5lib') as html: - result_linkz = html('a', href=re.compile("torrents-details")) + result_linkz = html('a', href=re.compile('torrents-details')) if not result_linkz: - logger.log(u"Data returned from provider do not contains any torrent", logger.DEBUG) + logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) continue if result_linkz: for link in result_linkz: title = link.text download_url = self.urls['base_url'] + link['href'] - download_url = download_url.replace("torrents-details", "download") + download_url = download_url.replace('torrents-details', 'download') # FIXME size = -1 seeders = 1 @@ -123,18 +126,29 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent # if seeders < min(self.minseed, 1): # if mode != 'RSS': - # logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + # logger.log('Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})'.format # (title, seeders), logger.DEBUG) # continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s " % title, logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index 489f5b0705..c41fe5cd76 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -1,26 +1,25 @@ # coding=utf-8 # Author: Jodi Jones # Rewrite: Gonçalo M. (aka duramato/supergonkas) - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -import validators +import traceback from sickbeard import logger, tvcache @@ -31,92 +30,105 @@ class BTDiggProvider(TorrentProvider): def __init__(self): - + # Provider Init - TorrentProvider.__init__(self, "BTDigg") + TorrentProvider.__init__(self, 'BTDigg') self.public = True - - # Torrent Stats + + # Torrent Stats self.minseed = None self.minleech = None # URLs - self.url = "https://btdigg.org" - self.urls = {"api": "https://api.btdigg.org/api/private-341ada3245790954/s02"} + self.url = 'https://btdigg.org' + self.urls = {'api': 'https://api.btdigg.org/api/private-341ada3245790954/s02'} self.custom_url = None - + # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Use this hacky way for RSS search since most results will use this codecs - cache_params = {"RSS": ["x264", "x264.HDTV", "720.HDTV.x264"]} + cache_params = {'RSS': ['x264', 'x264.HDTV', '720.HDTV.x264']} # Only poll BTDigg every 30 minutes max, since BTDigg takes some time to crawl self.cache = tvcache.TVCache(self, min_time=30, search_params=cache_params) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] - search_params = {"p": 0} + search_params = {'p': 0} for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_params["q"] = search_string - if mode != "RSS": - search_params["order"] = 0 - logger.log("Search string: {}".format(search_string.decode("utf-8")), + search_params['q'] = search_string + if mode != 'RSS': + search_params['order'] = 0 + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) else: - search_params["order"] = 2 + search_params['order'] = 2 if self.custom_url: # if not validators.url(self.custom_url): - # logger.log("Invalid custom url set, please check your settings", logger.WARNING) + # logger.log('Invalid custom url set, please check your settings', logger.WARNING) # return results - search_url = self.custom_url + "api/private-341ada3245790954/s02" + search_url = self.custom_url + 'api/private-341ada3245790954/s02' else: - search_url = self.urls["api"] - jdata = self.get_url(search_url, params=search_params, returns="json") + search_url = self.urls['api'] + jdata = self.get_url(search_url, params=search_params, returns='json') if not jdata: - logger.log("Provider did not return data", logger.DEBUG) + logger.log('Provider did not return data', logger.DEBUG) continue for torrent in jdata: try: - title = torrent.pop("name", "") - download_url = torrent.pop("magnet") + self._custom_trackers if torrent["magnet"] else None + title = torrent.pop('name', '') + download_url = torrent.pop('magnet') + self._custom_trackers if torrent['magnet'] else None if not all([title, download_url]): continue - if float(torrent.pop("ff")): - logger.log("Ignoring result for {} since it's been reported as fake (level = {})".format - (title, torrent["ff"]), logger.DEBUG) + if float(torrent.pop('ff')): + logger.log("Ignoring result for {0} since it's been" + ' reported as fake (level = {1})'.format + (title, torrent['ff']), logger.DEBUG) continue - if not int(torrent.pop("files")): - logger.log("Ignoring result for {} because it has no files".format + if not int(torrent.pop('files')): + logger.log('Ignoring result for {0} because it has no files'.format (title), logger.DEBUG) continue - leechers = torrent.pop("leechers", 0) - seeders = torrent.pop("seeders", 1) + + leechers = torrent.pop('leechers', 0) + seeders = torrent.pop('seeders', 1) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) - continue - torrent_size = torrent.pop("size") + continue + + torrent_size = torrent.pop('size') size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: %s " % title, logger.DEBUG) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index e08f7beccf..f9e0c41ef4 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -1,22 +1,22 @@ # coding=utf-8 # Author: Daniel Heimans # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals from datetime import datetime import jsonrpclib @@ -38,7 +38,7 @@ class BTNProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "BTN") + TorrentProvider.__init__(self, 'BTN') self.supports_absolute_numbering = True @@ -46,26 +46,26 @@ def __init__(self): self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max - self.urls = {'base_url': u'http://api.btnapps.net', - 'website': u'http://broadcasthe.net/', } + self.urls = {'base_url': 'http://api.btnapps.net', + 'website': 'http://broadcasthe.net/', } self.url = self.urls['website'] def _check_auth(self): if not self.api_key: - logger.log(u"Invalid api key. Check your settings", logger.WARNING) + logger.log('Invalid api key. Check your settings', logger.WARNING) return True - def _checkAuthFromData(self, parsedJSON): + def _checkAuthFromData(self, parsed_json): - if parsedJSON is None: + if parsed_json is None: return self._check_auth() - if 'api-error' in parsedJSON: - logger.log(u"Incorrect authentication credentials: % s" % parsedJSON['api-error'], logger.DEBUG) + if 'api-error' in parsed_json: + logger.log('Incorrect authentication credentials: % s' % parsed_json['api-error'], logger.DEBUG) raise AuthException( - "Your authentication credentials for " + self.name + " are incorrect, check your config.") + 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.') return True @@ -79,21 +79,21 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- # age in seconds if age: - params['age'] = "<=" + str(int(age)) + params['age'] = '<=' + str(int(age)) if search_params: params.update(search_params) - logger.log(u"Search string: %s" % search_params, logger.DEBUG) + logger.log('Search string: %s' % search_params, logger.DEBUG) - parsedJSON = self._api_call(apikey, params) - if not parsedJSON: - logger.log(u"No data returned from provider", logger.DEBUG) + parsed_json = self._api_call(apikey, params) + if not parsed_json: + logger.log('No data returned from provider', logger.DEBUG) return results - if self._checkAuthFromData(parsedJSON): + if self._checkAuthFromData(parsed_json): - if 'torrents' in parsedJSON: - found_torrents = parsedJSON['torrents'] + if 'torrents' in parsed_json: + found_torrents = parsed_json['torrents'] else: found_torrents = {} @@ -104,24 +104,24 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- max_pages = 150 results_per_page = 1000 - if 'results' in parsedJSON and int(parsedJSON['results']) >= results_per_page: - pages_needed = int(math.ceil(int(parsedJSON['results']) / results_per_page)) + if 'results' in parsed_json and int(parsed_json['results']) >= results_per_page: + pages_needed = int(math.ceil(int(parsed_json['results']) / results_per_page)) if pages_needed > max_pages: pages_needed = max_pages # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): - parsedJSON = self._api_call(apikey, params, results_per_page, page * results_per_page) + parsed_json = self._api_call(apikey, params, results_per_page, page * results_per_page) # Note that this these are individual requests and might time out individually. This would result in 'gaps' # in the results. There is no way to fix this though. - if 'torrents' in parsedJSON: - found_torrents.update(parsedJSON['torrents']) + if 'torrents' in parsed_json: + found_torrents.update(parsed_json['torrents']) for _, torrent_info in found_torrents.iteritems(): (title, url) = self._get_title_and_url(torrent_info) if title and url: - logger.log(u"Found result: %s " % title, logger.DEBUG) + logger.log('Found result: %s ' % title, logger.DEBUG) results.append(torrent_info) # FIXME SORT RESULTS @@ -130,66 +130,67 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): server = jsonrpclib.Server(self.urls['base_url']) - parsedJSON = {} + parsed_json = {} try: - parsedJSON = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) + parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except jsonrpclib.jsonrpc.ProtocolError, error: if error.message == 'Call Limit Exceeded': - logger.log(u"You have exceeded the limit of 150 calls per hour, per API key which is unique to your user account", logger.WARNING) + logger.log('You have exceeded the limit of 150 calls per hour,' + ' per API key which is unique to your user account', logger.WARNING) else: - logger.log(u"JSON-RPC protocol error while accessing provicer. Error: %s " % repr(error), logger.ERROR) - parsedJSON = {'api-error': ex(error)} - return parsedJSON + logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) + parsed_json = {'api-error': ex(error)} + return parsed_json except socket.timeout: - logger.log(u"Timeout while accessing provider", logger.WARNING) + logger.log('Timeout while accessing provider', logger.WARNING) except socket.error, error: # Note that sometimes timeouts are thrown as socket errors - logger.log(u"Socket error while accessing provider. Error: %s " % error[1], logger.WARNING) + logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) except Exception, error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log(u"Unknown error while accessing provider. Error: %s " % errorstring, logger.WARNING) + logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) - return parsedJSON + return parsed_json - def _get_title_and_url(self, parsedJSON): + def _get_title_and_url(self, parsed_json): # The BTN API gives a lot of information in response, # however SickRage is built mostly around Scene or # release names, which is why we are using them here. - if 'ReleaseName' in parsedJSON and parsedJSON['ReleaseName']: - title = parsedJSON['ReleaseName'] + if 'ReleaseName' in parsed_json and parsed_json['ReleaseName']: + title = parsed_json['ReleaseName'] else: # If we don't have a release name we need to get creative - title = u'' - if 'Series' in parsedJSON: - title += parsedJSON['Series'] - if 'GroupName' in parsedJSON: - title += '.' + parsedJSON['GroupName'] if title else parsedJSON['GroupName'] - if 'Resolution' in parsedJSON: - title += '.' + parsedJSON['Resolution'] if title else parsedJSON['Resolution'] - if 'Source' in parsedJSON: - title += '.' + parsedJSON['Source'] if title else parsedJSON['Source'] - if 'Codec' in parsedJSON: - title += '.' + parsedJSON['Codec'] if title else parsedJSON['Codec'] + title = '' + if 'Series' in parsed_json: + title += parsed_json['Series'] + if 'GroupName' in parsed_json: + title += '.' + parsed_json['GroupName'] if title else parsed_json['GroupName'] + if 'Resolution' in parsed_json: + title += '.' + parsed_json['Resolution'] if title else parsed_json['Resolution'] + if 'Source' in parsed_json: + title += '.' + parsed_json['Source'] if title else parsed_json['Source'] + if 'Codec' in parsed_json: + title += '.' + parsed_json['Codec'] if title else parsed_json['Codec'] if title: title = title.replace(' ', '.') url = None - if 'DownloadURL' in parsedJSON: - url = parsedJSON['DownloadURL'] + if 'DownloadURL' in parsed_json: + url = parsed_json['DownloadURL'] if url: # unescaped / is valid in JSON, but it can be escaped - url = url.replace("\\/", "/") + url = url.replace('\\/', '/') return title, url @@ -202,7 +203,7 @@ def _get_season_search_strings(self, ep_obj): # Search for the year of the air by date show current_params['name'] = str(ep_obj.airdate).split('-')[0] elif ep_obj.show.is_anime: - current_params['name'] = "%d" % ep_obj.scene_absolute_number + current_params['name'] = '%d' % ep_obj.scene_absolute_number else: current_params['name'] = 'Season ' + str(ep_obj.scene_season) @@ -236,10 +237,10 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): # combined with the series identifier should result in just one episode search_params['name'] = date_str.replace('-', '.') elif ep_obj.show.anime: - search_params['name'] = "%i" % int(ep_obj.scene_absolute_number) + search_params['name'] = '%i' % int(ep_obj.scene_absolute_number) else: # Do a general name search for the episode, formatted like SXXEYY - search_params['name'] = u"{ep}".format(ep=episode_num(ep_obj.scene_season, ep_obj.scene_episode)) + search_params['name'] = '{ep}'.format(ep=episode_num(ep_obj.scene_season, ep_obj.scene_episode)) # search if ep_obj.show.indexer == 1: @@ -291,14 +292,15 @@ def _getRSSData(self): if seconds_since_last_update < seconds_minTime: seconds_since_last_update = seconds_minTime - # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog + # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of 'RSS' data search, older things will need to be done through backlog if seconds_since_last_update > 86400: logger.log( - u"The last known successful update was more than 24 hours ago, only trying to fetch the last 24 hours!", + 'The last known successful update was more than 24 hours ago, only trying to fetch the last 24 hours!', logger.DEBUG) seconds_since_last_update = 86400 self.search_params = None # BTN cache does not use search params return {'entries': self.provider.search(search_params=self.search_params, age=seconds_since_last_update)} + provider = BTNProvider() diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 7802a4c420..d72a4ff411 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -1,24 +1,25 @@ # coding=utf-8 # Author: Guillaume Serre # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re +import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -31,25 +32,26 @@ class CpasbienProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "Cpasbien") + TorrentProvider.__init__(self, 'Cpasbien') self.public = True self.minseed = None self.minleech = None - self.url = "http://www.cpasbien.cm" + self.url = 'http://www.cpasbien.cm' self.proper_strings = ['PROPER', 'REPACK'] self.cache = tvcache.TVCache(self) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] + for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: @@ -63,32 +65,44 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_rows = html(class_=re.compile('ligne[01]')) for result in torrent_rows: try: - title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien") + title = result.find(class_='titre').get_text(strip=True).replace('HDTV', 'HDTV x264-CPasBien') title = re.sub(r' Saison', ' Season', title, flags=re.IGNORECASE) - tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip() + tmp = result.find('a')['href'].split('/')[-1].replace('.html', '.torrent').strip() download_url = (self.url + '/telechargement/%s' % tmp) if not all([title, download_url]): continue - seeders = try_int(result.find(class_="up").get_text(strip=True)) - leechers = try_int(result.find(class_="down").get_text(strip=True)) + seeders = try_int(result.find(class_='up').get_text(strip=True)) + leechers = try_int(result.find(class_='down').get_text(strip=True)) if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - torrent_size = result.find(class_="poid").get_text(strip=True) + torrent_size = result.find(class_='poid').get_text(strip=True) units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items From 9598985b134dda05b05da9f3601c52462c676104 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 4 Jun 2016 20:29:01 +0200 Subject: [PATCH 30/85] Small anizb update --- sickbeard/providers/anizb.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 5a4ec03068..5df071bdd8 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -75,17 +75,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: - logger.log(u'No data returned from provider', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith(' Date: Sat, 4 Jun 2016 20:32:49 +0200 Subject: [PATCH 31/85] Small bluetigers update --- sickbeard/providers/bluetigers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index eb4c565f43..93af2e4a18 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -26,7 +26,6 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -89,11 +88,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params['search'] = search_string From d74dae12159506901bfb92411a80c9375d8e49a9 Mon Sep 17 00:00:00 2001 From: medariox Date: Sun, 5 Jun 2016 13:30:45 +0200 Subject: [PATCH 32/85] Next 10 providers --- sickbeard/providers/danishbits.py | 52 +++++++++------ sickbeard/providers/elitetorrent.py | 84 ++++++++++++----------- sickbeard/providers/extratorrent.py | 76 ++++++++++++--------- sickbeard/providers/freshontv.py | 65 ++++++++++-------- sickbeard/providers/gftracker.py | 48 ++++++++----- sickbeard/providers/hd4free.py | 62 ++++++++++------- sickbeard/providers/hdbits.py | 57 ++++++++-------- sickbeard/providers/hdspace.py | 66 ++++++++++-------- sickbeard/providers/hdtorrents.py | 88 ++++++++++++++---------- sickbeard/providers/hounddawgs.py | 100 ++++++++++++++++------------ 10 files changed, 406 insertions(+), 292 deletions(-) diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index 56b414e540..e4d6415f2a 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -1,22 +1,24 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals + +import traceback from requests.utils import dict_from_cookiejar @@ -32,7 +34,7 @@ class DanishbitsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "Danishbits") + TorrentProvider.__init__(self, 'Danishbits') # Credentials self.username = None @@ -69,12 +71,12 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) self.session.cookies.clear() return False if 'Login :: Danishbits.org' in response: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) self.session.cookies.clear() return False @@ -105,28 +107,28 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', id='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Literal: Navn, Størrelse, Kommentarer, Tilføjet, Snatches, Seeders, Leechers @@ -138,7 +140,7 @@ def process_column_header(td): try: title = result.find(class_='croptorrenttext').get_text(strip=True) - download_url = self.url + result.find(title="Direkte download link")['href'] + download_url = self.url + result.find(title='Direkte download link')['href'] if not all([title, download_url]): continue @@ -150,8 +152,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue @@ -162,13 +164,23 @@ def process_column_header(td): torrent_size = cells[labels.index('Størrelse')].contents[0] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index c0720c8295..4fc3b1a085 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -1,22 +1,22 @@ # coding=utf-8 # Author: CristianBB # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re import traceback @@ -32,7 +32,7 @@ class elitetorrentProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "EliteTorrent") + TorrentProvider.__init__(self, 'EliteTorrent') self.onlyspasearch = None self.minseed = None @@ -50,7 +50,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang - """ + ''' Search query: http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe @@ -59,7 +59,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man orden = fecha => order buscar => Search show pag = 1 => page number - """ + ''' search_params = { 'cat': 4, @@ -67,21 +67,20 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'orden': 'fecha', 'pag': 1, 'buscar': '' - } for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': - logger.log(u"Show info is not spanish, skipping provider search", logger.DEBUG) + logger.log('Show info is not spanish, skipping provider search', logger.DEBUG) continue for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) @@ -91,27 +90,24 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', class_='fichas-listado') - torrent_rows = torrent_table('tr') if torrent_table else [] - - if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) - continue + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', class_='fichas-listado') + torrent_rows = torrent_table('tr') if torrent_table else [] - for row in torrent_rows[1:]: - try: - download_url = self.urls['base_url'] + row.find('a')['href'] - title = self._processTitle(row.find('a', class_='nombre')['title']) - seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) - leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) + # Continue only if at least one release is found + if len(torrent_rows) < 2: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - # Provider does not provide size - size = -1 + for row in torrent_rows[1:]: + try: + download_url = self.urls['base_url'] + row.find('a')['href'] + title = self._process_title(row.find('a', class_='nombre')['title']) + seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) + leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) - except (AttributeError, TypeError, KeyError, ValueError): - continue + # Provider does not provide size + size = -1 if not all([title, download_url]): continue @@ -119,25 +115,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items return results @staticmethod - def _processTitle(title): + def _process_title(title): # Quality, if no literal is defined it's HDTV if 'calidad' not in title: @@ -155,4 +162,5 @@ def _processTitle(title): return title.strip() + provider = elitetorrentProvider() diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 4764ffbb2b..1cb1f4a0ce 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -2,26 +2,27 @@ # Author: Gonçalo M. (aka duramato/supergonkas) # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . -import re +from __future__ import unicode_literals +import re +import traceback import sickbeard + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.common import USER_AGENT @@ -34,7 +35,7 @@ class ExtraTorrentProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): - TorrentProvider.__init__(self, "ExtraTorrent") + TorrentProvider.__init__(self, 'ExtraTorrent') self.urls = { 'index': 'http://extratorrent.cc', @@ -56,10 +57,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) @@ -67,15 +68,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, params=self.search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith('$', '', item.find('title').get_text(strip=True)) seeders = try_int(item.find('seeders').get_text(strip=True)) @@ -89,26 +90,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man download_url = re.sub(r'(.*)/torrent/(.*).html', r'\1/download/\2.torrent', download_url) else: info_hash = item.find('info_hash').get_text(strip=True) - download_url = "magnet:?xt=urn:btih:" + info_hash + "&dn=" + title + self._custom_trackers - - except (AttributeError, TypeError, KeyError, ValueError): - continue - - if not all([title, download_url]): - continue - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + download_url = 'magnet:?xt=urn:btih:' + info_hash + '&dn=' + title + self._custom_trackers + + if not all([title, download_url]): + continue + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format + (title, seeders), logger.DEBUG) + continue + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 95ce723fdc..59b7c10ed1 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -1,28 +1,29 @@ # coding=utf-8 # Author: Idan Gutman # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re -from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar import time import traceback +from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -34,7 +35,7 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-a def __init__(self): - TorrentProvider.__init__(self, "FreshOnTV") + TorrentProvider.__init__(self, 'FreshOnTV') self._uid = None self._hash = None @@ -59,7 +60,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True @@ -76,7 +77,7 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('/logout.php', response): @@ -90,15 +91,15 @@ def login(self): 'pass': self._hash} return True except Exception: - logger.log(u"Unable to login to provider (cookie)", logger.WARNING) + logger.log('Unable to login to provider (cookie)', logger.WARNING) return False else: if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) if re.search('DDoS protection by CloudFlare', response): - logger.log(u"Unable to login to provider due to CloudFlare DDoS javascript check", logger.WARNING) + logger.log('Unable to login to provider due to CloudFlare DDoS javascript check', logger.WARNING) return False @@ -111,11 +112,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) @@ -125,7 +126,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many max_page_number = 0 if not init_html: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue try: @@ -153,7 +154,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if max_page_number > 3 and mode == 'RSS': max_page_number = 3 except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) continue data_response_list = [init_html] @@ -164,7 +165,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many time.sleep(1) page_search_url = search_url + '&page=' + str(i) - # '.log(u"Search string: " + page_search_url, logger.DEBUG) + # '.log('Search string: ' + page_search_url, logger.DEBUG) page_html = self.get_url(page_search_url, returns='text') if not page_html: @@ -178,11 +179,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many with BS4Parser(data_response, 'html5lib') as html: - torrent_rows = html("tr", {"class": re.compile('torrent_[0-9]*')}) + torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) # Continue only if a Release is found if not torrent_rows: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for individual_torrent in torrent_rows: @@ -194,7 +195,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] except Exception: - logger.log(u"Unable to parse torrent title. Traceback: %s " % traceback.format_exc(), logger.WARNING) + logger.log('Unable to parse torrent title. Traceback: %s ' % traceback.format_exc(), logger.WARNING) continue try: @@ -214,18 +215,30 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 85c78b4eba..28e4943a16 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -1,24 +1,26 @@ # coding=utf-8 # Author: medariox # based on Dustyn Gibson's work - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re +import traceback from requests.utils import dict_from_cookiejar @@ -35,7 +37,7 @@ class GFTrackerProvider(TorrentProvider): # pylint: disable=too-many-instance-a def __init__(self): # Provider Init - TorrentProvider.__init__(self, "GFTracker") + TorrentProvider.__init__(self, 'GFTracker') # Credentials self.username = None @@ -61,7 +63,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') return True @@ -78,11 +80,11 @@ def login(self): self.get_url(self.url, returns='text') response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -118,19 +120,19 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -139,7 +141,7 @@ def process_column_header(td): # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [process_column_header(label) for label in torrent_rows[0]('td')] @@ -162,21 +164,31 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size/Snatched')].get_text(strip=True).split('/', 1)[0] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 27d94860ea..3cfa9e65c1 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -1,22 +1,24 @@ # coding=utf-8 # Author: Gonçalo M. (aka duramato/supergonkas) # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals + +import traceback from requests.compat import urljoin from sickbeard import logger, tvcache @@ -29,7 +31,7 @@ class HD4FreeProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): - TorrentProvider.__init__(self, "HD4Free") + TorrentProvider.__init__(self, 'HD4Free') self.url = 'https://hd4free.xyz' self.urls = {'search': urljoin(self.url, '/searchapi.php')} @@ -62,7 +64,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if self.freeleech: search_params['fl'] = 'true' @@ -70,56 +72,68 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('fl', '') if mode != 'RSS': - logger.log(u"Search string: {0}".format(search_string), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string else: search_params.pop('search', '') try: jdata = self.get_url(self.urls['search'], params=search_params, returns='json') except ValueError: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - + if not jdata: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - + error = jdata.get('error') if error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log('{0}'.format(error), logger.DEBUG) return results try: if jdata['0']['total_results'] == 0: - logger.log(u"Provider has no results for this search", logger.DEBUG) + logger.log('Provider has no results for this search', logger.DEBUG) continue except StandardError: continue for i in jdata: try: - title = jdata[i]["release_name"] - download_url = jdata[i]["download_url"] + title = jdata[i]['release_name'] + download_url = jdata[i]['download_url'] if not all([title, download_url]): continue - seeders = jdata[i]["seeders"] - leechers = jdata[i]["leechers"] + seeders = jdata[i]['seeders'] + leechers = jdata[i]['leechers'] if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - torrent_size = str(jdata[i]["size"]) + ' MB' + torrent_size = str(jdata[i]['size']) + ' MB' size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index f14d5ad5e3..fa22dfcdb9 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -1,21 +1,21 @@ # coding=utf-8 # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import datetime import json @@ -32,7 +32,7 @@ class HDBitsProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "HDBits") + TorrentProvider.__init__(self, 'HDBits') self.username = None self.passkey = None @@ -49,24 +49,24 @@ def __init__(self): def _check_auth(self): if not self.username or not self.passkey: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') return True - def _checkAuthFromData(self, parsedJSON): + def _check_auth_from_data(self, parsed_json): - if 'status' in parsedJSON and 'message' in parsedJSON: - if parsedJSON.get('status') == 5: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + if 'status' in parsed_json and 'message' in parsed_json: + if parsed_json.get('status') == 5: + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True def _get_season_search_strings(self, ep_obj): - season_search_string = [self._make_post_data_JSON(show=ep_obj.show, season=ep_obj)] + season_search_string = [self._make_post_data_json(show=ep_obj.show, season=ep_obj)] return season_search_string def _get_episode_search_strings(self, ep_obj, add_string=''): - episode_search_string = [self._make_post_data_JSON(show=ep_obj.show, episode=ep_obj)] + episode_search_string = [self._make_post_data_json(show=ep_obj.show, episode=ep_obj)] return episode_search_string def _get_title_and_url(self, item): @@ -80,19 +80,19 @@ def search(self, search_params, age=0, ep_obj=None): # FIXME results = [] - logger.log(u"Search string: %s" % search_params, logger.DEBUG) + logger.log('Search string: {0}'.format(search_params), logger.DEBUG) self._check_auth() - parsedJSON = self.get_url(self.urls['search'], post_data=search_params, returns='json') - if not parsedJSON: + parsed_json = self.get_url(self.urls['search'], post_data=search_params, returns='json') + if not parsed_json: return [] - if self._checkAuthFromData(parsedJSON): - if parsedJSON and 'data' in parsedJSON: - items = parsedJSON['data'] + if self._check_auth_from_data(parsed_json): + if parsed_json and 'data' in parsed_json: + items = parsed_json['data'] else: - logger.log(u"Resulting JSON from provider isn't correct, not parsing it", logger.ERROR) + logger.log("Resulting JSON from provider isn't correct, not parsing it", logger.ERROR) items = [] for item in items: @@ -106,7 +106,7 @@ def find_propers(self, search_date=None): search_terms = [' proper ', ' repack '] for term in search_terms: - for item in self.search(self._make_post_data_JSON(search_term=term)): + for item in self.search(self._make_post_data_json(search_term=term)): if item['utadded']: try: result_date = datetime.datetime.fromtimestamp(int(item['utadded'])) @@ -120,7 +120,7 @@ def find_propers(self, search_date=None): return results - def _make_post_data_JSON(self, show=None, episode=None, season=None, search_term=None): + def _make_post_data_json(self, show=None, episode=None, season=None, search_term=None): post_data = { 'username': self.username, @@ -143,7 +143,7 @@ def _make_post_data_JSON(self, show=None, episode=None, season=None, search_term elif show.anime: post_data['tvdb'] = { 'id': show.indexerid, - 'episode': "%i" % int(episode.scene_absolute_number) + 'episode': '%i' % int(episode.scene_absolute_number) } else: post_data['tvdb'] = { @@ -161,7 +161,7 @@ def _make_post_data_JSON(self, show=None, episode=None, season=None, search_term elif show.anime: post_data['tvdb'] = { 'id': show.indexerid, - 'season': "%d" % season.scene_absolute_number, + 'season': '%d' % season.scene_absolute_number, } else: post_data['tvdb'] = { @@ -181,13 +181,14 @@ def _getRSSData(self): results = [] try: - parsedJSON = self.provider.getURL(self.provider.urls['rss'], post_data=self.provider._make_post_data_JSON(), returns='json') + parsed_json = self.provider.getURL(self.provider.urls['rss'], post_data=self.provider._make_post_data_json(), returns='json') - if self.provider._checkAuthFromData(parsedJSON): - results = parsedJSON['data'] + if self.provider._check_auth_from_data(parsed_json): + results = parsed_json['data'] except Exception: pass return {'entries': results} + provider = HDBitsProvider() diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 24f26183a0..9730291e00 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -2,30 +2,32 @@ # Author: Idan Gutman # Modified by jkaberg, https://github.com/jkaberg for SceneAccess # Modified by 7ca for HDSpace - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re -from requests.utils import dict_from_cookiejar -from bs4 import BeautifulSoup +import traceback +from requests.utils import dict_from_cookiejar from requests.compat import quote_plus from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import convert_size, try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -35,7 +37,7 @@ class HDSpaceProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): - TorrentProvider.__init__(self, "HDSpace") + TorrentProvider.__init__(self, 'HDSpace') self.username = None self.password = None @@ -44,10 +46,10 @@ def __init__(self): self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max - self.urls = {'base_url': u'https://hd-space.org/', - 'login': u'https://hd-space.org/index.php?page=login', - 'search': u'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', - 'rss': u'https://hd-space.org/rss_torrents.php?feed=dl'} + self.urls = {'base_url': 'https://hd-space.org/', + 'login': 'https://hd-space.org/index.php?page=login', + 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', + 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl'} self.categories = [15, 21, 22, 24, 25, 40] # HDTV/DOC 1080/720, bluray, remux self.urls['search'] += '&category=' @@ -61,7 +63,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True @@ -77,11 +79,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Password Incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -93,7 +95,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': search_url = self.urls['search'] % (quote_plus(search_string.replace('.', ' ')),) @@ -101,12 +104,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_url = self.urls['search'] % '' if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) data = self.get_url(search_url, returns='text') if not data or 'please try later' in data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue # Search result page contains some invalid html that prevents html parser from returning all data. @@ -116,12 +119,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = data.split('
')[1] index = data.index(' # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re +import traceback + from requests.compat import quote_plus from requests.utils import dict_from_cookiejar @@ -33,7 +35,7 @@ class HDTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "HDTorrents") + TorrentProvider.__init__(self, 'HDTorrents') self.username = None self.password = None @@ -49,7 +51,7 @@ def __init__(self): self.url = self.urls['base_url'] - self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38" + self.categories = '&category[]=59&category[]=60&category[]=30&category[]=38' self.proper_strings = ['PROPER', 'REPACK'] self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max @@ -57,7 +59,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True @@ -71,11 +73,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('You need cookies enabled to log in.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -87,12 +89,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': search_url = self.urls['search'] % (quote_plus(search_string), self.categories) - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) else: search_url = self.urls['rss'] % self.categories @@ -102,11 +104,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data or 'please try later' in data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if data.find('No torrents here') != -1: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Search result page contains some invalid html that prevents html parser from returning all data. @@ -115,14 +117,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: index = data.lower().index('. +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re import traceback + from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -31,7 +34,7 @@ class HoundDawgsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "HoundDawgs") + TorrentProvider.__init__(self, 'HoundDawgs') self.username = None self.password = None @@ -49,19 +52,19 @@ def __init__(self): self.url = self.urls['base_url'] self.search_params = { - "filter_cat[85]": 1, - "filter_cat[58]": 1, - "filter_cat[57]": 1, - "filter_cat[74]": 1, - "filter_cat[92]": 1, - "filter_cat[93]": 1, - "order_by": "s3", - "order_way": "desc", - "type": '', - "userid": '', - "searchstr": '', - "searchimdb": '', - "searchtags": '' + 'filter_cat[85]': 1, + 'filter_cat[58]': 1, + 'filter_cat[57]': 1, + 'filter_cat[74]': 1, + 'filter_cat[92]': 1, + 'filter_cat[93]': 1, + 'order_by': 's3', + 'order_way': 'desc', + 'type': '', + 'userid': '', + 'searchstr': '', + 'searchimdb': '', + 'searchtags': '' } self.cache = tvcache.TVCache(self) @@ -80,13 +83,13 @@ def login(self): self.get_url(self.urls['base_url'], returns='text') response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Dit brugernavn eller kodeord er forkert.', response) \ or re.search('Login :: HoundDawgs', response) \ or re.search('Dine cookies er ikke aktiveret.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -98,32 +101,32 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params['searchstr'] = search_string data = self.get_url(self.urls['search'], params=self.search_params, returns='text') if not data: - logger.log(u'URL did not return data', logger.DEBUG) + logger.log('URL did not return data', logger.DEBUG) continue - strTableStart = "
Date: Thu, 9 Jun 2016 19:27:41 +0200 Subject: [PATCH 33/85] Added unicode_literals to GenericProvider (#677) * Added unicode_literals to GenericProvider * Also adapted all providers, to make use of the future import unicode_literals * Removed the decode()/encode() * Cleaned up some double to single quotes * Added proper exceptions for the provider results items * Some logging cleanup using format() * Now Really remove the .decodes() * Also removed the encodes. * Fixed after a search/replace * Fixed docstrings --- sickbeard/providers/abnormal.py | 2 +- sickbeard/providers/alpharatio.py | 2 +- sickbeard/providers/anizb.py | 10 +- sickbeard/providers/bithdtv.py | 4 +- sickbeard/providers/bitsnoop.py | 3 +- sickbeard/providers/bluetigers.py | 3 +- sickbeard/providers/btdigg.py | 3 +- sickbeard/providers/cpasbien.py | 2 +- sickbeard/providers/danishbits.py | 8 +- sickbeard/providers/elitetorrent.py | 6 +- sickbeard/providers/extratorrent.py | 3 +- sickbeard/providers/freshontv.py | 2 +- sickbeard/providers/gftracker.py | 2 +- sickbeard/providers/hdspace.py | 2 +- sickbeard/providers/hdtorrents.py | 2 +- sickbeard/providers/hounddawgs.py | 2 +- sickbeard/providers/ilovetorrents.py | 52 ++++----- sickbeard/providers/iptorrents.py | 65 ++++++----- sickbeard/providers/kat.py | 78 +++++++------ sickbeard/providers/morethantv.py | 33 +++--- sickbeard/providers/newpct.py | 22 ++-- sickbeard/providers/newznab.py | 18 ++- sickbeard/providers/norbits.py | 62 +++++----- sickbeard/providers/nyaatorrents.py | 17 +-- sickbeard/providers/omgwtfnzbs.py | 23 ++-- sickbeard/providers/pretome.py | 66 ++++++----- sickbeard/providers/rarbg.py | 104 ++++++++--------- sickbeard/providers/rsstorrent.py | 12 +- sickbeard/providers/scc.py | 27 +++-- sickbeard/providers/sceneelite.py | 93 +++++++-------- sickbeard/providers/scenetime.py | 53 +++++---- sickbeard/providers/speedcd.py | 26 +++-- sickbeard/providers/t411.py | 33 +++--- sickbeard/providers/thepiratebay.py | 89 +++++++------- sickbeard/providers/tntvillage.py | 159 +++++++++++++------------- sickbeard/providers/tokyotoshokan.py | 45 ++++---- sickbeard/providers/torrentbytes.py | 79 +++++++------ sickbeard/providers/torrentday.py | 62 +++++----- sickbeard/providers/torrentleech.py | 85 +++++++------- sickbeard/providers/torrentz.py | 26 +++-- sickbeard/providers/transmitthenet.py | 108 +++++++++-------- sickbeard/providers/tvchaosuk.py | 21 ++-- sickbeard/providers/womble.py | 2 +- sickbeard/providers/xthor.py | 35 +++--- sickbeard/providers/zooqle.py | 5 +- sickrage/providers/GenericProvider.py | 47 ++++---- 46 files changed, 854 insertions(+), 749 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index d550cbad92..35d7c55229 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -102,7 +102,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 5306d4a247..e75cd9530a 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -115,7 +115,7 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {search}'.format - (search=search_string.decode('utf-8')), logger.DEBUG) + (search=search_string), logger.DEBUG) search_params['searchstr'] = search_string search_url = self.urls['search'] diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 5df071bdd8..2ee9d7403f 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -31,7 +31,7 @@ class Anizb(NZBProvider): # pylint: disable=too-many-instance-attributes - '''Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches''' + """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches""" def __init__(self): # Provider Init @@ -52,11 +52,11 @@ def __init__(self): self.cache = tvcache.TVCache(self) def _get_size(self, item): - '''Override the default _get_size to prevent it from extracting using it the default tags''' + """Override the default _get_size to prevent it from extracting using it the default tags""" return try_int(item.get('size')) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals - '''Start searching for anime using the provided search_strings. Used for backlog and daily''' + """Start searching for anime using the provided search_strings. Used for backlog and daily""" results = [] if self.show and not self.show.is_anime: @@ -67,8 +67,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) try: search_url = (self.urls['rss'], self.urls['api'] + search_string)[mode != 'RSS'] @@ -110,5 +109,4 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results - provider = Anizb() diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 267a71113e..7b90e13c4b 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -162,8 +162,8 @@ def login(self): return True login_params = { - 'username': self.username.encode('utf-8'), - 'password': self.password.encode('utf-8'), + 'username': self.username, + 'password': self.password, } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index 4522a8d437..c6af13b84f 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -60,8 +60,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) try: search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 93af2e4a18..2e6ce79c4a 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -92,8 +92,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) self.search_params['search'] = search_string diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index c41fe5cd76..4b6a0abe92 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -64,8 +64,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params['q'] = search_string if mode != 'RSS': search_params['order'] = 0 - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) else: search_params['order'] = 2 if self.custom_url: diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index d72a4ff411..039d2d141c 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -51,7 +51,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index e4d6415f2a..948e82e077 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -62,8 +62,8 @@ def login(self): return True login_params = { - 'username': self.username.encode('utf-8'), - 'password': self.password.encode('utf-8'), + 'username': self.username, + 'password': self.password, 'keeplogged': 1, 'langlang': '', 'login': 'Login', @@ -103,7 +103,7 @@ def process_column_header(td): result = td.img.get('title') if not result: result = td.get_text(strip=True) - return result.encode('utf-8') + return result for mode in search_strings: items = [] @@ -112,7 +112,7 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index 4fc3b1a085..396b87a77f 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -50,7 +50,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang - ''' + """ Search query: http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe @@ -59,7 +59,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man orden = fecha => order buscar => Search show pag = 1 => page number - ''' + """ search_params = { 'cat': 4, @@ -80,7 +80,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 1cb1f4a0ce..ef71dd763c 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -60,8 +60,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) search_url = self.urls['rss'] if not self.custom_url else self.urls['rss'].replace(self.urls['index'], self.custom_url) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 59b7c10ed1..3ccc1f490c 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -116,7 +116,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for search_string in search_params[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 28e4943a16..b9458c8f2e 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -125,7 +125,7 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 9730291e00..1b21e229f8 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -104,7 +104,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_url = self.urls['search'] % '' if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) data = self.get_url(search_url, returns='text') diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index b966126e0c..1734361005 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -94,7 +94,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': search_url = self.urls['search'] % (quote_plus(search_string), self.categories) - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) else: search_url = self.urls['rss'] % self.categories diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index c2afe626ca..4f5eb9d704 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -105,7 +105,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) self.search_params['searchstr'] = search_string diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index 0e6a027b93..b49b9e88ef 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback from requests.compat import urljoin @@ -26,45 +28,43 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider class ILoveTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes def __init__(self): - + # Provider Init - TorrentProvider.__init__(self, "ILoveTorrents") - + TorrentProvider.__init__(self, 'ILoveTorrents') + # URLs self.url = 'https://www.ilovetorrents.me/' self.urls = { - 'login': urljoin(self.url, "takelogin.php"), - 'detail': urljoin(self.url, "details.php?id=%s"), - 'search': urljoin(self.url, "browse.php"), - 'download': urljoin(self.url, "%s"), + 'login': urljoin(self.url, 'takelogin.php'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + 'search': urljoin(self.url, 'browse.php'), + 'download': urljoin(self.url, '%s'), } - # Credentials self.username = None self.password = None - - # Torrent Stats + + # Torrent Stats self.minseed = None self.minleech = None - + # Proper Strings - self.proper_strings = ["PROPER", "REPACK", "REAL"] + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log(u'Invalid username or password. Check your settings', logger.WARNING) return True @@ -81,11 +81,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log(u'Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log(u'Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -95,15 +95,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results search_params = { - "cat": 0 + 'cat': 0 } for mode in search_strings: items = [] - logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {0}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) search_params['search'] = search_string @@ -112,13 +112,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue try: - with BS4Parser(data, "html.parser") as html: + with BS4Parser(data, 'html.parser') as html: torrent_table = html.find('table', class_='koptekst') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) continue for result in torrent_rows[1:]: @@ -145,18 +145,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue - #Use same failsafe as Bitsoup + # Use same failsafe as Bitsoup if seeders >= 32768 or leechers >= 32768: continue item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG) + logger.log(u'Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) items.append(item) except Exception: - logger.log(u"Failed parsing provider. Traceback: {0}".format(traceback.format_exc()), logger.WARNING) + logger.log(u'Failed parsing provider. Traceback: {0}'.format(traceback.format_exc()), logger.WARNING) results += items diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 7b3b178863..6adf97079b 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -18,14 +18,18 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback + from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.exceptions import AuthException, ex -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.exceptions import AuthException +from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -33,7 +37,7 @@ class IPTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "IPTorrents") + TorrentProvider.__init__(self, 'IPTorrents') self.username = None self.password = None @@ -54,7 +58,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -69,17 +73,17 @@ def login(self): self.get_url(self.urls['login'], returns='text') response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False # Invalid username and password combination if re.search('Invalid username and password combination', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False # You tried too often, please try again after 2 hours! if re.search('You tried too often', response): - logger.log(u"You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours", logger.WARNING) + logger.log('You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours', logger.WARNING) return False return True @@ -93,11 +97,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile @@ -112,11 +116,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many data = re.sub(r'(?im)', '', data, 0) with BS4Parser(data, 'html5lib') as html: if not html: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if html.find(text='No Torrents Found!'): - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue torrent_table = html.find('table', attrs={'class': 'torrents'}) @@ -124,7 +128,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Continue only if one Release is found if len(torrents) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for result in torrents[1:]: @@ -135,28 +139,31 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text) torrent_size = result('td')[5].text size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError, KeyError): - continue - if not all([title, download_url]): - continue + if not all([title, download_url]): + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - items.append(item) + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) - except Exception as e: - logger.log(u"Failed parsing provider. Error: %r" % ex(e), logger.ERROR) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue + except Exception: + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items return results diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 7d93428388..10b6fc6cc3 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -18,6 +18,7 @@ from __future__ import unicode_literals +import traceback import validators from requests.compat import urljoin from sickbeard.bs4_parser import BS4Parser @@ -33,7 +34,7 @@ class KatProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu def __init__(self): - TorrentProvider.__init__(self, "KickAssTorrents") + TorrentProvider.__init__(self, 'KickAssTorrents') self.public = True @@ -41,95 +42,98 @@ def __init__(self): self.minseed = None self.minleech = None - self.url = "https://kat.cr" - self.urls = {"search": urljoin(self.url, "%s/")} + self.url = 'https://kat.cr' + self.urls = {'search': urljoin(self.url, '%s/')} self.custom_url = None - self.cache = tvcache.TVCache(self, search_params={"RSS": ["tv", "anime"]}) + self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements results = [] anime = (self.show and self.show.anime) or (ep_obj and ep_obj.show and ep_obj.show.anime) or False search_params = { - "q": "", - "field": "seeders", - "sorder": "desc", - "rss": 1, - "category": ("tv", "anime")[anime] + 'q': '', + 'field': 'seeders', + 'sorder': 'desc', + 'rss': 1, + 'category': ('tv', 'anime')[anime] } for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_params["q"] = search_string if mode != "RSS" else "" - search_params["field"] = "seeders" if mode != "RSS" else "time_add" + search_params['q'] = search_string if mode != 'RSS' else '' + search_params['field'] = 'seeders' if mode != 'RSS' else 'time_add' - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_url = self.urls["search"] % ("usearch" if mode != "RSS" else search_string) + search_url = self.urls['search'] % ('usearch' if mode != 'RSS' else search_string) if self.custom_url: if not validators.url(self.custom_url): - logger.log("Invalid custom url: {}".format(self.custom_url), logger.WARNING) + logger.log('Invalid custom url: {0}'.format(self.custom_url), logger.WARNING) return results search_url = urljoin(self.custom_url, search_url.split(self.url)[1]) - data = self.get_url(search_url, params=search_params, returns="text") + data = self.get_url(search_url, params=search_params, returns='text') if not data: - logger.log("URL did not return data, maybe try a custom url, or a different one", logger.DEBUG) + logger.log('URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue - if not data.startswith(". +from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +39,7 @@ class MoreThanTVProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "MoreThanTV") + TorrentProvider.__init__(self, 'MoreThanTV') # Credentials self.username = None @@ -65,7 +68,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -82,11 +85,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Your username or password was incorrect.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -119,19 +122,19 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['searchstr'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -140,7 +143,7 @@ def process_column_header(td): # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [process_column_header(label) for label in torrent_rows[0]('td')] @@ -164,23 +167,25 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - results += items return results diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index b278a81b89..5a980739d4 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -19,8 +19,10 @@ # along with SickRage. If not, see . from __future__ import unicode_literals + from requests.compat import urljoin import re +import traceback from sickbeard import helpers from sickbeard import logger, tvcache @@ -69,7 +71,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': @@ -80,7 +82,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['q'] = search_string @@ -119,10 +121,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(torrent_size) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log('Found result: {}'.format(title), logger.DEBUG) + logger.log('Found result: {0}'.format(title), logger.DEBUG) items.append(item) - except (AttributeError, TypeError): + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items @@ -163,14 +167,14 @@ def download_result(self, result): if url_torrent.startswith('http'): self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'}) - logger.log('Downloading a result from {}'.format(url)) + logger.log('Downloading a result from {0}'.format(url)) if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers): if self._verify_download(filename): - logger.log('Saved result to {}'.format(filename), logger.INFO) + logger.log('Saved result to {0}'.format(filename), logger.INFO) return True else: - logger.log('Could not download {}'.format(url), logger.WARNING) + logger.log('Could not download {0}'.format(url), logger.WARNING) helpers.remove_file_failed(filename) if urls: @@ -199,8 +203,8 @@ def _processTitle(title): # Language title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(ur'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(ur'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) title += '-NEWPCT' diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index a2b9da4314..a13766e0ef 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -125,7 +125,6 @@ def get_providers_list(data): providers_dict[default.name].enable_daily = default.enable_daily providers_dict[default.name].enable_backlog = default.enable_backlog providers_dict[default.name].enable_manualsearch = default.enable_manualsearch - providers_dict[default.name].catIDs = default.catIDs return [provider for provider in providers_list if provider] @@ -174,13 +173,13 @@ def get_newznab_categories(self, just_caps=False): data = self.get_url(urljoin(self.url, 'api'), params=url_params, returns='text') if not data: - error_string = 'Error getting caps xml for [{}]'.format(self.name) + error_string = 'Error getting caps xml for [{0}]'.format(self.name) logger.log(error_string, logger.WARNING) return False, return_categories, error_string with BS4Parser(data, 'html5lib') as html: if not html.find('categories'): - error_string = 'Error parsing caps xml for [{}]'.format(self.name) + error_string = 'Error parsing caps xml for [{0}]'.format(self.name) logger.log(error_string, logger.DEBUG) return False, return_categories, error_string @@ -276,12 +275,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self._check_auth(): return results - # gingadaddy has no caps. - if not self.caps and 'gingadaddy' not in self.url: + # For providers that don't have no caps, or for which the t=caps is not working. + if not self.caps and all(provider not in self.url for provider in ['gingadaddy', 'usenet-crawler']): self.get_newznab_categories(just_caps=True) - - if not self.caps and 'gingadaddy' not in self.url: - return results + if not self.caps: + return results for mode in search_strings: torznab = False @@ -312,10 +310,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('ep', '') items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) if search_params['t'] != 'tvsearch': search_params['q'] = search_string diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 15983351d5..245909a8b9 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -20,6 +20,7 @@ from __future__ import unicode_literals +import traceback import json from requests.compat import urlencode @@ -74,12 +75,12 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format - (search_string.decode('utf-8')), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) post_data = { 'username': self.username, @@ -103,33 +104,38 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many 'not parsing it', logger.ERROR) for item in json_items.get('torrents', []): - title = item.pop('name', '') - download_url = '{}{}'.format( - self.urls['download'], - urlencode({'id': item.pop('id', ''), 'passkey': self.passkey})) - - if not all([title, download_url]): - continue - - seeders = try_int(item.pop('seeders', 0)) - leechers = try_int(item.pop('leechers', 0)) - - if seeders < min(self.minseed, 1): - logger.log('Discarding torrent because it does not meet ' - 'the minimum seeders: {0}. Seeders: {1})'.format - (title, seeders), logger.DEBUG) + try: + title = item.pop('name', '') + download_url = '{0}{1}'.format( + self.urls['download'], + urlencode({'id': item.pop('id', ''), 'passkey': self.passkey})) + + if not all([title, download_url]): + continue + + seeders = try_int(item.pop('seeders', 0)) + leechers = try_int(item.pop('leechers', 0)) + + if seeders < min(self.minseed, 1): + logger.log('Discarding torrent because it does not meet ' + 'the minimum seeders: {0}. Seeders: {1})'.format + (title, seeders), logger.DEBUG) + continue + + info_hash = item.pop('info_hash', '') + size = convert_size(item.pop('size', -1), -1) + + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': info_hash} + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format( + title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - info_hash = item.pop('info_hash', '') - size = convert_size(item.pop('size', -1), -1) - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': info_hash} - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format( - title, seeders, leechers), logger.DEBUG) - - items.append(item) - results += items return results diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index d461ec56ab..cd365c9c33 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -21,6 +21,7 @@ from __future__ import unicode_literals import re +import traceback from sickbeard import logger, tvcache @@ -55,11 +56,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u'Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u'Search string: {}'.format - (search_string.decode('utf-8')), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) search_params = { 'page': 'rss', @@ -84,7 +85,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man item_info = self.regex.search(curItem['summary']) if not item_info: - logger.log('There was a problem parsing an item summary, skipping: {}'.format + logger.log('There was a problem parsing an item summary, skipping: {0}'.format (title), logger.DEBUG) continue @@ -100,7 +101,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue if self.confirmed and not verified and mode != 'RSS': - logger.log('Found result {} but that doesn\'t seem like a verified result so I\'m ignoring it'.format + logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format (title), logger.DEBUG) continue @@ -111,8 +112,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders, leechers), logger.DEBUG) items.append(result) - except StandardError: - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 9f186bd60c..5778989066 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -18,7 +18,9 @@ # along with Medusa. If not, see . from __future__ import unicode_literals + import re +import traceback import sickbeard from sickbeard import logger, tvcache @@ -83,7 +85,7 @@ def _get_size(self, item): units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] summary = item.get('summary') if summary: - size_match = re.search(ur'Size[^\d]*([0-9.]*.[A-Z]*)', summary) + size_match = re.search(r'Size[^\d]*([0-9.]*.[A-Z]*)', summary) size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1 return try_int(size) @@ -103,11 +105,11 @@ def search(self, search_strings, age=0, ep_obj=None): for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_params['search'] = search_string if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) data = self.get_url(self.urls['api'], params=search_params, returns='json') @@ -119,11 +121,16 @@ def search(self, search_strings, age=0, ep_obj=None): continue for item in data: - if not self._get_title_and_url(item): - continue - - logger.log('Found result: {}'.format(item.get('title')), logger.DEBUG) - items.append(item) + try: + if not self._get_title_and_url(item): + continue + + logger.log('Found result: {0}'.format(item.get('title')), logger.DEBUG) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 0f2c9cb82e..591d385cb2 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback from requests.compat import quote @@ -26,7 +28,7 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -34,7 +36,7 @@ class PretomeProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): - TorrentProvider.__init__(self, "Pretome") + TorrentProvider.__init__(self, 'Pretome') self.username = None self.password = None @@ -50,7 +52,7 @@ def __init__(self): self.url = self.urls['base_url'] - self.categories = "&st=1&cat%5B%5D=7" + self.categories = '&st=1&cat%5B%5D=7' self.proper_strings = ['PROPER', 'REPACK'] @@ -59,7 +61,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password or not self.pin: - logger.log(u"Invalid username or password or pin. Check your settings", logger.WARNING) + logger.log('Invalid username or password or pin. Check your settings', logger.WARNING) return True @@ -73,11 +75,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -89,11 +91,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) @@ -105,26 +107,26 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: with BS4Parser(data, 'html5lib') as html: # Continue only if one Release is found - empty = html.find('h2', text="No .torrents fit this filter criteria") + empty = html.find('h2', text='No .torrents fit this filter criteria') if empty: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'}) if not torrent_table: - logger.log(u"Could not find table of torrents", logger.ERROR) + logger.log('Could not find table of torrents', logger.ERROR) continue torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) for result in torrent_rows: - cells = result('td') - size = None - link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) + try: + cells = result('td') + size = None + link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) - torrent_id = link['href'].replace('details.php?id=', '') + torrent_id = link['href'].replace('details.php?id=', '') - try: if link.get('title', ''): title = link['title'] else: @@ -139,27 +141,31 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many torrent_size = cells[7].text size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue + if not all([title, download_url]): + continue - if not all([title, download_url]): - continue + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + item = {'title': title, 'link': download_url, 'size': size, 'seeders': + seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + items.append(item) - items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 67fbbddc7d..e2915554d1 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -18,10 +18,10 @@ from __future__ import unicode_literals +import traceback import datetime import time -import sickbeard from sickbeard import logger, tvcache from sickbeard.indexers.indexer_config import INDEXER_TVDB @@ -33,7 +33,7 @@ class RarbgProvider(TorrentProvider): # pylint: disable=too-many-instance-attri def __init__(self): - TorrentProvider.__init__(self, "Rarbg") + TorrentProvider.__init__(self, 'Rarbg') self.public = True self.minseed = None @@ -44,10 +44,10 @@ def __init__(self): self.token_expires = None # Spec: https://torrentapi.org/apidocs_v2.txt - self.url = "https://rarbg.com" - self.urls = {"api": "http://torrentapi.org/pubapi_v2.php"} + self.url = 'https://rarbg.com' + self.urls = {'api': 'http://torrentapi.org/pubapi_v2.php'} - self.proper_strings = ["{{PROPER|REPACK}}"] + self.proper_strings = ['{{PROPER|REPACK}}'] self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max @@ -56,17 +56,17 @@ def login(self): return True login_params = { - "get_token": "get_token", - "format": "json", - "app_id": "sickrage2" + 'get_token': 'get_token', + 'format': 'json', + 'app_id': 'sickrage2' } - response = self.get_url(self.urls["api"], params=login_params, returns="json") + response = self.get_url(self.urls['api'], params=login_params, returns='json') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - self.token = response.get("token") + self.token = response.get('token') self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None return self.token is not None @@ -76,14 +76,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results search_params = { - "app_id": "sickrage2", - "category": "tv", - "min_seeders": try_int(self.minseed), - "min_leechers": try_int(self.minleech), - "limit": 100, - "format": "json_extended", - "ranked": try_int(self.ranked), - "token": self.token, + 'app_id': 'sickrage2', + 'category': 'tv', + 'min_seeders': try_int(self.minseed), + 'min_leechers': try_int(self.minleech), + 'limit': 100, + 'format': 'json_extended', + 'ranked': try_int(self.ranked), + 'token': self.token, } if ep_obj is not None: @@ -95,25 +95,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) - if mode == "RSS": - search_params["sort"] = "last" - search_params["mode"] = "list" - search_params.pop("search_string", None) - search_params.pop("search_tvdb", None) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + if mode == 'RSS': + search_params['sort'] = 'last' + search_params['mode'] = 'list' + search_params.pop('search_string', None) + search_params.pop('search_tvdb', None) else: - search_params["sort"] = self.sorting if self.sorting else "seeders" - search_params["mode"] = "search" + search_params['sort'] = self.sorting if self.sorting else 'seeders' + search_params['mode'] = 'search' if ep_indexer == INDEXER_TVDB and ep_indexerid: - search_params["search_tvdb"] = ep_indexerid + search_params['search_tvdb'] = ep_indexerid else: - search_params.pop("search_tvdb", None) + search_params.pop('search_tvdb', None) for search_string in search_strings[mode]: - if mode != "RSS": - search_params["search_string"] = search_string - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + search_params['search_string'] = search_string + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) # Check if token is still valid before search @@ -124,57 +124,59 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Changing to 5 because of server clock desync time.sleep(5) - data = self.get_url(self.urls["api"], params=search_params, returns="json") + data = self.get_url(self.urls['api'], params=search_params, returns='json') if not isinstance(data, dict): - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - error = data.get("error") - error_code = data.get("error_code") - # Don't log when {"error":"No results found","error_code":20} + error = data.get('error') + error_code = data.get('error_code') + # Don't log when {'error':'No results found','error_code':20} # List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312 if error: if error_code == 5: # 5 = Too many requests per second - logger.log("{0}. Error code: {1}".format(error, error_code), logger.INFO) + logger.log('{0}. Error code: {1}'.format(error, error_code), logger.INFO) elif error_code not in (14, 20): # 14 = Cant find thetvdb in database. Are you sure this thetvdb exists? # 20 = No results found - logger.log("{0}. Error code: {1}".format(error, error_code), logger.WARNING) + logger.log('{0}. Error code: {1}'.format(error, error_code), logger.WARNING) continue - torrent_results = data.get("torrent_results") + torrent_results = data.get('torrent_results') if not torrent_results: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for item in torrent_results: try: - title = item.pop("title") - download_url = item.pop("download") + title = item.pop('title') + download_url = item.pop('download') if not all([title, download_url]): continue - seeders = item.pop("seeders") - leechers = item.pop("leechers") + seeders = item.pop('seeders') + leechers = item.pop('leechers') if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue - torrent_size = item.pop("size", -1) + torrent_size = item.pop('size', -1) size = convert_size(torrent_size) or -1 - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} items.append(result) - except StandardError: - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 4a8f3ad8d9..bfc43c1f48 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -155,14 +155,14 @@ def validateRSS(self): # pylint: disable=too-many-return-statements if self.cookies: cookie_validator = re.compile(r'^(\w+=\w+)(;\w+=\w+)*$') if not cookie_validator.match(self.cookies): - return False, 'Cookie is not correctly formatted: {}'.format(self.cookies) + return False, 'Cookie is not correctly formatted: {0}'.format(self.cookies) add_dict_to_cookiejar(self.session.cookies, dict(x.rsplit('=', 1) for x in self.cookies.split(';'))) # pylint: disable=protected-access # Access to a protected member of a client class data = self.cache._getRSSData()['entries'] if not data: - return False, 'No items found in the RSS feed {}'.format(self.url) + return False, 'No items found in the RSS feed {0}'.format(self.url) title, url = self._get_title_and_url(data[0]) @@ -180,12 +180,12 @@ def validateRSS(self): # pylint: disable=too-many-return-statements bdecode(torrent_file) except Exception as error: self.dumpHTML(torrent_file) - return False, 'Torrent link is not a valid torrent file: {}'.format(ex(error)) + return False, 'Torrent link is not a valid torrent file: {0}'.format(ex(error)) return True, 'RSS feed Parsed correctly' except Exception as error: - return False, 'Error when trying to load RSS: {}'.format(ex(error)) + return False, 'Error when trying to load RSS: {0}'.format(ex(error)) @staticmethod def dumpHTML(data): @@ -197,10 +197,10 @@ def dumpHTML(data): fileOut.close() helpers.chmodAsParent(dumpName) except IOError as error: - logger.log('Unable to save the file: {}'.format(ex(error)), logger.ERROR) + logger.log('Unable to save the file: {0}'.format(ex(error)), logger.ERROR) return False - logger.log('Saved custom_torrent html dump {} '.format(dumpName), logger.INFO) + logger.log('Saved custom_torrent html dump {0} '.format(dumpName), logger.INFO) return True diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 652dd77de2..c58df97af1 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback import time from requests.compat import urljoin, quote @@ -37,7 +40,7 @@ class SCCProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu def __init__(self): - TorrentProvider.__init__(self, "SceneAccess") + TorrentProvider.__init__(self, 'SceneAccess') self.username = None self.password = None @@ -74,12 +77,12 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search(r'Username or password incorrect', response) \ or re.search(r'SceneAccess \| Login', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -97,10 +100,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] if mode != 'RSS': - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories[mode]) @@ -109,7 +112,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except Exception as e: - logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING) + logger.log('Unable to fetch data. Error: %s' % repr(e), logger.WARNING) if not data: continue @@ -120,7 +123,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for result in torrent_table('tr')[1:]: @@ -134,14 +137,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(urljoin(self.url, link['href']), returns='text') if data: with BS4Parser(data) as details_html: - title = re.search('(?<=").+(?. from __future__ import unicode_literals -import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache -from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -36,7 +35,7 @@ class SceneEliteProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "SceneElite") + TorrentProvider.__init__(self, 'SceneElite') # Credentials self.username = None @@ -48,16 +47,16 @@ def __init__(self): self.freeleech = None # URLs - self.url = "https://sceneelite.org/" + self.url = 'https://sceneelite.org/' self.urls = { - "login": urljoin(self.url, "/api/v1/auth"), - "search": urljoin(self.url, "/api/v1/torrents"), - "download": urljoin(self.url, "/api/v1/torrents/download/"), + 'login': urljoin(self.url, '/api/v1/auth'), + 'search': urljoin(self.url, '/api/v1/torrents'), + 'download': urljoin(self.url, '/api/v1/torrents/download/'), } # Proper Strings - self.proper_strings = ["PROPER", "REPACK", "REAL"] - cache_params = {"RSS": [""]} + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + cache_params = {'RSS': ['']} # Cache self.cache = tvcache.TVCache(self, min_time=0.1, search_params=cache_params) @@ -66,13 +65,13 @@ def login(self): return True login_params = { - "username": self.username, - "password": self.password + 'username': self.username, + 'password': self.password } - response = self.get_url(self.urls["login"], params=login_params, returns="json") + response = self.get_url(self.urls['login'], params=login_params, returns='json') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False return True @@ -83,66 +82,68 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - "extendedSearch": 'false', - "hideOld": 'false', - "index": '0', - "limit": '100', - "order": 'asc', - "page": 'search', - "sort": 'n', - "categories[0]": 3, - "categories[1]": 6, - "categories[2]": 7 + 'extendedSearch': 'false', + 'hideOld': 'false', + 'index': '0', + 'limit': '100', + 'order': 'asc', + 'page': 'search', + 'sort': 'n', + 'categories[0]': 3, + 'categories[1]': 6, + 'categories[2]': 7 } for mode in search_strings: items = [] - logger.log("Search Mode: {0}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {0}".format - (search_string.decode("utf-8")), logger.DEBUG) - search_params["searchText"] = search_string - else: - search_params["page"] = 'last_seriebrowse' + if mode != 'RSS': + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) + search_params['searchText'] = search_string + else: + search_params['page'] = 'last_seriebrowse' results = [] - search_url = self.urls["search"] + search_url = self.urls['search'] try: - jdata = self.get_url(search_url, params=search_params, returns="json") + jdata = self.get_url(search_url, params=search_params, returns='json') except ValueError: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue for torrent in jdata: try: - title = torrent.pop("name", "") - id = str(torrent.pop("id", "")) + title = torrent.pop('name', '') + id = str(torrent.pop('id', '')) if not id: continue - seeders = try_int(torrent.pop("seeders", ""), 1) - leechers = try_int(torrent.pop("leechers", ""), 0) - freeleech = torrent.pop("frileech") + seeders = try_int(torrent.pop('seeders', ''), 1) + leechers = try_int(torrent.pop('leechers', ''), 0) + freeleech = torrent.pop('frileech') if self.freeleech and freeleech != 1: continue - size = try_int(torrent.pop("size", ""), 0) - download_url = self.urls["download"] + id + size = try_int(torrent.pop('size', ''), 0) + download_url = self.urls['download'] + id # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) continue item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 496c4f500b..d3d95ec870 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -20,6 +20,8 @@ import re +import traceback + from requests.compat import quote from requests.utils import dict_from_cookiejar @@ -34,7 +36,7 @@ class SceneTimeProvider(TorrentProvider): # pylint: disable=too-many-instance-a def __init__(self): - TorrentProvider.__init__(self, "SceneTime") + TorrentProvider.__init__(self, 'SceneTime') self.username = None self.password = None @@ -51,7 +53,7 @@ def __init__(self): self.url = self.urls['base_url'] - self.categories = "&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1" + self.categories = '&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1' def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): @@ -62,11 +64,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -78,11 +80,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) @@ -92,14 +94,14 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many continue with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('div', id="torrenttable") + torrent_table = html.find('div', id='torrenttable') torrent_rows = [] if torrent_table: - torrent_rows = torrent_table.select("tr") + torrent_rows = torrent_table.select('tr') # Continue only if one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Scenetime apparently uses different number of cells in #torrenttable based @@ -112,10 +114,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many cells = result('td') link = cells[labels.index('Name')].find('a') - torrent_id = link['href'].replace('details.php?id=', '').split("&")[0] + torrent_id = link['href'].replace('details.php?id=', '').split('&')[0] title = link.get_text(strip=True) - download_url = self.urls['download'] % (torrent_id, "%s.torrent" % title.replace(" ", ".")) + download_url = self.urls['download'] % (torrent_id, '%s.torrent' % title.replace(' ', '.')) seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) @@ -123,24 +125,25 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError, KeyError, ValueError): - continue + if not all([title, download_url]): + continue - if not all([title, download_url]): - continue + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 490f520759..2e78ad7755 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -35,7 +38,7 @@ class SpeedCDProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): # Provider Init - TorrentProvider.__init__(self, "Speedcd") + TorrentProvider.__init__(self, 'Speedcd') # Credentials self.username = None @@ -70,11 +73,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Incorrect username or Password. Please try again.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -115,12 +118,12 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string @@ -136,7 +139,7 @@ def process_column_header(td): # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [process_column_header(label) for label in torrent_rows[0]('th')] @@ -157,19 +160,22 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size')].get_text() torrent_size = torrent_size[:-2] + ' ' + torrent_size[-2:] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index fe5f9ddb1e..65b2224797 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + from requests.auth import AuthBase import time import traceback @@ -69,7 +71,7 @@ def login(self): response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if response and 'token' in response: @@ -79,7 +81,7 @@ def login(self): self.session.auth = T411Auth(self.token) return True else: - logger.log(u"Token not found in authentication response", logger.WARNING) + logger.log('Token not found in authentication response', logger.WARNING) return False def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements @@ -89,11 +91,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] @@ -104,13 +106,13 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: if 'torrents' not in data and mode != 'RSS': - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue torrents = data['torrents'] if mode != 'RSS' else data if not torrents: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for torrent in torrents: @@ -120,7 +122,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: title = torrent['name'] torrent_id = torrent['id'] - download_url = (self.urls['download'] % torrent_id).encode('utf8') + download_url = (self.urls['download'] % torrent_id) if not all([title, download_url]): continue @@ -132,27 +134,28 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) continue if self.confirmed and not verified and mode != 'RSS': - logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format(title), logger.DEBUG) continue size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) items.append(item) - except Exception: - logger.log(u"Invalid torrent data, skipping result: %s" % torrent, logger.DEBUG) - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.DEBUG) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) results += items diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index c7a89111a6..0799d0ee01 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -21,6 +21,7 @@ from __future__ import unicode_literals import re +import traceback import validators from requests.compat import urljoin @@ -36,7 +37,7 @@ class ThePirateBayProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): # Provider Init - TorrentProvider.__init__(self, "ThePirateBay") + TorrentProvider.__init__(self, 'ThePirateBay') # Credentials self.public = True @@ -47,10 +48,10 @@ def __init__(self): self.confirmed = True # URLs - self.url = "https://thepiratebay.se" + self.url = 'https://thepiratebay.se' self.urls = { - "rss": urljoin(self.url, "browse/200"), - "search": urljoin(self.url, "s/"), # Needs trailing / + 'rss': urljoin(self.url, 'browse/200'), + 'search': urljoin(self.url, 's/'), # Needs trailing / } self.custom_url = None @@ -66,18 +67,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man https://pirateproxy.pl/s/?q=Game of Thrones&type=search&orderby=7&page=0&category=200 """ search_params = { - "q": "", - "type": "search", - "orderby": 7, - "page": 0, - "category": 200 + 'q': '', + 'type': 'search', + 'orderby': 7, + 'page': 0, + 'category': 200 } # Units - units = ["B", "KIB", "MIB", "GIB", "TIB", "PIB"] + units = ['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'] def process_column_header(th): - result = "" + result = '' if th.a: result = th.a.get_text(strip=True) if not result: @@ -86,81 +87,83 @@ def process_column_header(th): for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_url = self.urls["search"] if mode != "RSS" else self.urls["rss"] + search_url = self.urls['search'] if mode != 'RSS' else self.urls['rss'] if self.custom_url: if not validators.url(self.custom_url): - logger.log("Invalid custom url: {}".format(self.custom_url), logger.WARNING) + logger.log('Invalid custom url: {0}'.format(self.custom_url), logger.WARNING) return results search_url = urljoin(self.custom_url, search_url.split(self.url)[1]) - if mode != "RSS": - search_params["q"] = search_string - logger.log("Search string: {search}".format - (search=search_string.decode("utf-8")), logger.DEBUG) + if mode != 'RSS': + search_params['q'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - data = self.get_url(search_url, params=search_params, returns="text") + data = self.get_url(search_url, params=search_params, returns='text') else: - data = self.get_url(search_url, returns="text") + data = self.get_url(search_url, returns='text') if not data: - logger.log("URL did not return data, maybe try a custom url, or a different one", logger.DEBUG) + logger.log('URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", id="searchResult") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='searchResult') + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - labels = [process_column_header(label) for label in torrent_rows[0]("th")] + labels = [process_column_header(label) for label in torrent_rows[0]('th')] # Skip column headers for result in torrent_rows[1:]: try: - cells = result("td") + cells = result('td') - title = result.find(class_="detName").get_text(strip=True) - download_url = result.find(title="Download this torrent using magnet")["href"] + self._custom_trackers - if "magnet:?" not in download_url: - logger.log("Invalid ThePirateBay proxy please try another one", logger.DEBUG) + title = result.find(class_='detName').get_text(strip=True) + download_url = result.find(title='Download this torrent using magnet')['href'] + self._custom_trackers + if 'magnet:?' not in download_url: + logger.log('Invalid ThePirateBay proxy please try another one', logger.DEBUG) continue if not all([title, download_url]): continue - seeders = try_int(cells[labels.index("SE")].get_text(strip=True)) - leechers = try_int(cells[labels.index("LE")].get_text(strip=True)) + seeders = try_int(cells[labels.index('SE')].get_text(strip=True)) + leechers = try_int(cells[labels.index('LE')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue # Accept Torrent only from Good People for every Episode Search - if self.confirmed and not result.find(alt=re.compile(r"VIP|Trusted")): - if mode != "RSS": - logger.log("Found result {} but that doesn't seem like a trusted result so I'm ignoring it".format(title), logger.DEBUG) + if self.confirmed and not result.find(alt=re.compile(r'VIP|Trusted')): + if mode != 'RSS': + logger.log("Found result {0} but that doesn't seem like a trusted result so I'm ignoring it".format(title), logger.DEBUG) continue # Convert size after all possible skip scenarios - torrent_size = cells[labels.index("Name")].find(class_="detDesc").get_text(strip=True).split(", ")[1] - torrent_size = re.sub(r"Size ([\d.]+).+([KMGT]iB)", r"\1 \2", torrent_size) + torrent_size = cells[labels.index('Name')].find(class_='detDesc').get_text(strip=True).split(', ')[1] + torrent_size = re.sub(r'Size ([\d.]+).+([KMGT]iB)', r'\1 \2', torrent_size) size = convert_size(torrent_size, units=units) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 8ccb4de307..776277f8ad 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback @@ -28,7 +30,7 @@ from sickbeard.common import Quality from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import convert_size from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -63,7 +65,7 @@ class TNTVillageProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "TNTVillage") + TorrentProvider.__init__(self, 'TNTVillage') self._uid = None self._hash = None @@ -109,14 +111,14 @@ def __init__(self): self.proper_strings = ['PROPER', 'REPACK'] - self.categories = "cat=29" + self.categories = 'cat=29' self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') return True @@ -133,11 +135,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Sono stati riscontrati i seguenti errori', response) or re.search('Connettiti', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -180,27 +182,27 @@ def _episodeQuality(torrent_rows): # pylint: disable=too-many-return-statements if img_all: for img_type in img_all: try: - file_quality = file_quality + " " + img_type['src'].replace("style_images/mkportal-636/", "").replace(".gif", "").replace(".png", "") + file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '').replace('.gif', '').replace('.png', '') except Exception: - logger.log(u"Failed parsing quality. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing quality. Traceback: %s' % traceback.format_exc(), logger.ERROR) else: file_quality = (torrent_rows('td'))[1].get_text() - logger.log(u"Episode quality: %s" % file_quality, logger.DEBUG) + logger.log('Episode quality: %s' % file_quality, logger.DEBUG) def checkName(options, func): return func([re.search(option, file_quality, re.I) for option in options]) - dvdOptions = checkName(["dvd", "dvdrip", "dvdmux", "DVD9", "DVD5"], any) - bluRayOptions = checkName(["BD", "BDmux", "BDrip", "BRrip", "Bluray"], any) - sdOptions = checkName(["h264", "divx", "XviD", "tv", "TVrip", "SATRip", "DTTrip", "Mpeg2"], any) - hdOptions = checkName(["720p"], any) - fullHD = checkName(["1080p", "fullHD"], any) + dvdOptions = checkName(['dvd', 'dvdrip', 'dvdmux', 'DVD9', 'DVD5'], any) + bluRayOptions = checkName(['BD', 'BDmux', 'BDrip', 'BRrip', 'Bluray'], any) + sdOptions = checkName(['h264', 'divx', 'XviD', 'tv', 'TVrip', 'SATRip', 'DTTrip', 'Mpeg2'], any) + hdOptions = checkName(['720p'], any) + fullHD = checkName(['1080p', 'fullHD'], any) if img_all: file_quality = (torrent_rows('td'))[1].get_text() - webdl = checkName(["webdl", "webmux", "webrip", "dl-webmux", "web-dlmux", "webdl-mux", "web-dl", "webdlmux", "dlmux"], any) + webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) if sdOptions and not dvdOptions and not fullHD and not hdOptions: return Quality.SDTV @@ -234,13 +236,13 @@ def _is_italian(self, torrent_rows): else: continue - if re.search("ita", name.split(sub)[0], re.I): - logger.log(u"Found Italian release: " + name, logger.DEBUG) + if re.search('ita', name.split(sub)[0], re.I): + logger.log('Found Italian release: ' + name, logger.DEBUG) italian = True break - if not subFound and re.search("ita", name, re.I): - logger.log(u"Found Italian release: " + name, logger.DEBUG) + if not subFound and re.search('ita', name, re.I): + logger.log('Found Italian release: ' + name, logger.DEBUG) italian = True return italian @@ -253,8 +255,8 @@ def _is_english(torrent_rows): return False english = False - if re.search("eng", name, re.I): - logger.log(u"Found English release: " + name, logger.DEBUG) + if re.search('eng', name, re.I): + logger.log('Found English release: ' + name, logger.DEBUG) english = True return english @@ -265,11 +267,11 @@ def _is_season_pack(name): try: parse_result = NameParser(tryIndexers=True).parse(name) except (InvalidNameException, InvalidShowException) as error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log('{0}'.format(error), logger.DEBUG) return False main_db_con = db.DBConnection() - sql_selection = "select count(*) as count from tv_episodes where showid = ? and season = ?" + sql_selection = 'select count(*) as count from tv_episodes where showid = ? and season = ?' episodes = main_db_con.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True @@ -279,11 +281,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if not self.login(): return results - self.categories = "cat=" + str(self.cat) + self.categories = 'cat=' + str(self.cat) for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode == 'RSS': @@ -308,12 +310,12 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many search_url = self.urls['search_page'].format(z, self.categories) if mode != 'RSS': - logger.log(u"Search string: {}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) data = self.get_url(search_url, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue try: @@ -323,7 +325,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Continue only if one Release is found if len(torrent_rows) < 3: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) last_page = 1 continue @@ -340,60 +342,63 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many leechers = int(leechers.strip('[]')) seeders = result('td')[3]('td')[2].text seeders = int(seeders.strip('[]')) - torrent_size = result('td')[3]('td')[3].text.strip('[]') + " GB" + torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue - - filename_qt = self._reverseQuality(self._episodeQuality(result)) - for text in self.hdtext: - title1 = title - title = title.replace(text, filename_qt) - if title != title1: - break - - if Quality.nameQuality(title) == Quality.UNKNOWN: - title += filename_qt - - if not self._is_italian(result) and not self.subtitle: - logger.log(u"Torrent is subtitled, skipping: %s " % title, logger.DEBUG) - continue - - if self.engrelease and not self._is_english(result): - logger.log(u"Torrent isnt english audio/subtitled , skipping: %s " % title, logger.DEBUG) - continue - - search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] - show_title = search_show - rindex = re.search(r'([Ss][\d{1,2}]+)', title) - if rindex: - show_title = title[:rindex.start()] - ep_params = title[rindex.start():] - if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): - new_title = search_show + ep_params - title = new_title - - if not all([title, download_url]): - continue - - if self._is_season_pack(title): - title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + filename_qt = self._reverseQuality(self._episodeQuality(result)) + for text in self.hdtext: + title1 = title + title = title.replace(text, filename_qt) + if title != title1: + break + + if Quality.nameQuality(title) == Quality.UNKNOWN: + title += filename_qt + + if not self._is_italian(result) and not self.subtitle: + logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) + continue + + if self.engrelease and not self._is_english(result): + logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) + continue + + search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] + show_title = search_show + rindex = re.search(r'([Ss][\d{1,2}]+)', title) + if rindex: + show_title = title[:rindex.start()] + ep_params = title[rindex.start():] + if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): + new_title = search_show + ep_params + title = new_title + + if not all([title, download_url]): + continue + + if self._is_season_pack(title): + title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue + + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index a935ddd9a7..9fff0bf289 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -31,7 +34,7 @@ class TokyoToshokanProvider(TorrentProvider): # pylint: disable=too-many-instan def __init__(self): - TorrentProvider.__init__(self, "TokyoToshokan") + TorrentProvider.__init__(self, 'TokyoToshokan') self.public = True self.supports_absolute_numbering = True @@ -54,15 +57,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params = { - "terms": search_string, - "type": 1, # get anime types + 'terms': search_string, + 'type': 1, # get anime types } data = self.get_url(self.urls['search'], params=search_params, returns='text') @@ -75,7 +78,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Continue only if one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue a = 1 if len(torrent_rows[0]('td')) < 2 else 0 @@ -93,24 +96,26 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man sl = re.match(r'S:(?P\d+)L:(?P\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', '')) seeders = try_int(sl.group('seeders')) if sl else 0 leechers = try_int(sl.group('leechers')) if sl else 0 - except StandardError: - continue - if not all([title, download_url]): - continue + if not all([title, download_url]): + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + if mode != 'RSS': + logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 00d1a51fb3..cdc2eae814 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -21,6 +21,8 @@ from __future__ import unicode_literals import re +import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +38,7 @@ class TorrentBytesProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentBytes") + TorrentProvider.__init__(self, 'TorrentBytes') # Credentials self.username = None @@ -48,14 +50,14 @@ def __init__(self): self.freeleech = False # URLs - self.url = "https://www.torrentbytes.net" + self.url = 'https://www.torrentbytes.net' self.urls = { - "login": urljoin(self.url, "takelogin.php"), - "search": urljoin(self.url, "browse.php") + 'login': urljoin(self.url, 'takelogin.php'), + 'search': urljoin(self.url, 'browse.php') } # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Cache self.cache = tvcache.TVCache(self) @@ -64,17 +66,17 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {"username": self.username, - "password": self.password, - "login": "Log in!"} + login_params = {'username': self.username, + 'password': self.password, + 'login': 'Log in!'} - response = self.get_url(self.urls["login"], post_data=login_params, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search("Username or password incorrect", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -85,73 +87,74 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results search_params = { - "c41": 1, "c33": 1, "c38": 1, "c32": 1, "c37": 1 + 'c41': 1, 'c33': 1, 'c38': 1, 'c32': 1, 'c37': 1 } for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), - logger.DEBUG) + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_params["search"] = search_string - data = self.get_url(self.urls["search"], params=search_params, returns="text") + search_params['search'] = search_string + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", border="1") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', border='1') + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers" - labels = [label.get_text(strip=True) for label in torrent_rows[0]("td")] + labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] for result in torrent_rows[1:]: try: - cells = result("td") + cells = result('td') - download_url = urljoin(self.url, cells[labels.index("Name")].find("a", href=re.compile(r"download.php\?id="))["href"]) - title_element = cells[labels.index("Name")].find("a", href=re.compile(r"details.php\?id=")) - title = title_element.get("title", "") or title_element.get_text(strip=True) + download_url = urljoin(self.url, cells[labels.index('Name')].find('a', href=re.compile(r'download.php\?id='))['href']) + title_element = cells[labels.index('Name')].find('a', href=re.compile(r'details.php\?id=')) + title = title_element.get('title', '') or title_element.get_text(strip=True) if not all([title, download_url]): continue if self.freeleech: # Free leech torrents are marked with green [F L] in the title (i.e. [F L]) - freeleech = cells[labels.index("Name")].find("font", color="green") - if not freeleech or freeleech.get_text(strip=True) != "[F\xa0L]": + freeleech = cells[labels.index('Name')].find('font', color='green') + if not freeleech or freeleech.get_text(strip=True) != '[F\xa0L]': continue - seeders = try_int(cells[labels.index("Seeders")].get_text(strip=True)) - leechers = try_int(cells[labels.index("Leechers")].get_text(strip=True)) + seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) + leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue # Need size for failed downloads handling - torrent_size = cells[labels.index("Size")].get_text(strip=True) + torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError): + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index a40958def1..ff04501530 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.exceptions import RequestException from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar @@ -34,7 +37,7 @@ class TorrentDayProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentDay") + TorrentProvider.__init__(self, 'TorrentDay') # Credentials self.username = None @@ -79,11 +82,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('You tried too often', response): - logger.log(u"Too many login access attempts", logger.WARNING) + logger.log('Too many login access attempts', logger.WARNING) return False try: @@ -96,7 +99,7 @@ def login(self): except Exception: pass - logger.log(u"Unable to obtain cookie", logger.WARNING) + logger.log('Unable to obtain cookie', logger.WARNING) return False def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals @@ -106,11 +109,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_string = '+'.join(search_string.split()) @@ -131,40 +134,45 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: jdata = response.json() except ValueError: # also catches JSONDecodeError if simplejson is installed - logger.log(u"Data returned from provider is not json", logger.ERROR) + logger.log('Data returned from provider is not json', logger.ERROR) continue torrents = jdata.get('Fs', [dict()])[0].get('Cn', {}).get('torrents', []) if not torrents: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for torrent in torrents: - title = re.sub(r"\[.*\=.*\].*\[/.*\]", "", torrent['name']) if torrent['name'] else None - download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None - - if not all([title, download_url]): - continue + try: + title = re.sub(r'\[.*\=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None + download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None - seeders = int(torrent['seed']) if torrent['seed'] else 1 - leechers = int(torrent['leech']) if torrent['leech'] else 0 + if not all([title, download_url]): + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue + seeders = int(torrent['seed']) if torrent['seed'] else 1 + leechers = int(torrent['leech']) if torrent['leech'] else 0 - torrent_size = torrent['size'] - size = convert_size(torrent_size) or -1 + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + torrent_size = torrent['size'] + size = convert_size(torrent_size) or -1 - if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - items.append(item) + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 496cb68883..ba9c9c3aa9 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -21,6 +21,7 @@ from __future__ import unicode_literals import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +37,7 @@ class TorrentLeechProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentLeech") + TorrentProvider.__init__(self, 'TorrentLeech') # Credentials self.username = None @@ -47,14 +48,14 @@ def __init__(self): self.minleech = None # URLs - self.url = "https://torrentleech.org" + self.url = 'https://torrentleech.org' self.urls = { - "login": urljoin(self.url, "user/account/login/"), - "search": urljoin(self.url, "torrents/browse"), + 'login': urljoin(self.url, 'user/account/login/'), + 'search': urljoin(self.url, 'torrents/browse'), } # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Cache self.cache = tvcache.TVCache(self) @@ -64,19 +65,19 @@ def login(self): return True login_params = { - "username": self.username.encode("utf-8"), - "password": self.password.encode("utf-8"), - "login": "submit", - "remember_me": "on", + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', } - response = self.get_url(self.urls["login"], post_data=login_params, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search("Invalid Username/password", response) or re.search("Login :: TorrentLeech.org", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if re.search('Invalid Username/password', response) or re.search('Login :: TorrentLeech.org', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -90,83 +91,85 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # 2,26,27,32,7,34,35 # Units - units = ["B", "KB", "MB", "GB", "TB", "PB"] + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] def process_column_header(td): - result = "" + result = '' if td.a: - result = td.a.get("title") + result = td.a.get('title') if not result: result = td.get_text(strip=True) return result for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - categories = ["2", "7", "35"] - categories += ["26", "32"] if mode == "Episode" else ["27"] + categories = ['2', '7', '35'] + categories += ['26', '32'] if mode == 'Episode' else ['27'] if self.show and self.show.is_anime: - categories += ["34"] + categories += ['34'] else: - categories = ["2", "26", "27", "32", "7", "34", "35"] + categories = ['2', '26', '27', '32', '7', '34', '35'] search_params = { - "categories": ",".join(categories), - "query": search_string + 'categories': ','.join(categories), + 'query': search_string } - data = self.get_url(self.urls["search"], params=search_params, returns="text") + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", id="torrenttable") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='torrenttable') + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - labels = [process_column_header(label) for label in torrent_rows[0]("th")] + labels = [process_column_header(label) for label in torrent_rows[0]('th')] # Skip column headers for result in torrent_rows[1:]: try: - title = result.find("td", class_="name").find("a").get_text(strip=True) - download_url = urljoin(self.url, result.find("td", class_="quickdownload").find("a")["href"]) + title = result.find('td', class_='name').find('a').get_text(strip=True) + download_url = urljoin(self.url, result.find('td', class_='quickdownload').find('a')['href']) if not all([title, download_url]): continue - seeders = try_int(result.find("td", class_="seeders").get_text(strip=True)) - leechers = try_int(result.find("td", class_="leechers").get_text(strip=True)) + seeders = try_int(result.find('td', class_='seeders').get_text(strip=True)) + leechers = try_int(result.find('td', class_='leechers').get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue - torrent_size = result("td")[labels.index("Size")].get_text() + torrent_size = result('td')[labels.index('Size')].get_text() size = convert_size(torrent_size, units=units) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index c9cad22098..eca4cbcebb 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -16,6 +16,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback @@ -66,25 +68,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_url = self.urls['verified'] if self.confirmed else self.urls['feed'] if mode != 'RSS': - logger.log(u"Search string: {}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) data = self.get_url(search_url, params={'q': search_string}, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log("No data returned from provider", logger.DEBUG) continue if not data.startswith(". +from __future__ import unicode_literals + import re import traceback from requests.utils import dict_from_cookiejar @@ -25,7 +27,7 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import try_int from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -35,7 +37,7 @@ class TransmitTheNetProvider(TorrentProvider): # pylint: disable=too-many-insta def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TransmitTheNet") + TorrentProvider.__init__(self, 'TransmitTheNet') # Credentials self.username = None @@ -61,7 +63,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -78,11 +80,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username Incorrect', response) or re.search('Password Incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -97,14 +99,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params = { 'searchtext': search_string, 'filter_freeleech': (0, 1)[self.freeleech is True], 'order_by': ('seeders', 'time')[mode == 'RSS'], - "order_way": "desc" + 'order_way': 'desc' } if not search_string: @@ -112,67 +114,73 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue try: with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', {'id': 'torrent_table'}) if not torrent_table: - logger.log(u"Data returned from %s does not contain any torrents" % self.name, logger.DEBUG) + logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) continue torrent_rows = torrent_table('tr', {'class': 'torrent'}) # Continue only if one Release is found if not torrent_rows: - logger.log(u"Data returned from %s does not contain any torrents" % self.name, logger.DEBUG) + logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) continue for torrent_row in torrent_rows: - freeleech = torrent_row.find('img', alt="Freeleech") is not None - if self.freeleech and not freeleech: - continue - - download_item = torrent_row.find('a', {'title': [ - 'Download Torrent', # Download link - 'Previously Grabbed Torrent File', # Already Downloaded - 'Currently Seeding Torrent', # Seeding - 'Currently Leeching Torrent', # Leeching - ]}) - - if not download_item: - continue - - download_url = urljoin(self.url, download_item['href']) - - temp_anchor = torrent_row.find('a', {"data-src": True}) - title = temp_anchor['data-src'].rsplit('.', 1)[0] - if not all([title, download_url]): - continue - - cells = torrent_row('td') - seeders = try_int(cells[8].text.strip()) - leechers = try_int(cells[9].text.strip()) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + try: + freeleech = torrent_row.find('img', alt='Freeleech') is not None + if self.freeleech and not freeleech: + continue + + download_item = torrent_row.find('a', {'title': [ + 'Download Torrent', # Download link + 'Previously Grabbed Torrent File', # Already Downloaded + 'Currently Seeding Torrent', # Seeding + 'Currently Leeching Torrent', # Leeching + ]}) + + if not download_item: + continue + + download_url = urljoin(self.url, download_item['href']) + + temp_anchor = torrent_row.find('a', {'data-src': True}) + title = temp_anchor['data-src'].rsplit('.', 1)[0] + if not all([title, download_url]): + continue + + cells = torrent_row('td') + seeders = try_int(cells[8].text.strip()) + leechers = try_int(cells[9].text.strip()) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue + + size = temp_anchor['data-filesize'] or -1 + + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - size = temp_anchor['data-filesize'] or -1 - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 0e1006e916..b36100dda7 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -17,6 +17,7 @@ from __future__ import unicode_literals import re +import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -95,15 +96,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode == 'Season': - search_string = re.sub(ur'(.*)S0?', ur'\1Series ', search_string) + search_string = re.sub(r'(.*)S0?', r'\1Series ', search_string) if mode != 'RSS': - logger.log('Search string: {}'.format(search_string), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['keywords'] = search_string data = self.get_url(self.urls['search'], post_data=search_params, returns='text') @@ -144,17 +145,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Chop off tracker/channel prefix or we cant parse the result! if mode != 'RSS' and search_params['keywords']: - show_name_first_word = re.search(ur'^[^ .]+', search_params['keywords']).group() + show_name_first_word = re.search(r'^[^ .]+', search_params['keywords']).group() if not title.startswith(show_name_first_word): - title = re.sub(ur'.*(' + show_name_first_word + '.*)', ur'\1', title) + title = re.sub(r'.*(' + show_name_first_word + '.*)', r'\1', title) # Change title from Series to Season, or we can't parse if mode == 'Season': - title = re.sub(ur'(.*)(?i)Series', ur'\1Season', title) + title = re.sub(r'(.*)(?i)Series', r'\1Season', title) # Strip year from the end or we can't parse it! - title = re.sub(ur'(.*)[\. ]?\(\d{4}\)', ur'\1', title) - title = re.sub(ur'\s+', ur' ', title) + title = re.sub(r'(.*)[\. ]?\(\d{4}\)', r'\1', title) + title = re.sub(r'\s+', r' ', title) torrent_size = torrent('td')[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 @@ -165,7 +166,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man item = {'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index 7158e3da80..ec288d456d 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -31,7 +31,7 @@ class WombleProvider(NZBProvider): def __init__(self): - NZBProvider.__init__(self, 'Womble\'s Index') + NZBProvider.__init__(self, "Womble's Index") self.public = True diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index e3e7c4a2be..3bc1849961 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from requests.utils import dict_from_cookiejar @@ -34,7 +37,7 @@ class XthorProvider(TorrentProvider): # pylint: disable=too-many-instance-attri def __init__(self): # Provider Init - TorrentProvider.__init__(self, "Xthor") + TorrentProvider.__init__(self, 'Xthor') # Credentials self.username = None @@ -70,11 +73,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if not re.search('donate.php', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -117,7 +120,7 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) # Sorting: 1: Name, 3: Comments, 5: Size, 6: Completed, 7: Seeders, 8: Leechers (4: Time ?) search_params['sort'] = (7, 4)[mode == 'RSS'] @@ -125,25 +128,25 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find("table", class_="table2 table-bordered2") + torrent_table = html.find('table', class_='table2 table-bordered2') torrent_rows = [] if torrent_table: - torrent_rows = torrent_table("tr") + torrent_rows = torrent_table('tr') # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Catégorie, Nom du Torrent, (Download), (Bookmark), Com., Taille, Compl�t�, Seeders, Leechers @@ -157,7 +160,7 @@ def process_column_header(td): try: title = cells[labels.index('Nom du Torrent')].get_text(strip=True) - download_url = self.url + '/' + row.find("a", href=re.compile("download.php"))['href'] + download_url = self.url + '/' + row.find('a', href=re.compile('download.php'))['href'] if not all([title, download_url]): continue @@ -167,8 +170,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue @@ -177,11 +180,13 @@ def process_column_header(td): item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 775be50790..868ebf7f42 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -18,6 +18,7 @@ from __future__ import unicode_literals +import traceback from requests.compat import urljoin from sickbeard import logger, tvcache @@ -141,7 +142,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue # For each search mode sort all the items by seeders if available diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 96f62875e5..1c48a944a5 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -16,6 +16,7 @@ # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals import re import sickbeard @@ -97,21 +98,21 @@ def download_result(self, result): 'Referer': '/'.join(url.split('/')[:3]) + '/' }) - logger.log(u'Downloading a result from %s at %s' % (self.name, url)) + logger.log('Downloading a result from %s at %s' % (self.name, url)) if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB): filename = replace_extension(filename, GenericProvider.TORRENT) if download_file(url, filename, session=self.session, headers=self.headers, hooks={'response': self.get_url_hook}): if self._verify_download(filename): - logger.log(u'Saved result to %s' % filename, logger.INFO) + logger.log('Saved result to %s' % filename, logger.INFO) return True - logger.log(u'Could not download %s' % url, logger.WARNING) + logger.log('Could not download %s' % url, logger.WARNING) remove_file_failed(filename) if urls: - logger.log(u'Failed to download any results', logger.WARNING) + logger.log('Failed to download any results', logger.WARNING) return False @@ -198,13 +199,13 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if search_mode == 'sponly': if parse_result.episode_numbers: logger.log( - u'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title, + 'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title, logger.DEBUG ) add_cache_entry = True elif not [ep for ep in episodes if parse_result.season_number == (ep.season, ep.scene_season)[ep.show.is_scene]]: logger.log( - u'This season result %s is for a season we are not searching for, skipping it' % title, + 'This season result %s is for a season we are not searching for, skipping it' % title, logger.DEBUG ) add_cache_entry = True @@ -219,7 +220,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, ]): logger.log( - u'The result %s doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' % title, + 'The result %s doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' % title, logger.DEBUG) add_cache_entry = True @@ -231,7 +232,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if not parse_result.is_air_by_date: logger.log( - u'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title, + 'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title, logger.DEBUG) add_cache_entry = True else: @@ -253,7 +254,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, same_day_special = True elif len(sql_results) != 1: logger.log( - u'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title, + 'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title, logger.WARNING) add_cache_entry = True @@ -265,7 +266,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, actual_episodes = parse_result.episode_numbers if add_cache_entry: - logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG) + logger.log('Adding item from search to cache: %s' % title, logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=parse_result) @@ -285,10 +286,10 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, break if not episode_wanted: - logger.log(u'Ignoring result %s.' % (title), logger.DEBUG) + logger.log('Ignoring result %s.' % (title), logger.DEBUG) continue - logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG) + logger.log('Found result %s at %s' % (title, url), logger.DEBUG) episode_object = [] for current_episode in actual_episodes: @@ -310,13 +311,13 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if not episode_object: episode_number = SEASON_RESULT - logger.log(u'Separating full season result to check for later', logger.DEBUG) + logger.log('Separating full season result to check for later', logger.DEBUG) elif len(episode_object) == 1: episode_number = episode_object[0].episode - logger.log(u'Single episode result.', logger.DEBUG) + logger.log('Single episode result.', logger.DEBUG) else: episode_number = MULTI_EP_RESULT - logger.log(u'Separating multi-episode result to check for later - result contains episodes: %s' % str( + logger.log('Separating multi-episode result to check for later - result contains episodes: %s' % str( parse_result.episode_numbers), logger.DEBUG) if episode_number not in results: @@ -349,11 +350,11 @@ def get_result(self, episodes): @staticmethod def get_url_hook(response, **kwargs): - logger.log(u'{} URL: {} [Status: {}]'.format + logger.log('{} URL: {} [Status: {}]'.format (response.request.method, response.request.url, response.status_code), logger.DEBUG) if response.request.method == 'POST': - logger.log(u'With post data: {}'.format(response.request.body), logger.DEBUG) + logger.log('With post data: {}'.format(response.request.body), logger.DEBUG) def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments, kwargs['hooks'] = {'response': self.get_url_hook} @@ -421,7 +422,7 @@ def _get_episode_search_strings(self, episode, add_string=''): if add_string: episode_string += ' ' + add_string - search_string['Episode'].append(episode_string.encode('utf-8').strip()) + search_string['Episode'].append(episode_string.strip()) return [search_string] @@ -440,7 +441,7 @@ def _get_season_search_strings(self, episode): else: episode_string += 'S%02d' % int(episode.scene_season) - search_string['Season'].append(episode_string.encode('utf-8').strip()) + search_string['Season'].append(episode_string.strip()) return [search_string] @@ -475,7 +476,7 @@ def _get_title_and_url(self, item): # pylint: disable=no-self-use url = item.get('link', '') if title: - title = u'' + title.replace(' ', '.') + title = title.replace(' ', '.') else: title = '' @@ -491,7 +492,7 @@ def _make_url(self, result): return '', '' urls = [] - filename = u'' + filename = '' if result.url.startswith('magnet'): try: @@ -506,12 +507,12 @@ def _make_url(self, result): torrent_hash = b16encode(b32decode(torrent_hash)).upper() if not torrent_hash: - logger.log(u'Unable to extract torrent hash from magnet: %s' % ex(result.url), logger.ERROR) + logger.log('Unable to extract torrent hash from magnet: %s' % ex(result.url), logger.ERROR) return urls, filename urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.bt_cache_urls] except Exception: - logger.log(u'Unable to extract torrent hash or name from magnet: %s' % ex(result.url), logger.ERROR) + logger.log('Unable to extract torrent hash or name from magnet: %s' % ex(result.url), logger.ERROR) return urls, filename else: urls = [result.url] From a74e35d4e5d92e9b91f96ea138a5313d5b888826 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 14:05:21 +0200 Subject: [PATCH 34/85] Next 11 providers --- sickbeard/providers/alpharatio.py | 2 +- sickbeard/providers/ilovetorrents.py | 80 +++++++++++-------- sickbeard/providers/iptorrents.py | 110 +++++++++++++------------- sickbeard/providers/kat.py | 31 +++++--- sickbeard/providers/morethantv.py | 25 +++--- sickbeard/providers/newpct.py | 28 ++++--- sickbeard/providers/newznab.py | 36 ++++++--- sickbeard/providers/norbits.py | 25 +++--- sickbeard/providers/nyaatorrents.py | 58 ++++++++------ sickbeard/providers/omgwtfnzbs.py | 6 +- sickbeard/providers/pretome.py | 114 ++++++++++++++------------- 11 files changed, 294 insertions(+), 221 deletions(-) diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index e75cd9530a..a66ed392a9 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -156,7 +156,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index b49b9e88ef..87e9f203b3 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -3,25 +3,26 @@ # # URL: https://sickrage.github.io # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -43,9 +44,8 @@ def __init__(self): self.url = 'https://www.ilovetorrents.me/' self.urls = { 'login': urljoin(self.url, 'takelogin.php'), - 'detail': urljoin(self.url, 'details.php?id=%s'), 'search': urljoin(self.url, 'browse.php'), - 'download': urljoin(self.url, '%s'), + 'download': urljoin(self.url, '{link}'), } # Credentials @@ -94,16 +94,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] if not self.login(): return results + search_params = { 'cat': 0 } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string @@ -111,52 +113,62 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not data: continue - try: - with BS4Parser(data, 'html.parser') as html: - torrent_table = html.find('table', class_='koptekst') - torrent_rows = torrent_table('tr') if torrent_table else [] + with BS4Parser(data, 'html.parser') as html: + torrent_table = html.find('table', class_='koptekst') + torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one Release is found - if len(torrent_rows) < 2: - logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) - continue + # Continue only if one Release is found + if len(torrent_rows) < 2: + logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for result in torrent_rows[1:]: + for result in torrent_rows[1:]: + try: cells = result('td') - link = cells[1].find('a') - download_url = self.urls['download'] % cells[2].find('a')['href'] - - try: - title = link.getText() - seeders = int(cells[10].getText().replace(',', '')) - leechers = int(cells[11].getText().replace(',', '')) - torrent_size = cells[8].getText() - size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue + + download_url = self.urls['download'].format(link=cells[2].find('a')['href']) + title = link.getText() if not all([title, download_url]): continue + seeders = int(cells[10].getText().replace(',', '')) + leechers = int(cells[11].getText().replace(',', '')) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue + # Use same failsafe as Bitsoup if seeders >= 32768 or leechers >= 32768: continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + torrent_size = cells[8].getText() + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u'Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - - except Exception: - logger.log(u'Failed parsing provider. Traceback: {0}'.format(traceback.format_exc()), logger.WARNING) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 6adf97079b..cb487bf83d 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: seedboy # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -112,58 +110,64 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if not data: continue - try: - data = re.sub(r'(?im)', '', data, 0) - with BS4Parser(data, 'html5lib') as html: - if not html: - logger.log('No data returned from provider', logger.DEBUG) - continue - - if html.find(text='No Torrents Found!'): - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - - torrent_table = html.find('table', attrs={'class': 'torrents'}) - torrents = torrent_table('tr') if torrent_table else [] + data = re.sub(r'(?im)', '', data, 0) + with BS4Parser(data, 'html5lib') as html: + if not html: + logger.log('No data returned from provider', logger.DEBUG) + continue + + if html.find(text='No Torrents Found!'): + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + torrent_table = html.find('table', attrs={'class': 'torrents'}) + torrents = torrent_table('tr') if torrent_table else [] + + # Continue only if one release is found + if len(torrents) < 2: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + for result in torrents[1:]: + try: + title = result('td')[1].find('a').text + download_url = self.urls['base_url'] + result('td')[3].find('a')['href'] + if not all([title, download_url]): + continue - # Continue only if one Release is found - if len(torrents) < 2: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).text) + leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text) - for result in torrents[1:]: - try: - title = result('td')[1].find('a').text - download_url = self.urls['base_url'] + result('td')[3].find('a')['href'] - seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).text) - leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text) - torrent_size = result('td')[5].text - size = convert_size(torrent_size) or -1 - - if not all([title, download_url]): - continue - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + torrent_size = result('td')[5].text + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue + results += items return results diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 10b6fc6cc3..118fcf55fd 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -1,25 +1,26 @@ # coding=utf-8 # Author: Dustyn Gibson # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import traceback import validators + from requests.compat import urljoin from sickbeard.bs4_parser import BS4Parser @@ -64,6 +65,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['q'] = search_string if mode != 'RSS' else '' @@ -110,27 +112,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue verified = bool(try_int(item.find('torrent:verified').get_text(strip=True))) if self.confirmed and not verified: if mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format(title), logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a verified" + " result so I'm ignoring it".format(title), logger.DEBUG) continue torrent_size = item.find('torrent:contentlength').get_text(strip=True) size = convert_size(torrent_size) or -1 info_hash = item.find('torrent:infohash').get_text(strip=True) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': info_hash} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': info_hash + } if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index 1ed625b713..bdab9316c4 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -68,7 +66,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -175,8 +174,15 @@ def process_column_header(td): torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) @@ -186,6 +192,7 @@ def process_column_header(td): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) continue + results += items return results diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index 5a980739d4..dba63d9ace 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -1,29 +1,29 @@ # coding=utf-8 # Author: CristianBB # Greetings to Mr. Pine-apple - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.compat import urljoin import re import traceback +from requests.compat import urljoin + from sickbeard import helpers from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -117,11 +117,20 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = 1 leechers = 0 torrent_size = cells[labels.index('Tamaño')].get_text(strip=True) - size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: {0}'.format(title), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): @@ -210,4 +219,5 @@ def _processTitle(title): return title.strip() + provider = newpctProvider() diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index a13766e0ef..aed8770c14 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -1,31 +1,33 @@ # coding=utf-8 # Author: Nic Wolfe # Rewrite: Dustyn Gibson (miigotu) - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.compat import urljoin + import os import re import time import validators - import sickbeard +import traceback + +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.common import cpu_presets @@ -367,9 +369,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(item_size) or -1 - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + result = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + items.append(result) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue # Since we arent using the search string, @@ -377,8 +393,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if 'tvdbid' in search_params: break - if torznab: - results.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 245909a8b9..7d12bcaaa3 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -1,22 +1,19 @@ # coding=utf-8 -"""A Norbits (https://norbits.net) provider""" - -# URL: https://sickrage.github.io # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -125,10 +122,18 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many info_hash = item.pop('info_hash', '') size = convert_size(item.pop('size', -1), -1) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': info_hash} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': info_hash + } if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format( - title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index cd365c9c33..f7b8b82725 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -54,24 +52,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if self.show and not self.show.is_anime: return results + search_params = { + 'page': 'rss', + 'cats': '1_0', # All anime + 'sort': 2, # Sort Descending By Seeders + 'order': 1 + } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) - - search_params = { - 'page': 'rss', - 'cats': '1_0', # All anime - 'sort': 2, # Sort Descending By Seeders - 'order': 1 - } - if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + search_params['term'] = search_string - results = [] data = self.cache.getRSSFeed(self.url, params=search_params)['entries'] if not data: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) @@ -95,27 +92,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue if self.confirmed and not verified and mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format - (title), logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a verified" + " result so I'm ignoring it".format(title), logger.DEBUG) continue size = convert_size(torrent_size) or -1 - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(result) + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 5778989066..315efd8bb5 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -1,7 +1,6 @@ # coding=utf-8 # Author: Jordon Smith # -# # This file is part of Medusa. # # Medusa is free software: you can redistribute it and/or modify @@ -21,8 +20,8 @@ import re import traceback - import sickbeard + from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -30,6 +29,7 @@ class OmgwtfnzbsProvider(NZBProvider): + def __init__(self): NZBProvider.__init__(self, 'OMGWTFNZBs') @@ -106,6 +106,7 @@ def search(self, search_strings, age=0, ep_obj=None): for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['search'] = search_string if mode != 'RSS': @@ -158,4 +159,5 @@ def _getRSSData(self): } return self.getRSSFeed(self.provider.urls['rss'], params=search_params) + provider = OmgwtfnzbsProvider() diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 591d385cb2..b4ff4da0c2 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -1,27 +1,26 @@ # coding=utf-8 # Author: Nick Sologoub # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import quote from requests.utils import dict_from_cookiejar @@ -104,68 +103,71 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - # Continue only if one Release is found - empty = html.find('h2', text='No .torrents fit this filter criteria') - if empty: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - - torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'}) - if not torrent_table: - logger.log('Could not find table of torrents', logger.ERROR) - continue + with BS4Parser(data, 'html5lib') as html: + # Continue only if one Release is found + empty = html.find('h2', text='No .torrents fit this filter criteria') + if empty: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) + torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'}) + if not torrent_table: + logger.log('Could not find table of torrents', logger.ERROR) + continue - for result in torrent_rows: - try: - cells = result('td') - size = None - link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) + torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) - torrent_id = link['href'].replace('details.php?id=', '') + for result in torrent_rows: + try: + cells = result('td') + size = None + link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) - if link.get('title', ''): - title = link['title'] - else: - title = link.contents[0] + torrent_id = link['href'].replace('details.php?id=', '') - download_url = self.urls['download'] % (torrent_id, link.contents[0]) - seeders = int(cells[9].contents[0]) - leechers = int(cells[10].contents[0]) + if link.get('title', ''): + title = link['title'] + else: + title = link.contents[0] - # Need size for failed downloads handling - if size is None: - torrent_size = cells[7].text - size = convert_size(torrent_size) or -1 + download_url = self.urls['download'] % (torrent_id, link.contents[0]) + if not all([title, download_url]): + continue - if not all([title, download_url]): - continue + seeders = int(cells[9].contents[0]) + leechers = int(cells[10].contents[0]) - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + # Need size for failed downloads handling + if size is None: + torrent_size = cells[7].text + size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': - seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - - items.append(item) - - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items From f9a4213c07c45a940f821247255a37d6c8ab5106 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 14:54:31 +0200 Subject: [PATCH 35/85] Next 11 providers, removed sceneelite --- sickbeard/providers/newznab.py | 4 +- sickbeard/providers/rarbg.py | 34 ++++-- sickbeard/providers/rsstorrent.py | 26 ++-- sickbeard/providers/scc.py | 70 +++++------ sickbeard/providers/sceneelite.py | 153 ------------------------ sickbeard/providers/scenetime.py | 40 ++++--- sickbeard/providers/shazbat.py | 12 +- sickbeard/providers/speedcd.py | 28 +++-- sickbeard/providers/t411.py | 110 +++++++++-------- sickbeard/providers/thepiratebay.py | 27 +++-- sickbeard/providers/tntvillage.py | 172 ++++++++++++++------------- sickbeard/providers/tokyotoshokan.py | 39 +++--- sickbeard/providers/torrentbytes.py | 35 ++++-- 13 files changed, 334 insertions(+), 416 deletions(-) delete mode 100644 sickbeard/providers/sceneelite.py diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index aed8770c14..ef0d8ca50c 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -369,7 +369,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(item_size) or -1 - result = { + item = { 'title': title, 'link': download_url, 'size': size, @@ -382,7 +382,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(result) + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index e2915554d1..4b29b63eab 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -1,20 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -96,6 +96,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + if mode == 'RSS': search_params['sort'] = 'last' search_params['mode'] = 'list' @@ -157,10 +158,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = item.pop('seeders') leechers = item.pop('leechers') + if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -171,12 +173,24 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - items.append(result) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index bfc43c1f48..d236a17e54 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -1,32 +1,32 @@ # coding=utf-8 # # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import io import os import re +import sickbeard + from requests.utils import add_dict_to_cookiejar + from bencode import bdecode -import sickbeard from sickbeard import helpers, logger, tvcache from sickrage.helper.encoding import ek @@ -189,18 +189,18 @@ def validateRSS(self): # pylint: disable=too-many-return-statements @staticmethod def dumpHTML(data): - dumpName = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') + dump_name = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') try: - fileOut = io.open(dumpName, 'wb') - fileOut.write(data) - fileOut.close() - helpers.chmodAsParent(dumpName) + file_out = io.open(dump_name, 'wb') + file_out.write(data) + file_out.close() + helpers.chmodAsParent(dump_name) except IOError as error: logger.log('Unable to save the file: {0}'.format(ex(error)), logger.ERROR) return False - logger.log('Saved custom_torrent html dump {0} '.format(dumpName), logger.INFO) + logger.log('Saved custom_torrent html dump {0} '.format(dump_name), logger.INFO) return True diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index c58df97af1..588797f112 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -1,36 +1,32 @@ # coding=utf-8 # Author: Idan Gutman # Modified by jkaberg, https://github.com/jkaberg for SceneAccess - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback -import time from requests.compat import urljoin, quote from requests.utils import dict_from_cookiejar -import sickbeard from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickbeard.common import cpu_presets from sickrage.helper.common import convert_size, try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -99,21 +95,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - if mode != 'RSS': - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories[mode]) - try: - data = self.get_url(search_url, returns='text') - time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - except Exception as e: - logger.log('Unable to fetch data. Error: %s' % repr(e), logger.WARNING) - + data = self.get_url(search_url, returns='text') if not data: continue @@ -127,7 +117,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue for result in torrent_table('tr')[1:]: - try: link = result.find('td', attrs={'class': 'ttr_name'}).find('a') url = result.find('td', attrs={'class': 'td_dl'}).find('a') @@ -139,31 +128,42 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man with BS4Parser(data) as details_html: title = re.search("(?<=').+(? -# -# URL: https://sickrage.github.io -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - -from __future__ import unicode_literals - -import traceback -from requests.compat import urljoin -from requests.utils import dict_from_cookiejar - -from sickbeard import logger, tvcache - -from sickrage.helper.common import try_int -from sickrage.providers.torrent.TorrentProvider import TorrentProvider - - -class SceneEliteProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - - def __init__(self): - - # Provider Init - TorrentProvider.__init__(self, 'SceneElite') - - # Credentials - self.username = None - self.password = None - - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = None - - # URLs - self.url = 'https://sceneelite.org/' - self.urls = { - 'login': urljoin(self.url, '/api/v1/auth'), - 'search': urljoin(self.url, '/api/v1/torrents'), - 'download': urljoin(self.url, '/api/v1/torrents/download/'), - } - - # Proper Strings - self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - cache_params = {'RSS': ['']} - # Cache - self.cache = tvcache.TVCache(self, min_time=0.1, search_params=cache_params) - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password - } - - response = self.get_url(self.urls['login'], params=login_params, returns='json') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - return True - - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches - results = [] - if not self.login(): - return results - - # Search Params - search_params = { - 'extendedSearch': 'false', - 'hideOld': 'false', - 'index': '0', - 'limit': '100', - 'order': 'asc', - 'page': 'search', - 'sort': 'n', - 'categories[0]': 3, - 'categories[1]': 6, - 'categories[2]': 7 - } - - for mode in search_strings: - items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - - for search_string in search_strings[mode]: - if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) - search_params['searchText'] = search_string - else: - search_params['page'] = 'last_seriebrowse' - results = [] - search_url = self.urls['search'] - try: - jdata = self.get_url(search_url, params=search_params, returns='json') - except ValueError: - logger.log('No data returned from provider', logger.DEBUG) - continue - for torrent in jdata: - try: - title = torrent.pop('name', '') - id = str(torrent.pop('id', '')) - if not id: - continue - seeders = try_int(torrent.pop('seeders', ''), 1) - leechers = try_int(torrent.pop('leechers', ''), 0) - freeleech = torrent.pop('frileech') - if self.freeleech and freeleech != 1: - continue - size = try_int(torrent.pop('size', ''), 0) - download_url = self.urls['download'] + id - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue - - results += items - - return results - - -provider = SceneEliteProvider() diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index d3d95ec870..3dde79cb37 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -1,25 +1,24 @@ # coding=utf-8 # Author: Idan Gutman # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . -import re +from __future__ import unicode_literals +import re import traceback from requests.compat import quote @@ -118,26 +117,35 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many title = link.get_text(strip=True) download_url = self.urls['download'] % (torrent_id, '%s.torrent' % title.replace(' ', '.')) + if not all([title, download_url]): + continue seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) - torrent_size = cells[labels.index('Size')].get_text() - - size = convert_size(torrent_size) or -1 - - if not all([title, download_url]): - continue # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + torrent_size = cells[labels.index('Size')].get_text() + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 8310513aa6..1b12c9da3d 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -1,28 +1,27 @@ # coding=utf-8 # Author: Nic Wolfe # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals from requests.compat import urljoin from sickbeard import logger, tvcache + from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -77,4 +76,5 @@ def _getRSSData(self): def _checkAuth(self, data): return self.provider._checkAuthFromData(data) # pylint: disable=protected-access + provider = ShazbatProvider() diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 2e78ad7755..9539696276 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -160,17 +158,27 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size')].get_text() torrent_size = torrent_size[:-2] + ' ' + torrent_size[-2:] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 65b2224797..3160e8e65c 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -1,31 +1,31 @@ # coding=utf-8 # Author: djoole # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.auth import AuthBase import time import traceback +from requests.auth import AuthBase + from sickbeard import logger, tvcache from sickbeard.common import USER_AGENT + from sickrage.helper.common import try_int from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -98,64 +98,71 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] - for search_url in search_urlS: + search_urls = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] + for search_url in search_urls: data = self.get_url(search_url, returns='json') if not data: continue - try: - if 'torrents' not in data and mode != 'RSS': - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + if 'torrents' not in data and mode != 'RSS': + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - torrents = data['torrents'] if mode != 'RSS' else data + torrents = data['torrents'] if mode != 'RSS' else data - if not torrents: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + if not torrents: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + for torrent in torrents: + if mode == 'RSS' and 'category' in torrent and try_int(torrent['category'], 0) not in self.subcategories: continue - for torrent in torrents: - if mode == 'RSS' and 'category' in torrent and try_int(torrent['category'], 0) not in self.subcategories: + try: + title = torrent['name'] + torrent_id = torrent['id'] + download_url = (self.urls['download'] % torrent_id) + if not all([title, download_url]): continue - try: - title = torrent['name'] - torrent_id = torrent['id'] - download_url = (self.urls['download'] % torrent_id) - if not all([title, download_url]): - continue - - seeders = try_int(torrent['seeders']) - leechers = try_int(torrent['leechers']) - verified = bool(torrent['isVerified']) - torrent_size = torrent['size'] - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue - - if self.confirmed and not verified and mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format(title), logger.DEBUG) - continue - - size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + seeders = try_int(torrent['seeders']) + leechers = try_int(torrent['leechers']) + verified = bool(torrent['isVerified']) - items.append(item) + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) + continue - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + if self.confirmed and not verified and mode != 'RSS': + logger.log("Found result {0} but that doesn't seem like a verified" + " result so I'm ignoring it".format(title), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) + torrent_size = torrent['size'] + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items @@ -171,4 +178,5 @@ def __call__(self, r): r.headers['Authorization'] = self.token return r + provider = T411Provider() diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 0799d0ee01..c9e0498cb5 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -1,28 +1,27 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback import validators + from requests.compat import urljoin from sickbeard import logger, tvcache @@ -140,14 +139,16 @@ def process_column_header(th): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1}".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue # Accept Torrent only from Good People for every Episode Search if self.confirmed and not result.find(alt=re.compile(r'VIP|Trusted')): if mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a trusted result so I'm ignoring it".format(title), logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a trusted" + " result so I'm ignoring it".format(title), logger.DEBUG) continue # Convert size after all possible skip scenarios @@ -155,7 +156,15 @@ def process_column_header(th): torrent_size = re.sub(r'Size ([\d.]+).+([KMGT]iB)', r'\1 \2', torrent_size) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 776277f8ad..562eb61719 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -1,22 +1,21 @@ # coding=utf-8 # Author: Giovanni Borri # Modified by gborri, https://github.com/gborri for TNTVillage - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -118,7 +117,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -229,10 +228,10 @@ def _is_italian(self, torrent_rows): if not name or name == 'None': return False - subFound = italian = False + sub_found = italian = False for sub in self.sub_string: if re.search(sub, name, re.I): - subFound = True + sub_found = True else: continue @@ -241,7 +240,7 @@ def _is_italian(self, torrent_rows): italian = True break - if not subFound and re.search('ita', name, re.I): + if not sub_found and re.search('ita', name, re.I): logger.log('Found Italian release: ' + name, logger.DEBUG) italian = True @@ -318,87 +317,92 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many logger.log('No data returned from provider', logger.DEBUG) continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', attrs={'class': 'copyright'}) - torrent_rows = torrent_table('tr') if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', attrs={'class': 'copyright'}) + torrent_rows = torrent_table('tr') if torrent_table else [] + + # Continue only if one Release is found + if len(torrent_rows) < 3: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + last_page = 1 + continue + + if len(torrent_rows) < 42: + last_page = 1 + + for result in torrent_table('tr')[2:]: + try: + link = result.find('td').find('a') + title = link.string + download_url = self.urls['download'] % result('td')[8].find('a')['href'][-8:] + if not all([title, download_url]): + continue - # Continue only if one Release is found - if len(torrent_rows) < 3: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - last_page = 1 - continue + leechers = result('td')[3]('td')[1].text + leechers = int(leechers.strip('[]')) + seeders = result('td')[3]('td')[2].text + seeders = int(seeders.strip('[]')) - if len(torrent_rows) < 42: - last_page = 1 - - for result in torrent_table('tr')[2:]: - - try: - link = result.find('td').find('a') - title = link.string - download_url = self.urls['download'] % result('td')[8].find('a')['href'][-8:] - leechers = result('td')[3]('td')[1].text - leechers = int(leechers.strip('[]')) - seeders = result('td')[3]('td')[2].text - seeders = int(seeders.strip('[]')) - torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' - size = convert_size(torrent_size) or -1 - - filename_qt = self._reverseQuality(self._episodeQuality(result)) - for text in self.hdtext: - title1 = title - title = title.replace(text, filename_qt) - if title != title1: - break - - if Quality.nameQuality(title) == Quality.UNKNOWN: - title += filename_qt - - if not self._is_italian(result) and not self.subtitle: - logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) - continue - - if self.engrelease and not self._is_english(result): - logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) - continue - - search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] - show_title = search_show - rindex = re.search(r'([Ss][\d{1,2}]+)', title) - if rindex: - show_title = title[:rindex.start()] - ep_params = title[rindex.start():] - if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): - new_title = search_show + ep_params - title = new_title - - if not all([title, download_url]): - continue - - if self._is_season_pack(title): - title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) + continue + + filename_qt = self._reverseQuality(self._episodeQuality(result)) + for text in self.hdtext: + title1 = title + title = title.replace(text, filename_qt) + if title != title1: + break - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + if Quality.nameQuality(title) == Quality.UNKNOWN: + title += filename_qt + + if not self._is_italian(result) and not self.subtitle: + logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) + continue + + if self.engrelease and not self._is_english(result): + logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] + show_title = search_show + rindex = re.search(r'([Ss][\d{1,2}]+)', title) + if rindex: + show_title = title[:rindex.start()] + ep_params = title[rindex.start():] + if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): + new_title = search_show + ep_params + title = new_title + + if self._is_season_pack(title): + title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) + + torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 9fff0bf289..bed2eb9351 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -58,6 +56,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), @@ -88,28 +87,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man desc_top = top.find('td', class_='desc-top') title = desc_top.get_text(strip=True) download_url = desc_top.find('a')['href'] - - desc_bottom = bot.find('td', class_='desc-bot').get_text(strip=True) - size = convert_size(desc_bottom.split('|')[1].strip('Size: ')) or -1 + if not all([title, download_url]): + continue stats = bot.find('td', class_='stats').get_text(strip=True) sl = re.match(r'S:(?P\d+)L:(?P\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', '')) seeders = try_int(sl.group('seeders')) if sl else 0 leechers = try_int(sl.group('leechers')) if sl else 0 - if not all([title, download_url]): - continue - # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + desc_bottom = bot.find('td', class_='desc-bot').get_text(strip=True) + size = convert_size(desc_bottom.split('|')[1].strip('Size: ')) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): @@ -121,4 +129,5 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + provider = TokyoToshokanProvider() diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index cdc2eae814..7bd09655e8 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -1,22 +1,20 @@ -# coding=utf-8 +# coding=utf-8 # Author: Idan Gutman # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -87,12 +85,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results search_params = { - 'c41': 1, 'c33': 1, 'c38': 1, 'c32': 1, 'c37': 1 + 'c41': 1, + 'c33': 1, + 'c38': 1, + 'c32': 1, + 'c37': 1 } for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -138,15 +141,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - # Need size for failed downloads handling torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) From 270e72ccb6234302d84cd335dfbf1da9feb6bf25 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 15:16:48 +0200 Subject: [PATCH 36/85] Last 9 providers --- sickbeard/providers/torrentday.py | 34 ++++--- sickbeard/providers/torrentleech.py | 23 +++-- sickbeard/providers/torrentz.py | 36 +++++--- sickbeard/providers/transmitthenet.py | 123 ++++++++++++++------------ sickbeard/providers/tvchaosuk.py | 26 ++++-- sickbeard/providers/womble.py | 11 ++- sickbeard/providers/xthor.py | 21 +++-- sickbeard/providers/zooqle.py | 8 +- 8 files changed, 164 insertions(+), 118 deletions(-) diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index ff04501530..773ae5c499 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -1,27 +1,26 @@ # coding=utf-8 # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import urljoin from requests.exceptions import RequestException from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar @@ -110,6 +109,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_params[mode]: if mode != 'RSS': @@ -146,7 +146,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: title = re.sub(r'\[.*\=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None - if not all([title, download_url]): continue @@ -156,23 +155,32 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue torrent_size = torrent['size'] size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index ba9c9c3aa9..2d51fafd4b 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -1,27 +1,26 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -154,14 +153,22 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue torrent_size = result('td')[labels.index('Size')].get_text() size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index eca4cbcebb..2507674603 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -1,20 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -92,25 +92,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title_raw = item.title.text # Add "-" after codec and add missing "." - title = re.sub(r'([xh][ .]?264|xvid)( )', r'\1-', title_raw).replace(' ','.') if title_raw else '' - t_hash = item.guid.text.rsplit('/', 1)[-1] - - if not all([title, t_hash]): + title = re.sub(r'([xh][ .]?264|xvid)( )', r'\1-', title_raw).replace(' ', '.') if title_raw else '' + torrent_hash = item.guid.text.rsplit('/', 1)[-1] + if not all([title, torrent_hash]): continue - download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + self._custom_trackers + download_url = "magnet:?xt=urn:btih:" + torrent_hash + "&dn=" + title + self._custom_trackers torrent_size, seeders, leechers = self._split_description(item.find('description').text) size = convert_size(torrent_size) or -1 # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': t_hash} - items.append(result) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': torrent_hash + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 03f8182432..6730ac8104 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -1,26 +1,25 @@ # coding=utf-8 # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.utils import dict_from_cookiejar from requests.compat import urljoin @@ -96,6 +95,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -117,70 +118,74 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', {'id': 'torrent_table'}) - if not torrent_table: - logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) - continue - - torrent_rows = torrent_table('tr', {'class': 'torrent'}) - - # Continue only if one Release is found - if not torrent_rows: - logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) - continue + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', {'id': 'torrent_table'}) + if not torrent_table: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for torrent_row in torrent_rows: - try: - freeleech = torrent_row.find('img', alt='Freeleech') is not None - if self.freeleech and not freeleech: - continue + torrent_rows = torrent_table('tr', {'class': 'torrent'}) - download_item = torrent_row.find('a', {'title': [ - 'Download Torrent', # Download link - 'Previously Grabbed Torrent File', # Already Downloaded - 'Currently Seeding Torrent', # Seeding - 'Currently Leeching Torrent', # Leeching - ]}) + # Continue only if one Release is found + if not torrent_rows: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - if not download_item: - continue + for torrent_row in torrent_rows: + try: + freeleech = torrent_row.find('img', alt='Freeleech') is not None + if self.freeleech and not freeleech: + continue - download_url = urljoin(self.url, download_item['href']) + download_item = torrent_row.find('a', {'title': [ + 'Download Torrent', # Download link + 'Previously Grabbed Torrent File', # Already Downloaded + 'Currently Seeding Torrent', # Seeding + 'Currently Leeching Torrent', # Leeching + ]}) - temp_anchor = torrent_row.find('a', {'data-src': True}) - title = temp_anchor['data-src'].rsplit('.', 1)[0] - if not all([title, download_url]): - continue + if not download_item: + continue - cells = torrent_row('td') - seeders = try_int(cells[8].text.strip()) - leechers = try_int(cells[9].text.strip()) + download_url = urljoin(self.url, download_item['href']) - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + temp_anchor = torrent_row.find('a', {'data-src': True}) + title = temp_anchor['data-src'].rsplit('.', 1)[0] + if not all([title, download_url]): + continue - size = temp_anchor['data-filesize'] or -1 + cells = torrent_row('td') + seeders = try_int(cells[8].text.strip()) + leechers = try_int(cells[9].text.strip()) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + + size = temp_anchor['data-filesize'] or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index b36100dda7..c091334afb 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -1,26 +1,28 @@ # coding=utf-8 # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals + import re import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser + from sickrage.helper.common import convert_size, try_int from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -52,7 +54,7 @@ def _check_auth(self): if self.username and self.password: return True - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) def login(self): if len(self.session.cookies) >= 4: @@ -138,8 +140,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue @@ -160,11 +162,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_size = torrent('td')[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - item = {'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index ec288d456d..af5d075f9b 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Nic Wolfe # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -72,4 +70,5 @@ def updateCache(self): def _checkAuth(self, data): return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None + provider = WombleProvider() diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index 3bc1849961..0ecde09ebc 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -1,22 +1,21 @@ # coding=utf-8 # Author: adaur # Rewrite: Dustyn Gibson (miigotu) - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -171,14 +170,22 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Taille')].get_text() size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 868ebf7f42..fd69dc1240 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -19,6 +19,7 @@ from __future__ import unicode_literals import traceback + from requests.compat import urljoin from sickbeard import logger, tvcache @@ -119,13 +120,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue torrent_size = cells[4].get_text(strip=True) - size = convert_size(torrent_size, units=units) or -1 item = { @@ -147,8 +147,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results From 61d1076a499d0537b4bdca023c1f30ff3717b38b Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 15:19:52 +0200 Subject: [PATCH 37/85] Remove sceneelite from init --- sickbeard/providers/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index f4eec2c5b9..62c1dfeff6 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -25,7 +25,7 @@ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, xthor, abnormal, torrentbytes, cpasbien,\ freshontv, morethantv, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents, \ - norbits, ilovetorrents, sceneelite, anizb, bithdtv, zooqle + norbits, ilovetorrents, anizb, bithdtv, zooqle __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', @@ -36,7 +36,7 @@ 'xthor', 'abnormal', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents', - 'norbits', 'ilovetorrents', 'sceneelite', 'anizb', 'bithdtv', 'zooqle' + 'norbits', 'ilovetorrents', 'anizb', 'bithdtv', 'zooqle' ] From eb461f0097b6ec8d2c9c00a36729491a72034641 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 15:41:17 +0200 Subject: [PATCH 38/85] Renamed all search_params to search_strings --- sickbeard/providers/btn.py | 25 +++++++++++++------------ sickbeard/providers/freshontv.py | 6 +++--- sickbeard/providers/hdbits.py | 6 +++--- sickbeard/providers/iptorrents.py | 6 +++--- sickbeard/providers/norbits.py | 6 +++--- sickbeard/providers/pretome.py | 6 +++--- sickbeard/providers/scenetime.py | 6 +++--- sickbeard/providers/t411.py | 6 +++--- sickbeard/providers/tntvillage.py | 6 +++--- sickbeard/providers/torrentday.py | 6 +++--- 10 files changed, 40 insertions(+), 39 deletions(-) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index f9e0c41ef4..dd46b64007 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -18,13 +18,14 @@ from __future__ import unicode_literals -from datetime import datetime import jsonrpclib import math import socket import time - import sickbeard + +from datetime import datetime + from sickbeard import classes, logger, scene_exceptions, tvcache from sickbeard.common import cpu_presets from sickbeard.helpers import sanitizeSceneName @@ -63,13 +64,13 @@ def _checkAuthFromData(self, parsed_json): return self._check_auth() if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: % s' % parsed_json['api-error'], logger.DEBUG) - raise AuthException( - 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.') + logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True - def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals self._check_auth() @@ -81,9 +82,9 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- if age: params['age'] = '<=' + str(int(age)) - if search_params: - params.update(search_params) - logger.log('Search string: %s' % search_params, logger.DEBUG) + if search_strings: + params.update(search_strings) + logger.log('Search string: %s' % search_strings, logger.DEBUG) parsed_json = self._api_call(apikey, params) if not parsed_json: @@ -288,9 +289,9 @@ def _getRSSData(self): seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) # default to 15 minutes - seconds_minTime = self.minTime * 60 - if seconds_since_last_update < seconds_minTime: - seconds_since_last_update = seconds_minTime + seconds_min_time = self.minTime * 60 + if seconds_since_last_update < seconds_min_time: + seconds_since_last_update = seconds_min_time # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of 'RSS' data search, older things will need to be done through backlog if seconds_since_last_update > 86400: diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 3ccc1f490c..ad2e636fe5 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -103,17 +103,17 @@ def login(self): return False - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results freeleech = '3' if self.freeleech else '0' - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index fa22dfcdb9..04562fc481 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -75,16 +75,16 @@ def _get_title_and_url(self, item): return title, url - def search(self, search_params, age=0, ep_obj=None): + def search(self, search_strings, age=0, ep_obj=None): # FIXME results = [] - logger.log('Search string: {0}'.format(search_params), logger.DEBUG) + logger.log('Search string: {0}'.format(search_strings), logger.DEBUG) self._check_auth() - parsed_json = self.get_url(self.urls['search'], post_data=search_params, returns='json') + parsed_json = self.get_url(self.urls['search'], post_data=search_strings, returns='json') if not parsed_json: return [] diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index cb487bf83d..e95b781cb2 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -86,17 +86,17 @@ def login(self): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results freeleech = '&free=on' if self.freeleech else '' - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 7d12bcaaa3..8bcedcc710 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -65,16 +65,16 @@ def _checkAuthFromData(self, parsed_json): # pylint: disable=invalid-name return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ Do the actual searching and JSON parsing""" results = [] - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format (search_string), logger.DEBUG) diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index b4ff4da0c2..4b289f87cb 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -83,15 +83,15 @@ def login(self): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-statements, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-statements, too-many-locals results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 3dde79cb37..7ab271c546 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -72,15 +72,15 @@ def login(self): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 3160e8e65c..c81601ee5d 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -84,15 +84,15 @@ def login(self): logger.log('Token not found in authentication response', logger.WARNING) return False - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 562eb61719..ff27ae2159 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -275,17 +275,17 @@ def _is_season_pack(name): if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results self.categories = 'cat=' + str(self.cat) - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode == 'RSS': self.page = 2 diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 773ae5c499..d72742fa56 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -101,16 +101,16 @@ def login(self): logger.log('Unable to obtain cookie', logger.WARNING) return False - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), From 29048b369443256db02ff7aba685ca0943a887a7 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 14:16:53 +0200 Subject: [PATCH 39/85] Fix for GFTracker --- sickbeard/providers/gftracker.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index b9458c8f2e..a4b9594c56 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -63,7 +63,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -152,7 +153,9 @@ def process_column_header(td): try: cells = result('td') - title = cells[labels.index('Name')].find('a').find_next('a')['title'] or cells[labels.index('Name')].find('a')['title'] + title_anchor = cells[labels.index('Name')].find('a').find_next('a') or \ + cells[labels.index('Name')].find('a') + title = title_anchor.get('title') if title_anchor else None download_url = self.url + cells[labels.index('DL')].find('a')['href'] if not all([title, download_url]): continue From ea7b973d7a01b5f787514b1d8f5fe512cf7d1942 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 14:57:55 +0200 Subject: [PATCH 40/85] Fix TNTVillage --- sickbeard/providers/tntvillage.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index ff27ae2159..f258432f00 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -117,7 +117,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -137,7 +138,8 @@ def login(self): logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search('Sono stati riscontrati i seguenti errori', response) or re.search('Connettiti', response): + if re.search('Sono stati riscontrati i seguenti errori', response) or \ + re.search('Connettiti', response): logger.log('Invalid username or password. Check your settings', logger.WARNING) return False @@ -181,7 +183,8 @@ def _episodeQuality(torrent_rows): # pylint: disable=too-many-return-statements if img_all: for img_type in img_all: try: - file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '').replace('.gif', '').replace('.png', '') + file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '') + file_quality = file_quality.replace('.gif', '').replace('.png', '') except Exception: logger.log('Failed parsing quality. Traceback: %s' % traceback.format_exc(), logger.ERROR) @@ -201,7 +204,8 @@ def checkName(options, func): if img_all: file_quality = (torrent_rows('td'))[1].get_text() - webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) + webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', + 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) if sdOptions and not dvdOptions and not fullHD and not hdOptions: return Quality.SDTV @@ -272,7 +276,7 @@ def _is_season_pack(name): main_db_con = db.DBConnection() sql_selection = 'select count(*) as count from tv_episodes where showid = ? and season = ?' episodes = main_db_con.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) - if int(episodes[0]['count']) == len(parse_result.episode_numbers): + if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements @@ -333,8 +337,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for result in torrent_table('tr')[2:]: try: link = result.find('td').find('a') - title = link.string - download_url = self.urls['download'] % result('td')[8].find('a')['href'][-8:] + title = link.string if link else None + dl_link = result('td') + dl_url = dl_link[8].find('a')['href'][-8:] if len(dl_link) > 7 else None + download_url = self.urls['download'] % dl_url if dl_url else None if not all([title, download_url]): continue @@ -366,7 +372,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue if self.engrelease and not self._is_english(result): - logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) + logger.log('Torrent isnt english audio/subtitled, skipping: %s ' % title, logger.DEBUG) continue search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] From dce71b32617c354a2514bb9da71b95a234de0702 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:00:45 +0200 Subject: [PATCH 41/85] Fix HDTorrents --- sickbeard/providers/hdtorrents.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 1734361005..b835ea903e 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -146,17 +146,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if len(cells) < len(labels): continue - title = cells[labels.index('Filename')].a.get_text(strip=True) - seeders = try_int(cells[labels.index('S')].get_text(strip=True)) - leechers = try_int(cells[labels.index('L')].get_text(strip=True)) - torrent_size = cells[labels.index('Size')].get_text() - - size = convert_size(torrent_size) or -1 + title = cells[labels.index('Filename')].a + title = title.get_text(strip=True) if title else None download_url = self.url + '/' + cells[labels.index('Dl')].a['href'] - if not all([title, download_url]): continue + seeders = try_int(cells[labels.index('S')].get_text(strip=True)) + leechers = try_int(cells[labels.index('L')].get_text(strip=True)) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': @@ -165,6 +163,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue + torrent_size = cells[labels.index('Size')].get_text() + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, From 00f671279697158badb9d2830888b8b282cbd757 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:23:01 +0200 Subject: [PATCH 42/85] Fix Extratorrent --- sickbeard/providers/extratorrent.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index ef71dd763c..2c622938b8 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -78,10 +78,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for item in html('item'): try: title = re.sub(r'^$', '', item.find('title').get_text(strip=True)) - seeders = try_int(item.find('seeders').get_text(strip=True)) - leechers = try_int(item.find('leechers').get_text(strip=True)) - torrent_size = item.find('size').get_text() - size = convert_size(torrent_size) or -1 if sickbeard.TORRENT_METHOD == 'blackhole': enclosure = item.find('enclosure') # Backlog doesnt have enclosure @@ -94,7 +90,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not all([title, download_url]): continue - # Filter unseeded torrent + seeders = item.find('seeders') + seeders = try_int(seeders.get_text(strip=True)) if seeders else 1 + leechers = item.find('leechers') + leechers = try_int(leechers.get_text(strip=True)) if leechers else 0 + + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" @@ -102,6 +103,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue + torrent_size = item.find('size') + torrent_size = torrent_size.get_text() if torrent_size else None + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, From 4ce7ceb1d9aa0420e2229cfd5236fb350fcd427b Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:26:29 +0200 Subject: [PATCH 43/85] Fix HDSpace --- sickbeard/providers/hdspace.py | 93 +++++++++++++++++----------------- 1 file changed, 47 insertions(+), 46 deletions(-) diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 1b21e229f8..805d409bd9 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -122,59 +122,60 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Could not find main torrent table', logger.ERROR) continue - html = BS4Parser(data[index:], 'html5lib') - if not html: - logger.log('No html data parsed from provider', logger.DEBUG) - continue - - torrents = html('tr') - if not torrents: - continue - - # Skip column headers - for result in torrents[1:]: - if len(result.contents) < 10: - # skip extraneous rows at the end + with BS4Parser(data[index:], 'html5lib') as html: + if not html: + logger.log('No html data parsed from provider', logger.DEBUG) continue - try: - dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href'] - title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.') - download_url = self.urls['base_url'] + dl_href - seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text) - leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text) - torrent_size = re.match(r'.*?([0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+).*', str(result), re.DOTALL).group(1) - size = convert_size(torrent_size) or -1 + torrents = html('tr') + if not torrents: + continue - if not all([title, download_url]): + # Skip column headers + for result in torrents[1:]: + if len(result.contents) < 10: + # skip extraneous rows at the end continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + try: + dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href'] + title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.') + download_url = self.urls['base_url'] + dl_href + if not all([title, download_url]): + continue + + seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text) + leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format + (title, seeders), logger.DEBUG) + continue + + torrent_size = re.match(r'.*?([0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+).*', str(result), re.DOTALL).group(1) + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format - (title, seeders), logger.DEBUG) - continue + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None - } - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items From 030afbdc73be217240a7c53b6fef88bd2eac95c8 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:43:06 +0200 Subject: [PATCH 44/85] Use string in SQL with unicode_literals in GenericProvider --- sickrage/providers/GenericProvider.py | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 1c48a944a5..71b57b8938 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -1,21 +1,21 @@ # coding=utf-8 -# This file is part of SickRage. +# This file is part of Medusa. # - # Git: https://github.com/PyMedusa/SickRage.git # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + from __future__ import unicode_literals import re @@ -216,7 +216,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, parse_result.season_number is not None, parse_result.episode_numbers, [ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] == - parse_result.season_number and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers] + parse_result.season_number and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers] ]): logger.log( @@ -244,13 +244,13 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, ) if len(sql_results) == 2: - if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0: - actual_season = int(sql_results[1]['season']) - actual_episodes = [int(sql_results[1]['episode'])] + if int(sql_results[0][b'season']) == 0 and int(sql_results[1][b'season']) != 0: + actual_season = int(sql_results[1][b'season']) + actual_episodes = [int(sql_results[1][b'episode'])] same_day_special = True - elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0: - actual_season = int(sql_results[0]['season']) - actual_episodes = [int(sql_results[0]['episode'])] + elif int(sql_results[1][b'season']) == 0 and int(sql_results[0][b'season']) != 0: + actual_season = int(sql_results[0][b'season']) + actual_episodes = [int(sql_results[0][b'episode'])] same_day_special = True elif len(sql_results) != 1: logger.log( @@ -259,8 +259,8 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, add_cache_entry = True if not add_cache_entry and not same_day_special: - actual_season = int(sql_results[0]['season']) - actual_episodes = [int(sql_results[0]['episode'])] + actual_season = int(sql_results[0][b'season']) + actual_episodes = [int(sql_results[0][b'episode'])] else: actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers From 293444cf495c917760fefa0b2a2c85f2b1a80480 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:57:08 +0200 Subject: [PATCH 45/85] Fix BITHDTV --- sickbeard/providers/bithdtv.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 7b90e13c4b..ac3245528e 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -18,6 +18,8 @@ from __future__ import unicode_literals +import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -90,7 +92,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params['cat'] = 12 response = self.get_url(self.urls['search'], params=search_params, returns='response') - if not response.text: + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue @@ -124,13 +126,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue torrent_size = '{size} {unit}'.format(size=cells[6].contents[0], unit=cells[6].contents[1].get_text()) - size = convert_size(torrent_size, units=units) or -1 item = { @@ -146,18 +147,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(item) - except StandardError: + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results def login(self): - """Login method used for logging in before doing search and torrent downloads""" + """Login method used for logging in before doing search and torrent downloads.""" if any(dict_from_cookiejar(self.session.cookies).values()): return True @@ -168,12 +169,12 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u'Unable to connect to provider', logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) self.session.cookies.clear() return False if '

Login failed!

' in response: - logger.log(u'Invalid username or password. Check your settings', logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) self.session.cookies.clear() return False From 439defeff07ded4114e9b11a90f822184e4f1338 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 16:26:00 +0200 Subject: [PATCH 46/85] Fix TVChaosUK --- sickbeard/providers/tvchaosuk.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index c091334afb..4890b7cdff 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -54,7 +54,8 @@ def _check_auth(self): if self.username and self.password: return True - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) def login(self): if len(self.session.cookies) >= 4: @@ -116,11 +117,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man with BS4Parser(data, 'html5lib') as html: torrent_table = html.find(id='sortabletable') - torrent_rows = torrent_table("tr") if torrent_table else [] + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [label.img['title'] if label.img else label.get_text(strip=True) for label in torrent_rows[0]('td')] @@ -129,8 +130,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if self.freeleech and not torrent.find('img', alt=re.compile('Free Torrent')): continue - title = torrent.find(class_='tooltip-content').div.get_text(strip=True) - download_url = torrent.find(title='Click to Download this Torrent!').parent['href'] + title = torrent.find(class_='tooltip-content') + title = title.div.get_text(strip=True) if title else None + download_url = torrent.find(title='Click to Download this Torrent!') + download_url = download_url.parent['href'] if download_url else None if not all([title, download_url]): continue From 715b7a83792018c8a4dfa6c0c7bd765a2df46c4f Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 17:28:54 +0200 Subject: [PATCH 47/85] Improve BitSnoop --- sickbeard/providers/bitsnoop.py | 80 ++++++++++++++++----------------- 1 file changed, 38 insertions(+), 42 deletions(-) diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index c6af13b84f..bbddffe8a1 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -21,9 +21,8 @@ import traceback import sickbeard -from bs4 import BeautifulSoup - from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import convert_size, try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -57,31 +56,31 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - try: - search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] + search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] - data = self.get_url(search_url, returns='text') - if not data: - logger.log('No data returned from provider', logger.DEBUG) - continue + data = self.get_url(search_url, returns='text') + if not data: + logger.log('No data returned from provider', logger.DEBUG) + continue - if not data.startswith(' Date: Sat, 11 Jun 2016 16:27:36 +0200 Subject: [PATCH 48/85] Added flag to newznab, for torznab providers. If it's torznab then results are sorted by seeders in search.py. --- sickbeard/providers/newznab.py | 8 ++++---- sickbeard/search.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index ef0d8ca50c..8365639725 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -284,7 +284,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results for mode in search_strings: - torznab = False + self.torznab = False search_params = { 't': 'tvsearch' if 'tvdbid' in str(self.cap_tv_search) else 'search', 'limit': 100, @@ -331,9 +331,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man break try: - torznab = 'xmlns:torznab' in html.rss.attrs + self.torznab = 'xmlns:torznab' in html.rss.attrs except AttributeError: - torznab = False + self.torznab = False for item in html('item'): try: @@ -364,7 +364,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man peers = try_int(attr['value']) if attr['name'] == 'peers' else None leechers = peers - seeders if peers else leechers - if not item_size or (torznab and (seeders is -1 or leechers is -1)): + if not item_size or (self.torznab and (seeders is -1 or leechers is -1)): continue size = convert_size(item_size) or -1 diff --git a/sickbeard/search.py b/sickbeard/search.py index 1457d8f805..dc6d891985 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -585,7 +585,7 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m foundResults[cur_provider.name][curEp] = searchResults[curEp] # Sort the list by seeders if possible - if cur_provider.provider_type == 'torrent': + if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab'): foundResults[cur_provider.name][curEp].sort(key=lambda d: int(d.seeders), reverse=True) break From 97af77d60290e4d1e16d2dc0f01275df65dd0601 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:13:52 +0200 Subject: [PATCH 49/85] Improve Anizb --- sickbeard/providers/anizb.py | 62 ++++++++++++++++++++---------------- 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 2ee9d7403f..2eb4a3f133 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -20,18 +20,18 @@ import traceback +from requests.compat import urljoin + from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser from sickrage.providers.nzb.NZBProvider import NZBProvider from sickrage.helper.common import try_int -from requests.compat import urljoin - -from bs4 import BeautifulSoup - class Anizb(NZBProvider): # pylint: disable=too-many-instance-attributes - """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches""" + """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches.""" + def __init__(self): # Provider Init @@ -63,13 +63,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results for mode in search_strings: + items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - try: search_url = (self.urls['rss'], self.urls['api'] + search_string)[mode != 'RSS'] data = self.get_url(search_url, returns='text') @@ -81,32 +82,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Expected xml but got something else, is your mirror failing?', logger.INFO) continue - data = BeautifulSoup(data, 'html5lib') - entries = data('item') - if not entries: - logger.log('Returned xml contained no results', logger.INFO) - continue - - for item in entries: - try: - title = item.title.get_text(strip=True) - download_url = item.enclosure.get('url').strip() + with BS4Parser(data, 'html5lib') as html: + entries = html('item') + if not entries: + logger.log('Returned xml contained no results', logger.INFO) + continue - if not (title and download_url): + for item in entries: + try: + title = item.title.get_text(strip=True) + download_url = item.enclosure.get('url').strip() + if not (title and download_url): + continue + + # description = item.find('description') + size = try_int(item.enclosure.get('length', -1)) + + item = { + 'title': title, + 'link': download_url, + 'size': size + } + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - # description = item.find('description') - size = try_int(item.enclosure.get('length', -1)) - except (AttributeError, TypeError, KeyError, ValueError): - continue - - result = {'title': title, 'link': download_url, 'size': size} - results.append(result) + results += items - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + return results - return results provider = Anizb() From 4229c79d33c3eb20a8630757592738d52b4631da Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:23:51 +0200 Subject: [PATCH 50/85] Improve Bluetigers --- sickbeard/providers/bluetigers.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 2e6ce79c4a..d7adffb050 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -100,16 +100,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - result_linkz = html('a', href=re.compile('torrents-details')) + with BS4Parser(data, 'html5lib') as html: + result_linkz = html('a', href=re.compile('torrents-details')) - if not result_linkz: - logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) - continue + if not result_linkz: + logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) + continue - if result_linkz: - for link in result_linkz: + if result_linkz: + for link in result_linkz: + try: title = link.text download_url = self.urls['base_url'] + link['href'] download_url = download_url.replace('torrents-details', 'download') @@ -143,12 +143,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue - results += items + results += items return results From fd63285ddda58a89f2e83ddd4bdd45e630d75272 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:26:28 +0200 Subject: [PATCH 51/85] Cleanup BTdigg --- sickbeard/providers/btdigg.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index 4b6a0abe92..bd737da185 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -57,11 +57,14 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] search_params = {'p': 0} + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['q'] = search_string + if mode != 'RSS': search_params['order'] = 0 logger.log('Search string: {0}'.format(search_string), logger.DEBUG) @@ -74,6 +77,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_url = self.custom_url + 'api/private-341ada3245790954/s02' else: search_url = self.urls['api'] + jdata = self.get_url(search_url, params=search_params, returns='json') if not jdata: logger.log('Provider did not return data', logger.DEBUG) From 238cef657defce83cf976f4d93c4d0cc316be46a Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:38:54 +0200 Subject: [PATCH 52/85] Improve Hounddawgs --- sickbeard/providers/cpasbien.py | 1 + sickbeard/providers/extratorrent.py | 1 + sickbeard/providers/hdtorrents.py | 1 + sickbeard/providers/hounddawgs.py | 90 ++++++++++++++--------------- 4 files changed, 47 insertions(+), 46 deletions(-) diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 039d2d141c..3550c42c1f 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -48,6 +48,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 2c622938b8..9675e1e6a2 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -58,6 +58,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index b835ea903e..e784d5ba22 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -90,6 +90,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index 4f5eb9d704..b4f91c7e62 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -102,6 +102,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -121,48 +122,41 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not trimmed_data: continue - try: - with BS4Parser(trimmed_data, 'html5lib') as html: - result_table = html.find('table', {'id': 'torrent_table'}) - - if not result_table: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + with BS4Parser(trimmed_data, 'html5lib') as html: + result_table = html.find('table', {'id': 'torrent_table'}) - result_tbody = result_table.find('tbody') - entries = result_tbody.contents - del entries[1::2] + if not result_table: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for result in entries[1:]: + result_tbody = result_table.find('tbody') + entries = result_tbody.contents + del entries[1::2] + for result in entries[1:]: + try: torrent = result('td') if len(torrent) <= 1: break all_as = (torrent[1])('a') + notinternal = result.find('img', src='/static//common/user_upload.png') + if self.ranked and notinternal: + logger.log('Found a user uploaded release, Ignoring it..', logger.DEBUG) + continue - try: - notinternal = result.find('img', src='/static//common/user_upload.png') - if self.ranked and notinternal: - logger.log('Found a user uploaded release, Ignoring it..', logger.DEBUG) - continue - freeleech = result.find('img', src='/static//common/browse/freeleech.png') - if self.freeleech and not freeleech: - continue - title = all_as[2].string - download_url = self.urls['base_url'] + all_as[0].attrs['href'] - torrent_size = result.find('td', class_='nobr').find_next_sibling('td').string - if torrent_size: - size = convert_size(torrent_size) or -1 - seeders = try_int((result('td')[6]).text.replace(',', '')) - leechers = try_int((result('td')[7]).text.replace(',', '')) - - except (AttributeError, TypeError): + freeleech = result.find('img', src='/static//common/browse/freeleech.png') + if self.freeleech and not freeleech: continue + title = all_as[2].string + download_url = self.urls['base_url'] + all_as[0].attrs['href'] if not all([title, download_url]): continue + seeders = try_int((result('td')[6]).text.replace(',', '')) + leechers = try_int((result('td')[7]).text.replace(',', '')) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': @@ -171,24 +165,28 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None - } - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + torrent_size = result.find('td', class_='nobr').find_next_sibling('td').string + if torrent_size: + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items From d10f39c2b8a112200b097598e5ebda722a719427 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:48:43 +0200 Subject: [PATCH 53/85] Improve FreshOn --- sickbeard/providers/freshontv.py | 71 ++++++++++++++----------------- sickbeard/providers/iptorrents.py | 7 ++- 2 files changed, 36 insertions(+), 42 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index ad2e636fe5..62bf13f452 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -113,6 +113,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -129,9 +130,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - try: - with BS4Parser(init_html, 'html5lib') as init_soup: - + with BS4Parser(init_html, 'html5lib') as init_soup: + try: # Check to see if there is more than 1 page of results pager = init_soup.find('div', {'class': 'pager'}) if pager: @@ -153,9 +153,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # limit RSS search if max_page_number > 3 and mode == 'RSS': max_page_number = 3 - except Exception: - logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) - continue + except Exception: + logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) + continue data_response_list = [init_html] @@ -173,45 +173,33 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data_response_list.append(page_html) - try: - - for data_response in data_response_list: - - with BS4Parser(data_response, 'html5lib') as html: + for data_response in data_response_list: - torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) + with BS4Parser(data_response, 'html5lib') as html: + torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) - # Continue only if a Release is found - if not torrent_rows: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + # Continue only if a Release is found + if not torrent_rows: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for individual_torrent in torrent_rows: + for individual_torrent in torrent_rows: + try: # skip if torrent has been nuked due to poor quality if individual_torrent.find('img', alt='Nuked') is not None: continue - try: - title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] - except Exception: - logger.log('Unable to parse torrent title. Traceback: %s ' % traceback.format_exc(), logger.WARNING) - continue - - try: - details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href'] - torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) - download_url = self.urls['download'] % (str(torrent_id)) - seeders = try_int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip(), 1) - leechers = try_int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip(), 0) - torrent_size = individual_torrent.find('td', {'class': 'table_size'}).get_text() - size = convert_size(torrent_size) or -1 - except Exception: - continue - + title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] + details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href'] + torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) + download_url = self.urls['download'] % (str(torrent_id)) if not all([title, download_url]): continue + seeders = try_int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip(), 1) + leechers = try_int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip(), 0) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': @@ -220,6 +208,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue + torrent_size = individual_torrent.find('td', {'class': 'table_size'}).get_text() + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, @@ -235,14 +226,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue - results += items + results += items - return results + return results provider = FreshOnTVProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index e95b781cb2..fcfbfba75d 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -56,7 +56,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -81,7 +82,8 @@ def login(self): # You tried too often, please try again after 2 hours! if re.search('You tried too often', response): - logger.log('You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours', logger.WARNING) + logger.log('You tried too often, please try again after 2 hours!' + ' Disable IPTorrents for at least 2 hours', logger.WARNING) return False return True @@ -96,6 +98,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': From 5d96054682b9ce2e5008a1e5a3f9c93fb44e8ca3 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 19:00:53 +0200 Subject: [PATCH 54/85] More improvements and cleanups --- sickbeard/providers/hd4free.py | 3 ++- sickbeard/providers/newznab.py | 2 ++ sickbeard/providers/pretome.py | 1 + sickbeard/providers/scenetime.py | 1 + sickbeard/providers/t411.py | 1 + sickbeard/providers/thepiratebay.py | 1 + sickbeard/providers/tokyotoshokan.py | 1 + sickbeard/providers/transmitthenet.py | 3 ++- sickbeard/providers/zooqle.py | 2 +- 9 files changed, 12 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 3cfa9e65c1..3a456f0f61 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -65,6 +65,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if self.freeleech: search_params['fl'] = 'true' @@ -95,7 +96,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if jdata['0']['total_results'] == 0: logger.log('Provider has no results for this search', logger.DEBUG) continue - except StandardError: + except KeyError: continue for i in jdata: diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 8365639725..792f69e18e 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -313,7 +313,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 4b289f87cb..6aa5da0664 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -91,6 +91,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 7ab271c546..35f99c3cfa 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -80,6 +80,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index c81601ee5d..767d57c107 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -92,6 +92,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index c9e0498cb5..575f08a8ea 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -89,6 +89,7 @@ def process_column_header(th): logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + search_url = self.urls['search'] if mode != 'RSS' else self.urls['rss'] if self.custom_url: if not validators.url(self.custom_url): diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index bed2eb9351..234cab59c3 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -58,6 +58,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 6730ac8104..d329750538 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -62,7 +62,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index fd69dc1240..ce8944ca7d 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -87,7 +87,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = {'q': '{0} category:TV'.format(search_string)} response = self.get_url(self.urls['search'], params=search_params, returns='response') - if not response.text: + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue From 956c756bf18df441bbc3279f8afb72468403b8da Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 20:05:10 +0200 Subject: [PATCH 55/85] Fix ThePirateBay --- sickbeard/providers/thepiratebay.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 575f08a8ea..af7a316b9a 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -57,7 +57,7 @@ def __init__(self): # Proper Strings # Cache - self.cache = tvcache.TVCache(self, min_time=30) # only poll ThePirateBay every 30 minutes max + self.cache = tvcache.TVCache(self, min_time=1) # only poll ThePirateBay every 30 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] @@ -126,9 +126,11 @@ def process_column_header(th): try: cells = result('td') - title = result.find(class_='detName').get_text(strip=True) - download_url = result.find(title='Download this torrent using magnet')['href'] + self._custom_trackers - if 'magnet:?' not in download_url: + title = result.find(class_='detName') + title = title.get_text(strip=True) if title else None + download_url = result.find(title='Download this torrent using magnet') + download_url = download_url['href'] + self._custom_trackers if download_url else None + if download_url and 'magnet:?' not in download_url: logger.log('Invalid ThePirateBay proxy please try another one', logger.DEBUG) continue if not all([title, download_url]): From 83c40c88bc1a576c989f16b25906c95e317be3a7 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 21:42:15 +0200 Subject: [PATCH 56/85] Add size to freshon, cleanup, fix for tvchaosuk --- sickbeard/providers/freshontv.py | 39 ++++++++++++++++++-------------- sickbeard/providers/tvchaosuk.py | 2 +- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 62bf13f452..c247ed6919 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -81,9 +81,9 @@ def login(self): return False if re.search('/logout.php', response): - try: - if dict_from_cookiejar(self.session.cookies)['uid'] and dict_from_cookiejar(self.session.cookies)['pass']: + if dict_from_cookiejar(self.session.cookies)['uid'] and \ + dict_from_cookiejar(self.session.cookies)['pass']: self._uid = dict_from_cookiejar(self.session.cookies)['uid'] self._hash = dict_from_cookiejar(self.session.cookies)['pass'] @@ -92,10 +92,12 @@ def login(self): return True except Exception: logger.log('Unable to login to provider (cookie)', logger.WARNING) - return False + return False else: - if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response): + if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response) or \ + re.search('Username or password is incorrect. If you have an account here please use the' + ' recovery system or try again.', response): logger.log('Invalid username or password. Check your settings', logger.WARNING) if re.search('DDoS protection by CloudFlare', response): @@ -117,8 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) @@ -154,7 +155,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if max_page_number > 3 and mode == 'RSS': max_page_number = 3 except Exception: - logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue data_response_list = [init_html] @@ -164,7 +166,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for i in range(1, max_page_number): time.sleep(1) - page_search_url = search_url + '&page=' + str(i) + page_search_url = search_url + '&page=' + unicode(i) # '.log('Search string: ' + page_search_url, logger.DEBUG) page_html = self.get_url(page_search_url, returns='text') @@ -176,7 +178,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for data_response in data_response_list: with BS4Parser(data_response, 'html5lib') as html: - torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) + torrent_rows = html('tr', class_=re.compile('torrent_[0-9]*')) # Continue only if a Release is found if not torrent_rows: @@ -190,25 +192,29 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if individual_torrent.find('img', alt='Nuked') is not None: continue - title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] - details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href'] + title = individual_torrent.find('a', class_='torrent_name_link')['title'] + details_url = individual_torrent.find('a', class_='torrent_name_link')['href'] torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) - download_url = self.urls['download'] % (str(torrent_id)) + download_url = self.urls['download'] % (unicode(torrent_id)) if not all([title, download_url]): continue - seeders = try_int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip(), 1) - leechers = try_int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip(), 0) + seeders = try_int(individual_torrent.find('td', class_='table_seeders').find('span').get_text(strip=True), 1) + leechers = try_int(individual_torrent.find('td', class_='table_leechers').find('a').get_text(strip=True), 0) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - torrent_size = individual_torrent.find('td', {'class': 'table_size'}).get_text() + torrent_size = individual_torrent.find('td', class_='table_size').get_text(strip=True) + torrent_lenght = len(torrent_size) + torrent_weight = torrent_size[:torrent_lenght - 2] + torrent_unit = torrent_size[torrent_lenght - 2:] + torrent_size = '{0} {1}'.format(torrent_weight, torrent_unit) size = convert_size(torrent_size) or -1 item = { @@ -225,7 +231,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 4890b7cdff..5b3751c856 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -166,7 +166,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(torrent_size, units=units) or -1 item = { - 'title': title, + 'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, From a91faed8d7fadc94bef91ab452f28e5865106fc8 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 11 Jun 2016 20:46:32 +0200 Subject: [PATCH 57/85] Fix for omgwtfnzb, needed a default value, cause getattr doesn't do that by default. --- sickbeard/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/search.py b/sickbeard/search.py index dc6d891985..a5e9c1f105 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -585,7 +585,7 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m foundResults[cur_provider.name][curEp] = searchResults[curEp] # Sort the list by seeders if possible - if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab'): + if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab', None): foundResults[cur_provider.name][curEp].sort(key=lambda d: int(d.seeders), reverse=True) break From c3aca3f1583fe45b760c269dc1f03ad8521a9fd1 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 23:47:01 +0200 Subject: [PATCH 58/85] Improve size parsing code Freshon --- sickbeard/providers/freshontv.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index c247ed6919..1bd2390bc7 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -211,10 +211,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue torrent_size = individual_torrent.find('td', class_='table_size').get_text(strip=True) - torrent_lenght = len(torrent_size) - torrent_weight = torrent_size[:torrent_lenght - 2] - torrent_unit = torrent_size[torrent_lenght - 2:] - torrent_size = '{0} {1}'.format(torrent_weight, torrent_unit) + torrent_size = re.split('(\d+.?\d+)', unicode(torrent_size), 1) + torrent_size = '{0} {1}'.format(torrent_size[1], torrent_size[2]) size = convert_size(torrent_size) or -1 item = { From 12eade7387bf90816d59378915e86b2ac976a2c9 Mon Sep 17 00:00:00 2001 From: medariox Date: Sun, 12 Jun 2016 14:03:26 +0200 Subject: [PATCH 59/85] Fixes for ExtraTorrent and HDTorrents --- sickbeard/providers/extratorrent.py | 5 ++++- sickbeard/providers/hdtorrents.py | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 9675e1e6a2..21e4305884 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -85,7 +85,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man download_url = enclosure['url'] if enclosure else item.find('link').next.strip() download_url = re.sub(r'(.*)/torrent/(.*).html', r'\1/download/\2.torrent', download_url) else: - info_hash = item.find('info_hash').get_text(strip=True) + info_hash = item.find('info_hash') + if not info_hash: + continue + info_hash = info_hash.get_text(strip=True) download_url = 'magnet:?xt=urn:btih:' + info_hash + '&dn=' + title + self._custom_trackers if not all([title, download_url]): diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index e784d5ba22..f6c72834ee 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -149,7 +149,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title = cells[labels.index('Filename')].a title = title.get_text(strip=True) if title else None - download_url = self.url + '/' + cells[labels.index('Dl')].a['href'] + download_url = self.url + '/' + cells[labels.index('Dl')].a + download_url = download_url.get('href') if download_url else None if not all([title, download_url]): continue From 4ca3d606b786695b6327f072591f7910abc4ef2d Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 12 Jun 2016 16:49:59 +0200 Subject: [PATCH 60/85] Fixed bithdtv * For when it's not getting back the 750px tables. --- sickbeard/providers/bithdtv.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index ac3245528e..78a4444d47 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -98,8 +98,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Need the html.parser, as the html5parser has issues with this site. with BS4Parser(response.text, 'html.parser') as html: - torrent_table = html('table', width='750')[-1] # Get the last table with a width of 750px. - torrent_rows = torrent_table('tr') if torrent_table else [] + all_tables = html('table', width='750') # Get the last table with a width of 750px. + if all_tables: + result_table = all_tables[-1] + else: + continue + + torrent_rows = result_table('tr') if result_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: From 267e3594cdd5ca6c7fb4c78f2b3349f46ab09e32 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 13 Jun 2016 09:28:26 +0200 Subject: [PATCH 61/85] Fix tokyotoshokan provider errors --- sickbeard/providers/tokyotoshokan.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 234cab59c3..c8127ef2c2 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -86,8 +86,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]): try: desc_top = top.find('td', class_='desc-top') - title = desc_top.get_text(strip=True) - download_url = desc_top.find('a')['href'] + title = desc_top.get_text(strip=True) if desc_top else None + download_url = desc_top.find('a')['href'] if desc_top else None if not all([title, download_url]): continue From 4097f9b86daab4f827f49700674ac9c3abe7a956 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 13 Jun 2016 15:33:26 +0200 Subject: [PATCH 62/85] Fixed properSearch. * listPropers does an sql, but accessing the row, requires it to use b'' --- sickrage/providers/GenericProvider.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 71b57b8938..359f17694b 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -119,9 +119,12 @@ def download_result(self, result): def find_propers(self, search_date=None): results = self.cache.listPropers(search_date) - return [Proper(x['name'], x['url'], datetime.fromtimestamp(x['time']), self.show, x['seeders'], x['leechers'], x['size'], x['pubdate'], x['hash']) for x in results] + return [Proper(x[b'name'], x[b'url'], datetime.fromtimestamp(x[b'time']), self.show, x[b'seeders'], + x[b'leechers'], x[b'size'], x[b'pubdate'], x[b'hash']) for x in results] - def find_search_results(self, show, episodes, search_mode, forced_search=False, download_current_quality=False, manual_search=False, manual_search_type='episode'): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements + def find_search_results(self, show, episodes, search_mode, forced_search=False, + download_current_quality=False, manual_search=False, + manual_search_type='episode'): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements self._check_auth() self.show = show From bb656577d90d27dce36e86888a8082b6c395b9ba Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 13 Jun 2016 15:34:57 +0200 Subject: [PATCH 63/85] Added newznab search by search_query fallback, when search by tvdbid does not give back results. --- sickbeard/providers/newznab.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 792f69e18e..23a05559dd 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -69,6 +69,7 @@ def __init__(self, name, url, key='0', catIDs='5030,5040', search_mode='eponly', self.caps = False self.cap_tv_search = None + self.force_query = False # self.cap_search = None # self.cap_movie_search = None # self.cap_audio_search = None @@ -286,7 +287,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: self.torznab = False search_params = { - 't': 'tvsearch' if 'tvdbid' in str(self.cap_tv_search) else 'search', + 't': 'tvsearch' if 'tvdbid' in str(self.cap_tv_search) and not self.force_query else 'search', 'limit': 100, 'offset': 0, 'cat': self.catIDs.strip(', ') or '5030,5040', @@ -397,6 +398,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results += items + # Reproces but now use force_query = True + if not results and not self.force_query: + self.force_query = True + return self.search(search_strings, ep_obj=ep_obj) + return results def _get_size(self, item): From 0fac929812282c452d1e0061cbe258505b983287 Mon Sep 17 00:00:00 2001 From: medariox Date: Tue, 14 Jun 2016 19:28:30 +0200 Subject: [PATCH 64/85] Fix HDTorrents, use urljoin, partial rewrite --- sickbeard/providers/hdtorrents.py | 88 +++++++++++++------------------ 1 file changed, 38 insertions(+), 50 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index f6c72834ee..2da56bb428 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -1,5 +1,5 @@ # coding=utf-8 -# Author: Dustyn Gibson +# Orginal author: Dustyn Gibson # # This file is part of Medusa. # @@ -21,7 +21,7 @@ import re import traceback -from requests.compat import quote_plus +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -39,25 +39,22 @@ def __init__(self): self.username = None self.password = None + self.minseed = None self.minleech = None self.freeleech = None - self.urls = {'base_url': 'https://hd-torrents.org', - 'login': 'https://hd-torrents.org/login.php', - 'search': 'https://hd-torrents.org/torrents.php?search=%s&active=1&options=0%s', - 'rss': 'https://hd-torrents.org/torrents.php?search=&active=1&options=0%s', - 'home': 'https://hd-torrents.org/%s'} - - self.url = self.urls['base_url'] + self.url = 'https://hd-torrents.org/' + self.urls = { + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), + } - self.categories = '&category[]=59&category[]=60&category[]=30&category[]=38' self.proper_strings = ['PROPER', 'REPACK'] self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max def _check_auth(self): - if not self.username or not self.password: logger.log('Invalid username or password. Check your settings', logger.WARNING) @@ -67,9 +64,11 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'uid': self.username, - 'pwd': self.password, - 'submit': 'Confirm'} + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'submit': 'Confirm' + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -87,6 +86,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results + # Search Params + search_params = { + 'search': '', # BROWSE + 'active': 1, # TV/XVID + 'options': 0, # TV/X264 + 'category[]': 59, # TV/DVDRIP + 'category[]': 60, # TV/BLURAY + 'category[]': 30, # TV/DVDR + 'category[]': 38, # TV/SD + 'category[]': 65, + } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) @@ -94,47 +105,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - search_url = self.urls['search'] % (quote_plus(search_string), self.categories) - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) - else: - search_url = self.urls['rss'] % self.categories + search_params['search'] = search_string + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) if self.freeleech: - search_url = search_url.replace('active=1', 'active=5') + search_params['active'] = 5 - data = self.get_url(search_url, returns='text') - if not data or 'please try later' in data: + response = self.get_url(self.urls['search'], params=search_params, returns='response') + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue - if data.find('No torrents here') != -1: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - - # Search result page contains some invalid html that prevents html parser from returning all data. - # We cut everything before the table that contains the data we are interested in thus eliminating - # the invalid html portions - try: - index = data.lower().index('
Date: Tue, 14 Jun 2016 19:49:02 +0200 Subject: [PATCH 65/85] Fix rare Zooqle error --- sickbeard/providers/zooqle.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index ce8944ca7d..164e750d44 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -113,9 +113,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not all([title, download_url]): continue - peers = cells[6].find('div')['title'].replace(',', '').split(' | ', 1) - seeders = try_int(peers[0].strip('Seeders: ')) - leechers = try_int(peers[1].strip('Leechers: ')) + seeders = 1 + leechers = 0 + peers = cells[6].find('div') + if peers and peers.get('title'): + peers = peers['title'].replace(',', '').split(' | ', 1) + seeders = try_int(peers[0].strip('Seeders: ')) + leechers = try_int(peers[1].strip('Leechers: ')) # Filter unseeded torrent if seeders < min(self.minseed, 1): From 649ace222f2dd81b934b6bd71c25e25e42692dca Mon Sep 17 00:00:00 2001 From: medariox Date: Tue, 14 Jun 2016 21:35:03 +0200 Subject: [PATCH 66/85] Improve HDTorrents, bring back ugly hack --- sickbeard/providers/hdtorrents.py | 36 ++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 2da56bb428..ed68281739 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -28,6 +28,7 @@ from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import convert_size, try_int +from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -50,13 +51,15 @@ def __init__(self): 'search': urljoin(self.url, 'torrents.php'), } - self.proper_strings = ['PROPER', 'REPACK'] + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max + self.cache = tvcache.TVCache(self, min_time=30) def _check_auth(self): + if not self.username or not self.password: - logger.log('Invalid username or password. Check your settings', logger.WARNING) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -88,14 +91,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - 'search': '', # BROWSE - 'active': 1, # TV/XVID - 'options': 0, # TV/X264 - 'category[]': 59, # TV/DVDRIP - 'category[]': 60, # TV/BLURAY - 'category[]': 30, # TV/DVDR - 'category[]': 38, # TV/SD - 'category[]': 65, + 'search': '', + 'active': 1, + 'options': 0, + 'category[0]': 59, + 'category[1]': 60, + 'category[2]': 30, + 'category[3]': 38, + 'category[4]': 65, } for mode in search_strings: @@ -116,7 +119,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(response.text, 'html5lib') as html: + # Search result page contains some invalid html that prevents html parser from returning all data. + # We cut everything before the table that contains the data we are interested in thus eliminating + # the invalid html portions + try: + index = response.text.index('
Date: Thu, 16 Jun 2016 23:53:39 +0200 Subject: [PATCH 67/85] Improve TNTVillage, fix daily search, much more --- sickbeard/providers/tntvillage.py | 388 +++++++++--------------------- 1 file changed, 108 insertions(+), 280 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index f258432f00..96578afe81 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -1,5 +1,5 @@ # coding=utf-8 -# Author: Giovanni Borri +# Original author: Giovanni Borri # Modified by gborri, https://github.com/gborri for TNTVillage # # This file is part of Medusa. @@ -22,43 +22,18 @@ import re import traceback +from urlparse import parse_qs + from requests.utils import dict_from_cookiejar +from requests.compat import urljoin -from sickbeard import db, logger, tvcache +from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickbeard.common import Quality -from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickrage.helper.common import convert_size +from sickrage.helper.common import convert_size, try_int from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider -category_excluded = {'Sport': 22, - 'Teatro': 23, - 'Video Musicali': 21, - 'Film': 4, - 'Musica': 2, - 'Students Releases': 13, - 'E Books': 3, - 'Linux': 6, - 'Macintosh': 9, - 'Windows Software': 10, - 'Pc Game': 11, - 'Playstation 2': 12, - 'Wrestling': 24, - 'Varie': 25, - 'Xbox': 26, - 'Immagini sfondi': 27, - 'Altri Giochi': 28, - 'Fumetteria': 30, - 'Trash': 31, - 'PlayStation 1': 32, - 'PSP Portable': 33, - 'A Book': 34, - 'Podcast': 35, - 'Edicola': 36, - 'Mobile': 37} - class TNTVillageProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes @@ -68,52 +43,25 @@ def __init__(self): self._uid = None self._hash = None + self.username = None self.password = None - self.cat = None - self.engrelease = None - self.page = 10 - self.subtitle = None + self.minseed = None self.minleech = None - self.hdtext = [' - Versione 720p', - ' Versione 720p', - ' V 720p', - ' V 720', - ' V HEVC', - ' V HEVC', - ' V 1080', - ' Versione 1080p', - ' 720p HEVC', - ' Ver 720', - ' 720p HEVC', - ' 720p'] - - self.category_dict = {'Serie TV': 29, - 'Cartoni': 8, - 'Anime': 7, - 'Programmi e Film TV': 1, - 'Documentari': 14, - 'All': 0} - - self.urls = {'base_url': 'http://forum.tntvillage.scambioetico.org', - 'login': 'http://forum.tntvillage.scambioetico.org/index.php?act=Login&CODE=01', - 'detail': 'http://forum.tntvillage.scambioetico.org/index.php?showtopic=%s', - 'search': 'http://forum.tntvillage.scambioetico.org/?act=allreleases&%s', - 'search_page': 'http://forum.tntvillage.scambioetico.org/?act=allreleases&st={0}&{1}', - 'download': 'http://forum.tntvillage.scambioetico.org/index.php?act=Attach&type=post&id=%s'} - - self.url = self.urls['base_url'] - - self.sub_string = ['sub', 'softsub'] + self.url = 'http://forum.tntvillage.scambioetico.org/' + self.urls = { + 'login': urljoin(self.url, 'index.php?act=Login&CODE=01'), + 'download': urljoin(self.url, 'index.php?act=Attach&type=post&id={0}'), + } self.proper_strings = ['PROPER', 'REPACK'] - self.categories = 'cat=29' - self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max + self.subtitle = None + def _check_auth(self): if not self.username or not self.password: @@ -128,10 +76,12 @@ def login(self): if cookies_dict['pass_hash'] != '0' and cookies_dict['member_id'] != '0': return True - login_params = {'UserName': self.username, - 'PassWord': self.password, - 'CookieDate': 1, - 'submit': 'Connettiti al Forum'} + login_params = { + 'UserName': self.username, + 'PassWord': self.password, + 'CookieDate': 1, + 'submit': 'Connettiti al Forum' + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -145,209 +95,63 @@ def login(self): return True - @staticmethod - def _reverseQuality(quality): - - quality_string = '' - - if quality == Quality.SDTV: - quality_string = ' HDTV x264' - if quality == Quality.SDDVD: - quality_string = ' DVDRIP' - elif quality == Quality.HDTV: - quality_string = ' 720p HDTV x264' - elif quality == Quality.FULLHDTV: - quality_string = ' 1080p HDTV x264' - elif quality == Quality.RAWHDTV: - quality_string = ' 1080i HDTV mpeg2' - elif quality == Quality.HDWEBDL: - quality_string = ' 720p WEB-DL h264' - elif quality == Quality.FULLHDWEBDL: - quality_string = ' 1080p WEB-DL h264' - elif quality == Quality.HDBLURAY: - quality_string = ' 720p Bluray x264' - elif quality == Quality.FULLHDBLURAY: - quality_string = ' 1080p Bluray x264' - - return quality_string - - @staticmethod - def _episodeQuality(torrent_rows): # pylint: disable=too-many-return-statements, too-many-branches - """ - Return The quality from the scene episode HTML row. - """ - file_quality = '' - - img_all = (torrent_rows('td'))[1]('img') - - if img_all: - for img_type in img_all: - try: - file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '') - file_quality = file_quality.replace('.gif', '').replace('.png', '') - except Exception: - logger.log('Failed parsing quality. Traceback: %s' % traceback.format_exc(), logger.ERROR) - - else: - file_quality = (torrent_rows('td'))[1].get_text() - logger.log('Episode quality: %s' % file_quality, logger.DEBUG) - - def checkName(options, func): - return func([re.search(option, file_quality, re.I) for option in options]) - - dvdOptions = checkName(['dvd', 'dvdrip', 'dvdmux', 'DVD9', 'DVD5'], any) - bluRayOptions = checkName(['BD', 'BDmux', 'BDrip', 'BRrip', 'Bluray'], any) - sdOptions = checkName(['h264', 'divx', 'XviD', 'tv', 'TVrip', 'SATRip', 'DTTrip', 'Mpeg2'], any) - hdOptions = checkName(['720p'], any) - fullHD = checkName(['1080p', 'fullHD'], any) - - if img_all: - file_quality = (torrent_rows('td'))[1].get_text() - - webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', - 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) - - if sdOptions and not dvdOptions and not fullHD and not hdOptions: - return Quality.SDTV - elif dvdOptions: - return Quality.SDDVD - elif hdOptions and not bluRayOptions and not fullHD and not webdl: - return Quality.HDTV - elif not hdOptions and not bluRayOptions and fullHD and not webdl: - return Quality.FULLHDTV - elif hdOptions and not bluRayOptions and not fullHD and webdl: - return Quality.HDWEBDL - elif not hdOptions and not bluRayOptions and fullHD and webdl: - return Quality.FULLHDWEBDL - elif bluRayOptions and hdOptions and not fullHD: - return Quality.HDBLURAY - elif bluRayOptions and fullHD and not hdOptions: - return Quality.FULLHDBLURAY - else: - return Quality.UNKNOWN - - def _is_italian(self, torrent_rows): - - name = str(torrent_rows('td')[1].find('b').find('span')) - if not name or name == 'None': - return False - - sub_found = italian = False - for sub in self.sub_string: - if re.search(sub, name, re.I): - sub_found = True - else: - continue - - if re.search('ita', name.split(sub)[0], re.I): - logger.log('Found Italian release: ' + name, logger.DEBUG) - italian = True - break - - if not sub_found and re.search('ita', name, re.I): - logger.log('Found Italian release: ' + name, logger.DEBUG) - italian = True - - return italian - - @staticmethod - def _is_english(torrent_rows): - - name = str(torrent_rows('td')[1].find('b').find('span')) - if not name or name == 'None': - return False - - english = False - if re.search('eng', name, re.I): - logger.log('Found English release: ' + name, logger.DEBUG) - english = True - - return english - - @staticmethod - def _is_season_pack(name): - - try: - parse_result = NameParser(tryIndexers=True).parse(name) - except (InvalidNameException, InvalidShowException) as error: - logger.log('{0}'.format(error), logger.DEBUG) - return False - - main_db_con = db.DBConnection() - sql_selection = 'select count(*) as count from tv_episodes where showid = ? and season = ?' - episodes = main_db_con.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) - if int(episodes[0][b'count']) == len(parse_result.episode_numbers): - return True - - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): results = [] if not self.login(): return results - self.categories = 'cat=' + str(self.cat) + search_params = { + 'act': 'allreleases', + 'filter': '', + } for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_strings[mode]: - - if mode == 'RSS': - self.page = 2 - - last_page = 0 - y = int(self.page) - - if search_string == '': - continue - search_string = str(search_string).replace('.', ' ') - - for x in range(0, y): - z = x * 20 - if last_page: - break + for search_string in search_strings[mode]: if mode != 'RSS': - search_url = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string) + search_params['filter'] = search_string + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) else: - search_url = self.urls['search_page'].format(z, self.categories) - - if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + search_params['cat'] = 29 - data = self.get_url(search_url, returns='text') - if not data: + response = self.get_url(self.url, params=search_params, returns='response') + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', attrs={'class': 'copyright'}) + with BS4Parser(response.text, 'html5lib') as html: + torrent_table = html.find('table', class_='copyright') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one Release is found + # Continue only if one release is found if len(torrent_rows) < 3: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - last_page = 1 continue - if len(torrent_rows) < 42: - last_page = 1 - - for result in torrent_table('tr')[2:]: + for result in torrent_table('tr')[1:]: try: - link = result.find('td').find('a') - title = link.string if link else None - dl_link = result('td') - dl_url = dl_link[8].find('a')['href'][-8:] if len(dl_link) > 7 else None - download_url = self.urls['download'] % dl_url if dl_url else None + cells = result('td') + if not cells: + continue + + last_cell_anchor = cells[-1].find('a') + if not last_cell_anchor: + continue + params = parse_qs(last_cell_anchor.get('href', '')) + download_url = self.urls['download'].format(params['pid'][0]) if \ + params.get('pid') else None + title = _normalize_title(cells[0], cells[1], mode) if not all([title, download_url]): continue - leechers = result('td')[3]('td')[1].text - leechers = int(leechers.strip('[]')) - seeders = result('td')[3]('td')[2].text - seeders = int(seeders.strip('[]')) + info_cell = cells[3].find_all('td') + leechers = info_cell[0].find('span').get_text(strip=True) + leechers = try_int(leechers) + seeders = info_cell[1].find('span').get_text() + seeders = try_int(seeders, 1) # Filter unseeded torrent if seeders < min(self.minseed, 1): @@ -357,38 +161,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - filename_qt = self._reverseQuality(self._episodeQuality(result)) - for text in self.hdtext: - title1 = title - title = title.replace(text, filename_qt) - if title != title1: - break - - if Quality.nameQuality(title) == Quality.UNKNOWN: - title += filename_qt - - if not self._is_italian(result) and not self.subtitle: - logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) + if _has_only_subs(title) and not self.subtitle: + logger.log('Torrent is only subtitled, skipping: {0}'.format + (title), logger.DEBUG) continue - if self.engrelease and not self._is_english(result): - logger.log('Torrent isnt english audio/subtitled, skipping: %s ' % title, logger.DEBUG) - continue - - search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] - show_title = search_show - rindex = re.search(r'([Ss][\d{1,2}]+)', title) - if rindex: - show_title = title[:rindex.start()] - ep_params = title[rindex.start():] - if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): - new_title = search_show + ep_params - title = new_title - - if self._is_season_pack(title): - title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) - - torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' + torrent_size = info_cell[3].find('span').get_text() + ' GB' size = convert_size(torrent_size) or -1 item = { @@ -410,9 +188,59 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - results += items + results += items + + return results + + +def _normalize_title(title, info, mode): + + result_title = title.find('a').get_text() + result_info = info.find('span') + + if not result_info: + return None + + bad_words = ['[cura]', 'hot', 'season', 'stagione', 'series', 'premiere', 'finale', 'fine', + 'full', 'Completa', 'supereroi', 'commedia', 'drammatico', 'poliziesco', 'azione', + 'giallo', 'politico', 'sitcom', 'funzionante'] + + formatted_info = '' + for info_part in result_info: + if mode == 'RSS': + try: + info_part = info_part.get('src') + info_part = info_part.replace('style_images/mkportal-636/', '') + info_part = info_part.replace('.gif', '').replace('.png', '') + if info_part == 'dolby': + info_part = 'Ac3' + elif info_part == 'fullHd': + info_part = '1080p' + except AttributeError: + info_part = info_part.replace('·', '').replace(',', '') + info_part = info_part.replace('by', '-').strip() + formatted_info += ' ' + info_part + else: + formatted_info = info_part + + allowed_words = [word for word in formatted_info.split() if word.lower() not in bad_words] + final_title = '{0} '.format(result_title) + ' '.join(allowed_words).strip('-').strip() + + return final_title + - return results +def _has_only_subs(title): + + title = title.lower() + + if 'sub' in title: + title = title.split() + counter = 0 + for word in title: + if 'ita' in word: + counter = counter + 1 + if counter < 2: + return True provider = TNTVillageProvider() From 71cb481a79c00152d5d23c2a510f120eed976023 Mon Sep 17 00:00:00 2001 From: labrys Date: Fri, 17 Jun 2016 00:58:11 -0400 Subject: [PATCH 68/85] Fix BIT-HDTV --- sickbeard/providers/bithdtv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 78a4444d47..f59e453425 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -152,7 +152,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) From ffd321228421d363561f2f6e5b2bb7ab6f662455 Mon Sep 17 00:00:00 2001 From: labrys Date: Fri, 17 Jun 2016 00:56:16 -0400 Subject: [PATCH 69/85] More standardization --- sickbeard/providers/abnormal.py | 52 ++++++---- sickbeard/providers/alpharatio.py | 27 ++++-- sickbeard/providers/anizb.py | 20 ++-- sickbeard/providers/binsearch.py | 18 +++- sickbeard/providers/bitcannon.py | 48 ++++++--- sickbeard/providers/bithdtv.py | 9 +- sickbeard/providers/bitsnoop.py | 48 ++++++--- sickbeard/providers/bluetigers.py | 140 ++++++++++++++++----------- sickbeard/providers/btdigg.py | 34 ++++--- sickbeard/providers/btn.py | 53 ++++++---- sickbeard/providers/cpasbien.py | 34 +++++-- sickbeard/providers/danishbits.py | 37 ++++--- sickbeard/providers/elitetorrent.py | 91 +++++++++-------- sickbeard/providers/extratorrent.py | 62 ++++++++---- sickbeard/providers/freshontv.py | 57 +++++++---- sickbeard/providers/gftracker.py | 40 +++++--- sickbeard/providers/hd4free.py | 56 ++++++++--- sickbeard/providers/hdbits.py | 18 +++- sickbeard/providers/hdspace.py | 60 ++++++++---- sickbeard/providers/hdtorrents.py | 43 +++++--- sickbeard/providers/hounddawgs.py | 87 ++++++++++------- sickbeard/providers/ilovetorrents.py | 58 ++++++----- sickbeard/providers/iptorrents.py | 63 ++++++------ sickbeard/providers/kat.py | 26 +++-- sickbeard/providers/limetorrents.py | 30 +++--- sickbeard/providers/morethantv.py | 43 +++++--- sickbeard/providers/newpct.py | 71 ++++++++------ sickbeard/providers/newznab.py | 9 +- sickbeard/providers/norbits.py | 40 +++++--- sickbeard/providers/nyaatorrents.py | 43 +++++--- 30 files changed, 915 insertions(+), 502 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index 35d7c55229..aed5692913 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -32,7 +32,7 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """ABNormal Torrent provider""" def __init__(self): # Provider Init @@ -42,10 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://abnormal.ws' self.urls = { @@ -56,6 +52,12 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=30) @@ -80,16 +82,32 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + ABNormal search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results # Search Params search_params = { - 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', - 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], - # Both ASC and DESC are available for sort direction - 'way': 'DESC' + 'cat[]': [ + 'TV|SD|VOSTFR', + 'TV|HD|VOSTFR', + 'TV|SD|VF', + 'TV|HD|VF', + 'TV|PACK|FR', + 'TV|PACK|VOSTFR', + 'TV|EMISSIONS', + 'ANIME', + ], + 'order': 'Time', # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size + 'way': 'DESC', # Both ASC and DESC are available for sort direction } # Units @@ -97,19 +115,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) + search_params['order'] = 'Seeders' - # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size - search_params['order'] = ('Seeders', 'Time')[mode == 'RSS'] search_params['search'] = re.sub(r'[()]', '', search_string) data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -143,8 +161,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - 'minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -159,7 +177,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index a66ed392a9..5be2d498fc 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -32,7 +32,7 @@ class AlphaRatioProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """AlphaRatio Torrent provider""" def __init__(self): # Provider Init @@ -42,10 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'http://alpharatio.cc' self.urls = { @@ -56,6 +52,12 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Torrent Stats + self.minseed = None + self.minleech = None + + # Miscellaneous Options + # Cache self.cache = tvcache.TVCache(self) @@ -83,6 +85,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + AlphaRatio search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -110,9 +120,10 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) @@ -155,8 +166,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 2eb4a3f133..a97efe0221 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -37,17 +37,24 @@ def __init__(self): # Provider Init NZBProvider.__init__(self, 'Anizb') + # Credentials self.public = True - self.supports_absolute_numbering = True - self.anime_only = True + # URLs self.url = 'https://anizb.org/' self.urls = { 'rss': self.url, 'api': urljoin(self.url, 'api/?q=') } + # Proper Strings + # Miscellaneous Options + self.supports_absolute_numbering = True + self.anime_only = True + + # Torrent Stats + # Cache self.cache = tvcache.TVCache(self) @@ -58,21 +65,20 @@ def _get_size(self, item): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """Start searching for anime using the provided search_strings. Used for backlog and daily""" results = [] - if self.show and not self.show.is_anime: return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = (self.urls['rss'], self.urls['api'] + search_string)[mode != 'RSS'] - data = self.get_url(search_url, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -101,7 +107,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man item = { 'title': title, 'link': download_url, - 'size': size + 'size': size, } items.append(item) diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py index 26135267b6..e63ff6cbd2 100644 --- a/sickbeard/providers/binsearch.py +++ b/sickbeard/providers/binsearch.py @@ -28,17 +28,27 @@ class BinSearchProvider(NZBProvider): - + """BinSearch Newznab provider""" def __init__(self): + # Provider Init NZBProvider.__init__(self, 'BinSearch') - self.url = 'https://www.binsearch.info' - self.urls = {'rss': urljoin(self.url, 'rss.php')} - + # Credentials self.public = True self.supports_backlog = False + # URLs + self.url = 'https://www.binsearch.info' + self.urls = { + 'rss': urljoin(self.url, 'rss.php') + } + + # Proper Strings + + # Miscellaneous Options + + # Cache self.cache = BinSearchCache(self, min_time=30) # only poll Binsearch every 30 minutes max diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index a7abf41ae2..de351f34cb 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -30,21 +30,39 @@ class BitCannonProvider(TorrentProvider): - + """BitCannon Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BitCannon') + # Credentials + self.api_key = None + + # URLs + self.custom_url = None + + # Proper Strings + + # Miscellaneous Options + + # Torrent Stats self.minseed = None self.minleech = None - self.custom_url = None - self.api_key = None + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals + """ + BitCannon search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] - url = 'http://localhost:3000/' if self.custom_url: if not validators.url(self.custom_url, require_tld=False): @@ -52,21 +70,21 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results url = self.custom_url - search_params = {} - - anime = ep_obj and ep_obj.show and ep_obj.show.anime - search_params['category'] = ('tv', 'anime')[bool(anime)] - - if self.api_key: - search_params['apiKey'] = self.api_key + # Search Params + search_params = { + 'category': 'anime' if ep_obj and ep_obj.show and ep_obj.show.anime else 'tv', + 'apiKey': self.api_key + } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['q'] = search_string if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = urljoin(url, 'api/search') parsed_json = self.get_url(search_url, params=search_params, returns='json') @@ -93,10 +111,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man else: seeders = leechers = 0 + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - 'minimum seeders: {0}. Seeders: {1})'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -119,6 +138,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index f59e453425..8348413c76 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -87,6 +87,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': search_params['search'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) if mode == 'Season': search_params['cat'] = 12 @@ -96,8 +98,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - # Need the html.parser, as the html5parser has issues with this site. - with BS4Parser(response.text, 'html.parser') as html: + with BS4Parser(response.text, 'html.parser') as html: # Use html.parser, since html5parser has issues with this site. all_tables = html('table', width='750') # Get the last table with a width of 750px. if all_tables: result_table = all_tables[-1] @@ -131,8 +132,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index bbddffe8a1..f404d993ec 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -19,8 +19,10 @@ from __future__ import unicode_literals import traceback -import sickbeard +from requests.compat import urljoin + +import sickbeard from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -29,41 +31,57 @@ class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """BitSnoop Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BitSnoop') + # Credentials + self.public = True + + # URLs + self.url = 'http://bitsnoop.com' self.urls = { - 'index': 'http://bitsnoop.com', - 'search': 'http://bitsnoop.com/search/video/', - 'rss': 'http://bitsnoop.com/new_video.html?fmt=rss' + 'index': self.url, + 'search': urljoin(self.url, '/search/video/'), + 'rss': urljoin(self.url, '/new_video.html?fmt=rss'), } - self.url = self.urls['index'] + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK'] - self.public = True + # Miscellaneous Options + + # Torrent Stats self.minseed = None self.minleech = None - self.proper_strings = ['PROPER', 'REPACK'] - + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['rss']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals + """ + BitSnoop search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] - data = self.get_url(search_url, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -98,8 +116,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -114,7 +132,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': info_hash + 'hash': info_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index d7adffb050..8a0808fa83 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -21,6 +21,7 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -30,29 +31,36 @@ class BlueTigersProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """BlueTigers Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BLUETIGERS') + # Credentials self.username = None self.password = None self.token = None - self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max - + # URLs + self.url = 'https://www.bluetigers.ca/' self.urls = { - 'base_url': 'https://www.bluetigers.ca/', - 'search': 'https://www.bluetigers.ca/torrents-search.php', - 'login': 'https://www.bluetigers.ca/account-login.php', - 'download': 'https://www.bluetigers.ca/torrents-details.php?id=%s&hit=1', + 'base_url': self.url, + 'search': urljoin(self.url, 'torrents-search.php'), + 'login': urljoin(self.url, 'account-login.php'), + 'download': urljoin(self.url, 'torrents-details.php?id=%s&hit=1'), } - self.search_params = { - 'c16': 1, 'c10': 1, 'c130': 1, 'c131': 1, 'c17': 1, 'c18': 1, 'c19': 1, 'c9': 1 - } + # Proper Strings - self.url = self.urls['base_url'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): @@ -65,7 +73,6 @@ def login(self): } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: check_login = self.get_url(self.urls['base_url'], returns='text') if re.search('account-logout.php', check_login): @@ -81,74 +88,95 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals + """ + BLUETIGERS search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] - if not self.login(): return results + # Search Params + search_params = { + 'c9': 1, + 'c10': 1, + 'c16': 1, + 'c17': 1, + 'c18': 1, + 'c19': 1, + 'c130': 1, + 'c131': 1, + } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - self.search_params['search'] = search_string - - data = self.get_url(self.urls['search'], params=self.search_params, returns='text') + search_params['search'] = search_string + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: continue with BS4Parser(data, 'html5lib') as html: result_linkz = html('a', href=re.compile('torrents-details')) + # Continue only if at least one release is found if not result_linkz: - logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - if result_linkz: - for link in result_linkz: - try: - title = link.text - download_url = self.urls['base_url'] + link['href'] - download_url = download_url.replace('torrents-details', 'download') - # FIXME - size = -1 - seeders = 1 - leechers = 0 - - if not title or not download_url: - continue - - # Filter unseeded torrent - # if seeders < min(self.minseed, 1): - # if mode != 'RSS': - # logger.log('Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})'.format - # (title, seeders), logger.DEBUG) - # continue - - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None - } - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) + for link in result_linkz: + try: + title = link.text + download_url = self.urls['base_url'] + link['href'] + download_url = download_url.replace('torrents-details', 'download') + if not all([title, download_url]): + continue - items.append(item) + # FIXME + seeders = 1 + leechers = 0 - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) + continue + + # FIXME + size = -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None, + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - results += items + results += items return results diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index bd737da185..8e735c3710 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -28,7 +28,7 @@ class BTDiggProvider(TorrentProvider): - + """BTDigg Torrent provider""" def __init__(self): # Provider Init @@ -36,18 +36,22 @@ def __init__(self): self.public = True - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://btdigg.org' - self.urls = {'api': 'https://api.btdigg.org/api/private-341ada3245790954/s02'} + self.urls = { + 'api': 'https://api.btdigg.org/api/private-341ada3245790954/s02', + } self.custom_url = None # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Use this hacky way for RSS search since most results will use this codecs cache_params = {'RSS': ['x264', 'x264.HDTV', '720.HDTV.x264']} @@ -56,20 +60,24 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] - search_params = {'p': 0} + + # Search Params + search_params = { + 'p': 0, + 'order': 2, + } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_params['q'] = search_string if mode != 'RSS': search_params['order'] = 0 - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - else: - search_params['order'] = 2 + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) if self.custom_url: # if not validators.url(self.custom_url): # logger.log('Invalid custom url set, please check your settings', logger.WARNING) @@ -80,7 +88,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man jdata = self.get_url(search_url, params=search_params, returns='json') if not jdata: - logger.log('Provider did not return data', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue for torrent in jdata: @@ -122,7 +130,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index dd46b64007..bdd84a1668 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -36,21 +36,31 @@ class BTNProvider(TorrentProvider): - + """BTN Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BTN') - self.supports_absolute_numbering = True - + # Credentials self.api_key = None - self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max + # URLs + self.url = 'http://broadcasthe.net/' + self.urls = { + 'base_url': 'http://api.btnapps.net', + 'website': self.url, + } + + # Proper Strings + + # Miscellaneous Options + self.supports_absolute_numbering = True - self.urls = {'base_url': 'http://api.btnapps.net', - 'website': 'http://broadcasthe.net/', } + # Torrent Stats - self.url = self.urls['website'] + # Cache + self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max def _check_auth(self): if not self.api_key: @@ -71,32 +81,37 @@ def _checkAuthFromData(self, parsed_json): return True def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals - + """ + BTN search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ + results = [] self._check_auth() - results = [] - params = {} - apikey = self.api_key + # Search Params + search_params = { + } # age in seconds if age: - params['age'] = '<=' + str(int(age)) + search_params['age'] = '<=' + str(int(age)) if search_strings: - params.update(search_strings) + search_params.update(search_strings) logger.log('Search string: %s' % search_strings, logger.DEBUG) - parsed_json = self._api_call(apikey, params) + parsed_json = self._api_call(self.apikey, search_params) if not parsed_json: logger.log('No data returned from provider', logger.DEBUG) return results if self._checkAuthFromData(parsed_json): - if 'torrents' in parsed_json: - found_torrents = parsed_json['torrents'] - else: - found_torrents = {} + found_torrents = parsed_json.get('torrents', {}) # We got something, we know the API sends max 1000 results at a time. # See if there are more than 1000 results for our query, if not we @@ -112,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): - parsed_json = self._api_call(apikey, params, results_per_page, page * results_per_page) + parsed_json = self._api_call(self.apikey, search_params, results_per_page, page * results_per_page) # Note that this these are individual requests and might time out individually. This would result in 'gaps' # in the results. There is no way to fix this though. if 'torrents' in parsed_json: diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 3550c42c1f..b56d8b194c 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -29,22 +29,36 @@ class CpasbienProvider(TorrentProvider): - + """Cpasbien Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Cpasbien') + # Credentials self.public = True - self.minseed = None - self.minleech = None + + # URLs self.url = 'http://www.cpasbien.cm' + # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] + # Units + units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) @@ -52,8 +66,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: search_url = self.url + '/view_cat.php?categorie=series&trie=date-d' @@ -75,16 +89,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = try_int(result.find(class_='up').get_text(strip=True)) leechers = try_int(result.find(class_='down').get_text(strip=True)) + + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue torrent_size = result.find(class_='poid').get_text(strip=True) - - units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] size = convert_size(torrent_size, units=units) or -1 item = { @@ -94,7 +108,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index 948e82e077..3c4f834d79 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -20,6 +20,7 @@ import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -46,14 +47,18 @@ def __init__(self): self.freeleech = True # URLs - self.url = 'https://danishbits.org/' + self.url = 'https://danishbits.org' self.urls = { - 'login': self.url + 'login.php', - 'search': self.url + 'torrents.php', + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), } # Proper Strings + # Miscellaneous Options + + # Torrent Stats + # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll Danishbits every 10 minutes max @@ -83,6 +88,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + DanishBits search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -107,16 +120,15 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -137,6 +149,9 @@ def process_column_header(td): # Skip column headers for result in torrent_rows[1:]: + cells = result('td') + if len(cells) < len(labels): + continue try: title = result.find(class_='croptorrenttext').get_text(strip=True) @@ -144,16 +159,14 @@ def process_column_header(td): if not all([title, download_url]): continue - cells = result('td') - seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -171,7 +184,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index 396b87a77f..5d2a6bbef4 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -21,6 +21,8 @@ import re import traceback +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -29,49 +31,59 @@ class elitetorrentProvider(TorrentProvider): - + """EliteTorrent Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'EliteTorrent') - self.onlyspasearch = None - self.minseed = None - self.minleech = None - self.cache = tvcache.TVCache(self) # Only poll EliteTorrent every 20 minutes max + # Credentials + # URLs + self.url = 'http://www.elitetorrent.net' self.urls = { - 'base_url': 'http://www.elitetorrent.net', - 'search': 'http://www.elitetorrent.net/torrents.php' + 'base_url': self.url, + 'search': urljoin(self.url, 'torrents.php') } - self.url = self.urls['base_url'] + # Proper Strings - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches - results = [] - lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + # Miscellaneous Options + self.onlyspasearch = None + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) # Only poll EliteTorrent every 20 minutes max + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ - Search query: - http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe - - cat = 4 => Shows - modo = listado => display results mode - orden = fecha => order - buscar => Search show - pag = 1 => page number + EliteTorrent search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) """ + results = [] + lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + # Search query: + # http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe + # Search Params search_params = { - 'cat': 4, - 'modo': 'listado', - 'orden': 'fecha', - 'pag': 1, - 'buscar': '' + 'cat': 4, # Shows + 'modo': 'listado', # display results mode + 'orden': 'fecha', # date order + 'pag': 1, # page number + 'buscar': '', # Search show } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': @@ -79,15 +91,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) search_params['buscar'] = search_string.strip() if mode != 'RSS' else '' - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -99,27 +112,27 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue + # Skip column headers for row in torrent_rows[1:]: try: - download_url = self.urls['base_url'] + row.find('a')['href'] title = self._process_title(row.find('a', class_='nombre')['title']) - seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) - leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) - - # Provider does not provide size - size = -1 - + download_url = self.urls['base_url'] + row.find('a')['href'] if not all([title, download_url]): continue + seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) + leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue + size = -1 # Provider does not provide size + item = { 'title': title, 'link': download_url, @@ -127,7 +140,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format @@ -156,7 +169,7 @@ def _process_title(title): title = title.replace('(calidad regular)', 'DVDrip x264') title = title.replace('(calidad media)', 'DVDrip x264') - # Language, all results from this provider have spanish audio, we append it to title (avoid to download undesired torrents) + # Language, all results from this provider have spanish audio, we append it to title (to avoid downloading undesired torrents) title += ' SPANISH AUDIO' title += '-ELITETORRENT' diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 21e4305884..08b73260f0 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -21,8 +21,10 @@ import re import traceback -import sickbeard +from requests.compat import urljoin + +import sickbeard from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.common import USER_AGENT @@ -32,41 +34,67 @@ class ExtraTorrentProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """ExtraTorrent Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'ExtraTorrent') + # Credentials + self.public = True + + # URLs + self.url = 'http://extratorrent.cc' self.urls = { - 'index': 'http://extratorrent.cc', - 'rss': 'http://extratorrent.cc/rss.xml', + 'index': self.url, + 'rss': urljoin(self.url, 'rss.xml'), } + self.custom_url = None - self.url = self.urls['index'] + # Proper Strings - self.public = True + # Miscellaneous Options + self.headers.update({'User-Agent': USER_AGENT}) + self.search_params = {'cid': 8} + + # Torrent Stats self.minseed = None self.minleech = None - self.custom_url = None + # Cache self.cache = tvcache.TVCache(self, min_time=30) # Only poll ExtraTorrent every 30 minutes max - self.headers.update({'User-Agent': USER_AGENT}) - self.search_params = {'cid': 8} def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + ExtraTorrent search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] + + # Search Params + search_params = { + 'cid': 8, + 'type': 'rss', + } + for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + search_params['type'] = 'search' + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) + search_params['search'] = search_string search_url = self.urls['rss'] if not self.custom_url else self.urls['rss'].replace(self.urls['index'], self.custom_url) - - data = self.get_url(search_url, params=self.search_params, returns='text') + data = self.get_url(search_url, params=search_params, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) continue @@ -102,8 +130,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -118,7 +146,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 1bd2390bc7..c09d9b268d 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -22,6 +22,7 @@ import time import traceback +from requests.compat import urljoin from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar from sickbeard import logger, tvcache @@ -32,30 +33,40 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """FreshOnTV Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'FreshOnTV') - self._uid = None - self._hash = None + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.freeleech = False + self._uid = None + self._hash = None + self.cookies = None - self.cache = tvcache.TVCache(self) + # URLs + self.url = 'https://freshon.tv' + self.urls = { + 'base_url': self.url, + 'login': urljoin(self.url, 'login.php'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + 'search': urljoin(self.url, 'browse.php?incldead=%s&words=0&cat=0&search=%s'), + 'download': urljoin(self.url, 'download.php?id=%s&type=torrent'), + } - self.urls = {'base_url': 'https://freshon.tv/', - 'login': 'https://freshon.tv/login.php?action=makelogin', - 'detail': 'https://freshon.tv/details.php?id=%s', - 'search': 'https://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s', - 'download': 'https://freshon.tv/download.php?id=%s&type=torrent'} + # Proper Strings - self.url = self.urls['base_url'] + # Miscellaneous Options + self.freeleech = False - self.cookies = None + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) def _check_auth(self): @@ -68,13 +79,16 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'action': 'makelogin', + } + if self._uid and self._hash: add_dict_to_cookiejar(self.session.cookies, self.cookies) else: - login_params = {'username': self.username, - 'password': self.password, - 'login': 'submit'} - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: logger.log('Unable to connect to provider', logger.WARNING) @@ -114,12 +128,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) @@ -222,7 +237,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index a4b9594c56..18dd66e75a 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -22,6 +22,7 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -43,20 +44,22 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs - self.url = 'https://www.thegft.org/' + self.url = 'https://www.thegft.org' self.urls = { - 'login': self.url + 'loginsite.php', - 'search': self.url + 'browse.php', + 'login': urljoin(self.url, 'loginsite.php'), + 'search': urljoin(self.url, 'browse.php'), } # Proper Strings self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -91,6 +94,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + GFT search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -121,16 +132,15 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -140,7 +150,7 @@ def process_column_header(td): torrent_table = html.find('div', id='torrentBrowse') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -167,8 +177,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -182,7 +192,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 3a456f0f61..306f924e38 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -21,6 +21,7 @@ import traceback from requests.compat import urljoin + from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -28,20 +29,32 @@ class HD4FreeProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HD4Free Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HD4Free') + # Credentials + self.username = None + self.api_key = None + + # URLs self.url = 'https://hd4free.xyz' - self.urls = {'search': urljoin(self.url, '/searchapi.php')} + self.urls = { + 'search': urljoin(self.url, '/searchapi.php'), + } + + # Proper Strings + # Miscellaneous Options self.freeleech = None - self.username = None - self.api_key = None + + # Torrent Stats self.minseed = None self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll HD4Free every 10 minutes max def _check_auth(self): @@ -52,39 +65,48 @@ def _check_auth(self): return False def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HD4Free search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self._check_auth: return results + # Search Params search_params = { 'tv': 'true', 'username': self.username, - 'apikey': self.api_key + 'apikey': self.api_key, + 'fl': 'true' if self.freeleech else None } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if self.freeleech: - search_params['fl'] = 'true' - else: - search_params.pop('fl', '') if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string else: - search_params.pop('search', '') + search_params['search'] = None + try: jdata = self.get_url(self.urls['search'], params=search_params, returns='json') except ValueError: logger.log('No data returned from provider', logger.DEBUG) continue + # Continue only if at least one release is found if not jdata: - logger.log('No data returned from provider', logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue error = jdata.get('error') @@ -108,10 +130,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = jdata[i]['seeders'] leechers = jdata[i]['leechers'] + + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -125,7 +149,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 04562fc481..4c3ed2e61c 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -29,23 +29,33 @@ class HDBitsProvider(TorrentProvider): - + """HDBits Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HDBits') + # Credentials self.username = None self.passkey = None - self.cache = HDBitsCache(self, min_time=15) # only poll HDBits every 15 minutes max - + # URLs self.url = 'https://hdbits.org' self.urls = { 'search': urljoin(self.url, '/api/torrents'), 'rss': urljoin(self.url, '/api/torrents'), - 'download': urljoin(self.url, '/download.php') + 'download': urljoin(self.url, '/download.php'), } + # Proper Strings + + # Miscellaneous Options + + # Torrent Stats + + # Cache + self.cache = HDBitsCache(self, min_time=15) # only poll HDBits every 15 minutes max + def _check_auth(self): if not self.username or not self.passkey: diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 805d409bd9..d58f71e753 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -34,23 +34,28 @@ class HDSpaceProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HDSpace Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HDSpace') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max + # URLs + self.url = 'https://hd-space.org' + self.urls = { + 'base_url': self.url, + 'login': 'https://hd-space.org/index.php', + 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', + 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl', + } - self.urls = {'base_url': 'https://hd-space.org/', - 'login': 'https://hd-space.org/index.php?page=login', - 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', - 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl'} + # Proper Strings + # Miscellaneous Options self.categories = [15, 21, 22, 24, 25, 40] # HDTV/DOC 1080/720, bluray, remux self.urls['search'] += '&category=' for cat in self.categories: @@ -58,7 +63,12 @@ def __init__(self): self.urls['rss'] += '&cat[]=' + str(cat) self.urls['search'] = self.urls['search'][:-4] # remove extra %%3B - self.url = self.urls['base_url'] + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max def _check_auth(self): @@ -74,8 +84,11 @@ def login(self): if 'pass' in dict_from_cookiejar(self.session.cookies): return True - login_params = {'uid': self.username, - 'pwd': self.password} + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'page': 'login', + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -88,25 +101,32 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HDSpace search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (quote_plus(search_string.replace('.', ' ')),) else: search_url = self.urls['search'] % '' - if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) - data = self.get_url(search_url, returns='text') if not data or 'please try later' in data: logger.log('No data returned from provider', logger.DEBUG) @@ -150,8 +170,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -165,7 +185,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index ed68281739..904037f2a7 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -33,26 +33,34 @@ class HDTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HDTorrents Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HDTorrents') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.freeleech = None - + # URLs self.url = 'https://hd-torrents.org/' self.urls = { 'login': urljoin(self.url, 'login.php'), 'search': urljoin(self.url, 'torrents.php'), } + # Proper Strings self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + # Miscellaneous Options + self.freeleech = None + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self, min_time=30) def _check_auth(self): @@ -84,7 +92,15 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HDTorrents search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -99,20 +115,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'category[2]': 30, 'category[3]': 38, 'category[4]': 65, + 'active': 5 if self.freeleech else None, } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': search_params['search'] = search_string - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - - if self.freeleech: - search_params['active'] = 5 + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) response = self.get_url(self.urls['search'], params=search_params, returns='response') if not response or not response.text: @@ -160,8 +175,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -175,7 +190,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index b4f91c7e62..5ae3c86730 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -21,6 +21,7 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -31,42 +32,35 @@ class HoundDawgsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HoundDawgs Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HoundDawgs') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.freeleech = None - self.ranked = None + # URLs + self.url = 'https://hounddawgs.org' self.urls = { - 'base_url': 'https://hounddawgs.org/', - 'search': 'https://hounddawgs.org/torrents.php', - 'login': 'https://hounddawgs.org/login.php' + 'base_url': self.url, + 'search': urljoin(self.url, 'torrents.php'), + 'login': urljoin(self.url, 'login.php'), } - self.url = self.urls['base_url'] + # Proper Strings - self.search_params = { - 'filter_cat[85]': 1, - 'filter_cat[58]': 1, - 'filter_cat[57]': 1, - 'filter_cat[74]': 1, - 'filter_cat[92]': 1, - 'filter_cat[93]': 1, - 'order_by': 's3', - 'order_way': 'desc', - 'type': '', - 'userid': '', - 'searchstr': '', - 'searchimdb': '', - 'searchtags': '' - } + # Miscellaneous Options + self.freeleech = None + self.ranked = None + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self) def login(self): @@ -94,26 +88,49 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals,too-many-branches,too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HoundDawgs search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results + # Search Params + search_params = { + 'filter_cat[85]': 1, + 'filter_cat[58]': 1, + 'filter_cat[57]': 1, + 'filter_cat[74]': 1, + 'filter_cat[92]': 1, + 'filter_cat[93]': 1, + 'order_by': 's3', + 'order_way': 'desc', + 'type': '', + 'userid': '', + 'searchstr': '', + 'searchimdb': '', + 'searchtags': '' + } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) - - self.search_params['searchstr'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - data = self.get_url(self.urls['search'], params=self.search_params, returns='text') + search_params['searchstr'] = search_string + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log('URL did not return data', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue str_table_start = "
', '', data, 0) with BS4Parser(data, 'html5lib') as html: - if not html: - logger.log('No data returned from provider', logger.DEBUG) - continue - - if html.find(text='No Torrents Found!'): - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - torrent_table = html.find('table', attrs={'class': 'torrents'}) torrents = torrent_table('tr') if torrent_table else [] - # Continue only if one release is found - if len(torrents) < 2: + # Continue only if at least one release is found + if len(torrents) < 2 or html.find(text='No Torrents Found!'): logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue + # Skip column headers for result in torrents[1:]: try: title = result('td')[1].find('a').text @@ -144,8 +151,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -159,7 +166,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 118fcf55fd..c9d6d3d704 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -32,22 +32,32 @@ class KatProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """KAT Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'KickAssTorrents') + # Credentials self.public = True + # URLs + self.url = 'https://kat.cr' + self.urls = { + 'search': urljoin(self.url, '%s/'), + } + self.custom_url = None + + # Proper Strings + + # Miscellaneous Options self.confirmed = True + + # Torrent Stats self.minseed = None self.minleech = None - self.url = 'https://kat.cr' - self.urls = {'search': urljoin(self.url, '%s/')} - - self.custom_url = None - + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements @@ -64,7 +74,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: @@ -135,7 +145,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': info_hash + 'hash': info_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 8e8e1dd798..1174464701 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -37,13 +37,15 @@ class LimeTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - """Search provider LimeTorrents.""" - + """LimeTorrents Torrent provider""" def __init__(self): # Provider Inits TorrentProvider.__init__(self, 'LimeTorrents') + # Credentials + self.public = True + # URLs self.url = 'https://www.limetorrents.cc/' self.urls = { @@ -53,34 +55,32 @@ def __init__(self): 'rss': urljoin(self.url, '/browse-torrents/TV-shows/date/{page}/') } - # Credentials - self.public = True + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + + # Miscellaneous Options self.confirmed = False # Torrent Stats self.minseed = None self.minleech = None - # Proper Strings - self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - # Cache self.cache = tvcache.TVCache(self, min_time=10) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals """ - Search the provider for results. - - :param search_strings: Search to perform - :param age: Not used for this provider - :param ep_obj: Not used for this provider + ABNormal search and parsing - :return: A list of items found + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) """ results = [] for mode in search_strings: - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode == 'RSS': @@ -168,7 +168,7 @@ def parse(self, data, mode): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': torrent_hash + 'hash': torrent_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index bdab9316c4..a6fdf39413 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -33,7 +33,7 @@ class MoreThanTVProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """MoreThanTV Torrent provider""" def __init__(self): # Provider Init @@ -45,11 +45,6 @@ def __init__(self): self._uid = None self._hash = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = None - # URLs self.url = 'https://www.morethan.tv/' self.urls = { @@ -60,6 +55,13 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + self.freeleech = None + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -94,6 +96,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + MoreThanTV search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -105,7 +115,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'order_way': 'desc', 'action': 'basic', 'searchsubmit': 1, - 'searchstr': '' + 'searchstr': '', } # Units @@ -121,13 +131,13 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['searchstr'] = search_string @@ -140,7 +150,7 @@ def process_column_header(td): torrent_table = html.find('table', class_='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -149,6 +159,10 @@ def process_column_header(td): # Skip column headers for result in torrent_rows[1:]: + cells = result('td') + if len(cells) < len(labels): + continue + try: # skip if torrent has been nuked due to poor quality if result.find('img', alt='Nuked'): @@ -159,15 +173,14 @@ def process_column_header(td): if not all([title, download_url]): continue - cells = result('td') seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -181,7 +194,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index dba63d9ace..b41b37a38e 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -33,45 +33,58 @@ class newpctProvider(TorrentProvider): - + """Newpct Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Newpct') - self.onlyspasearch = None + # Credentials + # URLs self.url = 'http://www.newpct.com' - self.urls = {'search': urljoin(self.url, 'index.php')} + self.urls = { + 'search': urljoin(self.url, 'index.php'), + } + + # Proper Strings + # Miscellaneous Options + self.onlyspasearch = None + + # Torrent Stats + + # Cache self.cache = tvcache.TVCache(self, min_time=20) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ - Search query: - http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All - - q => Show name - category_ = Category 'Shows' (767) - idioma_ = Language Spanish (1) - bus_de_ = Date from (All, hoy) + Newpct search and parsing + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) """ + results = [] # Only search if user conditions are true lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + # http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All + # Search Params search_params = { 'l': 'doSearch', - 'q': '', - 'category_': 'All', - 'idioma_': 1, - 'bus_de_': 'All' + 'q': '', # Show name + 'category_': 'All', # Category 'Shows' (767) + 'idioma_': 1, # Language Spanish (1) + 'bus_de_': 'All' # Date from (All, hoy) } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': @@ -81,41 +94,45 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy' for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['q'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', id='categoryTable') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # 'Fecha', 'Título', 'Tamaño', '' - # Date, Title, Size + # Date, Title, Size labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')] + + # Skip column headers for row in torrent_rows[1:-1]: - try: - cells = row('td') + cells = row('td') + if len(cells) < len(labels): + continue + try: torrent_row = row.find('a') title = self._processTitle(torrent_row.get('title', '')) download_url = torrent_row.get('href', '') if not all([title, download_url]): continue - # Provider does not provide seeders/leechers - seeders = 1 - leechers = 0 + seeders = 1 # Provider does not provide seeders + leechers = 0 # Provider does not provide leechers torrent_size = cells[labels.index('Tamaño')].get_text(strip=True) size = convert_size(torrent_size) or -1 @@ -126,7 +143,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 23a05559dd..d4c8e2b482 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -278,7 +278,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self._check_auth(): return results - # For providers that don't have no caps, or for which the t=caps is not working. + # For providers that don't have caps, or for which the t=caps is not working. if not self.caps and all(provider not in self.url for provider in ['gingadaddy', 'usenet-crawler']): self.get_newznab_categories(just_caps=True) if not self.caps: @@ -313,12 +313,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('ep', '') items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) if search_params['t'] != 'tvsearch': search_params['q'] = search_string @@ -379,7 +380,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 8bcedcc710..7b7cb9307d 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -20,6 +20,7 @@ import traceback import json +from requests.compat import urljoin from requests.compat import urlencode from sickbeard import logger, tvcache @@ -34,19 +35,32 @@ class NorbitsProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): """ Initialize the class """ + + # Provider Init TorrentProvider.__init__(self, 'Norbits') + # Credentials self.username = None self.passkey = None + + # URLs + self.url = 'https://norbits.net' + self.urls = { + 'search': urljoin(self.url, 'api2.php?action=torrents'), + 'download': urljoin(self.url, 'download.php?'), + } + + # Proper Strings + + # Miscellaneous Options + + # Torrent Stats self.minseed = None self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=20) # only poll Norbits every 15 minutes max - self.url = 'https://norbits.net' - self.urls = {'search': self.url + '/api2.php?action=torrents', - 'download': self.url + '/download.php?'} - def _check_auth(self): if not self.username or not self.passkey: @@ -72,12 +86,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) post_data = { 'username': self.username, @@ -100,6 +115,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Resulting JSON from provider is not correct, ' 'not parsing it', logger.ERROR) + # Skip column headers for item in json_items.get('torrents', []): try: title = item.pop('name', '') @@ -113,10 +129,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = try_int(item.pop('seeders', 0)) leechers = try_int(item.pop('leechers', 0)) + # Filter unseeded torrent if seeders < min(self.minseed, 1): - logger.log('Discarding torrent because it does not meet ' - 'the minimum seeders: {0}. Seeders: {1})'.format - (title, seeders), logger.DEBUG) + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) continue info_hash = item.pop('info_hash', '') @@ -129,7 +147,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': info_hash + 'hash': info_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index f7b8b82725..fe66a4e041 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -31,47 +31,65 @@ class NyaaProvider(TorrentProvider): # pylint: disable=too-many-instance-attrib def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'NyaaTorrents') + # Credentials self.public = True - self.supports_absolute_numbering = True - self.anime_only = True + # URLs self.url = 'http://www.nyaa.se' - self.minseed = 0 - self.minleech = 0 - self.confirmed = False + # Miscellaneous Options + self.supports_absolute_numbering = True + self.anime_only = True + self.confirmed = False self.regex = re.compile(r'(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)', re.DOTALL) + # Torrent Stats + self.minseed = 0 + self.minleech = 0 + + # Cache self.cache = tvcache.TVCache(self, min_time=20) # only poll NyaaTorrents every 20 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + NyaaTorrents search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if self.show and not self.show.is_anime: return results + # Search Params search_params = { 'page': 'rss', 'cats': '1_0', # All anime 'sort': 2, # Sort Descending By Seeders - 'order': 1 + 'order': 1, } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['term'] = search_string - data = self.cache.getRSSFeed(self.url, params=search_params)['entries'] if not data: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue for curItem in data: try: @@ -90,10 +108,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = try_int(seeders) leechers = try_int(leechers) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -111,7 +130,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From 5370ba1460429cf76dceed43d604492519d7ce63 Mon Sep 17 00:00:00 2001 From: labrys Date: Fri, 17 Jun 2016 07:38:30 -0400 Subject: [PATCH 70/85] More standardization --- sickbeard/providers/tntvillage.py | 57 +++++++++++++++++++------------ 1 file changed, 36 insertions(+), 21 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 96578afe81..3c4efb1fc9 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -36,32 +36,38 @@ class TNTVillageProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TNTVillage Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'TNTVillage') - self._uid = None - self._hash = None - + # Credentials self.username = None self.password = None + self._uid = None + self._hash = None - self.minseed = None - self.minleech = None - + # URLs self.url = 'http://forum.tntvillage.scambioetico.org/' self.urls = { 'login': urljoin(self.url, 'index.php?act=Login&CODE=01'), 'download': urljoin(self.url, 'index.php?act=Attach&type=post&id={0}'), } + # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] - self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max - + # Miscellaneous Options self.subtitle = None + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max + def _check_auth(self): if not self.username or not self.password: @@ -80,7 +86,7 @@ def login(self): 'UserName': self.username, 'PassWord': self.password, 'CookieDate': 1, - 'submit': 'Connettiti al Forum' + 'submit': 'Connettiti al Forum', } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') @@ -95,7 +101,15 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TNTVillage search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -103,19 +117,19 @@ def search(self, search_strings, age=0, ep_obj=None): search_params = { 'act': 'allreleases', 'filter': '', + 'cat': 29, } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != 'RSS': - search_params['filter'] = search_string - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - else: - search_params['cat'] = 29 + if mode != 'RSS': + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) + search_params['filter'] = search_string response = self.get_url(self.url, params=search_params, returns='response') if not response or not response.text: @@ -126,11 +140,12 @@ def search(self, search_strings, age=0, ep_obj=None): torrent_table = html.find('table', class_='copyright') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one release is found + # Continue only if at least one release is found if len(torrent_rows) < 3: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue + # Skip column headers for result in torrent_table('tr')[1:]: try: cells = result('td') @@ -156,8 +171,8 @@ def search(self, search_strings, age=0, ep_obj=None): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -176,7 +191,7 @@ def search(self, search_strings, age=0, ep_obj=None): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From a473e5964307cf37eb9c07cd2df421d30745dcf0 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 14:19:22 +0200 Subject: [PATCH 71/85] Bring back eng releases only option --- sickbeard/providers/tntvillage.py | 141 ++++++++++++++++-------------- 1 file changed, 74 insertions(+), 67 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 3c4efb1fc9..a3569a1399 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -59,6 +59,7 @@ def __init__(self): self.proper_strings = ['PROPER', 'REPACK'] # Miscellaneous Options + self.engrelease = None self.subtitle = None # Torrent Stats @@ -117,7 +118,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'act': 'allreleases', 'filter': '', - 'cat': 29, } for mode in search_strings: @@ -126,86 +126,93 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: + if self.engrelease: + search_params['filter'] = 'eng' + search_string += ' eng' + if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) search_params['filter'] = search_string + else: + search_params['cat'] = 29 - response = self.get_url(self.url, params=search_params, returns='response') - if not response or not response.text: - logger.log('No data returned from provider', logger.DEBUG) - continue + response = self.get_url(self.url, params=search_params, returns='response') + if not response or not response.text: + logger.log('No data returned from provider', logger.DEBUG) + continue - with BS4Parser(response.text, 'html5lib') as html: - torrent_table = html.find('table', class_='copyright') - torrent_rows = torrent_table('tr') if torrent_table else [] + with BS4Parser(response.text, 'html5lib') as html: + torrent_table = html.find('table', class_='copyright') + torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one release is found - if len(torrent_rows) < 3: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + # Continue only if at least one release is found + if len(torrent_rows) < 3: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue # Skip column headers - for result in torrent_table('tr')[1:]: - try: - cells = result('td') - if not cells: - continue - - last_cell_anchor = cells[-1].find('a') - if not last_cell_anchor: - continue - params = parse_qs(last_cell_anchor.get('href', '')) - download_url = self.urls['download'].format(params['pid'][0]) if \ - params.get('pid') else None - title = _normalize_title(cells[0], cells[1], mode) - if not all([title, download_url]): - continue - - info_cell = cells[3].find_all('td') - leechers = info_cell[0].find('span').get_text(strip=True) - leechers = try_int(leechers) - seeders = info_cell[1].find('span').get_text() - seeders = try_int(seeders, 1) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1}".format - (title, seeders), logger.DEBUG) - continue - - if _has_only_subs(title) and not self.subtitle: - logger.log('Torrent is only subtitled, skipping: {0}'.format - (title), logger.DEBUG) - continue - - torrent_size = info_cell[3].find('span').get_text() + ' GB' - size = convert_size(torrent_size) or -1 - - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None, - } + for result in torrent_table('tr')[1:]: + try: + cells = result('td') + if not cells: + continue + + last_cell_anchor = cells[-1].find('a') + if not last_cell_anchor: + continue + params = parse_qs(last_cell_anchor.get('href', '')) + download_url = self.urls['download'].format(params['pid'][0]) if \ + params.get('pid') else None + title = _normalize_title(cells[0], cells[1], mode) + if not all([title, download_url]): + continue + + info_cell = cells[3].find_all('td') + leechers = info_cell[0].find('span').get_text(strip=True) + leechers = try_int(leechers) + seeders = info_cell[1].find('span').get_text() + seeders = try_int(seeders, 1) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) + continue - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + if _has_only_subs(title) and not self.subtitle: + logger.log('Torrent is only subtitled, skipping: {0}'.format + (title), logger.DEBUG) continue - results += items + torrent_size = info_cell[3].find('span').get_text() + ' GB' + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None, + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue - return results + items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) + results += items + + return results def _normalize_title(title, info, mode): From 817faf04e068ff86333363fc025848977a6f1792 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 14:24:08 +0200 Subject: [PATCH 72/85] small fixup --- sickbeard/providers/tntvillage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index a3569a1399..cff75925cb 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -209,7 +209,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results From a1d13d35000f56a5fbd0bea751535f33ca8f6091 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 14:57:42 +0200 Subject: [PATCH 73/85] Small tnt change --- sickbeard/providers/tntvillage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index cff75925cb..1febbbe3ea 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -118,6 +118,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'act': 'allreleases', 'filter': '', + 'cat': 29, } for mode in search_strings: @@ -134,8 +135,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) search_params['filter'] = search_string - else: - search_params['cat'] = 29 + search_params['cat'] = None response = self.get_url(self.url, params=search_params, returns='response') if not response or not response.text: From fc90ee39f5d8db578a0f5f2214b93f48ee6cf164 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 16:02:28 +0200 Subject: [PATCH 74/85] Update daily search url --- sickbeard/providers/thepiratebay.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index af7a316b9a..cba63effdb 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -49,7 +49,7 @@ def __init__(self): # URLs self.url = 'https://thepiratebay.se' self.urls = { - 'rss': urljoin(self.url, 'browse/200'), + 'rss': urljoin(self.url, 'tv/latest'), 'search': urljoin(self.url, 's/'), # Needs trailing / } self.custom_url = None From b7619a96325b1000d203ce84563efb798a785425 Mon Sep 17 00:00:00 2001 From: Dario Date: Sat, 18 Jun 2016 11:50:22 +0200 Subject: [PATCH 75/85] Remove freeleech option for MTV --- sickbeard/providers/morethantv.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index a6fdf39413..b4a08ba474 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -56,7 +56,6 @@ def __init__(self): self.proper_strings = ['PROPER', 'REPACK'] # Miscellaneous Options - self.freeleech = None # Torrent Stats self.minseed = None From 0691ac04ed3f288978508a71e37be0b7d4011a00 Mon Sep 17 00:00:00 2001 From: Dario Date: Sat, 18 Jun 2016 11:55:10 +0200 Subject: [PATCH 76/85] Remove TypeError from connection time out --- sickbeard/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/search.py b/sickbeard/search.py index a5e9c1f105..1211b8d62f 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -548,7 +548,7 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m except AuthException as e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) break - except (SocketTimeout, TypeError) as e: + except SocketTimeout as e: logger.log(u"Connection timed out (sockets) while searching %s. Error: %r" % (cur_provider.name, ex(e)), logger.DEBUG) break From 81a9d1b863d60b72a39faaa1a857b0d23c9a530f Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 18 Jun 2016 09:14:05 -0400 Subject: [PATCH 77/85] FIx repeated keyword in dict --- sickbeard/providers/hdtorrents.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 904037f2a7..d7d4070628 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -108,14 +108,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { 'search': '', - 'active': 1, + 'active': 5 if self.freeleech else 1, 'options': 0, 'category[0]': 59, 'category[1]': 60, 'category[2]': 30, 'category[3]': 38, 'category[4]': 65, - 'active': 5 if self.freeleech else None, } for mode in search_strings: From e431f9cc4f25ea731e2a4ece0fb06ddb4d88767f Mon Sep 17 00:00:00 2001 From: Labrys Date: Fri, 17 Jun 2016 23:12:47 -0400 Subject: [PATCH 78/85] More standardization --- sickbeard/providers/nyaatorrents.py | 1 - sickbeard/providers/omgwtfnzbs.py | 22 +++++--- sickbeard/providers/pretome.py | 76 +++++++++++++++++---------- sickbeard/providers/rarbg.py | 43 ++++++++++----- sickbeard/providers/scc.py | 28 ++++++---- sickbeard/providers/scenetime.py | 71 ++++++++++++++++--------- sickbeard/providers/shazbat.py | 21 +++++--- sickbeard/providers/speedcd.py | 34 ++++++------ sickbeard/providers/t411.py | 59 ++++++++++++++------- sickbeard/providers/thepiratebay.py | 24 +++++---- sickbeard/providers/tntvillage.py | 8 +-- sickbeard/providers/tokyotoshokan.py | 48 ++++++++++++----- sickbeard/providers/torrentbytes.py | 41 +++++++++------ sickbeard/providers/torrentday.py | 27 +++++----- sickbeard/providers/torrentleech.py | 32 +++++++---- sickbeard/providers/torrentproject.py | 23 ++++---- sickbeard/providers/torrentz.py | 27 +++++----- sickbeard/providers/transmitthenet.py | 32 +++++++---- sickbeard/providers/tvchaosuk.py | 52 ++++++++++++------ sickbeard/providers/womble.py | 21 ++++++-- sickbeard/providers/xthor.py | 58 ++++++++++---------- sickbeard/providers/zooqle.py | 21 ++++---- 22 files changed, 488 insertions(+), 281 deletions(-) diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index fe66a4e041..e4c2c650e3 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -40,7 +40,6 @@ def __init__(self): # URLs self.url = 'http://www.nyaa.se' - # Miscellaneous Options self.supports_absolute_numbering = True self.anime_only = True diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 315efd8bb5..ee34ace9c9 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -20,8 +20,8 @@ import re import traceback -import sickbeard +import sickbeard from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -31,21 +31,29 @@ class OmgwtfnzbsProvider(NZBProvider): def __init__(self): + + # Provider Init NZBProvider.__init__(self, 'OMGWTFNZBs') + # Credentials self.username = None self.api_key = None - self.cache = OmgwtfnzbsCache(self) - + # URLs self.url = 'https://omgwtfnzbs.org/' self.urls = { 'rss': 'https://rss.omgwtfnzbs.org/rss-download.php', - 'api': 'https://api.omgwtfnzbs.org/json/' + 'api': 'https://api.omgwtfnzbs.org/json/', } + # Proper Strings self.proper_strings = ['.PROPER.', '.REPACK.'] + # Miscellaneous Options + + # Cache + self.cache = OmgwtfnzbsCache(self) + def _check_auth(self): if not self.username or not self.api_key: @@ -105,13 +113,13 @@ def search(self, search_strings, age=0, ep_obj=None): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_params['search'] = search_string if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) data = self.get_url(self.urls['api'], params=search_params, returns='json') if not data: diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 6aa5da0664..59af9edf38 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -21,6 +21,7 @@ import re import traceback +from requests.compat import urljoin from requests.compat import quote from requests.utils import dict_from_cookiejar @@ -32,29 +33,38 @@ class PretomeProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """Pretome Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Pretome') + # Credentials self.username = None self.password = None self.pin = None - self.minseed = None - self.minleech = None - self.urls = {'base_url': 'https://pretome.info', - 'login': 'https://pretome.info/takelogin.php', - 'detail': 'https://pretome.info/details.php?id=%s', - 'search': 'https://pretome.info/browse.php?search=%s%s', - 'download': 'https://pretome.info/download.php/%s/%s.torrent'} - - self.url = self.urls['base_url'] + # URLs + self.url = 'https://pretome.info' + self.urls = { + 'base_url': self.url, + 'login': urljoin(self.url, 'takelogin.php'), + 'search': urljoin(self.url, 'browse.php?search=%s%s'), + 'download': urljoin(self.url, 'download.php/%s/%s.torrent'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + } + + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options self.categories = '&st=1&cat%5B%5D=7' - self.proper_strings = ['PROPER', 'REPACK'] + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) def _check_auth(self): @@ -68,9 +78,11 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'username': self.username, - 'password': self.password, - 'login_pin': self.pin} + login_params = { + 'username': self.username, + 'password': self.password, + 'login_pin': self.pin, + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -83,25 +95,33 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-statements, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + Pretome search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) - data = self.get_url(search_url, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -119,8 +139,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) for result in torrent_rows: + cells = result('td') try: - cells = result('td') size = None link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) @@ -138,19 +158,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = int(cells[9].contents[0]) leechers = int(cells[10].contents[0]) - # Need size for failed downloads handling - if size is None: - torrent_size = cells[7].text - size = convert_size(torrent_size) or -1 - # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue + # Need size for failed downloads handling + if size is None: + torrent_size = cells[7].text + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, @@ -158,7 +178,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 4b29b63eab..fa376ba62c 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -33,22 +33,32 @@ class RarbgProvider(TorrentProvider): # pylint: disable=too-many-instance-attri def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Rarbg') + # Credentials self.public = True - self.minseed = None - self.ranked = None - self.sorting = None - self.minleech = None self.token = None self.token_expires = None - # Spec: https://torrentapi.org/apidocs_v2.txt - self.url = 'https://rarbg.com' - self.urls = {'api': 'http://torrentapi.org/pubapi_v2.php'} + # URLs + self.url = 'https://rarbg.com' # Spec: https://torrentapi.org/apidocs_v2.txt + self.urls = { + 'api': 'http://torrentapi.org/pubapi_v2.php', + } + # Proper Strings self.proper_strings = ['{{PROPER|REPACK}}'] + # Miscellaneous Options + self.ranked = None + self.sorting = None + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max def login(self): @@ -58,7 +68,7 @@ def login(self): login_params = { 'get_token': 'get_token', 'format': 'json', - 'app_id': 'sickrage2' + 'app_id': 'sickrage2', } response = self.get_url(self.urls['api'], params=login_params, returns='json') @@ -71,10 +81,19 @@ def login(self): return self.token is not None def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + """ + RARBG search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results + # Search Params search_params = { 'app_id': 'sickrage2', 'category': 'tv', @@ -95,7 +114,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) if mode == 'RSS': search_params['sort'] = 'last' @@ -112,6 +131,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('search_tvdb', None) for search_string in search_strings[mode]: + if mode != 'RSS': search_params['search_string'] = search_string logger.log('Search string: {0}'.format(search_string), @@ -159,6 +179,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = item.pop('seeders') leechers = item.pop('leechers') + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" @@ -169,10 +190,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_size = item.pop('size', -1) size = convert_size(torrent_size) or -1 - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - item = { 'title': title, 'link': download_url, diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 588797f112..4782570a5f 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -36,15 +36,15 @@ class SCCProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'SceneAccess') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - - self.cache = tvcache.TVCache(self) # only poll SCC every 20 minutes max + # URLs + self.url = self.urls['base_url'] self.urls = { 'base_url': 'https://sceneaccess.eu', 'login': 'https://sceneaccess.eu/login', @@ -53,14 +53,22 @@ def __init__(self): 'download': 'https://www.sceneaccess.eu/%s' } - self.url = self.urls['base_url'] + # Proper Strings + # Miscellaneous Options self.categories = { 'Season': 'c26=26&c44=44&c45=45', # Archive, non-scene HD, non-scene SD; need to include non-scene because WEB-DL packs get added to those categories 'Episode': 'c17=17&c27=27&c33=33&c34=34&c44=44&c45=45', # TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264 'RSS': 'c17=17&c26=26&c27=27&c33=33&c34=34&c44=44&c45=45' # Season + Episode } + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) # only poll SCC every 20 minutes max + def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True @@ -68,7 +76,7 @@ def login(self): login_params = { 'username': self.username, 'password': self.password, - 'submit': 'come on in' + 'submit': 'come on in', } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') @@ -95,9 +103,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) @@ -105,6 +114,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -138,7 +148,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -152,7 +162,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 35f99c3cfa..458408e5dd 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -21,7 +21,7 @@ import re import traceback -from requests.compat import quote +from requests.compat import urljoin, quote from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -32,34 +32,46 @@ class SceneTimeProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """SceneTime Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'SceneTime') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - - self.cache = tvcache.TVCache(self) # only poll SceneTime every 20 minutes max - self.urls = {'base_url': 'https://www.scenetime.com', - 'login': 'https://www.scenetime.com/takelogin.php', - 'detail': 'https://www.scenetime.com/details.php?id=%s', - 'search': 'https://www.scenetime.com/browse.php?search=%s%s', - 'download': 'https://www.scenetime.com/download.php/%s/%s'} + # URLs + self.url = 'https://www.scenetime.com' + self.urls = { + 'base_url': self.url, + 'login': urljoin(self.url, 'takelogin.php'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + 'search': urljoin(self.url, 'browse.php?search=%s%s'), + 'download': urljoin(self.url, 'download.php/%s/%s'), + } - self.url = self.urls['base_url'] + # Proper Strings + # Miscellaneous Options self.categories = '&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1' + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) # only poll SceneTime every 20 minutes max + def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'username': self.username, - 'password': self.password} + login_params = { + 'username': self.username, + 'password': self.password, + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -72,25 +84,34 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + SceneTime search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) data = self.get_url(search_url, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -99,7 +120,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if torrent_table: torrent_rows = torrent_table.select('tr') - # Continue only if one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -109,13 +130,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # and using their index to find the correct download/seeders/leechers td. labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] + # Skip column headers for result in torrent_rows[1:]: - try: - cells = result('td') + cells = result('td') + if len(cells) < len(labels): + continue + try: link = cells[labels.index('Name')].find('a') torrent_id = link['href'].replace('details.php?id=', '').split('&')[0] - title = link.get_text(strip=True) download_url = self.urls['download'] % (torrent_id, '%s.torrent' % title.replace(' ', '.')) if not all([title, download_url]): @@ -128,7 +151,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -142,7 +165,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 1b12c9da3d..5303da6b19 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -27,18 +27,16 @@ class ShazbatProvider(TorrentProvider): - + """Shazbat Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Shazbat.tv') - self.supports_backlog = False - + # Credentials self.passkey = None - self.options = None - - self.cache = ShazbatCache(self, min_time=20) + # URLs self.url = 'http://www.shazbat.tv' self.urls = { 'login': urljoin(self.url, 'login'), @@ -47,6 +45,17 @@ def __init__(self): # 'rss_followed': urljoin(self.url, 'rss/followed') } + # Proper Strings + + # Miscellaneous Options + self.supports_backlog = False + self.options = None + + # Torrent Stats + + # Cache + self.cache = ShazbatCache(self, min_time=20) + def _check_auth(self): if not self.passkey: raise AuthException('Your authentication credentials are missing, check your config.') diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 9539696276..19ca6efe65 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -32,7 +32,7 @@ class SpeedCDProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """SpeedCD Torrent provider""" def __init__(self): # Provider Init @@ -42,11 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = False - # URLs self.url = 'https://speed.cd' self.urls = { @@ -57,6 +52,13 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + self.freeleech = False + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -116,18 +118,18 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -135,7 +137,7 @@ def process_column_header(td): torrent_table = torrent_table.find('table') if torrent_table else None torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -144,9 +146,11 @@ def process_column_header(td): # Skip column headers for result in torrent_rows[1:]: - try: - cells = result('td') + cells = result('td') + if len(cells) < len(labels): + continue + try: title = cells[labels.index('Title')].find('a', class_='torrent').get_text() download_url = urljoin(self.url, cells[labels.index('Download')].find(title='Download').parent['href']) if not all([title, download_url]): @@ -159,7 +163,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -174,7 +178,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 767d57c107..3a759893f7 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -21,6 +21,7 @@ import time import traceback +from requests.compat import urljoin from requests.auth import AuthBase from sickbeard import logger, tvcache @@ -32,42 +33,51 @@ class T411Provider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """T411 Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, "T411") + # Credentials self.username = None self.password = None self.token = None self.tokenLastUpdate = None - self.cache = tvcache.TVCache(self, min_time=10) # Only poll T411 every 10 minutes max - - self.urls = {'base_url': 'http://www.t411.ch/', - 'search': 'https://api.t411.ch/torrents/search/%s*?cid=%s&limit=100', - 'rss': 'https://api.t411.ch/torrents/top/today', - 'login_page': 'https://api.t411.ch/auth', - 'download': 'https://api.t411.ch/torrents/download/%s'} + # URLs + self.url = 'https://api.t411.ch' + self.urls = { + 'base_url': 'http://www.t411.ch/', + 'search': urljoin(self.url, 'torrents/search/%s*?cid=%s&limit=100'), + 'rss': urljoin(self.url, 'torrents/top/today'), + 'login_page': urljoin(self.url, 'auth'), + 'download': urljoin(self.url, 'torrents/download/%s'), + } - self.url = self.urls['base_url'] + # Proper Strings + # Miscellaneous Options self.headers.update({'User-Agent': USER_AGENT}) - self.subcategories = [433, 637, 455, 639] + self.confirmed = False + # Torrent Stats self.minseed = 0 self.minleech = 0 - self.confirmed = False - def login(self): + # Cache + self.cache = tvcache.TVCache(self, min_time=10) # Only poll T411 every 10 minutes max + def login(self): if self.token is not None: if time.time() < (self.tokenLastUpdate + 30 * 60): return True - login_params = {'username': self.username, - 'password': self.password} + login_params = { + 'username': self.username, + 'password': self.password, + } response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') if not response: @@ -84,25 +94,34 @@ def login(self): logger.log('Token not found in authentication response', logger.WARNING) return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + T411 search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_urls = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] for search_url in search_urls: data = self.get_url(search_url, returns='json') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue if 'torrents' not in data and mode != 'RSS': @@ -134,7 +153,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -153,7 +172,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index cba63effdb..eefdce7eef 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -32,7 +32,7 @@ class ThePirateBayProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """ThePirateBay Torrent provider""" def __init__(self): # Provider Init @@ -41,11 +41,6 @@ def __init__(self): # Credentials self.public = True - # Torrent Stats - self.minseed = None - self.minleech = None - self.confirmed = True - # URLs self.url = 'https://thepiratebay.se' self.urls = { @@ -56,6 +51,13 @@ def __init__(self): # Proper Strings + # Miscellaneous Options + self.confirmed = True + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=1) # only poll ThePirateBay every 30 minutes max @@ -86,7 +88,7 @@ def process_column_header(th): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: @@ -107,14 +109,14 @@ def process_column_header(th): data = self.get_url(search_url, returns='text') if not data: - logger.log('URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', id='searchResult') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -143,7 +145,7 @@ def process_column_header(th): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -166,7 +168,7 @@ def process_column_header(th): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 1febbbe3ea..3d3a8d32ac 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -117,7 +117,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'act': 'allreleases', - 'filter': '', + 'filter': 'eng ' if self.engrelease else '', 'cat': 29, } @@ -127,14 +127,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: - if self.engrelease: - search_params['filter'] = 'eng' - search_string += ' eng' - if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) - search_params['filter'] = search_string + search_params['filter'] += search_string search_params['cat'] = None response = self.get_url(self.url, params=search_params, returns='response') diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index c8127ef2c2..6e71464498 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -21,6 +21,8 @@ import re import traceback +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -29,39 +31,57 @@ class TokyoToshokanProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TokyoToshokan Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'TokyoToshokan') + # Credentials self.public = True + + # URLs + self.url = 'http://tokyotosho.info/' + self.urls = { + 'search': urljoin(self.url, 'search.php'), + 'rss': urljoin(self.url, 'rss.php'), + } + + # Proper Strings + + # Miscellaneous Options self.supports_absolute_numbering = True self.anime_only = True + # Torrent Stats self.minseed = None self.minleech = None - self.url = 'http://tokyotosho.info/' - self.urls = { - 'search': self.url + 'search.php', - 'rss': self.url + 'rss.php' - } + # Cache self.cache = tvcache.TVCache(self, min_time=15) # only poll TokyoToshokan every 15 minutes max - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TokyoToshokan search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if self.show and not self.show.is_anime: return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params = { 'terms': search_string, @@ -70,19 +90,21 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as soup: torrent_table = soup.find('table', class_='listing') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue a = 1 if len(torrent_rows[0]('td')) < 2 else 0 + # Skip column headers for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]): try: desc_top = top.find('td', class_='desc-top') @@ -100,7 +122,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -114,7 +136,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 7bd09655e8..a5fc036270 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -32,7 +32,7 @@ class TorrentBytesProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TorrentBytes Torrent provider""" def __init__(self): # Provider Init @@ -42,11 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = False - # URLs self.url = 'https://www.torrentbytes.net' self.urls = { @@ -57,6 +52,13 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + self.freeleech = False + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -64,9 +66,11 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'username': self.username, - 'password': self.password, - 'login': 'Log in!'} + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'Log in!', + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -84,6 +88,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results + # Search Params search_params = { 'c41': 1, 'c33': 1, @@ -94,12 +99,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') @@ -111,7 +117,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_table = html.find('table', border='1') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -119,10 +125,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers" labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] + # Skip column headers for result in torrent_rows[1:]: - try: - cells = result('td') + cells = result('td') + if len(cells) < len(labels): + continue + try: download_url = urljoin(self.url, cells[labels.index('Name')].find('a', href=re.compile(r'download.php\?id='))['href']) title_element = cells[labels.index('Name')].find('a', href=re.compile(r'details.php\?id=')) title = title_element.get('title', '') or title_element.get_text(strip=True) @@ -142,7 +151,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -156,7 +165,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index d72742fa56..8aa01ff532 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -32,7 +32,7 @@ class TorrentDayProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TorrentDay Torrent provider""" def __init__(self): # Provider Init @@ -44,11 +44,6 @@ def __init__(self): self._uid = None self._hash = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = False - # URLs self.url = 'https://classic.torrentday.com' self.urls = { @@ -57,10 +52,18 @@ def __init__(self): 'download': urljoin(self.url, '/download.php/') } + # Proper Strings + + # Miscellaneous Options + self.freeleech = False self.cookies = None self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1}, 'RSS': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}} + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max @@ -76,7 +79,7 @@ def login(self): 'username': self.username, 'password': self.password, 'submit.x': 0, - 'submit.y': 0 + 'submit.y': 0, } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') @@ -108,13 +111,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_string = '+'.join(search_string.split()) @@ -156,7 +159,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -170,7 +173,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 2d51fafd4b..d761539208 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -32,7 +32,7 @@ class TorrentLeechProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TorrentLeech Torrent provider""" def __init__(self): # Provider Init @@ -42,10 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://torrentleech.org' self.urls = { @@ -56,6 +52,12 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -81,7 +83,15 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TorrentLeech search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -102,13 +112,13 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) categories = ['2', '7', '35'] categories += ['26', '32'] if mode == 'Episode' else ['27'] @@ -131,7 +141,7 @@ def process_column_header(td): torrent_table = html.find('table', id='torrenttable') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -167,7 +177,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index a7ef07e76b..0f4ef98ed2 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -38,18 +38,19 @@ def __init__(self): # Credentials self.public = True - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://torrentproject.se/' - self.custom_url = None - self.headers.update({'User-Agent': USER_AGENT}) # Proper Strings + # Miscellaneous Options + self.headers.update({'User-Agent': USER_AGENT}) + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['0day']}) @@ -65,13 +66,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: # Mode = RSS, Season, Episode items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['s'] = search_string @@ -103,7 +104,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -118,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': torrent_hash + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index 2507674603..f4588d5a7b 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -30,7 +30,7 @@ class TorrentzProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """Torrentz Torrent provider""" def __init__(self): # Provider Init @@ -40,10 +40,6 @@ def __init__(self): self.public = True self.confirmed = True - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://torrentz.eu/' self.urls = { @@ -51,9 +47,15 @@ def __init__(self): 'feed': 'https://torrentz.eu/feed', 'base': self.url, } - self.headers.update({'User-Agent': USER_AGENT}) # Proper Strings + self.headers.update({'User-Agent': USER_AGENT}) + + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None # Cache self.cache = tvcache.TVCache(self, min_time=15) # only poll Torrentz every 15 minutes max @@ -68,16 +70,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_url = self.urls['verified'] if self.confirmed else self.urls['feed'] if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) data = self.get_url(search_url, params={'q': search_string}, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith(" Date: Sat, 18 Jun 2016 09:06:54 -0400 Subject: [PATCH 79/85] Standardize method names and order --- sickbeard/__init__.py | 2 +- sickbeard/providers/abnormal.py | 40 +-- sickbeard/providers/alpharatio.py | 46 +-- sickbeard/providers/anizb.py | 8 +- sickbeard/providers/bluetigers.py | 49 ++- sickbeard/providers/btn.py | 130 ++++---- sickbeard/providers/danishbits.py | 50 +-- sickbeard/providers/freshontv.py | 102 +++--- sickbeard/providers/gftracker.py | 60 ++-- sickbeard/providers/hd4free.py | 13 +- sickbeard/providers/hdbits.py | 58 ++-- sickbeard/providers/hdspace.py | 62 ++-- sickbeard/providers/hdtorrents.py | 58 ++-- sickbeard/providers/hounddawgs.py | 50 +-- sickbeard/providers/ilovetorrents.py | 56 ++-- sickbeard/providers/iptorrents.py | 74 ++--- sickbeard/providers/morethantv.py | 60 ++-- sickbeard/providers/newpct.py | 58 ++-- sickbeard/providers/newznab.py | 389 ++++++++++++----------- sickbeard/providers/norbits.py | 38 +-- sickbeard/providers/omgwtfnzbs.py | 90 +++--- sickbeard/providers/pretome.py | 56 ++-- sickbeard/providers/rarbg.py | 38 +-- sickbeard/providers/rsstorrent.py | 52 +-- sickbeard/providers/scc.py | 54 ++-- sickbeard/providers/scenetime.py | 40 +-- sickbeard/providers/shazbat.py | 4 +- sickbeard/providers/speedcd.py | 40 +-- sickbeard/providers/t411.py | 50 +-- sickbeard/providers/tntvillage.py | 152 ++++----- sickbeard/providers/torrentbytes.py | 42 +-- sickbeard/providers/torrentday.py | 74 ++--- sickbeard/providers/torrentleech.py | 44 +-- sickbeard/providers/torrentz.py | 10 +- sickbeard/providers/transmitthenet.py | 60 ++-- sickbeard/providers/tvchaosuk.py | 60 ++-- sickbeard/providers/xthor.py | 42 +-- sickbeard/server/web/config/providers.py | 12 +- 38 files changed, 1161 insertions(+), 1162 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 809da9f5eb..95a6e1e01f 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -2189,7 +2189,7 @@ def save_config(): # pylint: disable=too-many-statements, too-many-branches new_config['Newznab']['newznab_data'] = NEWZNAB_DATA new_config['TorrentRss'] = {} - new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.configStr() for x in torrentRssProviderList]) + new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.config_string() for x in torrentRssProviderList]) new_config['GUI'] = {} new_config['GUI']['gui_name'] = GUI_NAME diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index aed5692913..19de730f04 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -61,26 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if not re.search('torrents.php', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ ABNormal search and parsing @@ -193,5 +173,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if not re.search('torrents.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = ABNormalProvider() diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 5be2d498fc..0f5934c380 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -61,29 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - 'remember_me': 'on', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Invalid Username/password', response) \ - or re.search('Login :: AlphaRatio.cc', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ AlphaRatio search and parsing @@ -197,5 +174,28 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Invalid Username/password', response) \ + or re.search('Login :: AlphaRatio.cc', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = AlphaRatioProvider() diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index a97efe0221..c39d3f6426 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -58,10 +58,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _get_size(self, item): - """Override the default _get_size to prevent it from extracting using it the default tags""" - return try_int(item.get('size')) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """Start searching for anime using the provided search_strings. Used for backlog and daily""" results = [] @@ -120,5 +116,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _get_size(self, item): + """Override the default _get_size to prevent it from extracting using it the default tags""" + return try_int(item.get('size')) + provider = Anizb() diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 8a0808fa83..5c12dcb83d 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -62,31 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'take_login': '1' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - check_login = self.get_url(self.urls['base_url'], returns='text') - if re.search('account-logout.php', check_login): - return True - else: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('account-login.php', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ BLUETIGERS search and parsing @@ -180,5 +155,29 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'take_login': '1' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + check_login = self.get_url(self.urls['base_url'], returns='text') + if re.search('account-logout.php', check_login): + return True + else: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('account-login.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True provider = BlueTigersProvider() diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index bdd84a1668..dce5604685 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -62,24 +62,6 @@ def __init__(self): # Cache self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max - def _check_auth(self): - if not self.api_key: - logger.log('Invalid api key. Check your settings', logger.WARNING) - - return True - - def _checkAuthFromData(self, parsed_json): - - if parsed_json is None: - return self._check_auth() - - if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals """ BTN search and parsing @@ -109,7 +91,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many logger.log('No data returned from provider', logger.DEBUG) return results - if self._checkAuthFromData(parsed_json): + if self._check_auth_from_data(parsed_json): found_torrents = parsed_json.get('torrents', {}) @@ -143,38 +125,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # FIXME SORT RESULTS return results - def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): - - server = jsonrpclib.Server(self.urls['base_url']) - parsed_json = {} - - try: - parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) - time.sleep(cpu_presets[sickbeard.CPU_PRESET]) + def _check_auth(self): + if not self.api_key: + logger.log('Invalid api key. Check your settings', logger.WARNING) - except jsonrpclib.jsonrpc.ProtocolError, error: - if error.message == 'Call Limit Exceeded': - logger.log('You have exceeded the limit of 150 calls per hour,' - ' per API key which is unique to your user account', logger.WARNING) - else: - logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) - parsed_json = {'api-error': ex(error)} - return parsed_json + return True - except socket.timeout: - logger.log('Timeout while accessing provider', logger.WARNING) + def _check_auth_from_data(self, parsed_json): - except socket.error, error: - # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) + if parsed_json is None: + return self._check_auth() - except Exception, error: - errorstring = str(error) - if errorstring.startswith('<') and errorstring.endswith('>'): - errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) + if 'api-error' in parsed_json: + logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) - return parsed_json + return True def _get_title_and_url(self, parsed_json): @@ -210,6 +177,26 @@ def _get_title_and_url(self, parsed_json): return title, url + def find_propers(self, search_date=None): + results = [] + + search_terms = ['%.proper.%', '%.repack.%'] + + for term in search_terms: + for item in self.search({'release': term}, age=4 * 24 * 60 * 60): + if item['Time']: + try: + result_date = datetime.fromtimestamp(float(item['Time'])) + except TypeError: + result_date = None + + if result_date: + if not search_date or result_date > search_date: + title, url = self._get_title_and_url(item) + results.append(classes.Proper(title, url, result_date, self.show)) + + return results + def _get_season_search_strings(self, ep_obj): search_params = [] current_params = {'category': 'Season'} @@ -272,30 +259,43 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): return to_return - def _doGeneralSearch(self, search_string): - # 'search' looks as broad is it can find. Can contain episode overview and title for example, - # use with caution! - return self.search({'search': search_string}) + def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): - def find_propers(self, search_date=None): - results = [] + server = jsonrpclib.Server(self.urls['base_url']) + parsed_json = {} - search_terms = ['%.proper.%', '%.repack.%'] + try: + parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) + time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - for term in search_terms: - for item in self.search({'release': term}, age=4 * 24 * 60 * 60): - if item['Time']: - try: - result_date = datetime.fromtimestamp(float(item['Time'])) - except TypeError: - result_date = None + except jsonrpclib.jsonrpc.ProtocolError, error: + if error.message == 'Call Limit Exceeded': + logger.log('You have exceeded the limit of 150 calls per hour,' + ' per API key which is unique to your user account', logger.WARNING) + else: + logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) + parsed_json = {'api-error': ex(error)} + return parsed_json - if result_date: - if not search_date or result_date > search_date: - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, result_date, self.show)) + except socket.timeout: + logger.log('Timeout while accessing provider', logger.WARNING) - return results + except socket.error, error: + # Note that sometimes timeouts are thrown as socket errors + logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) + + except Exception, error: + errorstring = str(error) + if errorstring.startswith('<') and errorstring.endswith('>'): + errorstring = errorstring[1:-1] + logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) + + return parsed_json + + def _do_general_search(self, search_string): + # 'search' looks as broad is it can find. Can contain episode overview and title for example, + # use with caution! + return self.search({'search': search_string}) class BTNCache(tvcache.TVCache): diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index 3c4f834d79..de2f4a62ba 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -62,31 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll Danishbits every 10 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': 1, - 'langlang': '', - 'login': 'Login', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - self.session.cookies.clear() - return False - - if 'Login :: Danishbits.org' in response: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - self.session.cookies.clear() - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ DanishBits search and parsing @@ -200,5 +175,30 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': 1, + 'langlang': '', + 'login': 'Login', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + self.session.cookies.clear() + return False + + if 'Login :: Danishbits.org' in response: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + self.session.cookies.clear() + return False + + return True + provider = DanishbitsProvider() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index c09d9b268d..cddf247320 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -68,57 +68,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - 'action': 'makelogin', - } - - if self._uid and self._hash: - add_dict_to_cookiejar(self.session.cookies, self.cookies) - else: - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('/logout.php', response): - try: - if dict_from_cookiejar(self.session.cookies)['uid'] and \ - dict_from_cookiejar(self.session.cookies)['pass']: - self._uid = dict_from_cookiejar(self.session.cookies)['uid'] - self._hash = dict_from_cookiejar(self.session.cookies)['pass'] - - self.cookies = {'uid': self._uid, - 'pass': self._hash} - return True - except Exception: - logger.log('Unable to login to provider (cookie)', logger.WARNING) - - return False - else: - if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response) or \ - re.search('Username or password is incorrect. If you have an account here please use the' - ' recovery system or try again.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - if re.search('DDoS protection by CloudFlare', response): - logger.log('Unable to login to provider due to CloudFlare DDoS javascript check', logger.WARNING) - - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): @@ -253,5 +202,56 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'action': 'makelogin', + } + + if self._uid and self._hash: + add_dict_to_cookiejar(self.session.cookies, self.cookies) + else: + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('/logout.php', response): + try: + if dict_from_cookiejar(self.session.cookies)['uid'] and \ + dict_from_cookiejar(self.session.cookies)['pass']: + self._uid = dict_from_cookiejar(self.session.cookies)['uid'] + self._hash = dict_from_cookiejar(self.session.cookies)['pass'] + + self.cookies = {'uid': self._uid, + 'pass': self._hash} + return True + except Exception: + logger.log('Unable to login to provider (cookie)', logger.WARNING) + + return False + else: + if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response) or \ + re.search('Username or password is incorrect. If you have an account here please use the' + ' recovery system or try again.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + if re.search('DDoS protection by CloudFlare', response): + logger.log('Unable to login to provider due to CloudFlare DDoS javascript check', logger.WARNING) + + return False + + def _check_auth(self): + + if not self.username or not self.password: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + return True + provider = FreshOnTVProvider() diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 18dd66e75a..a5d0110aa5 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -63,36 +63,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - # Initialize session with a GET to have cookies - self.get_url(self.url, returns='text') - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ GFT search and parsing @@ -208,5 +178,35 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + # Initialize session with a GET to have cookies + self.get_url(self.url, returns='text') + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = GFTrackerProvider() diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 306f924e38..02e0ba2a28 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -57,13 +57,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll HD4Free every 10 minutes max - def _check_auth(self): - if self.username and self.api_key: - return True - - logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING) - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HD4Free search and parsing @@ -165,5 +158,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _check_auth(self): + if self.username and self.api_key: + return True + + logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING) + return False provider = HD4FreeProvider() diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 4c3ed2e61c..1107992e93 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -56,35 +56,6 @@ def __init__(self): # Cache self.cache = HDBitsCache(self, min_time=15) # only poll HDBits every 15 minutes max - def _check_auth(self): - - if not self.username or not self.passkey: - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') - - return True - - def _check_auth_from_data(self, parsed_json): - - if 'status' in parsed_json and 'message' in parsed_json: - if parsed_json.get('status') == 5: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - return True - - def _get_season_search_strings(self, ep_obj): - season_search_string = [self._make_post_data_json(show=ep_obj.show, season=ep_obj)] - return season_search_string - - def _get_episode_search_strings(self, ep_obj, add_string=''): - episode_search_string = [self._make_post_data_json(show=ep_obj.show, episode=ep_obj)] - return episode_search_string - - def _get_title_and_url(self, item): - title = item.get('name', '').replace(' ', '.') - url = self.urls['download'] + '?' + urlencode({'id': item['id'], 'passkey': self.passkey}) - - return title, url - def search(self, search_strings, age=0, ep_obj=None): # FIXME @@ -110,6 +81,27 @@ def search(self, search_strings, age=0, ep_obj=None): # FIXME SORTING return results + def _check_auth(self): + + if not self.username or not self.passkey: + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + + return True + + def _check_auth_from_data(self, parsed_json): + + if 'status' in parsed_json and 'message' in parsed_json: + if parsed_json.get('status') == 5: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + return True + + def _get_title_and_url(self, item): + title = item.get('name', '').replace(' ', '.') + url = self.urls['download'] + '?' + urlencode({'id': item['id'], 'passkey': self.passkey}) + + return title, url + def find_propers(self, search_date=None): results = [] @@ -130,6 +122,14 @@ def find_propers(self, search_date=None): return results + def _get_season_search_strings(self, ep_obj): + season_search_string = [self._make_post_data_json(show=ep_obj.show, season=ep_obj)] + return season_search_string + + def _get_episode_search_strings(self, ep_obj, add_string=''): + episode_search_string = [self._make_post_data_json(show=ep_obj.show, episode=ep_obj)] + return episode_search_string + def _make_post_data_json(self, show=None, episode=None, season=None, search_term=None): post_data = { diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index d58f71e753..da7b6757b1 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -70,37 +70,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max - def _check_auth(self): - - if not self.username or not self.password: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - if 'pass' in dict_from_cookiejar(self.session.cookies): - return True - - login_params = { - 'uid': self.username, - 'pwd': self.password, - 'page': 'login', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Password Incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HDSpace search and parsing @@ -201,5 +170,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + if 'pass' in dict_from_cookiejar(self.session.cookies): + return True + + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'page': 'login', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Password Incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + return True + provider = HDSpaceProvider() diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index d7d4070628..cefd5b7d10 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -63,35 +63,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'uid': self.username, - 'pwd': self.password, - 'submit': 'Confirm' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('You need cookies enabled to log in.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HDTorrents search and parsing @@ -205,5 +176,34 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'submit': 'Confirm' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('You need cookies enabled to log in.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = HDTorrentsProvider() diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index 5ae3c86730..81a6717734 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -63,31 +63,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': 'on', - 'login': 'Login' - } - - self.get_url(self.urls['base_url'], returns='text') - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Dit brugernavn eller kodeord er forkert.', response) \ - or re.search('Login :: HoundDawgs', response) \ - or re.search('Dine cookies er ikke aktiveret.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HoundDawgs search and parsing @@ -209,5 +184,30 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': 'on', + 'login': 'Login' + } + + self.get_url(self.urls['base_url'], returns='text') + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Dit brugernavn eller kodeord er forkert.', response) \ + or re.search('Login :: HoundDawgs', response) \ + or re.search('Dine cookies er ikke aktiveret.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = HoundDawgsProvider() diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index 9c681919f2..59f14d5a52 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -64,34 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - if not self.username or not self.password: - logger.log(u'Invalid username or password. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'logout': 'false', - 'submit': 'Welcome to ILT' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ ILoveTorrents search and parsing @@ -188,5 +160,33 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'logout': 'false', + 'submit': 'Welcome to ILT' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + if not self.username or not self.password: + logger.log(u'Invalid username or password. Check your settings', logger.WARNING) + + return True + provider = ILoveTorrentsProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index d7945fbe0b..788339e9ac 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -64,43 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - } - - self.get_url(self.urls['login'], returns='text') - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - # Invalid username and password combination - if re.search('Invalid username and password combination', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - # You tried too often, please try again after 2 hours! - if re.search('You tried too often', response): - logger.log('You tried too often, please try again after 2 hours!' - ' Disable IPTorrents for at least 2 hours', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): @@ -182,5 +145,42 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + } + + self.get_url(self.urls['login'], returns='text') + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + # Invalid username and password combination + if re.search('Invalid username and password combination', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + # You tried too often, please try again after 2 hours! + if re.search('You tried too often', response): + logger.log('You tried too often, please try again after 2 hours!' + ' Disable IPTorrents for at least 2 hours', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = IPTorrentsProvider() diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index b4a08ba474..21c75ccabb 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -64,36 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': '1', - 'login': 'Log in', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Your username or password was incorrect.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ MoreThanTV search and parsing @@ -209,5 +179,35 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': '1', + 'login': 'Log in', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Your username or password was incorrect.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = MoreThanTVProvider() diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index b41b37a38e..fd8a099ef0 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -126,7 +126,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: torrent_row = row.find('a') - title = self._processTitle(torrent_row.get('title', '')) + title = self._process_title(torrent_row.get('title', '')) download_url = torrent_row.get('href', '') if not all([title, download_url]): continue @@ -159,6 +159,34 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + @staticmethod + def _process_title(title): + # Remove 'Mas informacion sobre ' literal from title + title = title[22:] + + # Quality - Use re module to avoid case sensitive problems with replace + title = re.sub(r'\[HDTV 1080p[^\[]*]', '1080p HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[HDTV 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[ALTA DEFINICION 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BluRay 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[MicroHD 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BLuRay[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BRrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BDrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) + + # Language + title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + + title += '-NEWPCT' + + return title.strip() + def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments """ returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse @@ -208,33 +236,5 @@ def download_result(self, result): return False - @staticmethod - def _processTitle(title): - # Remove 'Mas informacion sobre ' literal from title - title = title[22:] - - # Quality - Use re module to avoid case sensitive problems with replace - title = re.sub(r'\[HDTV 1080p[^\[]*]', '1080p HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[HDTV 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[ALTA DEFINICION 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BluRay 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[MicroHD 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BLuRay[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BRrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BDrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) - - # Language - title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - - title += '-NEWPCT' - - return title.strip() - provider = newpctProvider() diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index d4c8e2b482..223e4eb170 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -76,199 +76,6 @@ def __init__(self, name, url, key='0', catIDs='5030,5040', search_mode='eponly', self.cache = tvcache.TVCache(self, min_time=30) # only poll newznab providers every 30 minutes max - def configStr(self): - """ - Generates a '|' delimited string of instance attributes, for saving to config.ini - """ - return '|'.join([ - self.name, self.url, self.key, self.catIDs, str(int(self.enabled)), - self.search_mode, str(int(self.search_fallback)), - str(int(self.enable_daily)), str(int(self.enable_backlog)), str(int(self.enable_manualsearch)) - ]) - - @staticmethod - def get_providers_list(data): - default_list = [ - provider for provider in - (NewznabProvider._make_provider(x) for x in NewznabProvider._get_default_providers().split('!!!')) - if provider] - - providers_list = [ - provider for provider in - (NewznabProvider._make_provider(x) for x in data.split('!!!')) - if provider] - - seen_values = set() - providers_set = [] - - for provider in providers_list: - value = provider.name - - if value not in seen_values: - providers_set.append(provider) - seen_values.add(value) - - providers_list = providers_set - providers_dict = dict(zip([provider.name for provider in providers_list], providers_list)) - - for default in default_list: - if not default: - continue - - if default.name not in providers_dict: - default.default = True - providers_list.append(default) - else: - providers_dict[default.name].default = True - providers_dict[default.name].name = default.name - providers_dict[default.name].url = default.url - providers_dict[default.name].needs_auth = default.needs_auth - providers_dict[default.name].search_mode = default.search_mode - providers_dict[default.name].search_fallback = default.search_fallback - providers_dict[default.name].enable_daily = default.enable_daily - providers_dict[default.name].enable_backlog = default.enable_backlog - providers_dict[default.name].enable_manualsearch = default.enable_manualsearch - - return [provider for provider in providers_list if provider] - - def image_name(self): - """ - Checks if we have an image for this provider already. - Returns found image or the default newznab image - """ - if ek(os.path.isfile, - ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', - self.get_id() + '.png')): - return self.get_id() + '.png' - return 'newznab.png' - - def set_caps(self, data): - if not data: - return - - def _parse_cap(tag): - elm = data.find(tag) - return elm.get('supportedparams', 'True') if elm and elm.get('available') else '' - - self.cap_tv_search = _parse_cap('tv-search') - # self.cap_search = _parse_cap('search') - # self.cap_movie_search = _parse_cap('movie-search') - # self.cap_audio_search = _parse_cap('audio-search') - - # self.caps = any([self.cap_tv_search, self.cap_search, self.cap_movie_search, self.cap_audio_search]) - self.caps = any([self.cap_tv_search]) - - def get_newznab_categories(self, just_caps=False): - """ - Uses the newznab provider url and apikey to get the capabilities. - Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk - Returns a tuple with (succes or not, array with dicts [{'id': '5070', 'name': 'Anime'}, - {'id': '5080', 'name': 'Documentary'}, {'id': '5020', 'name': 'Foreign'}...etc}], error message) - """ - return_categories = [] - - if not self._check_auth(): - return False, return_categories, 'Provider requires auth and your key is not set' - - url_params = {'t': 'caps'} - if self.needs_auth and self.key: - url_params['apikey'] = self.key - - data = self.get_url(urljoin(self.url, 'api'), params=url_params, returns='text') - if not data: - error_string = 'Error getting caps xml for [{0}]'.format(self.name) - logger.log(error_string, logger.WARNING) - return False, return_categories, error_string - - with BS4Parser(data, 'html5lib') as html: - if not html.find('categories'): - error_string = 'Error parsing caps xml for [{0}]'.format(self.name) - logger.log(error_string, logger.DEBUG) - return False, return_categories, error_string - - self.set_caps(html.find('searching')) - if just_caps: - return - - for category in html('category'): - if 'TV' in category.get('name', '') and category.get('id', ''): - return_categories.append({'id': category['id'], 'name': category['name']}) - for subcat in category('subcat'): - if subcat.get('name', '') and subcat.get('id', ''): - return_categories.append({'id': subcat['id'], 'name': subcat['name']}) - - return True, return_categories, '' - - @staticmethod - def _get_default_providers(): - # name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog|enable_manualsearch - return 'NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|0|0|0|0!!!' + \ - 'NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0|0!!!' + \ - 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0|0!!!' + \ - 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0|0!!!' + \ - 'DOGnzb|https://api.dognzb.cr/||5030,5040,5060,5070|0|eponly|0|0|0|0' - - def _check_auth(self): - """ - Checks that user has set their api key if it is needed - Returns: True/False - """ - if self.needs_auth and not self.key: - logger.log('Invalid api key. Check your settings', logger.WARNING) - return False - - return True - - def _checkAuthFromData(self, data): - """ - Checks that the returned data is valid - Returns: _check_auth if valid otherwise False if there is an error - """ - if data('categories') + data('item'): - return self._check_auth() - - try: - err_desc = data.error.attrs['description'] - if not err_desc: - raise - except (AttributeError, TypeError): - return self._check_auth() - - logger.log(ss(err_desc)) - - return False - - @staticmethod - def _make_provider(config): - if not config: - return None - - try: - values = config.split('|') - # Pad values with None for each missing value - values.extend([None for x in range(len(values), 10)]) - - (name, url, key, category_ids, enabled, - search_mode, search_fallback, - enable_daily, enable_backlog, enable_manualsearch - ) = values - - except ValueError: - logger.log('Skipping Newznab provider string: {config!r}, incorrect format'.format - (config=config), logger.ERROR) - return None - - new_provider = NewznabProvider( - name, url, key=key, catIDs=category_ids, - search_mode=search_mode or 'eponly', - search_fallback=search_fallback or 0, - enable_daily=enable_daily or 0, - enable_backlog=enable_backlog or 0, - enable_manualsearch=enable_manualsearch or 0) - new_provider.enabled = enabled == '1' - - return new_provider - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-arguments, too-many-locals, too-many-branches, too-many-statements """ Searches indexer using the params in search_strings, either for latest releases, or a string/id search @@ -331,7 +138,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man break with BS4Parser(data, 'html5lib') as html: - if not self._checkAuthFromData(html): + if not self._check_auth_from_data(html): break try: @@ -406,9 +213,203 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _check_auth(self): + """ + Checks that user has set their api key if it is needed + Returns: True/False + """ + if self.needs_auth and not self.key: + logger.log('Invalid api key. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth_from_data(self, data): + """ + Checks that the returned data is valid + Returns: _check_auth if valid otherwise False if there is an error + """ + if data('categories') + data('item'): + return self._check_auth() + + try: + err_desc = data.error.attrs['description'] + if not err_desc: + raise + except (AttributeError, TypeError): + return self._check_auth() + + logger.log(ss(err_desc)) + + return False + def _get_size(self, item): """ Gets size info from a result item Returns int size or -1 """ return try_int(item.get('size', -1), -1) + + def config_string(self): + """ + Generates a '|' delimited string of instance attributes, for saving to config.ini + """ + return '|'.join([ + self.name, self.url, self.key, self.catIDs, str(int(self.enabled)), + self.search_mode, str(int(self.search_fallback)), + str(int(self.enable_daily)), str(int(self.enable_backlog)), str(int(self.enable_manualsearch)) + ]) + + @staticmethod + def get_providers_list(data): + default_list = [ + provider for provider in + (NewznabProvider._make_provider(x) for x in NewznabProvider._get_default_providers().split('!!!')) + if provider] + + providers_list = [ + provider for provider in + (NewznabProvider._make_provider(x) for x in data.split('!!!')) + if provider] + + seen_values = set() + providers_set = [] + + for provider in providers_list: + value = provider.name + + if value not in seen_values: + providers_set.append(provider) + seen_values.add(value) + + providers_list = providers_set + providers_dict = dict(zip([provider.name for provider in providers_list], providers_list)) + + for default in default_list: + if not default: + continue + + if default.name not in providers_dict: + default.default = True + providers_list.append(default) + else: + providers_dict[default.name].default = True + providers_dict[default.name].name = default.name + providers_dict[default.name].url = default.url + providers_dict[default.name].needs_auth = default.needs_auth + providers_dict[default.name].search_mode = default.search_mode + providers_dict[default.name].search_fallback = default.search_fallback + providers_dict[default.name].enable_daily = default.enable_daily + providers_dict[default.name].enable_backlog = default.enable_backlog + providers_dict[default.name].enable_manualsearch = default.enable_manualsearch + + return [provider for provider in providers_list if provider] + + def image_name(self): + """ + Checks if we have an image for this provider already. + Returns found image or the default newznab image + """ + if ek(os.path.isfile, + ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', + self.get_id() + '.png')): + return self.get_id() + '.png' + return 'newznab.png' + + @staticmethod + def _make_provider(config): + if not config: + return None + + try: + values = config.split('|') + # Pad values with None for each missing value + values.extend([None for x in range(len(values), 10)]) + + (name, url, key, category_ids, enabled, + search_mode, search_fallback, + enable_daily, enable_backlog, enable_manualsearch + ) = values + + except ValueError: + logger.log('Skipping Newznab provider string: {config!r}, incorrect format'.format + (config=config), logger.ERROR) + return None + + new_provider = NewznabProvider( + name, url, key=key, catIDs=category_ids, + search_mode=search_mode or 'eponly', + search_fallback=search_fallback or 0, + enable_daily=enable_daily or 0, + enable_backlog=enable_backlog or 0, + enable_manualsearch=enable_manualsearch or 0) + new_provider.enabled = enabled == '1' + + return new_provider + + def set_caps(self, data): + if not data: + return + + def _parse_cap(tag): + elm = data.find(tag) + return elm.get('supportedparams', 'True') if elm and elm.get('available') else '' + + self.cap_tv_search = _parse_cap('tv-search') + # self.cap_search = _parse_cap('search') + # self.cap_movie_search = _parse_cap('movie-search') + # self.cap_audio_search = _parse_cap('audio-search') + + # self.caps = any([self.cap_tv_search, self.cap_search, self.cap_movie_search, self.cap_audio_search]) + self.caps = any([self.cap_tv_search]) + + def get_newznab_categories(self, just_caps=False): + """ + Uses the newznab provider url and apikey to get the capabilities. + Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk + Returns a tuple with (succes or not, array with dicts [{'id': '5070', 'name': 'Anime'}, + {'id': '5080', 'name': 'Documentary'}, {'id': '5020', 'name': 'Foreign'}...etc}], error message) + """ + return_categories = [] + + if not self._check_auth(): + return False, return_categories, 'Provider requires auth and your key is not set' + + url_params = {'t': 'caps'} + if self.needs_auth and self.key: + url_params['apikey'] = self.key + + data = self.get_url(urljoin(self.url, 'api'), params=url_params, returns='text') + if not data: + error_string = 'Error getting caps xml for [{0}]'.format(self.name) + logger.log(error_string, logger.WARNING) + return False, return_categories, error_string + + with BS4Parser(data, 'html5lib') as html: + if not html.find('categories'): + error_string = 'Error parsing caps xml for [{0}]'.format(self.name) + logger.log(error_string, logger.DEBUG) + return False, return_categories, error_string + + self.set_caps(html.find('searching')) + if just_caps: + return + + for category in html('category'): + if 'TV' in category.get('name', '') and category.get('id', ''): + return_categories.append({'id': category['id'], 'name': category['name']}) + for subcat in category('subcat'): + if subcat.get('name', '') and subcat.get('id', ''): + return_categories.append({'id': subcat['id'], 'name': subcat['name']}) + + return True, return_categories, '' + + @staticmethod + def _get_default_providers(): + # name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog|enable_manualsearch + return 'NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|0|0|0|0!!!' + \ + 'NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0|0!!!' + \ + 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0|0!!!' + \ + 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0|0!!!' + \ + 'DOGnzb|https://api.dognzb.cr/||5030,5040,5060,5070|0|eponly|0|0|0|0' + diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 7b7cb9307d..6a0a0f28b3 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -61,24 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=20) # only poll Norbits every 15 minutes max - def _check_auth(self): - - if not self.username or not self.passkey: - raise AuthException(('Your authentication credentials for %s are ' - 'missing, check your config.') % self.name) - - return True - - def _checkAuthFromData(self, parsed_json): # pylint: disable=invalid-name - """ Check that we are authenticated. """ - - if 'status' in parsed_json and 'message' in parsed_json: - if parsed_json.get('status') == 3: - logger.log('Invalid username or password. ' - 'Check your settings', logger.WARNING) - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ Do the actual searching and JSON parsing""" @@ -109,7 +91,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not parsed_json: return results - if self._checkAuthFromData(parsed_json): + if self._check_auth_from_data(parsed_json): json_items = parsed_json.get('data', '') if not json_items: logger.log('Resulting JSON from provider is not correct, ' @@ -163,5 +145,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _check_auth(self): + + if not self.username or not self.passkey: + raise AuthException(('Your authentication credentials for %s are ' + 'missing, check your config.') % self.name) + + return True + + def _check_auth_from_data(self, parsed_json): # pylint: disable=invalid-name + """ Check that we are authenticated. """ + + if 'status' in parsed_json and 'message' in parsed_json: + if parsed_json.get('status') == 3: + logger.log('Invalid username or password. ' + 'Check your settings', logger.WARNING) + + return True + provider = NorbitsProvider() # pylint: disable=invalid-name diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index ee34ace9c9..b56c959048 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -54,50 +54,6 @@ def __init__(self): # Cache self.cache = OmgwtfnzbsCache(self) - def _check_auth(self): - - if not self.username or not self.api_key: - logger.log('Invalid api key. Check your settings', logger.WARNING) - return False - - return True - - def _checkAuthFromData(self, parsed_data, is_XML=True): - - if not parsed_data: - return self._check_auth() - - if is_XML: - # provider doesn't return xml on error - return True - - if 'notice' in parsed_data: - description_text = parsed_data.get('notice') - if 'information is incorrect' in description_text: - logger.log('Invalid api key. Check your settings', logger.WARNING) - elif '0 results matched your terms' not in description_text: - logger.log('Unknown error: {0}'.format(description_text), logger.DEBUG) - return False - - return True - - def _get_title_and_url(self, item): - return item['release'], item['getnzb'] - - def _get_size(self, item): - size = item.get('sizebytes', -1) - - # Try to get the size from the summary tag - if size == -1: - # Units - units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] - summary = item.get('summary') - if summary: - size_match = re.search(r'Size[^\d]*([0-9.]*.[A-Z]*)', summary) - size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1 - - return try_int(size) - def search(self, search_strings, age=0, ep_obj=None): results = [] if not self._check_auth(): @@ -126,7 +82,7 @@ def search(self, search_strings, age=0, ep_obj=None): logger.log('No data returned from provider', logger.DEBUG) continue - if not self._checkAuthFromData(data, is_XML=False): + if not self._check_auth_from_data(data, is_XML=False): continue for item in data: @@ -145,6 +101,50 @@ def search(self, search_strings, age=0, ep_obj=None): return results + def _check_auth(self): + + if not self.username or not self.api_key: + logger.log('Invalid api key. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth_from_data(self, parsed_data, is_XML=True): + + if not parsed_data: + return self._check_auth() + + if is_XML: + # provider doesn't return xml on error + return True + + if 'notice' in parsed_data: + description_text = parsed_data.get('notice') + if 'information is incorrect' in description_text: + logger.log('Invalid api key. Check your settings', logger.WARNING) + elif '0 results matched your terms' not in description_text: + logger.log('Unknown error: {0}'.format(description_text), logger.DEBUG) + return False + + return True + + def _get_title_and_url(self, item): + return item['release'], item['getnzb'] + + def _get_size(self, item): + size = item.get('sizebytes', -1) + + # Try to get the size from the summary tag + if size == -1: + # Units + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] + summary = item.get('summary') + if summary: + size_match = re.search(r'Size[^\d]*([0-9.]*.[A-Z]*)', summary) + size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1 + + return try_int(size) + class OmgwtfnzbsCache(tvcache.TVCache): def _get_title_and_url(self, item): diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 59af9edf38..dbcda2881d 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -67,34 +67,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password or not self.pin: - logger.log('Invalid username or password or pin. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login_pin': self.pin, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ Pretome search and parsing @@ -194,5 +166,33 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login_pin': self.pin, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password or not self.pin: + logger.log('Invalid username or password or pin. Check your settings', logger.WARNING) + + return True + provider = PretomeProvider() diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index fa376ba62c..618f408065 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -61,25 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max - def login(self): - if self.token and self.token_expires and datetime.datetime.now() < self.token_expires: - return True - - login_params = { - 'get_token': 'get_token', - 'format': 'json', - 'app_id': 'sickrage2', - } - - response = self.get_url(self.urls['api'], params=login_params, returns='json') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - self.token = response.get('token') - self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None - return self.token is not None - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements """ RARBG search and parsing @@ -213,5 +194,24 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if self.token and self.token_expires and datetime.datetime.now() < self.token_expires: + return True + + login_params = { + 'get_token': 'get_token', + 'format': 'json', + 'app_id': 'sickrage2', + } + + response = self.get_url(self.urls['api'], params=login_params, returns='json') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + self.token = response.get('token') + self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None + return self.token is not None + provider = RarbgProvider() diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index d236a17e54..bb8236b04f 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -55,7 +55,29 @@ def __init__(self, name, url, cookies='', # pylint: disable=too-many-arguments self.cookies = cookies self.titleTAG = titleTAG - def configStr(self): # pylint: disable=too-many-arguments + def _get_title_and_url(self, item): + + title = item.get(self.titleTAG, '').replace(' ', '.') + + attempt_list = [ + lambda: item.get('torrent_magneturi'), + lambda: item.enclosures[0].href, + lambda: item.get('link') + ] + + url = None + for cur_attempt in attempt_list: + try: + url = cur_attempt() + except Exception: + continue + + if title and url: + break + + return title, url + + def config_string(self): # pylint: disable=too-many-arguments return '{}|{}|{}|{}|{}|{}|{}|{}|{}'.format( self.name or '', self.url or '', @@ -89,28 +111,6 @@ def image_name(self): return self.get_id() + '.png' return 'torrentrss.png' - def _get_title_and_url(self, item): - - title = item.get(self.titleTAG, '').replace(' ', '.') - - attempt_list = [ - lambda: item.get('torrent_magneturi'), - lambda: item.enclosures[0].href, - lambda: item.get('link') - ] - - url = None - for cur_attempt in attempt_list: - try: - url = cur_attempt() - except Exception: - continue - - if title and url: - break - - return title, url - @staticmethod def _make_provider(config): if not config: @@ -149,7 +149,7 @@ def _make_provider(config): return new_provider - def validateRSS(self): # pylint: disable=too-many-return-statements + def validate_rss(self): # pylint: disable=too-many-return-statements try: if self.cookies: @@ -179,7 +179,7 @@ def validateRSS(self): # pylint: disable=too-many-return-statements try: bdecode(torrent_file) except Exception as error: - self.dumpHTML(torrent_file) + self.dump_html(torrent_file) return False, 'Torrent link is not a valid torrent file: {0}'.format(ex(error)) return True, 'RSS feed Parsed correctly' @@ -188,7 +188,7 @@ def validateRSS(self): # pylint: disable=too-many-return-statements return False, 'Error when trying to load RSS: {0}'.format(ex(error)) @staticmethod - def dumpHTML(data): + def dump_html(data): dump_name = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') try: diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 4782570a5f..2d04053fa5 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -69,33 +69,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) # only poll SCC every 20 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'submit': 'come on in', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search(r'Username or password incorrect', response) \ - or re.search(r'SceneAccess \| Login', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - - @staticmethod - def _isSection(section, text): - title = r'.+? \| %s' % section - return re.search(title, text, re.IGNORECASE) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals,too-many-branches, too-many-statements results = [] if not self.login(): @@ -178,5 +151,32 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'submit': 'come on in', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search(r'Username or password incorrect', response) \ + or re.search(r'SceneAccess \| Login', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + @staticmethod + def _is_section(section, text): + title = r'.+? \| %s' % section + return re.search(title, text, re.IGNORECASE) + provider = SCCProvider() diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 458408e5dd..1299c771d9 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -64,26 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) # only poll SceneTime every 20 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ SceneTime search and parsing @@ -181,5 +161,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = SceneTimeProvider() diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 5303da6b19..8e0b1835ef 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -62,7 +62,7 @@ def _check_auth(self): return True - def _checkAuthFromData(self, data): + def _check_auth_from_data(self, data): if not self.passkey: self._check_auth() elif data.get('bozo') == 1 and not (data['entries'] and data['feed']): @@ -83,7 +83,7 @@ def _getRSSData(self): return self.getRSSFeed(self.provider.urls['rss_recent'], params=params) def _checkAuth(self, data): - return self.provider._checkAuthFromData(data) # pylint: disable=protected-access + return self.provider._check_auth_from_data(data) # pylint: disable=protected-access provider = ShazbatProvider() diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 19ca6efe65..aa9e0458b9 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -62,26 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Incorrect username or Password. Please try again.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches results = [] if not self.login(): @@ -194,5 +174,25 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Incorrect username or Password. Please try again.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = SpeedCDProvider() diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 3a759893f7..4ce84bb94c 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -69,31 +69,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll T411 every 10 minutes max - def login(self): - if self.token is not None: - if time.time() < (self.tokenLastUpdate + 30 * 60): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if response and 'token' in response: - self.token = response['token'] - self.tokenLastUpdate = time.time() - # self.uid = response['uid'].encode('ascii', 'ignore') - self.session.auth = T411Auth(self.token) - return True - else: - logger.log('Token not found in authentication response', logger.WARNING) - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ T411 search and parsing @@ -188,6 +163,31 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if self.token is not None: + if time.time() < (self.tokenLastUpdate + 30 * 60): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if response and 'token' in response: + self.token = response['token'] + self.tokenLastUpdate = time.time() + # self.uid = response['uid'].encode('ascii', 'ignore') + self.session.auth = T411Auth(self.token) + return True + else: + logger.log('Token not found in authentication response', logger.WARNING) + return False + class T411Auth(AuthBase): # pylint: disable=too-few-public-methods """Attaches HTTP Authentication to the given Request object.""" diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 3d3a8d32ac..773d9cbaaa 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -69,39 +69,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if len(self.session.cookies) > 1: - cookies_dict = dict_from_cookiejar(self.session.cookies) - if cookies_dict['pass_hash'] != '0' and cookies_dict['member_id'] != '0': - return True - - login_params = { - 'UserName': self.username, - 'PassWord': self.password, - 'CookieDate': 1, - 'submit': 'Connettiti al Forum', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Sono stati riscontrati i seguenti errori', response) or \ - re.search('Connettiti', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TNTVillage search and parsing @@ -160,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man params = parse_qs(last_cell_anchor.get('href', '')) download_url = self.urls['download'].format(params['pid'][0]) if \ params.get('pid') else None - title = _normalize_title(cells[0], cells[1], mode) + title = self._process_title(cells[0], cells[1], mode) if not all([title, download_url]): continue @@ -178,7 +145,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - if _has_only_subs(title) and not self.subtitle: + if self._has_only_subs(title) and not self.subtitle: logger.log('Torrent is only subtitled, skipping: {0}'.format (title), logger.DEBUG) continue @@ -209,55 +176,88 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if len(self.session.cookies) > 1: + cookies_dict = dict_from_cookiejar(self.session.cookies) + if cookies_dict['pass_hash'] != '0' and cookies_dict['member_id'] != '0': + return True -def _normalize_title(title, info, mode): - - result_title = title.find('a').get_text() - result_info = info.find('span') - - if not result_info: - return None - - bad_words = ['[cura]', 'hot', 'season', 'stagione', 'series', 'premiere', 'finale', 'fine', - 'full', 'Completa', 'supereroi', 'commedia', 'drammatico', 'poliziesco', 'azione', - 'giallo', 'politico', 'sitcom', 'funzionante'] + login_params = { + 'UserName': self.username, + 'PassWord': self.password, + 'CookieDate': 1, + 'submit': 'Connettiti al Forum', + } - formatted_info = '' - for info_part in result_info: - if mode == 'RSS': - try: - info_part = info_part.get('src') - info_part = info_part.replace('style_images/mkportal-636/', '') - info_part = info_part.replace('.gif', '').replace('.png', '') - if info_part == 'dolby': - info_part = 'Ac3' - elif info_part == 'fullHd': - info_part = '1080p' - except AttributeError: - info_part = info_part.replace('·', '').replace(',', '') - info_part = info_part.replace('by', '-').strip() - formatted_info += ' ' + info_part - else: - formatted_info = info_part + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False - allowed_words = [word for word in formatted_info.split() if word.lower() not in bad_words] - final_title = '{0} '.format(result_title) + ' '.join(allowed_words).strip('-').strip() + if re.search('Sono stati riscontrati i seguenti errori', response) or \ + re.search('Connettiti', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False - return final_title + return True + def _check_auth(self): -def _has_only_subs(title): + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) - title = title.lower() + return True - if 'sub' in title: - title = title.split() - counter = 0 - for word in title: - if 'ita' in word: - counter = counter + 1 - if counter < 2: - return True + @staticmethod + def _process_title(title, info, mode): + + result_title = title.find('a').get_text() + result_info = info.find('span') + + if not result_info: + return None + + bad_words = ['[cura]', 'hot', 'season', 'stagione', 'series', 'premiere', 'finale', 'fine', + 'full', 'Completa', 'supereroi', 'commedia', 'drammatico', 'poliziesco', 'azione', + 'giallo', 'politico', 'sitcom', 'funzionante'] + + formatted_info = '' + for info_part in result_info: + if mode == 'RSS': + try: + info_part = info_part.get('src') + info_part = info_part.replace('style_images/mkportal-636/', '') + info_part = info_part.replace('.gif', '').replace('.png', '') + if info_part == 'dolby': + info_part = 'Ac3' + elif info_part == 'fullHd': + info_part = '1080p' + except AttributeError: + info_part = info_part.replace('·', '').replace(',', '') + info_part = info_part.replace('by', '-').strip() + formatted_info += ' ' + info_part + else: + formatted_info = info_part + + allowed_words = [word for word in formatted_info.split() if word.lower() not in bad_words] + final_title = '{0} '.format(result_title) + ' '.join(allowed_words).strip('-').strip() + + return final_title + + @staticmethod + def _has_only_subs(title): + + title = title.lower() + + if 'sub' in title: + title = title.split() + counter = 0 + for word in title: + if 'ita' in word: + counter = counter + 1 + if counter < 2: + return True provider = TNTVillageProvider() diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index a5fc036270..5b580f3fe3 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -62,27 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'Log in!', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): @@ -181,5 +160,26 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'Log in!', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = TorrentBytesProvider() diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 8aa01ff532..829ca1660f 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -67,43 +67,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - if self._uid and self._hash: - add_dict_to_cookiejar(self.session.cookies, self.cookies) - else: - - login_params = { - 'username': self.username, - 'password': self.password, - 'submit.x': 0, - 'submit.y': 0, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('You tried too often', response): - logger.log('Too many login access attempts', logger.WARNING) - return False - - try: - if dict_from_cookiejar(self.session.cookies)['uid'] and dict_from_cookiejar(self.session.cookies)['pass']: - self._uid = dict_from_cookiejar(self.session.cookies)['uid'] - self._hash = dict_from_cookiejar(self.session.cookies)['pass'] - self.cookies = {'uid': self._uid, - 'pass': self._hash} - return True - except Exception: - pass - - logger.log('Unable to obtain cookie', logger.WARNING) - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] if not self.login(): @@ -189,5 +152,42 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + if self._uid and self._hash: + add_dict_to_cookiejar(self.session.cookies, self.cookies) + else: + + login_params = { + 'username': self.username, + 'password': self.password, + 'submit.x': 0, + 'submit.y': 0, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('You tried too often', response): + logger.log('Too many login access attempts', logger.WARNING) + return False + + try: + if dict_from_cookiejar(self.session.cookies)['uid'] and dict_from_cookiejar(self.session.cookies)['pass']: + self._uid = dict_from_cookiejar(self.session.cookies)['uid'] + self._hash = dict_from_cookiejar(self.session.cookies)['pass'] + self.cookies = {'uid': self._uid, + 'pass': self._hash} + return True + except Exception: + pass + + logger.log('Unable to obtain cookie', logger.WARNING) + return False + provider = TorrentDayProvider() diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index d761539208..b5ba0be432 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -61,28 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - 'remember_me': 'on', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Invalid Username/password', response) or re.search('Login :: TorrentLeech.org', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TorrentLeech search and parsing @@ -193,5 +171,27 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Invalid Username/password', response) or re.search('Login :: TorrentLeech.org', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = TorrentLeechProvider() diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index f4588d5a7b..267d430496 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -60,11 +60,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=15) # only poll Torrentz every 15 minutes max - @staticmethod - def _split_description(description): - match = re.findall(r'[0-9]+', description) - return int(match[0]) * 1024 ** 2, int(match[1]), int(match[2]) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] @@ -135,5 +130,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + @staticmethod + def _split_description(description): + match = re.findall(r'[0-9]+', description) + return int(match[0]) * 1024 ** 2, int(match[1]), int(match[2]) + provider = TorrentzProvider() diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 77c6a40a09..c36fed6bd4 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -61,36 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': 'on', - 'login': 'Login' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username Incorrect', response) or re.search('Password Incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TransmitTheNet search and parsing @@ -202,5 +172,35 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': 'on', + 'login': 'Login' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username Incorrect', response) or re.search('Password Incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = TransmitTheNetProvider() diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 06b800d63f..ab39816ce4 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -61,36 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - if self.username and self.password: - return True - - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - def login(self): - if len(self.session.cookies) >= 4: - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'logout': 'no', - 'submit': 'LOGIN', - 'returnto': '/browse.php', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Error: Username or password incorrect!', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TVChaosUK search and parsing @@ -208,5 +178,35 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if len(self.session.cookies) >= 4: + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'logout': 'no', + 'submit': 'LOGIN', + 'returnto': '/browse.php', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Error: Username or password incorrect!', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + if self.username and self.password: + return True + + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + provider = TVChaosUKProvider() diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index c0cd7f0655..01dd6c41b6 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -62,27 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'submitme': 'X' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if not re.search('donate.php', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ Xthor search and parsing @@ -204,5 +183,26 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'submitme': 'X' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if not re.search('donate.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = XthorProvider() diff --git a/sickbeard/server/web/config/providers.py b/sickbeard/server/web/config/providers.py index 521b3fb5bc..b9627bb6e8 100644 --- a/sickbeard/server/web/config/providers.py +++ b/sickbeard/server/web/config/providers.py @@ -81,12 +81,12 @@ def saveNewznabProvider(name, url, key=''): else: provider_dict[name].needs_auth = True - return '|'.join([provider_dict[name].get_id(), provider_dict[name].configStr()]) + return '|'.join([provider_dict[name].get_id(), provider_dict[name].config_string()]) else: new_provider = newznab.NewznabProvider(name, url, key=key) sickbeard.newznabProviderList.append(new_provider) - return '|'.join([new_provider.get_id(), new_provider.configStr()]) + return '|'.join([new_provider.get_id(), new_provider.config_string()]) @staticmethod def getNewznabCategories(name, url, key): @@ -152,7 +152,7 @@ def canAddTorrentRssProvider(name, url, cookies, titleTAG): if temp_provider.get_id() in provider_dict: return json.dumps({'error': 'Exists as {name}'.format(name=provider_dict[temp_provider.get_id()].name)}) else: - (succ, err_msg) = temp_provider.validateRSS() + (succ, err_msg) = temp_provider.validate_rss() if succ: return json.dumps({'success': temp_provider.get_id()}) else: @@ -175,12 +175,12 @@ def saveTorrentRssProvider(name, url, cookies, titleTAG): provider_dict[name].cookies = cookies provider_dict[name].titleTAG = titleTAG - return '|'.join([provider_dict[name].get_id(), provider_dict[name].configStr()]) + return '|'.join([provider_dict[name].get_id(), provider_dict[name].config_string()]) else: new_provider = rsstorrent.TorrentRssProvider(name, url, cookies, titleTAG) sickbeard.torrentRssProviderList.append(new_provider) - return '|'.join([new_provider.get_id(), new_provider.configStr()]) + return '|'.join([new_provider.get_id(), new_provider.config_string()]) @staticmethod def deleteTorrentRssProvider(id): @@ -543,7 +543,7 @@ def saveProviders(self, newznab_string='', torrentrss_string='', provider_order= except (AttributeError, KeyError): curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes - sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList]) + sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_string() for x in sickbeard.newznabProviderList]) sickbeard.PROVIDER_ORDER = provider_list sickbeard.save_config() From 6dacb54a80db856ca2d4bcb2a452df93303f55d7 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 18 Jun 2016 09:28:13 -0400 Subject: [PATCH 80/85] FIx missed URL join --- sickbeard/providers/scc.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 2d04053fa5..5e54cef129 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -44,13 +44,13 @@ def __init__(self): self.password = None # URLs - self.url = self.urls['base_url'] + self.url = 'https://sceneaccess.eu' self.urls = { - 'base_url': 'https://sceneaccess.eu', - 'login': 'https://sceneaccess.eu/login', - 'detail': 'https://www.sceneaccess.eu/details?id=%s', - 'search': 'https://sceneaccess.eu/all?search=%s&method=1&%s', - 'download': 'https://www.sceneaccess.eu/%s' + 'base_url': self.url, + 'login': urljoin(self.url, 'login'), + 'detail': urljoin(self.url, 'details?id=%s'), + 'search': urljoin(self.url, 'all?search=%s&method=1&%s'), + 'download': urljoin(self.url, '%s') } # Proper Strings From d7d9a2b70d47da66aa7fa1d0e92a8900a7e963dd Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 18 Jun 2016 09:50:06 -0400 Subject: [PATCH 81/85] Standardize string formatting --- sickbeard/providers/btdigg.py | 4 ++-- sickbeard/providers/btn.py | 18 +++++++++--------- sickbeard/providers/freshontv.py | 2 +- sickbeard/providers/hd4free.py | 3 ++- sickbeard/providers/kat.py | 2 +- sickbeard/providers/limetorrents.py | 2 +- sickbeard/providers/rarbg.py | 2 +- sickbeard/providers/scc.py | 2 +- sickbeard/providers/scenetime.py | 2 +- sickbeard/providers/speedcd.py | 2 +- sickbeard/providers/t411.py | 2 +- sickbeard/providers/thepiratebay.py | 2 +- sickbeard/providers/tokyotoshokan.py | 2 +- sickbeard/providers/torrentbytes.py | 2 +- sickbeard/providers/torrentday.py | 4 ++-- sickbeard/providers/torrentleech.py | 2 +- sickbeard/providers/torrentproject.py | 2 +- sickbeard/providers/torrentz.py | 2 +- sickbeard/providers/transmitthenet.py | 2 +- sickbeard/providers/tvchaosuk.py | 2 +- sickbeard/providers/xthor.py | 2 +- sickbeard/providers/zooqle.py | 4 ++-- 22 files changed, 34 insertions(+), 33 deletions(-) diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index 8e735c3710..d6ca2cb61a 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -115,8 +115,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index dce5604685..937b9f8cf6 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -84,7 +84,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many if search_strings: search_params.update(search_strings) - logger.log('Search string: %s' % search_strings, logger.DEBUG) + logger.log('Search string: {0}'.format(search_strings), logger.DEBUG) parsed_json = self._api_call(self.apikey, search_params) if not parsed_json: @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many (title, url) = self._get_title_and_url(torrent_info) if title and url: - logger.log('Found result: %s ' % title, logger.DEBUG) + logger.log('Found result: {0} '.format(title), logger.DEBUG) results.append(torrent_info) # FIXME SORT RESULTS @@ -137,7 +137,7 @@ def _check_auth_from_data(self, parsed_json): return self._check_auth() if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) + logger.log('Incorrect authentication credentials: {0}'.format(parsed_json['api-error']), logger.DEBUG) raise AuthException('Your authentication credentials for {0} are missing,' ' check your config.'.format(self.name)) @@ -268,27 +268,27 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - except jsonrpclib.jsonrpc.ProtocolError, error: + except jsonrpclib.jsonrpc.ProtocolError as error: if error.message == 'Call Limit Exceeded': logger.log('You have exceeded the limit of 150 calls per hour,' ' per API key which is unique to your user account', logger.WARNING) else: - logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) + logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r} '.format(msg=error), logger.ERROR) parsed_json = {'api-error': ex(error)} return parsed_json except socket.timeout: logger.log('Timeout while accessing provider', logger.WARNING) - except socket.error, error: + except socket.error as error: # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) + logger.log('Socket error while accessing provider. Error: {msg} '.format(error[1]), logger.WARNING) - except Exception, error: + except Exception as error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) + logger.log('Unknown error while accessing provider. Error: {msg} '.format(msg=errorstring), logger.WARNING) return parsed_json diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index cddf247320..bd0e6834d6 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -169,7 +169,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 02e0ba2a28..f367a59a1b 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -162,7 +162,8 @@ def _check_auth(self): if self.username and self.api_key: return True - logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING) + logger.log('Your authentication credentials for {provider} are missing, check your config.'.format + (provider=self.name), logger.WARNING) return False provider = HD4FreeProvider() diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index c9d6d3d704..b44a012375 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -122,7 +122,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 1174464701..5af0c7f0e4 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -156,7 +156,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 618f408065..c4e8f5f85a 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -163,7 +163,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 5e54cef129..6d66f0df10 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -120,7 +120,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 1299c771d9..0f0ae55abb 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -130,7 +130,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index aa9e0458b9..1e23c08796 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -142,7 +142,7 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 4ce84bb94c..712aecb5bf 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -127,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index eefdce7eef..3e02992e5c 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -144,7 +144,7 @@ def process_column_header(th): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 6e71464498..09809ef52c 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -121,7 +121,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 5b580f3fe3..37b3438424 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -129,7 +129,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 829ca1660f..450705b96e 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -121,7 +121,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -136,7 +136,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None, + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index b5ba0be432..ccf47c7801 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -140,7 +140,7 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 0f4ef98ed2..cde2f11b8e 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -103,7 +103,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index 267d430496..c296e64a20 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -102,7 +102,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index c36fed6bd4..3c676c8253 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -142,7 +142,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index ab39816ce4..79f7f4570f 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -133,7 +133,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index 01dd6c41b6..76b7de7f9f 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -152,7 +152,7 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 5a2a4f2ea3..9c71949197 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -112,7 +112,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title = cells[1].find('a').get_text() magnet = cells[2].find('a')['href'] download_url = '{magnet}{trackers}'.format(magnet=magnet, - trackers=self._custom_trackers) + trackers=self._custom_trackers) if not all([title, download_url]): continue @@ -127,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue From 3259a5efb161fd31e7aa5b464e13346ec0f048e6 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 18 Jun 2016 16:20:11 +0200 Subject: [PATCH 82/85] Last small changes --- sickbeard/providers/btn.py | 8 ++++---- sickbeard/providers/freshontv.py | 2 +- sickbeard/providers/kat.py | 2 +- sickbeard/providers/limetorrents.py | 2 +- sickbeard/providers/rarbg.py | 2 +- sickbeard/providers/torrentday.py | 2 +- sickbeard/providers/torrentleech.py | 2 +- sickbeard/providers/torrentproject.py | 2 +- sickbeard/providers/transmitthenet.py | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 937b9f8cf6..61b958745c 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many (title, url) = self._get_title_and_url(torrent_info) if title and url: - logger.log('Found result: {0} '.format(title), logger.DEBUG) + logger.log('Found result: {0}'.format(title), logger.DEBUG) results.append(torrent_info) # FIXME SORT RESULTS @@ -273,7 +273,7 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): logger.log('You have exceeded the limit of 150 calls per hour,' ' per API key which is unique to your user account', logger.WARNING) else: - logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r} '.format(msg=error), logger.ERROR) + logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r}'.format(msg=error), logger.ERROR) parsed_json = {'api-error': ex(error)} return parsed_json @@ -282,13 +282,13 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): except socket.error as error: # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: {msg} '.format(error[1]), logger.WARNING) + logger.log('Socket error while accessing provider. Error: {msg}'.format(msg=error[1]), logger.WARNING) except Exception as error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: {msg} '.format(msg=errorstring), logger.WARNING) + logger.log('Unknown error while accessing provider. Error: {msg}'.format(msg=errorstring), logger.WARNING) return parsed_json diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index bd0e6834d6..6dd3335295 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -170,7 +170,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index b44a012375..13cbcd0a32 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -123,7 +123,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 5af0c7f0e4..97f0b83ace 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -157,7 +157,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index c4e8f5f85a..7261c49f98 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -164,7 +164,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - " minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 450705b96e..45848cf5ef 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -122,7 +122,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index ccf47c7801..7341eda9dd 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -141,7 +141,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - " minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index cde2f11b8e..67e2f9bb1e 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -104,7 +104,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 3c676c8253..9dc6c5fa5a 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -143,7 +143,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - " minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue From 97547ac3e9dfa8d20b26b94b3b02d6e5c8d467d4 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 18 Jun 2016 16:22:27 +0200 Subject: [PATCH 83/85] Change TPB url, update cache to 20 min --- sickbeard/providers/thepiratebay.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 3e02992e5c..9c4556eeab 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -42,7 +42,7 @@ def __init__(self): self.public = True # URLs - self.url = 'https://thepiratebay.se' + self.url = 'https://thepiratebay.org' self.urls = { 'rss': urljoin(self.url, 'tv/latest'), 'search': urljoin(self.url, 's/'), # Needs trailing / @@ -59,7 +59,7 @@ def __init__(self): self.minleech = None # Cache - self.cache = tvcache.TVCache(self, min_time=1) # only poll ThePirateBay every 30 minutes max + self.cache = tvcache.TVCache(self, min_time=20) # only poll ThePirateBay every 20 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] From fbffd218ad1c9cbdce3043dcd6b192cb01031b97 Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Sat, 18 Jun 2016 10:28:14 -0400 Subject: [PATCH 84/85] More providers (#698) * Remove redundant character escapes * Use augmented assignment * Fix indentation * Use six.iteritems for py3 compatibility --- sickbeard/providers/alpharatio.py | 4 ++-- sickbeard/providers/btn.py | 9 +++++---- sickbeard/providers/iptorrents.py | 2 +- sickbeard/providers/tntvillage.py | 2 +- sickbeard/providers/torrentday.py | 2 +- sickbeard/providers/torrentproject.py | 2 +- 6 files changed, 11 insertions(+), 10 deletions(-) diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 0f5934c380..4ea71a35ec 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -190,8 +190,8 @@ def login(self): logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search('Invalid Username/password', response) \ - or re.search('Login :: AlphaRatio.cc', response): + if any([re.search('Invalid Username/password', response), + re.search('Login :: AlphaRatio.cc', response)]): logger.log('Invalid username or password. Check your settings', logger.WARNING) return False diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 61b958745c..73d3caa412 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -22,8 +22,9 @@ import math import socket import time -import sickbeard +from six import iteritems +import sickbeard from datetime import datetime from sickbeard import classes, logger, scene_exceptions, tvcache @@ -110,12 +111,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): parsed_json = self._api_call(self.apikey, search_params, results_per_page, page * results_per_page) - # Note that this these are individual requests and might time out individually. This would result in 'gaps' - # in the results. There is no way to fix this though. + # Note that these are individual requests and might time out individually. + # This would result in 'gaps' in the results. There is no way to fix this though. if 'torrents' in parsed_json: found_torrents.update(parsed_json['torrents']) - for _, torrent_info in found_torrents.iteritems(): + for _, torrent_info in iteritems(found_torrents): (title, url) = self._get_title_and_url(torrent_info) if title and url: diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 788339e9ac..e578078cf9 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -90,7 +90,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - data = re.sub(r'(?im)', '', data, 0) + data = re.sub(r'(?im)', '', data, 0) with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'class': 'torrents'}) torrents = torrent_table('tr') if torrent_table else [] diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 773d9cbaaa..6b5264be72 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -255,7 +255,7 @@ def _has_only_subs(title): counter = 0 for word in title: if 'ita' in word: - counter = counter + 1 + counter += 1 if counter < 2: return True diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 45848cf5ef..c9e5825b60 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -110,7 +110,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for torrent in torrents: try: - title = re.sub(r'\[.*\=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None + title = re.sub(r'\[.*=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None if not all([title, download_url]): continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 67e2f9bb1e..051a7b5d01 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None, + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From 40c958115dc947238a7bc3d186bc53b9a2cceb1e Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 18 Jun 2016 16:47:03 +0200 Subject: [PATCH 85/85] Store hash for torrentproject --- sickbeard/providers/torrentproject.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 051a7b5d01..8426e64c02 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -108,9 +108,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - torrent_hash = torrents[result].get('torrent_hash') torrent_size = torrents[result].get('torrent_size') size = convert_size(torrent_size) or -1 + torrent_hash = torrents[result].get('torrent_hash') item = { 'title': title, @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None, + 'hash': torrent_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format