From 242a3a45661053e4324c9d51f4eaefdfe1a8762e Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 10:39:21 +0300 Subject: [PATCH 01/51] Add flake8-quotes --- pytest.ini | 3 +++ setup.py | 1 + 2 files changed, 4 insertions(+) diff --git a/pytest.ini b/pytest.ini index 4f5847f9a4..6aef014f2e 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,9 @@ [pytest] import-order-style = cryptography flake8-max-line-length = 160 +inline-quotes = single +multiline-quotes = double +docstring-quotes = double flake8-ignore = D107 medusa/__init__.py D104 F401 diff --git a/setup.py b/setup.py index bea45ac8b9..656e7f1500 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ def run_tests(self): 'flake8', 'flake8-docstrings', 'flake8-import-order', + 'flake8-quotes', 'pep8-naming', 'pycodestyle==2.3.1', 'pytest', From 1e6a7cee9280f11d07c576b68aa85648729ea92e Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:15:44 +0300 Subject: [PATCH 02/51] Convert `pytest.ini` to `setup.cfg`, fix options From [pytest-flake8](https://github.com/tholo/pytest-flake8/blob/1.0.1/README.rst): > If optional flake8 plugins are installed, those will be used automatically. No provisions have been made for configuring these via pytest. For the record, the `import-order-style` option also didn't work. `cryptography` is just the default value. --- pytest.ini => setup.cfg | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) rename pytest.ini => setup.cfg (98%) diff --git a/pytest.ini b/setup.cfg similarity index 98% rename from pytest.ini rename to setup.cfg index 6aef014f2e..98cae5876a 100644 --- a/pytest.ini +++ b/setup.cfg @@ -1,10 +1,14 @@ -[pytest] +[flake8] +; flake8-import-order import-order-style = cryptography -flake8-max-line-length = 160 +; flake8-quotes inline-quotes = single multiline-quotes = double docstring-quotes = double -flake8-ignore = +; flake8 +max-line-length = 160 +ignore = + ; Error codes reference: https://git.io/fNlTP D107 medusa/__init__.py D104 F401 medusa/bs4_parser.py D100 D101 D102 D105 From 47ce67d77a0686b87e0e58c8c2ff4b10ebbdcec7 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:16:12 +0300 Subject: [PATCH 03/51] Ignore Q002 (Handled by flake8-docstrings) --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.cfg b/setup.cfg index 98cae5876a..c8c579ecf3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -10,6 +10,8 @@ max-line-length = 160 ignore = ; Error codes reference: https://git.io/fNlTP D107 + ; Q002: Handled by flake8-docstrings + Q002 medusa/__init__.py D104 F401 medusa/bs4_parser.py D100 D101 D102 D105 medusa/cache.py D401 E305 From b0a561cd4946f246ca3d0a7f81cdbea0ec765902 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:21:54 +0300 Subject: [PATCH 04/51] medusa/db.py --- medusa/db.py | 80 ++++++++++++++++++++++++++-------------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/medusa/db.py b/medusa/db.py index 5b12f8e1ef..53a0d213e7 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -69,7 +69,7 @@ def __init__(self, filename=None, suffix=None, row_type=None): logger.log(u'Please check your database owner/permissions: {}'.format( self.path, logger.WARNING)) except Exception as e: - logger.log(u"DB error: " + ex(e), logger.ERROR) + logger.log(u'DB error: ' + ex(e), logger.ERROR) raise @property @@ -92,7 +92,7 @@ def _set_row_factory(self): once lock is aquired we can configure the connection for this particular instance of DBConnection """ - if self.row_type == "dict": + if self.row_type == 'dict': self.connection.row_factory = DBConnection._dict_factory else: self.connection.row_factory = sqlite3.Row @@ -155,7 +155,7 @@ def check_db_major_version(self): try: if self.hasTable('db_version'): - result = self.select("SELECT db_version FROM db_version") + result = self.select('SELECT db_version FROM db_version') except sqlite3.OperationalError: return None @@ -174,7 +174,7 @@ def check_db_minor_version(self): try: if self.hasColumn('db_version', 'db_minor_version'): - result = self.select("SELECT db_minor_version FROM db_version") + result = self.select('SELECT db_minor_version FROM db_version') except sqlite3.OperationalError: return None @@ -217,27 +217,27 @@ def mass_action(self, querylist=None, logTransaction=False, fetchall=False): sql_results.append(self._execute(qu[0], fetchall=fetchall)) elif len(qu) > 1: if logTransaction: - logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG) + logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DEBUG) sql_results.append(self._execute(qu[0], qu[1], fetchall=fetchall)) self.connection.commit() - logger.log(u"Transaction with " + str(len(sql_results)) + u" queries executed", logger.DEBUG) + logger.log(u'Transaction with ' + str(len(sql_results)) + u' queries executed', logger.DEBUG) # finished break except sqlite3.OperationalError as e: sql_results = [] self._try_rollback() - if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: - logger.log(u"DB error: " + ex(e), logger.WARNING) + if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: + logger.log(u'DB error: ' + ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: - logger.log(u"DB error: " + ex(e), logger.ERROR) + logger.log(u'DB error: ' + ex(e), logger.ERROR) raise except sqlite3.DatabaseError as e: sql_results = [] self._try_rollback() - logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR) + logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise # time.sleep(0.02) @@ -252,9 +252,9 @@ def _try_rollback(self): except sqlite3.OperationalError as error: # See https://github.com/pymedusa/Medusa/issues/3190 if 'no transaction is active' in error.args[0]: - logger.log("Rollback not needed, skipping", logger.DEBUG) + logger.log('Rollback not needed, skipping', logger.DEBUG) else: - logger.log("Failed to perform rollback: {error!r}".format(error=error), logger.ERROR) + logger.log('Failed to perform rollback: {error!r}'.format(error=error), logger.ERROR) def action(self, query, args=None, fetchall=False, fetchone=False): """ @@ -277,9 +277,9 @@ def action(self, query, args=None, fetchall=False, fetchone=False): while attempt < 5: try: if args is None: - logger.log(self.filename + ": " + query, logger.DB) + logger.log(self.filename + ': ' + query, logger.DB) else: - logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB) + logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB) sql_results = self._execute(query, args, fetchall=fetchall, fetchone=fetchone) self.connection.commit() @@ -287,15 +287,15 @@ def action(self, query, args=None, fetchall=False, fetchone=False): # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError as e: - if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: - logger.log(u"DB error: " + ex(e), logger.WARNING) + if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: + logger.log(u'DB error: ' + ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: - logger.log(u"DB error: " + ex(e), logger.ERROR) + logger.log(u'DB error: ' + ex(e), logger.ERROR) raise except sqlite3.DatabaseError as e: - logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR) + logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) raise # time.sleep(0.02) @@ -346,16 +346,16 @@ def upsert(self, tableName, valueDict, keyDict): changesBefore = self.connection.total_changes def gen_params(my_dict): - return [x + " = ?" for x in my_dict] + return [x + ' = ?' for x in my_dict] - query = "UPDATE [" + tableName + "] SET " + ", ".join(gen_params(valueDict)) + " WHERE " + " AND ".join( + query = 'UPDATE [' + tableName + '] SET ' + ', '.join(gen_params(valueDict)) + ' WHERE ' + ' AND '.join( gen_params(keyDict)) self.action(query, list(itervalues(valueDict)) + list(itervalues(keyDict))) if self.connection.total_changes == changesBefore: - query = "INSERT INTO [" + tableName + "] (" + ", ".join(list(valueDict) + list(keyDict)) + ")" + \ - " VALUES (" + ", ".join(["?"] * len(list(valueDict) + list(keyDict))) + ")" + query = 'INSERT INTO [' + tableName + '] (' + ', '.join(list(valueDict) + list(keyDict)) + ')' + \ + ' VALUES (' + ', '.join(['?'] * len(list(valueDict) + list(keyDict))) + ')' self.action(query, list(itervalues(valueDict)) + list(itervalues(keyDict))) def tableInfo(self, tableName): @@ -365,7 +365,7 @@ def tableInfo(self, tableName): :param tableName: name of table :return: array of name/type info """ - sql_results = self.select("PRAGMA table_info(`%s`)" % tableName) + sql_results = self.select('PRAGMA table_info(`%s`)' % tableName) columns = {} for column in sql_results: columns[column[b'name']] = {'type': column[b'type']} @@ -383,7 +383,7 @@ def _unicode_text_factory(x): # Just revert to the old code for now, until we can fix unicode return text_type(x, 'utf-8') except Exception: - return text_type(x, app.SYS_ENCODING, errors="ignore") + return text_type(x, app.SYS_ENCODING, errors='ignore') @staticmethod def _dict_factory(cursor, row): @@ -399,7 +399,7 @@ def hasTable(self, tableName): :param tableName: table name to check :return: True if table exists, False if it does not """ - return len(self.select("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, ))) > 0 + return len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0 def hasColumn(self, tableName, column): """ @@ -411,7 +411,7 @@ def hasColumn(self, tableName, column): """ return column in self.tableInfo(tableName) - def addColumn(self, table, column, column_type="NUMERIC", default=0): + def addColumn(self, table, column, column_type='NUMERIC', default=0): """ Adds a column to a table, default column type is NUMERIC TODO: Make this return true/false on success/failure @@ -421,8 +421,8 @@ def addColumn(self, table, column, column_type="NUMERIC", default=0): :param column_type: Column type to add :param default: Default value for column """ - self.action("ALTER TABLE [%s] ADD %s %s" % (table, column, column_type)) - self.action("UPDATE [%s] SET %s = ?" % (table, column), (default,)) + self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, column_type)) + self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,)) def sanityCheckDatabase(connection, sanity_check): @@ -448,28 +448,28 @@ def upgradeDatabase(connection, schema): :param connection: Existing DB Connection to use :param schema: New schema to upgrade to """ - logger.log(u"Checking database structure..." + connection.filename, logger.DEBUG) + logger.log(u'Checking database structure...' + connection.filename, logger.DEBUG) _processUpgrade(connection, schema) def prettyName(class_name): - return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) + return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)]) def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) - logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) + logger.log(u'Checking ' + prettyName(upgradeClass.__name__) + ' database upgrade', logger.DEBUG) if not instance.test(): - logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.DEBUG) + logger.log(u'Database upgrade required: ' + prettyName(upgradeClass.__name__), logger.DEBUG) try: instance.execute() except Exception as e: - logger.log("Error in " + str(upgradeClass.__name__) + ": " + ex(e), logger.ERROR) + logger.log('Error in ' + str(upgradeClass.__name__) + ': ' + ex(e), logger.ERROR) raise - logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG) + logger.log(upgradeClass.__name__ + ' upgrade completed', logger.DEBUG) else: - logger.log(upgradeClass.__name__ + " upgrade not required", logger.DEBUG) + logger.log(upgradeClass.__name__ + ' upgrade not required', logger.DEBUG) for upgradeSubClass in upgradeClass.__subclasses__(): _processUpgrade(connection, upgradeSubClass) @@ -481,19 +481,19 @@ def __init__(self, connection): self.connection = connection def hasTable(self, tableName): - return len(self.connection.select("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, ))) > 0 + return len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0 def hasColumn(self, tableName, column): return column in self.connection.tableInfo(tableName) - def addColumn(self, table, column, column_type="NUMERIC", default=0): - self.connection.action("ALTER TABLE [%s] ADD %s %s" % (table, column, column_type)) - self.connection.action("UPDATE [%s] SET %s = ?" % (table, column), (default,)) + def addColumn(self, table, column, column_type='NUMERIC', default=0): + self.connection.action('ALTER TABLE [%s] ADD %s %s' % (table, column, column_type)) + self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,)) def checkDBVersion(self): return self.connection.checkDBVersion() def incDBVersion(self): new_version = self.checkDBVersion() + 1 - self.connection.action("UPDATE db_version SET db_version = ?", [new_version]) + self.connection.action('UPDATE db_version SET db_version = ?', [new_version]) return new_version From caa39817a1d9b07fdb940b9c4b97fb6b902f0919 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:23:22 +0300 Subject: [PATCH 05/51] medusa/config.py --- medusa/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/config.py b/medusa/config.py index f0b405c296..3e76cea2d2 100644 --- a/medusa/config.py +++ b/medusa/config.py @@ -339,7 +339,7 @@ def change_GIT_PATH(): """ app.version_check_scheduler = None app.version_check_scheduler = scheduler.Scheduler( - CheckVersion(), cycleTime=datetime.timedelta(hours=app.UPDATE_FREQUENCY), threadName="CHECKVERSION", silent=False) + CheckVersion(), cycleTime=datetime.timedelta(hours=app.UPDATE_FREQUENCY), threadName='CHECKVERSION', silent=False) app.version_check_scheduler.enable = True app.version_check_scheduler.start() app.version_check_scheduler.forceRun() From 3da99b52501a3ae5e83d69e6a00fa50e919593cc Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:24:10 +0300 Subject: [PATCH 06/51] medusa/event_queue.py --- medusa/event_queue.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/event_queue.py b/medusa/event_queue.py index 647a790bb7..b5194d6635 100644 --- a/medusa/event_queue.py +++ b/medusa/event_queue.py @@ -34,7 +34,7 @@ def __init__(self, callback): # http://stackoverflow.com/a/20598791 self.daemon = False self.callback = callback - self.name = "EVENT-QUEUE" + self.name = 'EVENT-QUEUE' self.stop = threading.Event() def put(self, event_type): @@ -61,11 +61,11 @@ def run(self): # exiting thread self.stop.clear() except Exception as error: - log.error(u"Exception generated in thread %s: %s", + log.error(u'Exception generated in thread %s: %s', self.name, ex(error)) log.debug(repr(traceback.format_exc())) # System Events class SystemEvent(Event): - RESTART = "RESTART" - SHUTDOWN = "SHUTDOWN" + RESTART = 'RESTART' + SHUTDOWN = 'SHUTDOWN' From 409a1784bada4b18063cc6c8e3cf829ca3746b2e Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:24:51 +0300 Subject: [PATCH 07/51] medusa/generic_queue.py --- medusa/generic_queue.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/generic_queue.py b/medusa/generic_queue.py index 7e73da0e92..9ef214a9f8 100644 --- a/medusa/generic_queue.py +++ b/medusa/generic_queue.py @@ -21,19 +21,19 @@ class GenericQueue(object): def __init__(self): self.currentItem = None self.queue = [] - self.queue_name = "QUEUE" + self.queue_name = 'QUEUE' self.min_priority = 0 self.lock = threading.Lock() self.amActive = False def pause(self): """Pauses this queue.""" - log.info(u"Pausing queue") + log.info(u'Pausing queue') self.min_priority = 999999999999 def unpause(self): """Unpauses this queue.""" - log.info(u"Unpausing queue") + log.info(u'Unpausing queue') self.min_priority = 0 def add_item(self, item): @@ -102,7 +102,7 @@ def sorter(x, y): class QueueItem(threading.Thread): def __init__(self, name, action_id=0): super(QueueItem, self).__init__() - self.name = name.replace(" ", "-").upper() + self.name = name.replace(' ', '-').upper() self.inProgress = False self.priority = QueuePriorities.NORMAL self.action_id = action_id From efcac75ba1005df516ad7463a8f045b17dd14288 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:27:20 +0300 Subject: [PATCH 08/51] medusa/history.py --- medusa/history.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/medusa/history.py b/medusa/history.py index fe9ddf78f3..9698f90415 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -42,10 +42,10 @@ def _log_history_item(action, ep_obj, resource, provider, version=-1, proper_tag main_db_con = db.DBConnection() main_db_con.action( - "INSERT INTO history " - "(action, date, indexer_id, showid, season, episode, quality, " - "resource, provider, version, proper_tags, manually_searched, info_hash, size) " - "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + 'INSERT INTO history ' + '(action, date, indexer_id, showid, season, episode, quality, ' + 'resource, provider, version, proper_tags, manually_searched, info_hash, size) ' + 'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)', [action, logDate, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, ep_obj.quality, resource, provider, version, proper_tags, manually_searched, info_hash, size]) @@ -68,7 +68,7 @@ def log_snatch(search_result): if providerClass is not None: provider = providerClass.name else: - provider = "unknown" + provider = 'unknown' action = SNATCHED ep_obj.quality = search_result.quality From ad2ad58527be3db6c193dedac7776b517ecda378 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:29:16 +0300 Subject: [PATCH 09/51] medusa/name_cache.py --- medusa/name_cache.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/name_cache.py b/medusa/name_cache.py index 8e1ee093ea..72f4a28cb3 100644 --- a/medusa/name_cache.py +++ b/medusa/name_cache.py @@ -60,9 +60,9 @@ def clear_cache(indexer_id=0, series_id=0): series_ids = (0, series_id) cache_db_con = db.DBConnection('cache.db') cache_db_con.action( - "DELETE FROM scene_names " - "WHERE (indexer_id = 0 AND indexer = ?) OR" - " (indexer_id = ? AND indexer = ?) ", + 'DELETE FROM scene_names ' + 'WHERE (indexer_id = 0 AND indexer = ?) OR' + ' (indexer_id = ? AND indexer = ?) ', [series_id, indexer_id, series_id] ) @@ -82,7 +82,7 @@ def saveNameCacheToDb(): for name, series in iteritems(name_cache): indexer_id, series_id = series - cache_db_con.action("INSERT OR REPLACE INTO scene_names (indexer_id, name, indexer) VALUES (?, ?, ?)", [series_id, name, indexer_id]) + cache_db_con.action('INSERT OR REPLACE INTO scene_names (indexer_id, name, indexer) VALUES (?, ?, ?)', [series_id, name, indexer_id]) def build_name_cache(series_obj=None): From dadbfbe4cb1aaab0db4d30e96c17fe1a2af538a6 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:30:48 +0300 Subject: [PATCH 10/51] medusa/naming.py --- medusa/naming.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/medusa/naming.py b/medusa/naming.py index 5dbb16af75..0df64be754 100644 --- a/medusa/naming.py +++ b/medusa/naming.py @@ -52,8 +52,8 @@ class TVShow(object): # pylint: disable=too-few-public-methods def __init__(self): - self.name = "Show Name" - self.genre = "Comedy" + self.name = 'Show Name' + self.genre = 'Comedy' self.indexerid = 1 self.air_by_date = 0 self.sports = 0 @@ -143,11 +143,11 @@ def check_valid_naming(pattern=None, multi=None, anime_type=None): if anime_type is None: anime_type = app.NAMING_ANIME - logger.log(u"Checking whether the pattern " + pattern + " is valid for a single episode", logger.DEBUG) + logger.log(u'Checking whether the pattern ' + pattern + ' is valid for a single episode', logger.DEBUG) valid = validate_name(pattern, None, anime_type) if multi is not None: - logger.log(u"Checking whether the pattern " + pattern + " is valid for a multi episode", logger.DEBUG) + logger.log(u'Checking whether the pattern ' + pattern + ' is valid for a multi episode', logger.DEBUG) valid = valid and validate_name(pattern, multi, anime_type) return valid @@ -162,7 +162,7 @@ def check_valid_abd_naming(pattern=None): if pattern is None: pattern = app.NAMING_PATTERN - logger.log(u"Checking whether the pattern " + pattern + " is valid for an air-by-date episode", logger.DEBUG) + logger.log(u'Checking whether the pattern ' + pattern + ' is valid for an air-by-date episode', logger.DEBUG) valid = validate_name(pattern, abd=True) return valid @@ -177,7 +177,7 @@ def check_valid_sports_naming(pattern=None): if pattern is None: pattern = app.NAMING_PATTERN - logger.log(u"Checking whether the pattern " + pattern + " is valid for an sports episode", logger.DEBUG) + logger.log(u'Checking whether the pattern ' + pattern + ' is valid for an sports episode', logger.DEBUG) valid = validate_name(pattern, sports=True) return valid @@ -204,18 +204,18 @@ def validate_name(pattern, multi=None, anime_type=None, # pylint: disable=too-m new_name = os.path.join(new_path, new_name) if not new_name: - logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) + logger.log(u'Unable to create a name out of ' + pattern, logger.DEBUG) return False - logger.log(u"Trying to parse " + new_name, logger.DEBUG) + logger.log(u'Trying to parse ' + new_name, logger.DEBUG) try: parse_result = NameParser(series=ep.series, naming_pattern=True).parse(new_name) except (InvalidNameException, InvalidShowException) as error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log(u'{}'.format(error), logger.DEBUG) return False - logger.log(u"The name " + new_name + " parsed into " + str(parse_result), logger.DEBUG) + logger.log(u'The name ' + new_name + ' parsed into ' + str(parse_result), logger.DEBUG) if abd or sports: if parse_result.air_date != ep.airdate: @@ -239,7 +239,7 @@ def validate_name(pattern, multi=None, anime_type=None, # pylint: disable=too-m def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): # make a fake episode object - ep = TVEpisode(2, 3, 3, "Ep Name") + ep = TVEpisode(2, 3, 3, 'Ep Name') # pylint: disable=protected-access ep.status = DOWNLOADED @@ -260,14 +260,14 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep.release_name = 'Show.Name.S02E03.HDTV.x264-RLSGROUP' if multi is not None: - ep.name = "Ep Name (1)" + ep.name = 'Ep Name (1)' if anime_type != 3: ep.series.anime = 1 ep.release_name = 'Show.Name.003-004.HDTV.x264-RLSGROUP' - secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") + secondEp = TVEpisode(2, 4, 4, 'Ep Name (2)') secondEp.status = DOWNLOADED secondEp.quality = Quality.HDTV secondEp.release_name = ep.release_name @@ -276,12 +276,12 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): else: ep.release_name = 'Show.Name.S02E03E04E05.HDTV.x264-RLSGROUP' - secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") + secondEp = TVEpisode(2, 4, 4, 'Ep Name (2)') secondEp.status = DOWNLOADED secondEp.quality = Quality.HDTV secondEp.release_name = ep.release_name - thirdEp = TVEpisode(2, 5, 5, "Ep Name (3)") + thirdEp = TVEpisode(2, 5, 5, 'Ep Name (3)') thirdEp.status = DOWNLOADED thirdEp.quality = Quality.HDTV thirdEp.release_name = ep.release_name From 4b05d696319ff0a8aa67240fb95213f7d6551af8 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:32:12 +0300 Subject: [PATCH 11/51] medusa/nzb_splitter.py --- medusa/nzb_splitter.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/medusa/nzb_splitter.py b/medusa/nzb_splitter.py index d05749afe3..82973aa8f4 100644 --- a/medusa/nzb_splitter.py +++ b/medusa/nzb_splitter.py @@ -58,7 +58,7 @@ def get_season_nzbs(name, url_data, season): # Match the xmlns in an nzb # Example: nzbElement.getchildren()[1].tag == '{http://www.newzbin.com/DTD/2003/nzb}file' # regex match returns 'http://www.newzbin.com/DTD/2003/nzb' - 'nzb_xmlns': r"{(http://[\w_\./]+nzb)}file", + 'nzb_xmlns': r'{(http://[\w_\./]+nzb)}file', 'scene_name': '([\w\._\ ]+)[\. ]S%02d[\. ]([\w\._\-\ ]+)[\- ]([\w_\-\ ]+?)', # pylint: disable=anomalous-backslash-in-string 'episode': '\.S%02d(?:[E0-9]+)\.[\w\._]+\-\w+', # pylint: disable=anomalous-backslash-in-string } @@ -91,9 +91,9 @@ def get_season_nzbs(name, url_data, season): continue else: xmlns = xmlns_match.group(1) - match = re.search(regex, cur_file.get("subject"), re.I) + match = re.search(regex, cur_file.get('subject'), re.I) if not match: - # regex couldn't match cur_file.get("subject") + # regex couldn't match cur_file.get('subject') continue cur_ep = match.group(1) if cur_ep not in ep_files: @@ -112,9 +112,9 @@ def create_nzb_string(file_elements, xmlns): :param xmlns: the xml namespace to be used :return: string containing all extra info extracted from the file_elements """ - root_element = ETree.Element("nzb") + root_element = ETree.Element('nzb') if xmlns: - root_element.set("xmlns", xmlns) + root_element.set('xmlns', xmlns) for cur_file in file_elements: root_element.append(strip_xmlns(cur_file, xmlns)) @@ -130,7 +130,7 @@ def save_nzb(nzb_name, nzb_string): :param nzb_string: Content to write in file """ try: - with open(nzb_name + ".nzb", 'w') as nzb_fh: + with open(nzb_name + '.nzb', 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as error: @@ -145,7 +145,7 @@ def strip_xmlns(element, xmlns): :param xmlns: xml namespace to be removed :return: processed element """ - element.tag = element.tag.replace("{" + xmlns + "}", "") + element.tag = element.tag.replace('{' + xmlns + '}', '') for cur_child in element.getchildren(): strip_xmlns(cur_child, xmlns) From 02b6c9d9e3e131daf5a3c273d09550d724f4381c Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:34:30 +0300 Subject: [PATCH 12/51] medusa/post_processor.py --- medusa/post_processor.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 2d7d16003a..b243744a91 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -656,7 +656,7 @@ def _find_info(self): if int(numseasons_result[0][0]) == 1: self.log(u"Episode doesn't have a season number, but this show appears " - u"to have only 1 season, setting season number to 1...", logger.DEBUG) + u'to have only 1 season, setting season number to 1...', logger.DEBUG) season = 1 return series_obj, season, episodes, quality, version @@ -1029,7 +1029,7 @@ def process(self): (series_obj, season, episodes, quality, version) = self._find_info() if not series_obj: raise EpisodePostProcessingFailedException(u"This show isn't in your list, you need to add it " - u"before post-processing an episode") + u'before post-processing an episode') elif season is None or not episodes: raise EpisodePostProcessingFailedException(u'Not enough information to determine what episode this is') @@ -1098,7 +1098,7 @@ def process(self): if int(ep_obj.season) > 0: main_db_con = db.DBConnection() max_season = main_db_con.select( - "SELECT MAX(season) FROM tv_episodes WHERE showid = ? and indexer = ?", + 'SELECT MAX(season) FROM tv_episodes WHERE showid = ? and indexer = ?', [series_obj.series_id, series_obj.indexer]) # If the file season (ep_obj.season) is bigger than @@ -1115,7 +1115,7 @@ def process(self): if existing_file_status != PostProcessor.DOESNT_EXIST: self.flag_kodi_clean_library() self.log(u"This download is marked a priority download so I'm going to replace " - u"an existing file if I find one") + u'an existing file if I find one') # try to find out if we have enough space to perform the copy or move action. if not helpers.is_file_locked(self.file_path, False): From edca077ef2c3b8786cd91863decd57b496db06be Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:40:06 +0300 Subject: [PATCH 13/51] medusa/process_tv.py --- medusa/process_tv.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/medusa/process_tv.py b/medusa/process_tv.py index b098e88366..c5c381a22e 100644 --- a/medusa/process_tv.py +++ b/medusa/process_tv.py @@ -63,10 +63,10 @@ def directory(self, path): self.log('Trying to use folder: {0}'.format(directory), logger.DEBUG) else: - self.log("Unable to figure out what folder to process." + self.log('Unable to figure out what folder to process.' " If your download client and Medusa aren't on the same" - " machine, make sure to fill out the Post Processing Dir" - " field in the config.", logger.WARNING) + ' machine, make sure to fill out the Post Processing Dir' + ' field in the config.', logger.WARNING) setattr(self, '_directory', directory) @property @@ -521,7 +521,7 @@ def already_postprocessed(self, video_file): if not tv_episodes_result or tv_episodes_result[0][b'manually_searched'] == 0: self.log("You're trying to post-process an automatically searched file that has" - " already been processed, skipping: {0}".format(video_file), logger.DEBUG) + ' already been processed, skipping: {0}'.format(video_file), logger.DEBUG) return True def process_media(self, path, video_files, force=False, is_priority=None, ignore_subs=False): @@ -576,7 +576,7 @@ def _process_postponed(self, processor, path, video, ignore_subs): # We want to ignore embedded subtitles and video has at least one if accept_unknown(embedded_subs): self.log("Found embedded unknown subtitles and we don't want to ignore them. " - "Continuing the post-processing of this file: {0}".format(video)) + 'Continuing the post-processing of this file: {0}'.format(video)) elif accept_any(embedded_subs): self.log('Found wanted embedded subtitles. ' 'Continuing the post-processing of this file: {0}'.format(video)) @@ -640,7 +640,7 @@ def subtitles_enabled(*args): parse_result = NameParser().parse(name) if parse_result.series.indexerid: main_db_con = db.DBConnection() - sql_results = main_db_con.select("SELECT subtitles FROM tv_shows WHERE indexer = ? AND indexer_id = ? LIMIT 1", + sql_results = main_db_con.select('SELECT subtitles FROM tv_shows WHERE indexer = ? AND indexer_id = ? LIMIT 1', [parse_result.series.indexer, parse_result.series.indexerid]) return bool(sql_results[0][b'subtitles']) if sql_results else False @@ -677,6 +677,6 @@ def move_torrent(info_hash, release_names): return True else: logger.log("Couldn't move torrent for release{s} '{release}' with hash: {hash} to: '{path}'. " - "Please check logs.".format(release=release_names, hash=info_hash, s=s, + 'Please check logs.'.format(release=release_names, hash=info_hash, s=s, path=app.TORRENT_SEED_LOCATION), logger.WARNING) return False From 9e121e2bb9b2036127245cb979eaa579ae660e60 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:42:58 +0300 Subject: [PATCH 14/51] medusa/scene_numbering.py --- medusa/scene_numbering.py | 106 +++++++++++++++++++------------------- 1 file changed, 53 insertions(+), 53 deletions(-) diff --git a/medusa/scene_numbering.py b/medusa/scene_numbering.py index 56494b7a58..de1cf2874c 100644 --- a/medusa/scene_numbering.py +++ b/medusa/scene_numbering.py @@ -73,11 +73,11 @@ def find_scene_numbering(series_obj, season, episode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0", + 'SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0', [series_obj.indexer, series_obj.series_id, season, episode]) if rows: - return int(rows[0][b"scene_season"]), int(rows[0][b"scene_episode"]) + return int(rows[0][b'scene_season']), int(rows[0][b'scene_episode']) def get_scene_absolute_numbering(series_obj, absolute_number, fallback_to_xem=True): @@ -118,11 +118,11 @@ def find_scene_absolute_numbering(series_obj, absolute_number): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and absolute_number = ? and scene_absolute_number != 0", + 'SELECT scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and absolute_number = ? and scene_absolute_number != 0', [series_obj.indexer, series_obj.series_id, absolute_number]) if rows: - return int(rows[0][b"scene_absolute_number"]) + return int(rows[0][b'scene_absolute_number']) def get_indexer_numbering(series_obj, sceneSeason, sceneEpisode, fallback_to_xem=True): @@ -135,13 +135,13 @@ def get_indexer_numbering(series_obj, sceneSeason, sceneEpisode, fallback_to_xem main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT season, episode FROM scene_numbering " - "WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", + 'SELECT season, episode FROM scene_numbering ' + 'WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?', [series_obj.indexer, series_obj.series_id, sceneSeason, sceneEpisode] ) if rows: - return int(rows[0][b"season"]), int(rows[0][b"episode"]) + return int(rows[0][b'season']), int(rows[0][b'episode']) else: if fallback_to_xem: return get_indexer_numbering_for_xem(series_obj, sceneSeason, sceneEpisode) @@ -159,15 +159,15 @@ def get_indexer_absolute_numbering(series_obj, sceneAbsoluteNumber, fallback_to_ main_db_con = db.DBConnection() if scene_season is None: rows = main_db_con.select( - "SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ?", + 'SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ?', [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber]) else: rows = main_db_con.select( - "SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ? and scene_season = ?", + 'SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ? and scene_season = ?', [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber, scene_season]) if rows: - return int(rows[0][b"absolute_number"]) + return int(rows[0][b'absolute_number']) else: if fallback_to_xem: return get_indexer_absolute_numbering_for_xem(series_obj, sceneAbsoluteNumber, scene_season) @@ -188,20 +188,20 @@ def set_scene_numbering(series_obj, season=None, episode=None, # pylint:disable # Season/episode can be 0 so can't check "if season" if season is not None and episode is not None and absolute_number is None: main_db_con.action( - "INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)", + 'INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)', [series_obj.indexer, series_obj.series_id, season, episode]) main_db_con.action( - "UPDATE scene_numbering SET scene_season = ?, scene_episode = ? WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", + 'UPDATE scene_numbering SET scene_season = ?, scene_episode = ? WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?', [sceneSeason, sceneEpisode, series_obj.indexer, series_obj.series_id, season, episode]) # absolute_number can be 0 so can't check "if absolute_number" else: main_db_con.action( - "INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, absolute_number) VALUES (?,?,?)", + 'INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, absolute_number) VALUES (?,?,?)', [series_obj.indexer, series_obj.series_id, absolute_number]) main_db_con.action( - "UPDATE scene_numbering SET scene_absolute_number = ? WHERE indexer = ? and indexer_id = ? and absolute_number = ?", + 'UPDATE scene_numbering SET scene_absolute_number = ? WHERE indexer = ? and indexer_id = ? and absolute_number = ?', [sceneAbsolute, series_obj.indexer, series_obj.series_id, absolute_number]) series_obj.flush_episodes() @@ -225,15 +225,15 @@ def find_xem_numbering(series_obj, season, episode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_season, scene_episode " - "FROM tv_episodes " - "WHERE indexer = ? and showid = ? and season = ? " - "and episode = ? and (scene_season or scene_episode) != 0", + 'SELECT scene_season, scene_episode ' + 'FROM tv_episodes ' + 'WHERE indexer = ? and showid = ? and season = ? ' + 'and episode = ? and (scene_season or scene_episode) != 0', [series_obj.indexer, series_obj.series_id, season, episode] ) if rows: - return int(rows[0][b"scene_season"]), int(rows[0][b"scene_episode"]) + return int(rows[0][b'scene_season']), int(rows[0][b'scene_episode']) def find_xem_absolute_numbering(series_obj, absolute_number): @@ -252,14 +252,14 @@ def find_xem_absolute_numbering(series_obj, absolute_number): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_absolute_number " - "FROM tv_episodes " - "WHERE indexer = ? and showid = ? " - "and absolute_number = ? and scene_absolute_number != 0", + 'SELECT scene_absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? and showid = ? ' + 'and absolute_number = ? and scene_absolute_number != 0', [series_obj.indexer, series_obj.series_id, absolute_number]) if rows: - return int(rows[0][b"scene_absolute_number"]) + return int(rows[0][b'scene_absolute_number']) def get_indexer_numbering_for_xem(series_obj, sceneSeason, sceneEpisode): @@ -278,14 +278,14 @@ def get_indexer_numbering_for_xem(series_obj, sceneSeason, sceneEpisode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT season, episode " - "FROM tv_episodes " - "WHERE indexer = ? and showid = ? " - "and scene_season = ? and scene_episode = ?", + 'SELECT season, episode ' + 'FROM tv_episodes ' + 'WHERE indexer = ? and showid = ? ' + 'and scene_season = ? and scene_episode = ?', [series_obj.indexer, series_obj.series_id, sceneSeason, sceneEpisode]) if rows: - return int(rows[0][b"season"]), int(rows[0][b"episode"]) + return int(rows[0][b'season']), int(rows[0][b'episode']) return sceneSeason, sceneEpisode @@ -306,21 +306,21 @@ def get_indexer_absolute_numbering_for_xem(series_obj, sceneAbsoluteNumber, scen main_db_con = db.DBConnection() if scene_season is None: rows = main_db_con.select( - "SELECT absolute_number " - "FROM tv_episodes " - "WHERE indexer = ? AND showid = ? " - "AND scene_absolute_number = ?", + 'SELECT absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? AND showid = ? ' + 'AND scene_absolute_number = ?', [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber]) else: rows = main_db_con.select( - "SELECT absolute_number " - "FROM tv_episodes " - "WHERE indexer = ? " - "AND showid = ? AND scene_absolute_number = ? and scene_season = ?", + 'SELECT absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? ' + 'AND showid = ? AND scene_absolute_number = ? and scene_season = ?', [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber, scene_season]) if rows: - return int(rows[0][b"absolute_number"]) + return int(rows[0][b'absolute_number']) return sceneAbsoluteNumber @@ -453,7 +453,7 @@ def xem_refresh(series_obj, force=False): MAX_REFRESH_AGE_SECS = 86400 # 1 day main_db_con = db.DBConnection() - rows = main_db_con.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?", + rows = main_db_con.select('SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?', [indexer_id, series_id]) if rows: lastRefresh = int(rows[0][b'last_refreshed']) @@ -467,7 +467,7 @@ def xem_refresh(series_obj, force=False): # mark refreshed main_db_con.upsert( - "xem_refresh", + 'xem_refresh', {'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple()))}, {'indexer': indexer_id, 'indexer_id': series_id} ) @@ -477,14 +477,14 @@ def xem_refresh(series_obj, force=False): logger.log(u'{0} is an unsupported indexer in XEM'.format(indexerApi(indexer_id).name), logger.DEBUG) return # XEM MAP URL - url = "http://thexem.de/map/havemap?origin={0}".format(indexerApi(indexer_id).config['xem_origin']) + url = 'http://thexem.de/map/havemap?origin={0}'.format(indexerApi(indexer_id).config['xem_origin']) parsed_json = safe_session.get_json(url) if not parsed_json or 'result' not in parsed_json or 'success' not in parsed_json['result'] or 'data' not in parsed_json or str(series_id) not in parsed_json['data']: logger.log(u'No XEM data for show ID {0} on {1}'.format(series_id, series_obj.indexer_name), logger.DEBUG) return # XEM API URL - url = "http://thexem.de/map/all?id={0}&origin={1}&destination=scene".format(series_id, indexerApi(indexer_id).config['xem_origin']) + url = 'http://thexem.de/map/all?id={0}&origin={1}&destination=scene'.format(series_id, indexerApi(indexer_id).config['xem_origin']) parsed_json = safe_session.get_json(url) if not parsed_json or 'result' not in parsed_json or 'success' not in parsed_json['result']: logger.log(u'No XEM data for show ID {0} on {1}'.format(indexer_id, series_obj.indexer_name), logger.DEBUG) @@ -494,24 +494,24 @@ def xem_refresh(series_obj, force=False): for entry in parsed_json['data']: if 'scene' in entry: cl.append([ - "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? " - "WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?", + 'UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? ' + 'WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [entry['scene']['season'], entry['scene']['episode'], entry['scene']['absolute'], indexer_id, series_id, entry[indexerApi(indexer_id).config['xem_origin']]['season'], entry[indexerApi(indexer_id).config['xem_origin']]['episode']] ]) cl.append([ - "UPDATE tv_episodes SET absolute_number = ? " - "WHERE indexer = ? AND showid = ? AND season = ? AND episode = ? AND absolute_number = 0", + 'UPDATE tv_episodes SET absolute_number = ? ' + 'WHERE indexer = ? AND showid = ? AND season = ? AND episode = ? AND absolute_number = 0', [entry[indexerApi(indexer_id).config['xem_origin']]['absolute'], indexer_id, series_id, entry[indexerApi(indexer_id).config['xem_origin']]['season'], entry[indexerApi(indexer_id).config['xem_origin']]['episode']] ]) if 'scene_2' in entry: # for doubles cl.append([ - "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? " - "WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?", + 'UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? ' + 'WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [entry['scene_2']['season'], entry['scene_2']['episode'], entry['scene_2']['absolute'], indexer_id, series_id, entry[indexerApi(indexer_id).config['xem_origin']]['season'], @@ -523,7 +523,7 @@ def xem_refresh(series_obj, force=False): main_db_con.mass_action(cl) except Exception as e: - logger.log(u"Exception while refreshing XEM data for show ID {0} on {1}: {2}".format + logger.log(u'Exception while refreshing XEM data for show ID {0} on {1}: {2}'.format (series_id, series_obj.indexer_name, ex(e)), logger.WARNING) logger.log(traceback.format_exc(), logger.DEBUG) @@ -608,28 +608,28 @@ def fix_xem_numbering(series_obj): # pylint:disable=too-many-locals, too-many-b if update_absolute_number: cl.append([ - "UPDATE tv_episodes SET absolute_number = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?", + 'UPDATE tv_episodes SET absolute_number = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [absolute_number, series_obj.indexer, series_obj.series_id, season, episode] ]) update_absolute_number = False if update_scene_season: cl.append([ - "UPDATE tv_episodes SET scene_season = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?", + 'UPDATE tv_episodes SET scene_season = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [scene_season, series_obj.indexer, series_obj.series_id, season, episode] ]) update_scene_season = False if update_scene_episode: cl.append([ - "UPDATE tv_episodes SET scene_episode = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?", + 'UPDATE tv_episodes SET scene_episode = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [scene_episode, series_obj.indexer, series_obj.series_id, season, episode] ]) update_scene_episode = False if update_scene_absolute_number: cl.append([ - "UPDATE tv_episodes SET scene_absolute_number = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?", + 'UPDATE tv_episodes SET scene_absolute_number = ? WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [scene_absolute_number, series_obj.indexer, series_obj.series_id, season, episode] ]) update_scene_absolute_number = False From 96e9366f423c0ed1f500ec4c7acf964dc61d7884 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:43:09 +0300 Subject: [PATCH 15/51] medusa/scheduler.py --- medusa/scheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/scheduler.py b/medusa/scheduler.py index 411596887e..1a83ba3879 100644 --- a/medusa/scheduler.py +++ b/medusa/scheduler.py @@ -17,7 +17,7 @@ class Scheduler(threading.Thread): def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0), - start_time=None, threadName="ScheduledThread", silent=True): + start_time=None, threadName='ScheduledThread', silent=True): super(Scheduler, self).__init__() self.run_delay = run_delay From 698cf89063124d15f926c9da2d48ecdb553b6220 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:44:01 +0300 Subject: [PATCH 16/51] medusa/show_queue.py --- medusa/show_queue.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/show_queue.py b/medusa/show_queue.py index 6a12fa378a..448d4f17bc 100644 --- a/medusa/show_queue.py +++ b/medusa/show_queue.py @@ -114,7 +114,7 @@ class ShowQueue(generic_queue.GenericQueue): def __init__(self): generic_queue.GenericQueue.__init__(self) - self.queue_name = "SHOWQUEUE" + self.queue_name = 'SHOWQUEUE' def _isInQueue(self, show, actions): if not show: @@ -187,13 +187,13 @@ def updateShow(self, show, season=None): if self.isBeingUpdated(show): raise CantUpdateShowException( - "{show_name} is already being updated by Post-processor or manually started," + '{show_name} is already being updated by Post-processor or manually started,' " can't update again until it's done.".format(show_name=show.name) ) if self.isInUpdateQueue(show): raise CantUpdateShowException( - "{show_name} is in process of being updated by Post-processor or manually started," + '{show_name} is in process of being updated by Post-processor or manually started,' " can't update again until it's done.".format(show_name=show.name) ) @@ -209,7 +209,7 @@ def refreshShow(self, show, force=False): raise CantRefreshShowException('This show is already being refreshed, not refreshing again.') if (self.isBeingUpdated(show) or self.isInUpdateQueue(show)) and not force: - log.debug("A refresh was attempted but there is already an update queued or in progress." + log.debug('A refresh was attempted but there is already an update queued or in progress.' " Since updates do a refresh at the end anyway I'm skipping this request.") return From 76517601c148cb357e44e90f8bd53bc1390dd009 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:45:08 +0300 Subject: [PATCH 17/51] medusa/show_updater.py --- medusa/show_updater.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/show_updater.py b/medusa/show_updater.py index c76905e129..73134ec2c7 100644 --- a/medusa/show_updater.py +++ b/medusa/show_updater.py @@ -200,7 +200,7 @@ def run(self, force=False): else: logger.info(u'Show update skipped, show: {show} is paused.', show=show[1].name) - ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator("Daily Update", pi_list)) + ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list)) # Only refresh updated shows that have been updated using the season updates. # The full refreshed shows, are updated from the queueItem. From 8b3cc7a148917a7a5ff3f4727e14a868d249cfd9 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:50:30 +0300 Subject: [PATCH 18/51] medusa/subtitles.py --- medusa/subtitles.py | 56 ++++++++++++++++++++++----------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/medusa/subtitles.py b/medusa/subtitles.py index 09d374e425..a2ce2bfaac 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -334,7 +334,7 @@ def list_subtitles(tv_episode, video_path=None, limit=40): for subtitle, _ in scored_subtitles: cache.set(subtitle_key.format(id=subtitle.id).encode('utf-8'), subtitle) - logger.debug("Scores computed for release: {release}".format(release=os.path.basename(video_path))) + logger.debug('Scores computed for release: {release}'.format(release=os.path.basename(video_path))) max_score = episode_scores['hash'] max_scores = set(episode_scores) - {'hearing_impaired', 'hash'} @@ -879,7 +879,7 @@ def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-bra new_release_name = remove_extension(filename) if tv_episode.release_name and new_release_name != tv_episode.release_name: logger.debug(u"As this is a release replacement I'm not going to consider existing " - u"subtitles or release name from database to refine the new release") + u'subtitles or release name from database to refine the new release') logger.debug(u"Replacing old release name '%s' with new release name '%s'", tv_episode.release_name, new_release_name) tv_episode.subtitles = [] @@ -973,33 +973,33 @@ def dhm(td): sql_results = [] for args in sql_args: sql_results += database.select( - "SELECT " - "s.show_name, " - "e.indexer," - "e.showid, " - "e.season, " - "e.episode," - "e.release_name, " - "e.status, " - "e.subtitles, " - "e.subtitles_searchcount AS searchcount, " - "e.subtitles_lastsearch AS lastsearch, " - "e.location, (? - e.airdate) as age " - "FROM " - "tv_episodes AS e " - "INNER JOIN tv_shows AS s " - "ON (e.showid = s.indexer_id AND e.indexer = s.indexer) " - "WHERE " - "s.subtitles = 1 " - "AND s.paused = 0 " - "AND e.status = ? " - "AND e.season > 0 " + 'SELECT ' + 's.show_name, ' + 'e.indexer,' + 'e.showid, ' + 'e.season, ' + 'e.episode,' + 'e.release_name, ' + 'e.status, ' + 'e.subtitles, ' + 'e.subtitles_searchcount AS searchcount, ' + 'e.subtitles_lastsearch AS lastsearch, ' + 'e.location, (? - e.airdate) as age ' + 'FROM ' + 'tv_episodes AS e ' + 'INNER JOIN tv_shows AS s ' + 'ON (e.showid = s.indexer_id AND e.indexer = s.indexer) ' + 'WHERE ' + 's.subtitles = 1 ' + 'AND s.paused = 0 ' + 'AND e.status = ? ' + 'AND e.season > 0 ' "AND e.location != '' " - "AND age {} 30 " - "AND e.subtitles NOT LIKE ? " - "ORDER BY " - "lastsearch ASC " - "LIMIT {}".format + 'AND age {} 30 ' + 'AND e.subtitles NOT LIKE ? ' + 'ORDER BY ' + 'lastsearch ASC ' + 'LIMIT {}'.format (args['age_comparison'], args['limit']), [datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages] ) From 28714470af1e718622b922f7a142abebc3854d76 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:55:23 +0300 Subject: [PATCH 19/51] W503 line break before binary operator * medusa/trakt_checker.py * medusa/init/logconfig.py * medusa/server/web/home/handler.py * medusa/session/handlers.py * medusa/show/recommendations/recommended.py --- medusa/init/logconfig.py | 4 ++-- medusa/server/web/home/handler.py | 4 ++-- medusa/session/handlers.py | 8 ++++---- medusa/show/recommendations/recommended.py | 4 ++-- medusa/trakt_checker.py | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/medusa/init/logconfig.py b/medusa/init/logconfig.py index 48c23f952c..28e65f4fb5 100644 --- a/medusa/init/logconfig.py +++ b/medusa/init/logconfig.py @@ -12,8 +12,8 @@ class StyleAdapter(logging.LoggerAdapter): """Logger Adapter with new string format style.""" - adapter_members = {attr: attr for attr in dir(logging.LoggerAdapter) if not callable(attr) - and not attr.startswith('__')} + adapter_members = {attr: attr for attr in dir(logging.LoggerAdapter) if not callable(attr) and + not attr.startswith('__')} adapter_members.update({'warn': 'warning', 'fatal': 'critical'}) reserved_keywords = getargspec(logging.Logger._log).args[1:] diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index c5c552aaa6..62d8dbb691 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1952,8 +1952,8 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire snatched_qualities = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] if status == DOWNLOADED and not ( - ep_obj.status in snatched_qualities + [DOWNLOADED] - or os.path.isfile(ep_obj.location)): + ep_obj.status in snatched_qualities + [DOWNLOADED] or + os.path.isfile(ep_obj.location)): logger.log('Refusing to change status of {series} {episode} to DOWNLOADED' ' because it\'s not SNATCHED/DOWNLOADED or the file is missing'.format( series=series_obj.name, episode=cur_ep), logger.WARNING) diff --git a/medusa/session/handlers.py b/medusa/session/handlers.py index afa0f91bf7..cf7d120c25 100644 --- a/medusa/session/handlers.py +++ b/medusa/session/handlers.py @@ -78,8 +78,8 @@ def is_cloudflare_challenge(resp): Source: goo.gl/v8FvnD """ return ( - resp.status_code == 503 - and resp.headers.get('Server', '').startswith('cloudflare') - and b'jschl_vc' in resp.content - and b'jschl_answer' in resp.content + resp.status_code == 503 and + resp.headers.get('Server', '').startswith('cloudflare') and + b'jschl_vc' in resp.content and + b'jschl_answer' in resp.content ) diff --git a/medusa/show/recommendations/recommended.py b/medusa/show/recommendations/recommended.py index 481882095f..696852f799 100644 --- a/medusa/show/recommendations/recommended.py +++ b/medusa/show/recommendations/recommended.py @@ -112,8 +112,8 @@ def __init__(self, rec_show_prov, series_id, title, mapped_indexer, mapped_serie # Check if the show is currently already in the db self.show_in_list = bool([show.indexerid for show in app.showList - if show.series_id == self.mapped_series_id - and show.indexer == self.mapped_indexer]) + if show.series_id == self.mapped_series_id and + show.indexer == self.mapped_indexer]) self.session = session def cache_image(self, image_url, default=None): diff --git a/medusa/trakt_checker.py b/medusa/trakt_checker.py index 2528c6d112..d1ffab219d 100644 --- a/medusa/trakt_checker.py +++ b/medusa/trakt_checker.py @@ -120,8 +120,8 @@ def find_show(self, indexerid, indexer): log.info('No shows found in your Trakt library. Nothing to sync') return trakt_show = [x for x in trakt_library if - get_trakt_indexer(indexer) - and int(indexerid) in [int(x['show']['ids'].get(get_trakt_indexer(indexer)))]] + get_trakt_indexer(indexer) and + int(indexerid) in [int(x['show']['ids'].get(get_trakt_indexer(indexer)))]] return trakt_show if trakt_show else None From f4e0fa1244715ef1af03d4ad6a5118dbce43dfc8 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 12:57:19 +0300 Subject: [PATCH 20/51] E226 missing whitespace around arithmetic operator * medusa/providers/nzb/binsearch.py * medusa/providers/torrent/html/bjshare.py --- medusa/providers/nzb/binsearch.py | 2 +- medusa/providers/torrent/html/bjshare.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/providers/nzb/binsearch.py b/medusa/providers/nzb/binsearch.py index 713c55b9f3..72e3de1fa8 100644 --- a/medusa/providers/nzb/binsearch.py +++ b/medusa/providers/nzb/binsearch.py @@ -192,7 +192,7 @@ def clean_title(title, mode): for extension in ('.nfo', '.par2', '.rar', '.zip', '.nzb', '.part'): # Strip extensions that aren't part of the file name if title.endswith(extension): - title = title[:len(title)-len(extension)] + title = title[:len(title) - len(extension)] return title except AttributeError: return None diff --git a/medusa/providers/torrent/html/bjshare.py b/medusa/providers/torrent/html/bjshare.py index ea1159b24f..de7659e98d 100644 --- a/medusa/providers/torrent/html/bjshare.py +++ b/medusa/providers/torrent/html/bjshare.py @@ -227,7 +227,7 @@ def process_column_header(td): torrent_details = torrent_details.replace('[', ' ').replace(']', ' ').replace('/', ' ') torrent_details = torrent_details.replace('Full HD ', '1080p').replace('HD ', '720p') - torrent_size = cells[labels.index('Tamanho')+group_index].get_text(strip=True) + torrent_size = cells[labels.index('Tamanho') + group_index].get_text(strip=True) size = convert_size(torrent_size) or -1 torrent_name = '{0} {1}'.format(title, torrent_details.strip()).strip() From 83c9c3df94f7934d1da5979ddae082e77ab3cdab Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:01:08 +0300 Subject: [PATCH 21/51] medusa/ui.py --- medusa/ui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/ui.py b/medusa/ui.py index ba3576d0f8..9d2ab34246 100644 --- a/medusa/ui.py +++ b/medusa/ui.py @@ -182,7 +182,7 @@ def nextName(self): if curItem in self.queueItemList: return curItem.name - return "Unknown" + return 'Unknown' def percentComplete(self): numFinished = self.numFinished() From 1eefc905439e67ee02a352baff53011dfd9e5166 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:06:35 +0300 Subject: [PATCH 22/51] medusa/version_checker.py --- medusa/version_checker.py | 40 +++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/medusa/version_checker.py b/medusa/version_checker.py index 5bb1f3bc46..1ae43a21e3 100644 --- a/medusa/version_checker.py +++ b/medusa/version_checker.py @@ -375,7 +375,7 @@ def get_github_repo(): @staticmethod def get_update_url(): - return app.WEB_ROOT + "/home/update/?pid=" + str(app.PID) + return app.WEB_ROOT + '/home/update/?pid=' + str(app.PID) class GitUpdateManager(UpdateManager): @@ -405,9 +405,9 @@ def get_cur_version(self): def get_newest_version(self): if self._newest_commit_hash: - self._cur_version = self._run_git(self._git_path, "describe --tags --abbrev=0 " + self._newest_commit_hash)[0] + self._cur_version = self._run_git(self._git_path, 'describe --tags --abbrev=0 ' + self._newest_commit_hash)[0] else: - self._cur_version = self._run_git(self._git_path, "describe --tags --abbrev=0 " + self._cur_commit_hash)[0] + self._cur_version = self._run_git(self._git_path, 'describe --tags --abbrev=0 ' + self._cur_commit_hash)[0] return self._cur_version def get_num_commits_behind(self): @@ -547,7 +547,7 @@ def _find_installed_branch(self): if branch: app.BRANCH = branch return branch - return "" + return '' def _check_github_for_update(self): """ @@ -588,8 +588,8 @@ def _check_github_for_update(self): if exit_status == 0 and output: try: - self._num_commits_behind = int(output.count("<")) - self._num_commits_ahead = int(output.count(">")) + self._num_commits_behind = int(output.count('<')) + self._num_commits_ahead = int(output.count('>')) except Exception: log.debug(u"git didn't return numbers for behind and ahead, not using it") @@ -612,14 +612,14 @@ def set_newest_text(self): url = base_url + '/commits/' newest_text = 'There is a newer version available ' - newest_text += " (you're " + str(self._num_commits_behind) + " commit" + newest_text += " (you're " + str(self._num_commits_behind) + ' commit' if self._num_commits_behind > 1: newest_text += 's' newest_text += ' behind' if self._num_commits_ahead > 0: newest_text += ' and {ahead} commit{s} ahead'.format(ahead=self._num_commits_ahead, s='s' if self._num_commits_ahead > 1 else '') - newest_text += ')' + "— Update Now" + newest_text += ') — Update Now' elif self._num_commits_ahead > 0: newest_text = u'Local branch is ahead of {0}. Automatic update not possible'.format(self.branch) @@ -687,7 +687,7 @@ def update(self): # Notify update successful if app.NOTIFY_ON_UPDATE: try: - notifiers.notify_git_update(app.CUR_COMMIT_HASH or "") + notifiers.notify_git_update(app.CUR_COMMIT_HASH or '') except Exception: log.debug(u'Unable to send update notification. Continuing the update process') return True @@ -769,7 +769,7 @@ def __init__(self): @staticmethod def _find_installed_branch(): - return app.CUR_COMMIT_BRANCH if app.CUR_COMMIT_BRANCH else "master" + return app.CUR_COMMIT_BRANCH if app.CUR_COMMIT_BRANCH else 'master' def get_cur_commit_hash(self): return self._cur_commit_hash @@ -779,11 +779,11 @@ def get_newest_commit_hash(self): @staticmethod def get_cur_version(): - return "" + return '' @staticmethod def get_newest_version(): - return "" + return '' def get_num_commits_behind(self): return self._num_commits_behind @@ -836,10 +836,10 @@ def _check_github_for_update(self): self._num_commits_behind = branch_compared.behind_by self._num_commits_ahead = branch_compared.ahead_by except Exception: # UnknownObjectException - self._newest_commit_hash = "" + self._newest_commit_hash = '' self._num_commits_behind = 0 self._num_commits_ahead = 0 - self._cur_commit_hash = "" + self._cur_commit_hash = '' # fall back and iterate over last 100 (items per page in gh_api) commits if not self._newest_commit_hash: @@ -868,8 +868,8 @@ def set_newest_text(self): log.debug(u"Unknown current version number, don't know if we should update or not") newest_text = "Unknown current version number: If you've never used the application " \ - "upgrade system before then current version is not set." - newest_text += "— Update Now" + 'upgrade system before then current version is not set. ' \ + '— Update Now' elif self._num_commits_behind > 0: base_url = 'http://github.com/' + self.github_org + '/' + self.github_repo @@ -879,10 +879,10 @@ def set_newest_text(self): url = base_url + '/commits/' newest_text = 'There is a newer version available' - newest_text += " (you're " + str(self._num_commits_behind) + " commit" + newest_text += " (you're " + str(self._num_commits_behind) + ' commit' if self._num_commits_behind > 1: - newest_text += "s" - newest_text += " behind)" + "— Update Now" + newest_text += 's' + newest_text += ' behind) — Update Now' else: return @@ -973,7 +973,7 @@ def update(self): # Notify update successful try: - notifiers.notify_git_update(app.CUR_COMMIT_HASH or "") + notifiers.notify_git_update(app.CUR_COMMIT_HASH or '') except Exception: log.debug(u'Unable to send update notification. Continuing the update process') return True From 910fcbb80717a690e985414fc9e7330260258b43 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:07:03 +0300 Subject: [PATCH 23/51] medusa/clients/torrent/deluge_client.py --- medusa/clients/torrent/deluge_client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/medusa/clients/torrent/deluge_client.py b/medusa/clients/torrent/deluge_client.py index 571c5ed79e..18dacaf024 100644 --- a/medusa/clients/torrent/deluge_client.py +++ b/medusa/clients/torrent/deluge_client.py @@ -395,9 +395,9 @@ def _set_torrent_ratio(self, result): elif ratio and float(ratio) == -1: # Disable stop at ratio to seed forever - post_data = json.dumps({"method": "core.set_torrent_stop_at_ratio", - "params": [result.hash, False], - "id": 5}) + post_data = json.dumps({'method': 'core.set_torrent_stop_at_ratio', + 'params': [result.hash, False], + 'id': 5}) self._request(method='post', data=post_data) From cd8a0cd9cb71eb21b5f4f2b32d26c602e3dbafae Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:19:00 +0300 Subject: [PATCH 24/51] medusa/databases/* --- medusa/databases/__init__.py | 2 +- medusa/databases/cache_db.py | 98 +++++++++--------- medusa/databases/failed_db.py | 8 +- medusa/databases/main_db.py | 190 +++++++++++++++++----------------- 4 files changed, 149 insertions(+), 149 deletions(-) diff --git a/medusa/databases/__init__.py b/medusa/databases/__init__.py index af92c96958..c993d15d28 100644 --- a/medusa/databases/__init__.py +++ b/medusa/databases/__init__.py @@ -1,4 +1,4 @@ # coding=utf-8 from __future__ import unicode_literals -__all__ = ["main_db", "cache_db", "failed_db"] +__all__ = ['main_db', 'cache_db', 'failed_db'] diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 2048495dd7..48aefb6fe1 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -17,19 +17,19 @@ # and subclass the previous migration. class InitialSchema(db.SchemaUpgrade): def test(self): - return self.hasTable("db_version") + return self.hasTable('db_version') def execute(self): queries = [ - ("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",), - ("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);",), - ("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER," - " show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",), - ("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);",), - ("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);",), - ("CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);",), - ("CREATE TABLE db_version (db_version INTEGER);",), - ("INSERT INTO db_version(db_version) VALUES (1);",), + ('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);',), + ('CREATE TABLE lastSearch (provider TEXT, time NUMERIC);',), + ('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER,' + ' show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);',), + ('CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);',), + ('CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);',), + ('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);',), + ('CREATE TABLE db_version (db_version INTEGER);',), + ('INSERT INTO db_version(db_version) VALUES (1);',), ] for query in queries: if len(query) == 1: @@ -40,110 +40,110 @@ def execute(self): class AddSceneExceptions(InitialSchema): def test(self): - return self.hasTable("scene_exceptions") + return self.hasTable('scene_exceptions') def execute(self): self.connection.action( - "CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER, show_name TEXT);") + 'CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER, show_name TEXT);') class AddSceneNameCache(AddSceneExceptions): def test(self): - return self.hasTable("scene_names") + return self.hasTable('scene_names') def execute(self): - self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);") + self.connection.action('CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);') class AddNetworkTimezones(AddSceneNameCache): def test(self): - return self.hasTable("network_timezones") + return self.hasTable('network_timezones') def execute(self): - self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);") + self.connection.action('CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);') class AddLastSearch(AddNetworkTimezones): def test(self): - return self.hasTable("lastSearch") + return self.hasTable('lastSearch') def execute(self): - self.connection.action("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);") + self.connection.action('CREATE TABLE lastSearch (provider TEXT, time NUMERIC);') class AddSceneExceptionsSeasons(AddLastSearch): def test(self): - return self.hasColumn("scene_exceptions", "season") + return self.hasColumn('scene_exceptions', 'season') def execute(self): - self.addColumn("scene_exceptions", "season", "NUMERIC", -1) + self.addColumn('scene_exceptions', 'season', 'NUMERIC', -1) class AddSceneExceptionsCustom(AddSceneExceptionsSeasons): # pylint:disable=too-many-ancestors def test(self): - return self.hasColumn("scene_exceptions", "custom") + return self.hasColumn('scene_exceptions', 'custom') def execute(self): - self.addColumn("scene_exceptions", "custom", "NUMERIC", 0) + self.addColumn('scene_exceptions', 'custom', 'NUMERIC', 0) class AddSceneExceptionsRefresh(AddSceneExceptionsCustom): # pylint:disable=too-many-ancestors def test(self): - return self.hasTable("scene_exceptions_refresh") + return self.hasTable('scene_exceptions_refresh') def execute(self): self.connection.action( - "CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);") + 'CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);') class ConvertSceneExeptionsToIndexerScheme(AddSceneExceptionsRefresh): # pylint:disable=too-many-ancestors def test(self): - return self.hasColumn("scene_exceptions", "indexer_id") + return self.hasColumn('scene_exceptions', 'indexer_id') def execute(self): - self.connection.action("DROP TABLE IF EXISTS tmp_scene_exceptions;") - self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions;") - self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER," - " show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);") - self.connection.action("INSERT INTO scene_exceptions SELECT exception_id, tvdb_id as indexer_id, show_name," - " season, custom FROM tmp_scene_exceptions;") - self.connection.action("DROP TABLE tmp_scene_exceptions;") + self.connection.action('DROP TABLE IF EXISTS tmp_scene_exceptions;') + self.connection.action('ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions;') + self.connection.action('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER,' + ' show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);') + self.connection.action('INSERT INTO scene_exceptions SELECT exception_id, tvdb_id as indexer_id, show_name,' + ' season, custom FROM tmp_scene_exceptions;') + self.connection.action('DROP TABLE tmp_scene_exceptions;') class ConvertSceneNamesToIndexerScheme(AddSceneExceptionsRefresh): # pylint:disable=too-many-ancestors def test(self): - return self.hasColumn("scene_names", "indexer_id") + return self.hasColumn('scene_names', 'indexer_id') def execute(self): - self.connection.action("DROP TABLE IF EXISTS tmp_scene_names;") - self.connection.action("ALTER TABLE scene_names RENAME TO tmp_scene_names;") - self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);") - self.connection.action("INSERT INTO scene_names SELECT * FROM tmp_scene_names;") - self.connection.action("DROP TABLE tmp_scene_names;") + self.connection.action('DROP TABLE IF EXISTS tmp_scene_names;') + self.connection.action('ALTER TABLE scene_names RENAME TO tmp_scene_names;') + self.connection.action('CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);') + self.connection.action('INSERT INTO scene_names SELECT * FROM tmp_scene_names;') + self.connection.action('DROP TABLE tmp_scene_names;') class RemoveIndexerUpdateSchema(ConvertSceneNamesToIndexerScheme): # pylint:disable=too-many-ancestors def test(self): - return not self.hasTable("indexer_update") + return not self.hasTable('indexer_update') def execute(self): - self.connection.action("DROP TABLE indexer_update;") + self.connection.action('DROP TABLE indexer_update;') class AddIndexerSceneExceptions(RemoveIndexerUpdateSchema): # pylint:disable=too-many-ancestors def test(self): - return self.hasColumn("scene_exceptions", "indexer") + return self.hasColumn('scene_exceptions', 'indexer') def execute(self): - self.connection.action("DROP TABLE IF EXISTS tmp_scene_exceptions;") - self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions;") + self.connection.action('DROP TABLE IF EXISTS tmp_scene_exceptions;') + self.connection.action('ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions;') self.connection.action( - "CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer INTEGER, indexer_id INTEGER, " - "show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);") + 'CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer INTEGER, indexer_id INTEGER, ' + 'show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);') self.connection.action( - "INSERT INTO scene_exceptions SELECT exception_id, 1, indexer_id, show_name, season," - "custom FROM tmp_scene_exceptions;") - self.connection.action("DROP TABLE tmp_scene_exceptions;") + 'INSERT INTO scene_exceptions SELECT exception_id, 1, indexer_id, show_name, season,' + 'custom FROM tmp_scene_exceptions;') + self.connection.action('DROP TABLE tmp_scene_exceptions;') class AddIndexerIds(AddIndexerSceneExceptions): @@ -196,5 +196,5 @@ def clear_provider_tables(self): def inc_major_version(self): major_version, minor_version = self.connection.version major_version += 1 - self.connection.action("UPDATE db_version SET db_version = ?;", [major_version]) + self.connection.action('UPDATE db_version SET db_version = ?;', [major_version]) return self.connection.version diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index b4df13d77a..d258531cad 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -158,7 +158,7 @@ def translate_status(self): def inc_major_version(self): major_version, minor_version = self.connection.version major_version += 1 - self.connection.action("UPDATE db_version SET db_version = ?;", [major_version]) + self.connection.action('UPDATE db_version SET db_version = ?;', [major_version]) return self.connection.version @@ -185,16 +185,16 @@ def shift_history_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in history one place to the left.') - sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;") + sql_results = self.connection.select('SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;') for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 self.connection.action( - "UPDATE history SET quality = ? WHERE quality = ?;", + 'UPDATE history SET quality = ? WHERE quality = ?;', [new_quality, quality] ) def update_status_unknown(self): """Change any `UNKNOWN` quality to 1.""" log.info(u'Update status UNKONWN from tv_episodes') - self.connection.action("UPDATE history SET quality = 1 WHERE quality = 65536;") + self.connection.action('UPDATE history SET quality = 1 WHERE quality = 65536;') diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index f6c410b455..d55edcb384 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -67,12 +67,12 @@ def update_old_propers(self): proper_tags = '|'.join(parse_result.proper_tags) log.debug(u'Add proper tags {0!r} to {1!r}', proper_tags, proper_release) - self.connection.action("UPDATE history SET proper_tags = ? WHERE resource = ?", + self.connection.action('UPDATE history SET proper_tags = ? WHERE resource = ?', [proper_tags, proper_release]) def fix_subtitle_reference(self): log.debug(u'Checking for delete episodes with subtitle reference') - query = "SELECT episode_id, showid, location, subtitles, subtitles_searchcount, subtitles_lastsearch " + \ + query = 'SELECT episode_id, showid, location, subtitles, subtitles_searchcount, subtitles_lastsearch ' + \ "FROM tv_episodes WHERE location = '' AND subtitles is not ''" sql_results = self.connection.select(query) @@ -83,14 +83,14 @@ def fix_subtitle_reference(self): sql_result[b'episode_id'], sql_result[b'showid']) self.connection.action("UPDATE tv_episodes SET subtitles = '', " "subtitles_searchcount = 0, subtitles_lastsearch = '' " - "WHERE episode_id = %i" % (sql_result[b'episode_id']) + 'WHERE episode_id = %i' % (sql_result[b'episode_id']) ) def fix_duplicate_episodes(self): sql_results = self.connection.select( - "SELECT indexer, showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY indexer," - " showid, season, episode HAVING count > 1") + 'SELECT indexer, showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY indexer,' + ' showid, season, episode HAVING count > 1') for cur_duplicate in sql_results: @@ -99,21 +99,21 @@ def fix_duplicate_episodes(self): cur_duplicate[b'showid'], cur_duplicate[b'season'], cur_duplicate[b'episode'], cur_duplicate[b'count']) cur_dupe_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?", + 'SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?', [cur_duplicate[b'indexer'], cur_duplicate[b'showid'], cur_duplicate[b'season'], cur_duplicate[b'episode'], int(cur_duplicate[b'count']) - 1] ) for cur_dupe_id in cur_dupe_results: log.info('Deleting duplicate episode with episode_id: {0!s}', cur_dupe_id[b'episode_id']) - self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_dupe_id[b'episode_id']]) + self.connection.action('DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id[b'episode_id']]) def fix_orphan_episodes(self): sql_results = self.connection.select( - "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes" - " LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id" - " WHERE tv_shows.indexer_id IS NULL;") + 'SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes' + ' LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id' + ' WHERE tv_shows.indexer_id IS NULL;') for cur_orphan in sql_results: log.debug(u'Orphan episode detected! episode_id: {0!s}' @@ -121,49 +121,49 @@ def fix_orphan_episodes(self): cur_orphan[b'showid']) log.info(u'Deleting orphan episode with episode_id: {0!s}', cur_orphan[b'episode_id']) - self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan[b'episode_id']]) + self.connection.action('DELETE FROM tv_episodes WHERE episode_id = ?', [cur_orphan[b'episode_id']]) def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_tv_episodes_showid_airdate')"): log.info(u'Missing idx_tv_episodes_showid_airdate for TV Episodes table detected, fixing...') - self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);") + self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);') if not self.connection.select("PRAGMA index_info('idx_showid')"): log.info(u'Missing idx_showid for TV Episodes table detected, fixing...') - self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") + self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);') if not self.connection.select("PRAGMA index_info('idx_status')"): log.info(u'Missing idx_status for TV Episodes table detected, fixing...') - self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, quality, season, episode, airdate)") + self.connection.action('CREATE INDEX idx_status ON tv_episodes (status, quality, season, episode, airdate)') if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): log.info(u'Missing idx_sta_epi_air for TV Episodes table detected, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, quality, episode, airdate)") + self.connection.action('CREATE INDEX idx_sta_epi_air ON tv_episodes (status, quality, episode, airdate)') if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table detected, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, quality, airdate)") + self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, quality, airdate)') def fix_unaired_episodes(self): cur_date = datetime.date.today() sql_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE (airdate > ? OR airdate = 1) AND status in (?, ?) AND season > 0", + 'SELECT episode_id FROM tv_episodes WHERE (airdate > ? OR airdate = 1) AND status in (?, ?) AND season > 0', [cur_date.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: log.info(u'Fixing unaired episode status for episode_id: {0!s}', cur_unaired[b'episode_id']) - self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", + self.connection.action('UPDATE tv_episodes SET status = ? WHERE episode_id = ?', [common.UNAIRED, cur_unaired[b'episode_id']]) def fix_indexer_show_statues(self): for old_status, new_status in iteritems(STATUS_MAP): - self.connection.action("UPDATE tv_shows SET status = ? WHERE LOWER(status) = ?", [new_status, old_status]) + self.connection.action('UPDATE tv_shows SET status = ? WHERE LOWER(status) = ?', [new_status, old_status]) def fix_episode_statuses(self): - sql_results = self.connection.select("SELECT episode_id, showid FROM tv_episodes WHERE status IS NULL") + sql_results = self.connection.select('SELECT episode_id, showid FROM tv_episodes WHERE status IS NULL') for cur_ep in sql_results: log.debug(u'MALFORMED episode status detected! episode_id: {0!s}' @@ -171,13 +171,13 @@ def fix_episode_statuses(self): cur_ep[b'showid']) log.info(u'Fixing malformed episode status with' u' episode_id: {0!s}', cur_ep[b'episode_id']) - self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", + self.connection.action('UPDATE tv_episodes SET status = ? WHERE episode_id = ?', [common.UNSET, cur_ep[b'episode_id']]) def fix_invalid_airdates(self): sql_results = self.connection.select( - "SELECT episode_id, showid FROM tv_episodes WHERE airdate >= ? OR airdate < 1", + 'SELECT episode_id, showid FROM tv_episodes WHERE airdate >= ? OR airdate < 1', [datetime.date.max.toordinal()]) for bad_airdate in sql_results: @@ -215,7 +215,7 @@ def fix_subtitles_codes(self): langs.append(subcode) - self.connection.action("UPDATE tv_episodes SET subtitles = ?, subtitles_lastsearch = ? WHERE episode_id = ?;", + self.connection.action('UPDATE tv_episodes SET subtitles = ?, subtitles_lastsearch = ? WHERE episode_id = ?;', [','.join(langs), datetime.datetime.now().strftime(dateTimeFormat), sql_result[b'episode_id']]) def fix_show_nfo_lang(self): @@ -229,29 +229,29 @@ def fix_show_nfo_lang(self): class InitialSchema(db.SchemaUpgrade): def test(self): - return self.hasTable("db_version") + return self.hasTable('db_version') def execute(self): - if not self.hasTable("tv_shows") and not self.hasTable("db_version"): + if not self.hasTable('tv_shows') and not self.hasTable('db_version'): queries = [ - "CREATE TABLE db_version(db_version INTEGER);", - "CREATE TABLE history(action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT, version NUMERIC DEFAULT -1);", - "CREATE TABLE imdb_info(indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC, plot TEXT);", - "CREATE TABLE info(last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC);", - "CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC, PRIMARY KEY(indexer_id, season, episode));", - "CREATE TABLE tv_shows(show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC DEFAULT -1);", - "CREATE TABLE tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT);", - "CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT);", - "CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT);", - "CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER);", - "CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER, PRIMARY KEY (indexer_id, indexer, mindexer));", - "CREATE UNIQUE INDEX idx_indexer_id ON tv_shows(indexer_id);", - "CREATE INDEX idx_showid ON tv_episodes(showid);", - "CREATE INDEX idx_sta_epi_air ON tv_episodes(status, episode, airdate);", - "CREATE INDEX idx_sta_epi_sta_air ON tv_episodes(season, episode, status, airdate);", - "CREATE INDEX idx_status ON tv_episodes(status,season,episode,airdate);", - "CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);", - "INSERT INTO db_version(db_version) VALUES (42);" + 'CREATE TABLE db_version(db_version INTEGER);', + 'CREATE TABLE history(action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT, version NUMERIC DEFAULT -1);', + 'CREATE TABLE imdb_info(indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC, plot TEXT);', + 'CREATE TABLE info(last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC);', + 'CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC, PRIMARY KEY(indexer_id, season, episode));', + 'CREATE TABLE tv_shows(show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC DEFAULT -1);', + 'CREATE TABLE tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT);', + 'CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT);', + 'CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT);', + 'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER);', + 'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER, PRIMARY KEY (indexer_id, indexer, mindexer));', + 'CREATE UNIQUE INDEX idx_indexer_id ON tv_shows(indexer_id);', + 'CREATE INDEX idx_showid ON tv_episodes(showid);', + 'CREATE INDEX idx_sta_epi_air ON tv_episodes(status, episode, airdate);', + 'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes(season, episode, status, airdate);', + 'CREATE INDEX idx_status ON tv_episodes(status,season,episode,airdate);', + 'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);', + 'INSERT INTO db_version(db_version) VALUES (42);' ] for query in queries: self.connection.action(query) @@ -289,9 +289,9 @@ def execute(self): utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column version to tv_episodes and history') - self.addColumn("tv_episodes", "version", "NUMERIC", "-1") - self.addColumn("tv_episodes", "release_group", "TEXT", "") - self.addColumn("history", "version", "NUMERIC", "-1") + self.addColumn('tv_episodes', 'version', 'NUMERIC', '-1') + self.addColumn('tv_episodes', 'release_group', 'TEXT', '') + self.addColumn('history', 'version', 'NUMERIC', '-1') self.incDBVersion() @@ -304,7 +304,7 @@ def execute(self): utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column default_ep_status to tv_shows') - self.addColumn("tv_shows", "default_ep_status", "NUMERIC", "-1") + self.addColumn('tv_shows', 'default_ep_status', 'NUMERIC', '-1') self.incDBVersion() @@ -317,18 +317,18 @@ def execute(self): utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Converting column indexer and default_ep_status field types to numeric') - self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows") - self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") - self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC," - " indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT," - " classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT," - " flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC," - " lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT," - " last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC," - " rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC," - " scene NUMERIC, default_ep_status NUMERIC)") - self.connection.action("INSERT INTO tv_shows SELECT * FROM tmp_tv_shows") - self.connection.action("DROP TABLE tmp_tv_shows") + self.connection.action('DROP TABLE IF EXISTS tmp_tv_shows') + self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows') + self.connection.action('CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC,' + ' indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT,' + ' classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT,' + ' flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC,' + ' lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT,' + ' last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC,' + ' rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC,' + ' scene NUMERIC, default_ep_status NUMERIC)') + self.connection.action('INSERT INTO tv_shows SELECT * FROM tmp_tv_shows') + self.connection.action('DROP TABLE tmp_tv_shows') self.incDBVersion() @@ -338,20 +338,20 @@ def test(self): return self.checkDBVersion() >= 43 and self.hasColumn(b'db_version', b'db_minor_version') def incDBVersion(self): - warnings.warn("Deprecated: Use inc_major_version or inc_minor_version instead", DeprecationWarning) + warnings.warn('Deprecated: Use inc_major_version or inc_minor_version instead', DeprecationWarning) def inc_major_version(self): major_version, minor_version = self.connection.version major_version += 1 minor_version = 0 - self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?;", + self.connection.action('UPDATE db_version SET db_version = ?, db_minor_version = ?;', [major_version, minor_version]) return self.connection.version def inc_minor_version(self): major_version, minor_version = self.connection.version minor_version += 1 - self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?;", + self.connection.action('UPDATE db_version SET db_version = ?, db_minor_version = ?;', [major_version, minor_version]) return self.connection.version @@ -461,8 +461,8 @@ def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column info_hash in history') - if not self.hasColumn("history", "info_hash"): - self.addColumn("history", "info_hash", 'TEXT', None) + if not self.hasColumn('history', 'info_hash'): + self.addColumn('history', 'info_hash', 'TEXT', None) self.inc_minor_version() @@ -506,9 +506,9 @@ def test(self): def execute(self): utils.backup_database(self.connection.path, self.connection.version) - log.info(u"Adding column size in history") - if not self.hasColumn("history", "size"): - self.addColumn("history", "size", 'NUMERIC', -1) + log.info(u'Adding column size in history') + if not self.hasColumn('history', 'size'): + self.addColumn('history', 'size', 'NUMERIC', -1) self.inc_minor_version() @@ -526,14 +526,14 @@ def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding PK to mindexer column in indexer_mapping table') - self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") - self.connection.action("CREATE TABLE IF NOT EXISTS new_indexer_mapping" - "(indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER," - "PRIMARY KEY (indexer_id, indexer, mindexer));") - self.connection.action("INSERT INTO new_indexer_mapping SELECT * FROM indexer_mapping;") - self.connection.action("DROP TABLE IF EXISTS indexer_mapping;") - self.connection.action("ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;") - self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") + self.connection.action('DROP TABLE IF EXISTS new_indexer_mapping;') + self.connection.action('CREATE TABLE IF NOT EXISTS new_indexer_mapping' + '(indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER,' + 'PRIMARY KEY (indexer_id, indexer, mindexer));') + self.connection.action('INSERT INTO new_indexer_mapping SELECT * FROM indexer_mapping;') + self.connection.action('DROP TABLE IF EXISTS indexer_mapping;') + self.connection.action('ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;') + self.connection.action('DROP TABLE IF EXISTS new_indexer_mapping;') self.inc_minor_version() @@ -551,19 +551,19 @@ def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Make indexer and indexer_id as INTEGER in tv_episodes table') - self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") + self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') self.connection.action( - "CREATE TABLE new_tv_episodes " - "(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, " - "season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, " - "status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, " - "subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, " - "scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, " - "version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);") - self.connection.action("INSERT INTO new_tv_episodes SELECT * FROM tv_episodes;") - self.connection.action("DROP TABLE IF EXISTS tv_episodes;") - self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") - self.connection.action("DROP TABLE IF EXISTS new_tv_episodoes;") + 'CREATE TABLE new_tv_episodes ' + '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, ' + 'season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, ' + 'status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, ' + 'subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, ' + 'scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, ' + 'version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);') + self.connection.action('INSERT INTO new_tv_episodes SELECT * FROM tv_episodes;') + self.connection.action('DROP TABLE IF EXISTS tv_episodes;') + self.connection.action('ALTER TABLE new_tv_episodes RENAME TO tv_episodes;') + self.connection.action('DROP TABLE IF EXISTS new_tv_episodoes;') self.inc_minor_version() @@ -744,7 +744,7 @@ def execute(self): # Update `history` table: Remove the quality value from `action` log.info(u'Removing the quality from the action field, as this is a composite status') - sql_results = self.connection.select("SELECT action FROM history GROUP BY action;") + sql_results = self.connection.select('SELECT action FROM history GROUP BY action;') for item in sql_results: composite_action = item[b'action'] status, quality = utils.split_composite_status(composite_action) @@ -780,7 +780,7 @@ def shift_tv_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in tv_shows one place to the left.') - sql_results = self.connection.select("SELECT quality FROM tv_shows GROUP BY quality ORDER BY quality DESC;") + sql_results = self.connection.select('SELECT quality FROM tv_shows GROUP BY quality ORDER BY quality DESC;') for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 @@ -793,7 +793,7 @@ def shift_tv_qualities(self): new_quality |= common.Quality.UNKNOWN # Then re-add it using the correct value self.connection.action( - "UPDATE tv_shows SET quality = ? WHERE quality = ?;", + 'UPDATE tv_shows SET quality = ? WHERE quality = ?;', [new_quality, quality] ) @@ -804,8 +804,8 @@ def shift_episode_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in tv_episodes one place to the left.') - sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0 GROUP BY quality" - " ORDER BY quality DESC;") + sql_results = self.connection.select('SELECT quality FROM tv_episodes WHERE quality != 0 GROUP BY quality' + ' ORDER BY quality DESC;') for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 @@ -816,7 +816,7 @@ def shift_episode_qualities(self): new_quality = quality << 1 self.connection.action( - "UPDATE tv_episodes SET quality = ? WHERE quality = ?;", + 'UPDATE tv_episodes SET quality = ? WHERE quality = ?;', [new_quality, quality] ) @@ -827,7 +827,7 @@ def shift_history_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in history one place to the left.') - sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;") + sql_results = self.connection.select('SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;') for result in sql_results: quality = result[b'quality'] @@ -837,6 +837,6 @@ def shift_history_qualities(self): new_quality = quality << 1 self.connection.action( - "UPDATE history SET quality = ? WHERE quality = ?;", + 'UPDATE history SET quality = ? WHERE quality = ?;', [new_quality, quality] ) From 5494c3cd7c87321143e1a05249ab55624c05d279 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:23:20 +0300 Subject: [PATCH 25/51] medusa/helpers/* --- medusa/helpers/__init__.py | 38 ++++++++++++++++++------------------- medusa/helpers/anidb.py | 2 +- medusa/helpers/externals.py | 2 +- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/medusa/helpers/__init__.py b/medusa/helpers/__init__.py index 76f514409d..b08be977f5 100644 --- a/medusa/helpers/__init__.py +++ b/medusa/helpers/__init__.py @@ -69,10 +69,10 @@ def indent_xml(elem, level=0): """Do our pretty printing and make Matt very happy.""" - i = "\n" + level * " " + i = '\n' + level * ' ' if elem: if not elem.text or not elem.text.strip(): - elem.text = i + " " + elem.text = i + ' ' if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: @@ -105,7 +105,7 @@ def is_media_file(filename): if filename.startswith('._'): return False - sep_file = filename.rpartition(".") + sep_file = filename.rpartition('.') if re.search('extras?$', sep_file[0], re.I): return False @@ -489,7 +489,7 @@ def make_dirs(path): {'path': path}) os.makedirs(path) except (OSError, IOError) as msg: - log.error(u"Failed creating {path} : {error!r}", + log.error(u'Failed creating {path} : {error!r}', {'path': path, 'error': msg}) return False @@ -890,7 +890,7 @@ def backup_versioned_file(old_file, version): log.debug(u'Trying to back up {old} to new', {'old': old_file, 'new': new_file}) shutil.copy(old_file, new_file) - log.debug(u"Backup done") + log.debug(u'Backup done') break except OSError as error: log.warning(u'Error while trying to back up {old} to {new}:' @@ -911,7 +911,7 @@ def backup_versioned_file(old_file, version): def get_lan_ip(): """Return IP of system.""" try: - return [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][0] + return [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith('127.')][0] except Exception: return socket.gethostname() @@ -1260,7 +1260,7 @@ def download_file(url, filename, session, method='GET', data=None, headers=None, def handle_requests_exception(requests_exception): - default = "Request failed: {0}" + default = 'Request failed: {0}' try: raise requests_exception except requests.exceptions.SSLError as error: @@ -1423,7 +1423,7 @@ def is_file_locked(check_file, write_lock_check=False): return True if write_lock_check: - lock_file = check_file + ".lckchk" + lock_file = check_file + '.lckchk' if os.path.exists(lock_file): os.remove(lock_file) try: @@ -1462,34 +1462,34 @@ def get_tvdb_from_id(indexer_id, indexer): tvdb_id = '' if indexer == 'IMDB': - url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s" % indexer_id + url = 'http://www.thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s' % indexer_id data = session.get_content(url) if data is None: return tvdb_id with suppress(SyntaxError): tree = ET.fromstring(data) - for show in tree.iter("Series"): - tvdb_id = show.findtext("seriesid") + for show in tree.iter('Series'): + tvdb_id = show.findtext('seriesid') if tvdb_id: return tvdb_id elif indexer == 'ZAP2IT': - url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?zap2it=%s" % indexer_id + url = 'http://www.thetvdb.com/api/GetSeriesByRemoteID.php?zap2it=%s' % indexer_id data = session.get_content(url) if data is None: return tvdb_id with suppress(SyntaxError): tree = ET.fromstring(data) - for show in tree.iter("Series"): - tvdb_id = show.findtext("seriesid") + for show in tree.iter('Series'): + tvdb_id = show.findtext('seriesid') return tvdb_id elif indexer == 'TVMAZE': - url = "http://api.tvmaze.com/shows/%s" % indexer_id + url = 'http://api.tvmaze.com/shows/%s' % indexer_id data = session.get_json(url) if data is None: return tvdb_id @@ -1591,10 +1591,10 @@ def is_ip_private(ip): :return: :rtype: bool """ - priv_lo = re.compile(r"^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$") - priv_24 = re.compile(r"^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$") - priv_20 = re.compile(r"^192\.168\.\d{1,3}.\d{1,3}$") - priv_16 = re.compile(r"^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$") + priv_lo = re.compile(r'^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$') + priv_24 = re.compile(r'^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$') + priv_20 = re.compile(r'^192\.168\.\d{1,3}.\d{1,3}$') + priv_16 = re.compile(r'^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$') return bool(priv_lo.match(ip) or priv_24.match(ip) or priv_20.match(ip) or priv_16.match(ip)) diff --git a/medusa/helpers/anidb.py b/medusa/helpers/anidb.py index e875668c38..bd605afe9a 100644 --- a/medusa/helpers/anidb.py +++ b/medusa/helpers/anidb.py @@ -51,7 +51,7 @@ def anidb_logger(msg): def create_key_encode_utf_8(namespace, fn, **kw): def generate_key(*args, **kw): - return namespace + "|" + args[0].encode('utf-8') + return namespace + '|' + args[0].encode('utf-8') return generate_key diff --git a/medusa/helpers/externals.py b/medusa/helpers/externals.py index 4541361ca6..c49f8cd3cb 100644 --- a/medusa/helpers/externals.py +++ b/medusa/helpers/externals.py @@ -103,7 +103,7 @@ def get_externals(show=None, indexer=None, indexed_show=None): except IndexerUnavailable: continue if hasattr(t, 'get_id_by_external'): - log.debug(u"Trying other indexer: {indexer} get_id_by_external", + log.debug(u'Trying other indexer: {indexer} get_id_by_external', {'indexer': indexerApi(other_indexer).name}) # Call the get_id_by_external and pass all the externals we have, # except for the indexers own. From 0d1f9d20e1dcede6a45c57edf36985cee806a9f6 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:26:55 +0300 Subject: [PATCH 26/51] medusa/indexers/* --- medusa/indexers/indexer_base.py | 2 +- medusa/indexers/indexer_config.py | 4 ++-- medusa/indexers/indexer_ui.py | 2 +- medusa/indexers/tmdb/tmdb.py | 2 +- medusa/indexers/tmdb/tmdb_exceptions.py | 8 ++++---- medusa/indexers/tvdbv2/fallback.py | 6 +++--- medusa/indexers/tvdbv2/tvdbv2_exceptions.py | 8 ++++---- medusa/indexers/tvmaze/tvmaze_exceptions.py | 8 ++++---- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/medusa/indexers/indexer_base.py b/medusa/indexers/indexer_base.py index e580c0a7ad..f96bf6e14e 100644 --- a/medusa/indexers/indexer_base.py +++ b/medusa/indexers/indexer_base.py @@ -417,7 +417,7 @@ def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None): :param weeks: number of weeks to get updates for. :param filter_show_list: Optional list of show objects, to use for filtering the returned list. """ - raise IndexerSeasonUpdatesNotSupported("Method get_last_updated_series not implemented by this indexer") + raise IndexerSeasonUpdatesNotSupported('Method get_last_updated_series not implemented by this indexer') def get_episodes_for_season(self, show_id, *args, **kwargs): self._get_episodes(show_id, *args, **kwargs) diff --git a/medusa/indexers/indexer_config.py b/medusa/indexers/indexer_config.py index 4065e30dcb..04d0d9a707 100644 --- a/medusa/indexers/indexer_config.py +++ b/medusa/indexers/indexer_config.py @@ -16,8 +16,8 @@ initConfig = { 'valid_languages': [ - "da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr", - "ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no" + 'da', 'fi', 'nl', 'de', 'it', 'es', 'fr', 'pl', 'hu', 'el', 'tr', + 'ru', 'he', 'ja', 'pt', 'zh', 'cs', 'sl', 'hr', 'ko', 'en', 'sv', 'no' ], 'langabbv_to_id': { 'el': 20, 'en': 7, 'zh': 27, diff --git a/medusa/indexers/indexer_ui.py b/medusa/indexers/indexer_ui.py index b3e6f2ccf5..f1a780b3fc 100644 --- a/medusa/indexers/indexer_ui.py +++ b/medusa/indexers/indexer_ui.py @@ -25,7 +25,7 @@ def __init__(self, config, enable_logging=None): if enable_logging is not None: warnings.warn("the UI's log parameter is deprecated, instead use\n" "use import logging; logging.getLogger('ui').info('blah')\n" - "The self.log attribute will be removed in the next version") + 'The self.log attribute will be removed in the next version') self.log = logging.getLogger(__name__) def select_series(self, all_series): diff --git a/medusa/indexers/tmdb/tmdb.py b/medusa/indexers/tmdb/tmdb.py index 69b2c3df1c..5f2481a0a0 100644 --- a/medusa/indexers/tmdb/tmdb.py +++ b/medusa/indexers/tmdb/tmdb.py @@ -92,7 +92,7 @@ def _map_results(tmdb_response, key_mappings=None, list_separator='|'): :type tmdb_response: object """ def week_day(input_date): - days = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] + days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] week_day_number = parser.parse(input_date).weekday() return days[week_day_number] diff --git a/medusa/indexers/tmdb/tmdb_exceptions.py b/medusa/indexers/tmdb/tmdb_exceptions.py index 8234e856b8..b17df3d1cb 100644 --- a/medusa/indexers/tmdb/tmdb_exceptions.py +++ b/medusa/indexers/tmdb/tmdb_exceptions.py @@ -19,11 +19,11 @@ """Custom exceptions used or raised by tvdbv2_api.""" from __future__ import unicode_literals -__author__ = "p0psicles" -__version__ = "1.0" +__author__ = 'p0psicles' +__version__ = '1.0' -__all__ = ["tvdbv2_error", "tvdbv2_userabort", "tvdbv2_shownotfound", "tvdbv2_showincomplete", - "tvdbv2_seasonnotfound", "tvdbv2_episodenotfound", "tvdbv2_attributenotfound"] +__all__ = ['tvdbv2_error', 'tvdbv2_userabort', 'tvdbv2_shownotfound', 'tvdbv2_showincomplete', + 'tvdbv2_seasonnotfound', 'tvdbv2_episodenotfound', 'tvdbv2_attributenotfound'] class tvdbv2_exception(Exception): diff --git a/medusa/indexers/tvdbv2/fallback.py b/medusa/indexers/tvdbv2/fallback.py index daf4befe49..76dbd2d0eb 100644 --- a/medusa/indexers/tvdbv2/fallback.py +++ b/medusa/indexers/tvdbv2/fallback.py @@ -101,11 +101,11 @@ def fallback_notification(): if fallback_config['api_base_url'] == app.FALLBACK_PLEX_API_URL: if (fallback_config['plex_fallback_time'] + datetime.timedelta(hours=fallback_config['fallback_plex_timeout']) < datetime.datetime.now()): - logger.debug("Disabling Plex fallback as fallback timeout was reached") + logger.debug('Disabling Plex fallback as fallback timeout was reached') session.api_client.host = 'https://api.thetvdb.com' session.auth = TVDBAuth(api_key=app.TVDB_API_KEY) else: - logger.debug("Keeping Plex fallback enabled as fallback timeout not reached") + logger.debug('Keeping Plex fallback enabled as fallback timeout not reached') try: # Run api request @@ -129,7 +129,7 @@ def fallback_notification(): # Send notification back to user. if fallback_config['fallback_plex_notifications']: - logger.warning("Enabling Plex fallback as TheTvdb.com API is having some connectivity issues") + logger.warning('Enabling Plex fallback as TheTvdb.com API is having some connectivity issues') fallback_notification() # Run api request diff --git a/medusa/indexers/tvdbv2/tvdbv2_exceptions.py b/medusa/indexers/tvdbv2/tvdbv2_exceptions.py index ae9980f14a..29d6531738 100644 --- a/medusa/indexers/tvdbv2/tvdbv2_exceptions.py +++ b/medusa/indexers/tvdbv2/tvdbv2_exceptions.py @@ -19,11 +19,11 @@ """Custom exceptions used or raised by tvdbv2_api.""" from __future__ import unicode_literals -__author__ = "p0psicles" -__version__ = "1.0" +__author__ = 'p0psicles' +__version__ = '1.0' -__all__ = ["Tvdb2Error", "Tvdb2UserAbort", "Tvdb2ShowNotFound", "Tvdb2ShowIncomplete", - "Tvdb2SeasonNotFound", "Tvdb2EpisodeNotFound", "Tvdb2AttributeNotFound"] +__all__ = ['Tvdb2Error', 'Tvdb2UserAbort', 'Tvdb2ShowNotFound', 'Tvdb2ShowIncomplete', + 'Tvdb2SeasonNotFound', 'Tvdb2EpisodeNotFound', 'Tvdb2AttributeNotFound'] class Tvdb2Exception(Exception): diff --git a/medusa/indexers/tvmaze/tvmaze_exceptions.py b/medusa/indexers/tvmaze/tvmaze_exceptions.py index 8234e856b8..b17df3d1cb 100644 --- a/medusa/indexers/tvmaze/tvmaze_exceptions.py +++ b/medusa/indexers/tvmaze/tvmaze_exceptions.py @@ -19,11 +19,11 @@ """Custom exceptions used or raised by tvdbv2_api.""" from __future__ import unicode_literals -__author__ = "p0psicles" -__version__ = "1.0" +__author__ = 'p0psicles' +__version__ = '1.0' -__all__ = ["tvdbv2_error", "tvdbv2_userabort", "tvdbv2_shownotfound", "tvdbv2_showincomplete", - "tvdbv2_seasonnotfound", "tvdbv2_episodenotfound", "tvdbv2_attributenotfound"] +__all__ = ['tvdbv2_error', 'tvdbv2_userabort', 'tvdbv2_shownotfound', 'tvdbv2_showincomplete', + 'tvdbv2_seasonnotfound', 'tvdbv2_episodenotfound', 'tvdbv2_attributenotfound'] class tvdbv2_exception(Exception): From df2b30f7ce1c6254834d6abfa9e3afc61feab475 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:27:39 +0300 Subject: [PATCH 27/51] medusa/init/__init__.py --- medusa/init/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/medusa/init/__init__.py b/medusa/init/__init__.py index b15120c87c..36f6dd50aa 100644 --- a/medusa/init/__init__.py +++ b/medusa/init/__init__.py @@ -78,13 +78,13 @@ def _ssl_configuration(): def _configure_mimetypes(): # Fix mimetypes on misconfigured systems - mimetypes.add_type("text/css", ".css") - mimetypes.add_type("application/sfont", ".otf") - mimetypes.add_type("application/sfont", ".ttf") - mimetypes.add_type("application/javascript", ".js") - mimetypes.add_type("application/font-woff", ".woff") + mimetypes.add_type('text/css', '.css') + mimetypes.add_type('application/sfont', '.otf') + mimetypes.add_type('application/sfont', '.ttf') + mimetypes.add_type('application/javascript', '.js') + mimetypes.add_type('application/font-woff', '.woff') # Not sure about this one, but we also have halflings in .woff so I think it wont matter - # mimetypes.add_type("application/font-woff2", ".woff2") + # mimetypes.add_type('application/font-woff2', '.woff2') def _handle_old_tornado(): From 0136c74738555d6edea5eeb3eb54ee5d077e6dea Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:27:45 +0300 Subject: [PATCH 28/51] medusa/logger/__init__.py --- medusa/logger/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/logger/__init__.py b/medusa/logger/__init__.py index cbc666d504..8776924bcd 100644 --- a/medusa/logger/__init__.py +++ b/medusa/logger/__init__.py @@ -474,7 +474,7 @@ def format_to_html(self, base_url): def __repr__(self): """Object representation.""" - return "%s(%r)" % (self.__class__, self.__dict__) + return '%s(%r)' % (self.__class__, self.__dict__) def __str__(self): """String representation.""" From 330095076a836d55710dab889455554dcc8b566b Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:28:57 +0300 Subject: [PATCH 29/51] medusa/metadata/* --- medusa/metadata/kodi.py | 24 ++++++++++++------------ medusa/metadata/ps3.py | 26 +++++++++++++------------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/medusa/metadata/kodi.py b/medusa/metadata/kodi.py index fd2d6d3e42..dfdd04e1ed 100644 --- a/medusa/metadata/kodi.py +++ b/medusa/metadata/kodi.py @@ -51,20 +51,20 @@ def __init__(self, season_all_banner) self.name = 'KODI' - self.poster_name = self.banner_name = "folder.jpg" - self.season_all_poster_name = "season-all.tbn" + self.poster_name = self.banner_name = 'folder.jpg' + self.season_all_poster_name = 'season-all.tbn' # web-ui metadata template - self.eg_show_metadata = "tvshow.nfo" - self.eg_episode_metadata = "Season##\\filename.nfo" - self.eg_fanart = "fanart.jpg" - self.eg_poster = "folder.jpg" - self.eg_banner = "folder.jpg" - self.eg_episode_thumbnails = "Season##\\filename.tbn" - self.eg_season_posters = "season##.tbn" - self.eg_season_banners = "not supported" - self.eg_season_all_poster = "season-all.tbn" - self.eg_season_all_banner = "not supported" + self.eg_show_metadata = 'tvshow.nfo' + self.eg_episode_metadata = 'Season##\\filename.nfo' + self.eg_fanart = 'fanart.jpg' + self.eg_poster = 'folder.jpg' + self.eg_banner = 'folder.jpg' + self.eg_episode_thumbnails = 'Season##\\filename.tbn' + self.eg_season_posters = 'season##.tbn' + self.eg_season_banners = 'not supported' + self.eg_season_all_poster = 'season-all.tbn' + self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def create_season_banners(self, ep_obj): diff --git a/medusa/metadata/ps3.py b/medusa/metadata/ps3.py index 303e96f1f4..4d8628d519 100644 --- a/medusa/metadata/ps3.py +++ b/medusa/metadata/ps3.py @@ -43,21 +43,21 @@ def __init__(self, season_all_poster, season_all_banner) - self.name = "Sony PS3" + self.name = 'Sony PS3' - self.poster_name = "cover.jpg" + self.poster_name = 'cover.jpg' # web-ui metadata template - self.eg_show_metadata = "not supported" - self.eg_episode_metadata = "not supported" - self.eg_fanart = "not supported" - self.eg_poster = "cover.jpg" - self.eg_banner = "not supported" - self.eg_episode_thumbnails = "Season##\\filename.ext.cover.jpg" - self.eg_season_posters = "not supported" - self.eg_season_banners = "not supported" - self.eg_season_all_poster = "not supported" - self.eg_season_all_banner = "not supported" + self.eg_show_metadata = 'not supported' + self.eg_episode_metadata = 'not supported' + self.eg_fanart = 'not supported' + self.eg_poster = 'cover.jpg' + self.eg_banner = 'not supported' + self.eg_episode_thumbnails = 'Season##\\filename.ext.cover.jpg' + self.eg_season_posters = 'not supported' + self.eg_season_banners = 'not supported' + self.eg_season_all_poster = 'not supported' + self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): @@ -103,7 +103,7 @@ def get_episode_thumb_path(ep_obj): ep_obj: a Episode instance for which to create the thumbnail """ if os.path.isfile(ep_obj.location): - tbn_filename = ep_obj.location + ".cover.jpg" + tbn_filename = ep_obj.location + '.cover.jpg' else: return None From ec397d7d185f60981adea9c59267ec977b77312f Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:31:12 +0300 Subject: [PATCH 30/51] medusa/notifiers/* --- medusa/notifiers/__init__.py | 2 +- medusa/notifiers/libnotify.py | 24 ++++++++++++------------ medusa/notifiers/trakt.py | 2 +- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/medusa/notifiers/__init__.py b/medusa/notifiers/__init__.py index aee743525b..2c914056ab 100644 --- a/medusa/notifiers/__init__.py +++ b/medusa/notifiers/__init__.py @@ -110,7 +110,7 @@ def notify_snatch(ep_name, is_proper): log.debug(u'Unable to send snatch notification. Error: {0}', error.message) -def notify_git_update(new_version=""): +def notify_git_update(new_version=''): for n in notifiers: if app.NOTIFY_ON_UPDATE: try: diff --git a/medusa/notifiers/libnotify.py b/medusa/notifiers/libnotify.py index 0d17d145bc..f049c1e00f 100644 --- a/medusa/notifiers/libnotify.py +++ b/medusa/notifiers/libnotify.py @@ -23,12 +23,12 @@ def diagnose(): from gi.repository import Notify # @UnusedImport except ImportError: return (u"

Error: gir-notify isn't installed. On Ubuntu/Debian, install the " - u"gir1.2-notify-0.7 or " - u"gir1.0-notify-0.4 package.") + u'gir1.2-notify-0.7 or ' + u'gir1.0-notify-0.4 package.') if 'DISPLAY' not in os.environ and 'DBUS_SESSION_BUS_ADDRESS' not in os.environ: - return (u"

Error: Environment variables DISPLAY and DBUS_SESSION_BUS_ADDRESS " + return (u'

Error: Environment variables DISPLAY and DBUS_SESSION_BUS_ADDRESS ' u"aren't set. libnotify will only work when you run Medusa " - u"from a desktop login.") + u'from a desktop login.') try: import dbus except ImportError: @@ -37,15 +37,15 @@ def diagnose(): try: bus = dbus.SessionBus() except dbus.DBusException as e: - return (u"

Error: unable to connect to D-Bus session bus: %s." - u"

Are you running Medusa in a desktop session?") % (cgi.escape(e),) + return (u'

Error: unable to connect to D-Bus session bus: %s.' + u'

Are you running Medusa in a desktop session?') % (cgi.escape(e),) try: bus.get_object('org.freedesktop.Notifications', '/org/freedesktop/Notifications') except dbus.DBusException as e: return (u"

Error: there doesn't seem to be a notification daemon available: %s " - u"

Try installing notification-daemon or notify-osd.") % (cgi.escape(e),) - return u"

Error: Unable to send notification." + u'

Try installing notification-daemon or notify-osd.') % (cgi.escape(e),) + return u'

Error: Unable to send notification.' class Notifier(object): @@ -83,22 +83,22 @@ def notify_download(self, ep_name): def notify_subtitle_download(self, ep_name, lang): if app.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang) + self._notify(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_LIBNOTIFY: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._notify(title, update_text + new_version) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_LIBNOTIFY: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] self._notify(title, update_text.format(ipaddress)) def test_notify(self): - return self._notify('Test notification', "This is a test notification from Medusa", force=True) + return self._notify('Test notification', 'This is a test notification from Medusa', force=True) def _notify(self, title, message, force=False): if not app.USE_LIBNOTIFY and not force: diff --git a/medusa/notifiers/trakt.py b/medusa/notifiers/trakt.py index fe62568594..bfb8b3dad5 100644 --- a/medusa/notifiers/trakt.py +++ b/medusa/notifiers/trakt.py @@ -138,7 +138,7 @@ def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode else: log.warning( "There's a coding problem contact developer. It's needed to be provided at" - " least one of the two: data_show or show_obj", + ' least one of the two: data_show or show_obj', ) return False From 70ee19956f27d980c6c89f0a73c182113e513274 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:35:20 +0300 Subject: [PATCH 31/51] medusa/providers/* --- medusa/providers/generic_provider.py | 12 ++++++------ medusa/providers/torrent/html/abnormal.py | 2 +- medusa/providers/torrent/html/alpharatio.py | 2 +- medusa/providers/torrent/html/anidex.py | 2 +- medusa/providers/torrent/html/animetorrents.py | 2 +- medusa/providers/torrent/html/archetorrent.py | 2 +- medusa/providers/torrent/html/bithdtv.py | 2 +- medusa/providers/torrent/html/bjshare.py | 2 +- medusa/providers/torrent/html/elitetracker.py | 2 +- medusa/providers/torrent/html/hdspace.py | 2 +- medusa/providers/torrent/html/hdtorrents.py | 2 +- medusa/providers/torrent/html/hebits.py | 2 +- medusa/providers/torrent/html/iptorrents.py | 2 +- medusa/providers/torrent/html/limetorrents.py | 2 +- medusa/providers/torrent/html/morethantv.py | 2 +- medusa/providers/torrent/html/nebulance.py | 2 +- medusa/providers/torrent/html/pretome.py | 2 +- medusa/providers/torrent/html/scenetime.py | 2 +- medusa/providers/torrent/html/sdbits.py | 2 +- medusa/providers/torrent/html/speedcd.py | 2 +- medusa/providers/torrent/html/thepiratebay.py | 2 +- medusa/providers/torrent/html/tntvillage.py | 2 +- medusa/providers/torrent/html/tokyotoshokan.py | 2 +- medusa/providers/torrent/html/torrent9.py | 2 +- medusa/providers/torrent/html/torrentbytes.py | 2 +- medusa/providers/torrent/html/torrenting.py | 2 +- medusa/providers/torrent/html/tvchaosuk.py | 2 +- medusa/providers/torrent/html/yggtorrent.py | 2 +- medusa/providers/torrent/html/zooqle.py | 2 +- medusa/providers/torrent/json/animebytes.py | 4 ++-- medusa/providers/torrent/json/bitcannon.py | 2 +- medusa/providers/torrent/json/btn.py | 2 +- medusa/providers/torrent/json/danishbits.py | 2 +- medusa/providers/torrent/json/hdbits.py | 2 +- medusa/providers/torrent/json/norbits.py | 2 +- medusa/providers/torrent/json/rarbg.py | 2 +- medusa/providers/torrent/json/torrentday.py | 2 +- medusa/providers/torrent/json/torrentleech.py | 2 +- medusa/providers/torrent/rss/nyaa.py | 2 +- medusa/providers/torrent/torznab/torznab.py | 2 +- medusa/providers/torrent/xml/torrentz2.py | 4 ++-- 41 files changed, 48 insertions(+), 48 deletions(-) diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index 19cb612cc5..27f049e2d3 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -369,7 +369,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False if search_result.parsed_result.season_number is None: log.debug( "The result {0} doesn't seem to have a valid season that we are currently trying to " - "snatch, skipping it", search_result.name + 'snatch, skipping it', search_result.name ) search_result.result_wanted = False continue @@ -378,7 +378,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False if not search_result.parsed_result.episode_numbers: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " - "snatch, skipping it", search_result.name + 'snatch, skipping it', search_result.name ) search_result.result_wanted = False continue @@ -391,7 +391,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False search_result.parsed_result.episode_numbers]: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " - "snatch, skipping it", search_result.name + 'snatch, skipping it', search_result.name ) search_result.result_wanted = False continue @@ -408,7 +408,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False if not search_result.parsed_result.is_air_by_date: log.debug( "This is supposed to be a date search but the result {0} didn't parse as one, " - "skipping it", search_result.name + 'skipping it', search_result.name ) search_result.result_wanted = False continue @@ -433,7 +433,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False elif len(sql_results) != 1: log.warning( "Tried to look up the date for the episode {0} but the database didn't return proper " - "results, skipping it", search_result.name + 'results, skipping it', search_result.name ) search_result.result_wanted = False continue @@ -806,7 +806,7 @@ def add_cookies_from_ui(self): return { 'result': False, 'message': "You haven't configured the requied cookies. Please login at {provider_url}, " - "and make sure you have copied the following cookies: {required_cookies!r}" + 'and make sure you have copied the following cookies: {required_cookies!r}' .format(provider_url=self.name, required_cookies=self.required_cookies) } diff --git a/medusa/providers/torrent/html/abnormal.py b/medusa/providers/torrent/html/abnormal.py index 557581cdbc..c0d2becf2d 100644 --- a/medusa/providers/torrent/html/abnormal.py +++ b/medusa/providers/torrent/html/abnormal.py @@ -148,7 +148,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/alpharatio.py b/medusa/providers/torrent/html/alpharatio.py index c004a38c78..2b18d8ce2b 100644 --- a/medusa/providers/torrent/html/alpharatio.py +++ b/medusa/providers/torrent/html/alpharatio.py @@ -149,7 +149,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/anidex.py b/medusa/providers/torrent/html/anidex.py index 2860734e69..11fdb9fb3b 100644 --- a/medusa/providers/torrent/html/anidex.py +++ b/medusa/providers/torrent/html/anidex.py @@ -123,7 +123,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/animetorrents.py b/medusa/providers/torrent/html/animetorrents.py index 3f711dd1e5..690311ea00 100644 --- a/medusa/providers/torrent/html/animetorrents.py +++ b/medusa/providers/torrent/html/animetorrents.py @@ -156,7 +156,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/archetorrent.py b/medusa/providers/torrent/html/archetorrent.py index d553ed7e08..81aa07b873 100644 --- a/medusa/providers/torrent/html/archetorrent.py +++ b/medusa/providers/torrent/html/archetorrent.py @@ -148,7 +148,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/bithdtv.py b/medusa/providers/torrent/html/bithdtv.py index 766def6290..7fd776b388 100644 --- a/medusa/providers/torrent/html/bithdtv.py +++ b/medusa/providers/torrent/html/bithdtv.py @@ -135,7 +135,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/bjshare.py b/medusa/providers/torrent/html/bjshare.py index de7659e98d..9c52180980 100644 --- a/medusa/providers/torrent/html/bjshare.py +++ b/medusa/providers/torrent/html/bjshare.py @@ -211,7 +211,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/elitetracker.py b/medusa/providers/torrent/html/elitetracker.py index 57be18343f..3de9f9765c 100644 --- a/medusa/providers/torrent/html/elitetracker.py +++ b/medusa/providers/torrent/html/elitetracker.py @@ -153,7 +153,7 @@ def parse(self, data, mode, **kwargs): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/hdspace.py b/medusa/providers/torrent/html/hdspace.py index bf791d2930..e9c857f17f 100644 --- a/medusa/providers/torrent/html/hdspace.py +++ b/medusa/providers/torrent/html/hdspace.py @@ -148,7 +148,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/hdtorrents.py b/medusa/providers/torrent/html/hdtorrents.py index 81d9815624..813713b3ed 100644 --- a/medusa/providers/torrent/html/hdtorrents.py +++ b/medusa/providers/torrent/html/hdtorrents.py @@ -153,7 +153,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/hebits.py b/medusa/providers/torrent/html/hebits.py index e8e0f8a815..e6e0c2f3c6 100644 --- a/medusa/providers/torrent/html/hebits.py +++ b/medusa/providers/torrent/html/hebits.py @@ -144,7 +144,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/iptorrents.py b/medusa/providers/torrent/html/iptorrents.py index 08d28080f2..35233249d8 100644 --- a/medusa/providers/torrent/html/iptorrents.py +++ b/medusa/providers/torrent/html/iptorrents.py @@ -124,7 +124,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/limetorrents.py b/medusa/providers/torrent/html/limetorrents.py index afbc1da60b..c64115c238 100644 --- a/medusa/providers/torrent/html/limetorrents.py +++ b/medusa/providers/torrent/html/limetorrents.py @@ -156,7 +156,7 @@ def process_column_header(th): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/morethantv.py b/medusa/providers/torrent/html/morethantv.py index 692d5f626b..3dc55266c6 100644 --- a/medusa/providers/torrent/html/morethantv.py +++ b/medusa/providers/torrent/html/morethantv.py @@ -161,7 +161,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/nebulance.py b/medusa/providers/torrent/html/nebulance.py index bf92a1d1e3..810d794c8b 100644 --- a/medusa/providers/torrent/html/nebulance.py +++ b/medusa/providers/torrent/html/nebulance.py @@ -152,7 +152,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/pretome.py b/medusa/providers/torrent/html/pretome.py index 3419a62a4c..c4fef8e384 100644 --- a/medusa/providers/torrent/html/pretome.py +++ b/medusa/providers/torrent/html/pretome.py @@ -129,7 +129,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/scenetime.py b/medusa/providers/torrent/html/scenetime.py index fee59ae63b..ac53d1c0eb 100644 --- a/medusa/providers/torrent/html/scenetime.py +++ b/medusa/providers/torrent/html/scenetime.py @@ -149,7 +149,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/sdbits.py b/medusa/providers/torrent/html/sdbits.py index 0decfbfd1d..951cdc7536 100644 --- a/medusa/providers/torrent/html/sdbits.py +++ b/medusa/providers/torrent/html/sdbits.py @@ -143,7 +143,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/speedcd.py b/medusa/providers/torrent/html/speedcd.py index 06f0472e45..2d3d9b95ac 100644 --- a/medusa/providers/torrent/html/speedcd.py +++ b/medusa/providers/torrent/html/speedcd.py @@ -142,7 +142,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/thepiratebay.py b/medusa/providers/torrent/html/thepiratebay.py index 7fa3f0ed36..f7b6caccfc 100644 --- a/medusa/providers/torrent/html/thepiratebay.py +++ b/medusa/providers/torrent/html/thepiratebay.py @@ -153,7 +153,7 @@ def process_column_header(th): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/tntvillage.py b/medusa/providers/torrent/html/tntvillage.py index 9321fd5241..2292f3c44d 100644 --- a/medusa/providers/torrent/html/tntvillage.py +++ b/medusa/providers/torrent/html/tntvillage.py @@ -148,7 +148,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/tokyotoshokan.py b/medusa/providers/torrent/html/tokyotoshokan.py index 2816744e83..df1afb17aa 100644 --- a/medusa/providers/torrent/html/tokyotoshokan.py +++ b/medusa/providers/torrent/html/tokyotoshokan.py @@ -128,7 +128,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/torrent9.py b/medusa/providers/torrent/html/torrent9.py index c97ddcadda..f69ecd3cf7 100644 --- a/medusa/providers/torrent/html/torrent9.py +++ b/medusa/providers/torrent/html/torrent9.py @@ -133,7 +133,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/torrentbytes.py b/medusa/providers/torrent/html/torrentbytes.py index de50f04865..bba94f24b7 100644 --- a/medusa/providers/torrent/html/torrentbytes.py +++ b/medusa/providers/torrent/html/torrentbytes.py @@ -150,7 +150,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/torrenting.py b/medusa/providers/torrent/html/torrenting.py index ecab0478ab..1c0cee4b3f 100644 --- a/medusa/providers/torrent/html/torrenting.py +++ b/medusa/providers/torrent/html/torrenting.py @@ -129,7 +129,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/tvchaosuk.py b/medusa/providers/torrent/html/tvchaosuk.py index 94c8563034..5d5ea09a35 100644 --- a/medusa/providers/torrent/html/tvchaosuk.py +++ b/medusa/providers/torrent/html/tvchaosuk.py @@ -156,7 +156,7 @@ def parse(self, data, mode, **kwargs): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/yggtorrent.py b/medusa/providers/torrent/html/yggtorrent.py index 660900a389..d42c35d190 100644 --- a/medusa/providers/torrent/html/yggtorrent.py +++ b/medusa/providers/torrent/html/yggtorrent.py @@ -138,7 +138,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/html/zooqle.py b/medusa/providers/torrent/html/zooqle.py index 7653e95160..26f1f445f6 100644 --- a/medusa/providers/torrent/html/zooqle.py +++ b/medusa/providers/torrent/html/zooqle.py @@ -134,7 +134,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/animebytes.py b/medusa/providers/torrent/json/animebytes.py index 121c4ee8af..8693b89cda 100644 --- a/medusa/providers/torrent/json/animebytes.py +++ b/medusa/providers/torrent/json/animebytes.py @@ -170,7 +170,7 @@ def parse(self, data, mode): # Attempt and get an season or episode number title_info = row.get('EditionData').get('EditionTitle') - if title_info != "": + if title_info != '': if title_info.startswith('Episodes'): episode = re.match('Episodes 1-(\d+)', title_info).group(1) release_type = MULTI_EP @@ -241,7 +241,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/bitcannon.py b/medusa/providers/torrent/json/bitcannon.py index 2a44aa71bc..5d663d3674 100644 --- a/medusa/providers/torrent/json/bitcannon.py +++ b/medusa/providers/torrent/json/bitcannon.py @@ -129,7 +129,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/btn.py b/medusa/providers/torrent/json/btn.py index d2992941d7..7f7f139c8a 100644 --- a/medusa/providers/torrent/json/btn.py +++ b/medusa/providers/torrent/json/btn.py @@ -124,7 +124,7 @@ def parse(self, data, mode): # Filter unseeded torrent if seeders < min(self.minseed, 1): log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/danishbits.py b/medusa/providers/torrent/json/danishbits.py index 300ce4112b..8918cdab7b 100644 --- a/medusa/providers/torrent/json/danishbits.py +++ b/medusa/providers/torrent/json/danishbits.py @@ -134,7 +134,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/hdbits.py b/medusa/providers/torrent/json/hdbits.py index e07d82fa6f..f7f7959e0e 100644 --- a/medusa/providers/torrent/json/hdbits.py +++ b/medusa/providers/torrent/json/hdbits.py @@ -135,7 +135,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): log.debug( "Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/norbits.py b/medusa/providers/torrent/json/norbits.py index 28f59fbe9f..35e1ca14e2 100644 --- a/medusa/providers/torrent/json/norbits.py +++ b/medusa/providers/torrent/json/norbits.py @@ -125,7 +125,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/rarbg.py b/medusa/providers/torrent/json/rarbg.py index e1a50c3cc4..a1d82cb615 100644 --- a/medusa/providers/torrent/json/rarbg.py +++ b/medusa/providers/torrent/json/rarbg.py @@ -171,7 +171,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/torrentday.py b/medusa/providers/torrent/json/torrentday.py index 6b880ec448..e9f04dcd56 100644 --- a/medusa/providers/torrent/json/torrentday.py +++ b/medusa/providers/torrent/json/torrentday.py @@ -140,7 +140,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/json/torrentleech.py b/medusa/providers/torrent/json/torrentleech.py index fe6d2a1920..89fca0c101 100644 --- a/medusa/providers/torrent/json/torrentleech.py +++ b/medusa/providers/torrent/json/torrentleech.py @@ -156,7 +156,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", title, seeders) + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue size = convert_size(torrent['size']) or -1 diff --git a/medusa/providers/torrent/rss/nyaa.py b/medusa/providers/torrent/rss/nyaa.py index 0ce803fac4..f06a5afb65 100644 --- a/medusa/providers/torrent/rss/nyaa.py +++ b/medusa/providers/torrent/rss/nyaa.py @@ -116,7 +116,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", title, seeders) + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue size = convert_size(item['nyaa_size'], default=-1, units=units) diff --git a/medusa/providers/torrent/torznab/torznab.py b/medusa/providers/torrent/torznab/torznab.py index 7c1cc53631..0d35f39331 100644 --- a/medusa/providers/torrent/torznab/torznab.py +++ b/medusa/providers/torrent/torznab/torznab.py @@ -185,7 +185,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue diff --git a/medusa/providers/torrent/xml/torrentz2.py b/medusa/providers/torrent/xml/torrentz2.py index c2857561d1..38f910fb42 100644 --- a/medusa/providers/torrent/xml/torrentz2.py +++ b/medusa/providers/torrent/xml/torrentz2.py @@ -111,7 +111,7 @@ def parse(self, data, mode): # Add "-" after codec and add missing "." title = re.sub(r'([xh][ .]?264|xvid)( )', r'\1-', title_raw).replace(' ', '.') if title_raw else '' info_hash = row.guid.text.rsplit('/', 1)[-1] - download_url = "magnet:?xt=urn:btih:" + info_hash + "&dn=" + title + self._custom_trackers + download_url = 'magnet:?xt=urn:btih:' + info_hash + '&dn=' + title + self._custom_trackers if not all([title, download_url]): continue @@ -125,7 +125,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1}", + ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue From a8c03fe066053359ae556f84a02acac0139f2a8f Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:37:34 +0300 Subject: [PATCH 32/51] medusa/search/* --- medusa/search/manual.py | 24 ++++++++++++------------ medusa/search/proper.py | 8 ++++---- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index 7c130a872f..4da92c5924 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -201,10 +201,10 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search # Let's check if this provider table already exists table_exists = main_db_con.select( - b"SELECT name " - b"FROM sqlite_master " + b'SELECT name ' + b'FROM sqlite_master ' b"WHERE type='table'" - b" AND name=?", + b' AND name=?', [cur_provider.get_id()] ) columns = [i[1] for i in main_db_con.select("PRAGMA table_info('{0}')".format(cur_provider.get_id()))] if table_exists else [] @@ -220,10 +220,10 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search b"SELECT rowid, ? AS 'provider_type', ? AS 'provider_image'," b" ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed'," b" ? 'provider_minleech', name, season, episodes, indexer, indexerid," - b" url, proper_tags, quality, release_group, version," - b" seeders, leechers, size, time, pubdate, date_added " + b' url, proper_tags, quality, release_group, version,' + b' seeders, leechers, size, time, pubdate, date_added ' b"FROM '{provider_id}' " - b"WHERE indexer = ? AND indexerid = ? AND quality > 0 ".format( + b'WHERE indexer = ? AND indexerid = ? AND quality > 0 '.format( provider_id=cur_provider.get_id() ) ) @@ -237,8 +237,8 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search # If were not looking for all results, meaning don't do the filter on season + ep, add sql if not int(show_all_results): # If it's an episode search, pass season and episode. - common_sql += " AND season = ? AND episodes LIKE ? " - add_params += [season, "%|{0}|%".format(episode)] + common_sql += ' AND season = ? AND episodes LIKE ? ' + add_params += [season, '%|{0}|%'.format(episode)] else: # If were not looking for all results, meaning don't do the filter on season + ep, add sql @@ -247,7 +247,7 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search ['?' for _ in series_obj.get_all_episodes(season)] )) - common_sql += " AND season = ? AND (episodes LIKE ? OR {list_of_episodes})".format( + common_sql += ' AND season = ? AND (episodes LIKE ? OR {list_of_episodes})'.format( list_of_episodes=list_of_episodes ) add_params += [season, '||'] # When the episodes field is empty. @@ -258,14 +258,14 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search combined_sql_params += add_params # Get the last updated cache items timestamp - last_update = main_db_con.select(b"SELECT max(time) AS lastupdate " + last_update = main_db_con.select(b'SELECT max(time) AS lastupdate ' b"FROM '{provider_id}'".format(provider_id=cur_provider.get_id())) provider_results['last_prov_updates'][cur_provider.get_id()] = last_update[0][b'lastupdate'] if last_update[0][b'lastupdate'] else 0 # Check if we have the combined sql strings if combined_sql_q: - sql_prepend = b"SELECT * FROM (" - sql_append = b") ORDER BY quality DESC, proper_tags DESC, seeders DESC" + sql_prepend = b'SELECT * FROM (' + sql_append = b') ORDER BY quality DESC, proper_tags DESC, seeders DESC' # Add all results sql_total += main_db_con.select(b'{0} {1} {2}'. diff --git a/medusa/search/proper.py b/medusa/search/proper.py index 0e74fd1f17..e2d4ed10c3 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -227,10 +227,10 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran # check if we have the episode as DOWNLOADED main_db_con = db.DBConnection() - sql_results = main_db_con.select(b"SELECT quality, release_name " - b"FROM tv_episodes WHERE indexer = ? " - b"AND showid = ? AND season = ? " - b"AND episode = ? AND status = ?", + sql_results = main_db_con.select(b'SELECT quality, release_name ' + b'FROM tv_episodes WHERE indexer = ? ' + b'AND showid = ? AND season = ? ' + b'AND episode = ? AND status = ?', [best_result.indexer, best_result.series.indexerid, best_result.actual_season, From 2ff9017bce579606c7d22e4bcfdb0de06e4c2434 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:41:01 +0300 Subject: [PATCH 33/51] medusa/server/* --- medusa/server/core.py | 2 +- medusa/server/web/home/add_recommended.py | 4 +- medusa/server/web/home/add_shows.py | 62 +++++++++++------------ medusa/server/web/home/handler.py | 34 ++++++------- 4 files changed, 51 insertions(+), 51 deletions(-) diff --git a/medusa/server/core.py b/medusa/server/core.py index f217185490..96afc8b23f 100644 --- a/medusa/server/core.py +++ b/medusa/server/core.py @@ -194,7 +194,7 @@ def __init__(self, options=None): self.app.add_handlers('.*$', get_apiv2_handlers(self.options['api_v2_root'])) # Websocket handler - self.app.add_handlers(".*$", [ + self.app.add_handlers('.*$', [ (r'{base}/ui(/?.*)'.format(base=self.options['web_socket']), WebSocketUIHandler) ]) diff --git a/medusa/server/web/home/add_recommended.py b/medusa/server/web/home/add_recommended.py index 74926f9d3b..e249c07b9f 100644 --- a/medusa/server/web/home/add_recommended.py +++ b/medusa/server/web/home/add_recommended.py @@ -37,5 +37,5 @@ def index(self): [Converted to VueRouter] """ - t = PageTemplate(rh=self, filename="index.mako") - return t.render(controller="addShows", action="index") + t = PageTemplate(rh=self, filename='index.mako') + return t.render(controller='addShows', action='index') diff --git a/medusa/server/web/home/add_shows.py b/medusa/server/web/home/add_shows.py index 0581497b9a..2953908f92 100644 --- a/medusa/server/web/home/add_shows.py +++ b/medusa/server/web/home/add_shows.py @@ -141,11 +141,11 @@ def trendingShows(self, traktList=None): else: page_title = 'Trakt Most Anticipated Shows' - t = PageTemplate(rh=self, filename="addShows_trendingShows.mako") + t = PageTemplate(rh=self, filename='addShows_trendingShows.mako') return t.render(title=page_title, header=page_title, enable_anime_options=True, blacklist=[], whitelist=[], groups=[], - traktList=traktList, controller="addShows", action="trendingShows", - realpage="trendingShows") + traktList=traktList, controller='addShows', action='trendingShows', + realpage='trendingShows') def getTrendingShows(self, traktList=None): """ @@ -153,32 +153,32 @@ def getTrendingShows(self, traktList=None): posts them to addNewShow """ e = None - t = PageTemplate(rh=self, filename="addShows_recommended.mako") + t = PageTemplate(rh=self, filename='addShows_recommended.mako') if traktList is None: - traktList = "" + traktList = '' traktList = traktList.lower() - if traktList == "trending": - page_url = "shows/trending" - elif traktList == "popular": - page_url = "shows/popular" - elif traktList == "anticipated": - page_url = "shows/anticipated" - elif traktList == "collected": - page_url = "shows/collected" - elif traktList == "watched": - page_url = "shows/watched" - elif traktList == "played": - page_url = "shows/played" - elif traktList == "recommended": - page_url = "recommendations/shows" - elif traktList == "newshow": - page_url = 'calendars/all/shows/new/%s/30' % datetime.date.today().strftime("%Y-%m-%d") - elif traktList == "newseason": - page_url = 'calendars/all/shows/premieres/%s/30' % datetime.date.today().strftime("%Y-%m-%d") + if traktList == 'trending': + page_url = 'shows/trending' + elif traktList == 'popular': + page_url = 'shows/popular' + elif traktList == 'anticipated': + page_url = 'shows/anticipated' + elif traktList == 'collected': + page_url = 'shows/collected' + elif traktList == 'watched': + page_url = 'shows/watched' + elif traktList == 'played': + page_url = 'shows/played' + elif traktList == 'recommended': + page_url = 'recommendations/shows' + elif traktList == 'newshow': + page_url = 'calendars/all/shows/new/%s/30' % datetime.date.today().strftime('%Y-%m-%d') + elif traktList == 'newseason': + page_url = 'calendars/all/shows/premieres/%s/30' % datetime.date.today().strftime('%Y-%m-%d') else: - page_url = "shows/anticipated" + page_url = 'shows/anticipated' try: (trakt_blacklist, recommended_shows, removed_from_medusa) = TraktPopular().fetch_popular_shows(page_url=page_url, trakt_list=traktList) @@ -189,13 +189,13 @@ def getTrendingShows(self, traktList=None): removed_from_medusa = None return t.render(trakt_blacklist=trakt_blacklist, recommended_shows=recommended_shows, removed_from_medusa=removed_from_medusa, - exception=e, enable_anime_options=False, blacklist=[], whitelist=[], realpage="getTrendingShows") + exception=e, enable_anime_options=False, blacklist=[], whitelist=[], realpage='getTrendingShows') def popularShows(self): """ Fetches data from IMDB to show a list of popular shows. """ - t = PageTemplate(rh=self, filename="addShows_recommended.mako") + t = PageTemplate(rh=self, filename='addShows_recommended.mako') e = None try: @@ -203,16 +203,16 @@ def popularShows(self): except (RequestException, Exception) as e: recommended_shows = None - return t.render(title="Popular Shows", header="Popular Shows", + return t.render(title='Popular Shows', header='Popular Shows', recommended_shows=recommended_shows, exception=e, groups=[], enable_anime_options=True, blacklist=[], whitelist=[], - controller="addShows", action="recommendedShows", realpage="popularShows") + controller='addShows', action='recommendedShows', realpage='popularShows') def popularAnime(self, list_type=REQUEST_HOT): """ Fetches list recommeded shows from anidb.info. """ - t = PageTemplate(rh=self, filename="addShows_recommended.mako") + t = PageTemplate(rh=self, filename='addShows_recommended.mako') e = None try: @@ -221,10 +221,10 @@ def popularAnime(self, list_type=REQUEST_HOT): # print traceback.format_exc() recommended_shows = None - return t.render(title="Popular Anime Shows", header="Popular Anime Shows", + return t.render(title='Popular Anime Shows', header='Popular Anime Shows', recommended_shows=recommended_shows, exception=e, groups=[], enable_anime_options=True, blacklist=[], whitelist=[], - controller="addShows", action="recommendedShows", realpage="popularAnime") + controller='addShows', action='recommendedShows', realpage='popularAnime') def addShowToBlacklist(self, seriesid): # URL parameters diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 62d8dbb691..67c173b756 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -525,20 +525,20 @@ def settingsNMJv2(host=None, dbloc=None, instance=None): result = notifiers.nmjv2_notifier.notify_settings(unquote_plus(host), dbloc, instance) if result: return json.dumps({ - "message": "NMJ Database found at: {host}".format(host=host), - "database": app.NMJv2_DATABASE, + 'message': 'NMJ Database found at: {host}'.format(host=host), + 'database': app.NMJv2_DATABASE, }) else: return json.dumps({ - "message": "Unable to find NMJ Database at location: {db_loc}. " - "Is the right location selected and PCH running?".format(db_loc=dbloc), - "database": "" + 'message': 'Unable to find NMJ Database at location: {db_loc}. ' + 'Is the right location selected and PCH running?'.format(db_loc=dbloc), + 'database': '' }) @staticmethod def getTraktToken(trakt_pin=None): - trakt_settings = {"trakt_api_key": app.TRAKT_API_KEY, - "trakt_api_secret": app.TRAKT_API_SECRET} + trakt_settings = {'trakt_api_key': app.TRAKT_API_KEY, + 'trakt_api_secret': app.TRAKT_API_SECRET} trakt_api = TraktApi(app.SSL_VERIFY, app.TRAKT_TIMEOUT, **trakt_settings) response = None try: @@ -561,9 +561,9 @@ def getTraktToken(trakt_pin=None): return "Connection error. Click 'Authorize Medusa' button again" if response: ui.notifications.message('Trakt Authorized') - return "Trakt Authorized" + return 'Trakt Authorized' ui.notifications.error('Connection error. Reload the page to get new token!') - return "Trakt Not Authorized!" + return 'Trakt Not Authorized!' @staticmethod def testTrakt(username=None, blacklist_name=None): @@ -1625,7 +1625,7 @@ def editShow(self, indexername=None, seriesid=None, location=None, allowed_quali logger.log(u"Unable to create the show directory '{location}'. Error: {msg}".format (location=new_location, msg=error), logger.WARNING) else: - logger.log(u"New show directory created", logger.INFO) + logger.log(u'New show directory created', logger.INFO) helpers.chmod_as_parent(new_location) else: changed_location = False @@ -1690,7 +1690,7 @@ def editShow(self, indexername=None, seriesid=None, location=None, allowed_quali except CantRefreshShowException as e: errors += 1 logger.log("Unable to refresh show '{show}'. Please manually trigger a full show refresh. " - "Error: {error}".format(show=series_obj.name, error=e.message), logger.WARNING) + 'Error: {error}'.format(show=series_obj.name, error=e.message), logger.WARNING) if directCall: return errors @@ -1702,7 +1702,7 @@ def editShow(self, indexername=None, seriesid=None, location=None, allowed_quali ) ) - logger.log(u"Finished editing show: {show}".format(show=series_obj.name), logger.DEBUG) + logger.log(u'Finished editing show: {show}'.format(show=series_obj.name), logger.DEBUG) return self.redirect( '/home/displayShow?indexername={series_obj.indexer_name}&seriesid={series_obj.series_id}'.format( series_obj=series_obj)) @@ -1973,7 +1973,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire if ep_obj.manually_searched: logger.log("Resetting 'manually searched' flag of {series} {episode}" - " as episode was changed to WANTED".format( + ' as episode was changed to WANTED'.format( series=series_obj.name, episode=cur_ep), logger.DEBUG) ep_obj.manually_searched = False @@ -2201,7 +2201,7 @@ def searchEpisodeSubtitles(self, indexername=None, seriesid=None, season=None, e try: if lang: - logger.log("Manual re-downloading subtitles for {show} with language {lang}".format + logger.log('Manual re-downloading subtitles for {show} with language {lang}'.format (show=ep_obj.series.name, lang=lang)) new_subtitles = ep_obj.download_subtitles(lang=lang) except Exception: @@ -2249,7 +2249,7 @@ def manual_search_subtitles(self, indexername=None, seriesid=None, season=None, logger.log('Outdated list. Please refresh page and try again', logger.WARNING) return json.dumps({'result': 'failure'}) except (ValueError, TypeError) as e: - ui.notifications.message('Error', "Please check logs") + ui.notifications.message('Error', 'Please check logs') logger.log('Error while manual {mode} subtitles. Error: {error_msg}'.format (mode=mode, error_msg=e), logger.ERROR) return json.dumps({'result': 'failure'}) @@ -2260,7 +2260,7 @@ def manual_search_subtitles(self, indexername=None, seriesid=None, season=None, return json.dumps({'result': 'failure'}) if mode == 'searching': - logger.log("Manual searching subtitles for: {0}".format(release_name)) + logger.log('Manual searching subtitles for: {0}'.format(release_name)) found_subtitles = subtitles.list_subtitles(tv_episode=ep_obj, video_path=video_path) if found_subtitles: ui.notifications.message(ep_obj.series.name, 'Found {} subtitles'.format(len(found_subtitles))) @@ -2269,7 +2269,7 @@ def manual_search_subtitles(self, indexername=None, seriesid=None, season=None, result = 'success' if found_subtitles else 'failure' subtitles_result = found_subtitles else: - logger.log("Manual downloading subtitles for: {0}".format(release_name)) + logger.log('Manual downloading subtitles for: {0}'.format(release_name)) new_manual_subtitle = subtitles.save_subtitle(tv_episode=ep_obj, subtitle_id=picked_id, video_path=video_path) if new_manual_subtitle: From 645b16ec6c340ff68f0339b8e5da8727e1f19d66 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:41:26 +0300 Subject: [PATCH 34/51] medusa/show/recommendations/* --- medusa/show/recommendations/anidb.py | 2 +- medusa/show/recommendations/trakt.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/show/recommendations/anidb.py b/medusa/show/recommendations/anidb.py index 10c51d0195..df9f098d56 100644 --- a/medusa/show/recommendations/anidb.py +++ b/medusa/show/recommendations/anidb.py @@ -34,7 +34,7 @@ def __init__(self): """ self.cache_subfolder = __name__.split('.')[-1] if '.' in __name__ else __name__ self.session = MedusaSession() - self.recommender = "Anidb Popular" + self.recommender = 'Anidb Popular' self.base_url = 'https://anidb.net/perl-bin/animedb.pl?show=anime&aid={aid}' self.default_img_src = 'poster.png' diff --git a/medusa/show/recommendations/trakt.py b/medusa/show/recommendations/trakt.py index b4aa896a08..6fab3ee98d 100644 --- a/medusa/show/recommendations/trakt.py +++ b/medusa/show/recommendations/trakt.py @@ -41,7 +41,7 @@ class TraktPopular(object): def __init__(self): """Initialize the trakt recommended list object.""" self.cache_subfolder = __name__.split('.')[-1] if '.' in __name__ else __name__ - self.recommender = "Trakt Popular" + self.recommender = 'Trakt Popular' self.default_img_src = 'trakt-default.png' self.tvdb_api_v2 = indexerApi(INDEXER_TVDBV2).indexer() From 56370bbb00d6648493429d1d8550043a2e1c5a2a Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:42:17 +0300 Subject: [PATCH 35/51] medusa/themes/base.py --- medusa/themes/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/themes/base.py b/medusa/themes/base.py index b13e528f97..cbb9a74f94 100644 --- a/medusa/themes/base.py +++ b/medusa/themes/base.py @@ -55,7 +55,7 @@ def validate_theme(theme_path): try: dir_list = os.listdir(theme_path) except Exception as err: - raise Exception("Unable to list directories in {path}: {err!r}".format(path=theme_path, err=err)) + raise Exception('Unable to list directories in {path}: {err!r}'.format(path=theme_path, err=err)) # If the folder is completely empty, then the theme was probably removed, so just skip it if not dir_list: @@ -76,11 +76,11 @@ def validate_theme(theme_path): # Validate if they mandatory keys are configured in the package.json. if not package_json.get('name') or not package_json.get('version'): - raise Exception("As a bare minimum you'l need at least to provide the 'name' and and 'version' key. " - "Please refer to the medusa theming documentation.") + raise Exception("As a bare minimum you'll need at least to provide the 'name' and and 'version' key. " + 'Please refer to the medusa theming documentation.') if not os.path.isdir(os.path.join(theme_path, 'templates')) and not os.path.isfile(os.path.join(theme_path, 'index.html')): - raise Exception("You need to have at least a templates folder with mako temnplates, " + raise Exception('You need to have at least a templates folder with mako temnplates, ' "or an index.html in your [theme's] root. Please refer to the medusa theming documentation.") return True From f8f5517c6d10dcde1904c3501ce4ea5b5ec84fee Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 22 Jul 2018 13:43:17 +0300 Subject: [PATCH 36/51] medusa/tv/series.py --- medusa/tv/series.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index a1aae9c28c..b1c74f2fd5 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -912,7 +912,7 @@ def write_metadata(self, show_only=False): def __write_episode_nfos(self): - log.debug(u"{id}: Writing NFOs for all episodes", + log.debug(u'{id}: Writing NFOs for all episodes', {'id': self.series_id}) main_db_con = db.DBConnection() @@ -952,7 +952,7 @@ def __update_show_nfo(self): result = False - log.info(u"{id}: Updating NFOs for show with new indexer info", + log.info(u'{id}: Updating NFOs for show with new indexer info', {'id': self.series_id}) # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. @@ -968,7 +968,7 @@ def load_episodes_from_dir(self): {'id': self.series_id}) return - log.debug(u"{id}: Loading all episodes from the show directory: {location}", + log.debug(u'{id}: Loading all episodes from the show directory: {location}', {'id': self.series_id, 'location': self.location}) # get file list @@ -981,13 +981,13 @@ def load_episodes_from_dir(self): for media_file in media_files: cur_episode = None - log.debug(u"{id}: Creating episode from: {location}", + log.debug(u'{id}: Creating episode from: {location}', {'id': self.series_id, 'location': media_file}) try: cur_episode = self.make_ep_from_file(os.path.join(self.location, media_file)) except (ShowNotFoundException, EpisodeNotFoundException) as error: log.warning( - u"{id}: Episode {location} returned an exception {error_msg}", { + u'{id}: Episode {location} returned an exception {error_msg}', { 'id': self.series_id, 'location': media_file, 'error_msg': ex(error), @@ -1542,7 +1542,7 @@ def load_imdb_info(self): try: imdb_info = imdb_api.get_title(self.imdb_id) except LookupError as error: - log.warning(u"{id}: IMDbPie error while loading show info: {error}", + log.warning(u'{id}: IMDbPie error while loading show info: {error}', {'id': self.series_id, 'error': error}) imdb_info = None @@ -2188,7 +2188,7 @@ def want_episode(self, season, episode, quality, forced_search=False, log.debug( u"{id}: '{show}' {ep} status is: '{status}'." u" {action} result with quality '{new_quality}'." - u" Reason: {reason}", { + u' Reason: {reason}', { 'id': self.series_id, 'show': self.name, 'ep': episode_num(season, episode), From 135500cf597e38732541b0997c8b20008d4ee1a8 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Tue, 24 Jul 2018 10:59:17 +0300 Subject: [PATCH 37/51] Move most ignores to `pytest` only, so errors show up in editors --- setup.cfg | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index c8c579ecf3..0b8a1bd0c7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,11 +7,15 @@ multiline-quotes = double docstring-quotes = double ; flake8 max-line-length = 160 +; If per-file ignores are needed, use: https://github.com/snoack/flake8-per-file-ignores ignore = ; Error codes reference: https://git.io/fNlTP - D107 ; Q002: Handled by flake8-docstrings Q002 + +[tool:pytest] +flake8-ignore = + D107 medusa/__init__.py D104 F401 medusa/bs4_parser.py D100 D101 D102 D105 medusa/cache.py D401 E305 From 11203b2159667c3931fe9a041b247b7dd8977eb0 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Tue, 24 Jul 2018 11:49:11 +0300 Subject: [PATCH 38/51] Fix E123 and E126 E123: closing bracket does not match indentation of opening bracket's line * medusa/providers/generic_provider.py E126: continuation line over-indented for hanging indent * medusa/server/web/home/handler.py * medusa/show/recommendations/trakt.py --- medusa/providers/generic_provider.py | 3 +-- medusa/server/web/home/handler.py | 6 +++--- medusa/show/recommendations/trakt.py | 4 ++-- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index 27f049e2d3..dc2431f55e 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -235,8 +235,7 @@ def remove_duplicate_mappings(items, pk='link'): itervalues(OrderedDict( (item[pk], item) for item in items - ) - ) + )) ) def find_search_results(self, series, episodes, search_mode, forced_search=False, download_current_quality=False, diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 67c173b756..204498647c 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1962,7 +1962,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire if status == FAILED and ep_obj.status not in snatched_qualities + [DOWNLOADED, ARCHIVED]: logger.log('Refusing to change status of {series} {episode} to FAILED' ' because it\'s not SNATCHED/DOWNLOADED/ARCHIVED'.format( - series=series_obj.name, episode=cur_ep), logger.WARNING) + series=series_obj.name, episode=cur_ep), logger.WARNING) continue if status == WANTED: @@ -1974,7 +1974,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire if ep_obj.manually_searched: logger.log("Resetting 'manually searched' flag of {series} {episode}" ' as episode was changed to WANTED'.format( - series=series_obj.name, episode=cur_ep), logger.DEBUG) + series=series_obj.name, episode=cur_ep), logger.DEBUG) ep_obj.manually_searched = False # Only in failed_history we set to FAILED. @@ -1995,7 +1995,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire upd = 'Remove' logger.log('{action} episodes, showid: indexerid {show.indexerid}, Title {show.name} to Watchlist'.format( - action=upd, show=series_obj), logger.DEBUG) + action=upd, show=series_obj), logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(series_obj, data_episode=data, update=upd.lower()) diff --git a/medusa/show/recommendations/trakt.py b/medusa/show/recommendations/trakt.py index 6fab3ee98d..ddbc308105 100644 --- a/medusa/show/recommendations/trakt.py +++ b/medusa/show/recommendations/trakt.py @@ -67,8 +67,8 @@ def _create_recommended_show(self, series, storage_key=None): try: if not missing_posters.has(series['show']['ids']['tvdb']): image = self.check_cache_for_poster(series['show']['ids']['tvdb']) or \ - self.tvdb_api_v2.config['session'].series_api.series_id_images_query_get( - series['show']['ids']['tvdb'], key_type='poster').data[0].file_name + self.tvdb_api_v2.config['session'].series_api.series_id_images_query_get( + series['show']['ids']['tvdb'], key_type='poster').data[0].file_name else: log.info('CACHE: Missing poster on TVDB for show {0}', series['show']['title']) use_default = self.default_img_src From 07369bd96449158c9fbc11b7f0fd962749757ceb Mon Sep 17 00:00:00 2001 From: sharkykh Date: Tue, 24 Jul 2018 12:08:49 +0300 Subject: [PATCH 39/51] Caching .eggs can cause issues --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index d8e2b0aa83..ff2183eafd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,7 +5,6 @@ cache: directories: - $HOME/.cache/pip - node_modules - - .eggs before_install: - python .github/check_version.py install: false From 45ba12d951845dbbaabb53860a95bf801327d644 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Tue, 24 Jul 2018 12:14:33 +0300 Subject: [PATCH 40/51] Update test dependencies --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 656e7f1500..f5433000b5 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,6 @@ def run_tests(self): install_requires=['tornado==5.1', 'six', 'profilehooks', 'contextlib2', ], cmdclass={'test': PyTest}, tests_require=[ - 'dredd_hooks', 'flake8', 'flake8-docstrings', 'flake8-import-order', @@ -47,7 +46,7 @@ def run_tests(self): 'pycodestyle==2.3.1', 'pytest', 'pytest-cov', - 'pytest-flake8==0.9.1', + 'pytest-flake8', 'pytest-tornado5', 'PyYAML<4', 'vcrpy', From a4c85334bc683703c93712a6430f9b5200c135eb Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 08:57:03 +0300 Subject: [PATCH 41/51] Update select-list component Add csv-enabled flag, to start with the csv list. --- themes-default/slim/static/js/templates/select-list.vue | 8 ++++++-- themes/dark/assets/js/templates/select-list.vue | 8 ++++++-- themes/light/assets/js/templates/select-list.vue | 8 ++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/themes-default/slim/static/js/templates/select-list.vue b/themes-default/slim/static/js/templates/select-list.vue index 11f91f8ff9..cfdf1131b7 100644 --- a/themes-default/slim/static/js/templates/select-list.vue +++ b/themes-default/slim/static/js/templates/select-list.vue @@ -46,6 +46,11 @@ module.exports = { type: Boolean, default: true, required: false + }, + csvEnabled: { + type: Boolean, + default: false, + required: false } }, data() { @@ -54,8 +59,7 @@ module.exports = { editItems: [], newItem: '', indexCounter: 0, - csv: '', - csvEnabled: false + csv: '' }; }, created() { diff --git a/themes/dark/assets/js/templates/select-list.vue b/themes/dark/assets/js/templates/select-list.vue index 11f91f8ff9..cfdf1131b7 100644 --- a/themes/dark/assets/js/templates/select-list.vue +++ b/themes/dark/assets/js/templates/select-list.vue @@ -46,6 +46,11 @@ module.exports = { type: Boolean, default: true, required: false + }, + csvEnabled: { + type: Boolean, + default: false, + required: false } }, data() { @@ -54,8 +59,7 @@ module.exports = { editItems: [], newItem: '', indexCounter: 0, - csv: '', - csvEnabled: false + csv: '' }; }, created() { diff --git a/themes/light/assets/js/templates/select-list.vue b/themes/light/assets/js/templates/select-list.vue index 11f91f8ff9..cfdf1131b7 100644 --- a/themes/light/assets/js/templates/select-list.vue +++ b/themes/light/assets/js/templates/select-list.vue @@ -46,6 +46,11 @@ module.exports = { type: Boolean, default: true, required: false + }, + csvEnabled: { + type: Boolean, + default: false, + required: false } }, data() { @@ -54,8 +59,7 @@ module.exports = { editItems: [], newItem: '', indexCounter: 0, - csv: '', - csvEnabled: false + csv: '' }; }, created() { From c263451fb337164c04de38296bcf70fa92069b91 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 09:02:28 +0300 Subject: [PATCH 42/51] Move select-list component registration to `main.mako` --- themes-default/slim/views/editShow.mako | 1 - themes-default/slim/views/layouts/main.mako | 1 + themes/dark/templates/editShow.mako | 1 - themes/dark/templates/layouts/main.mako | 1 + themes/light/templates/editShow.mako | 1 - themes/light/templates/layouts/main.mako | 1 + 6 files changed, 3 insertions(+), 3 deletions(-) diff --git a/themes-default/slim/views/editShow.mako b/themes-default/slim/views/editShow.mako index a681502d6f..b0f70ff38d 100644 --- a/themes-default/slim/views/editShow.mako +++ b/themes-default/slim/views/editShow.mako @@ -1,7 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%block name="scripts"> <%include file="/vue-components/sub-menu.mako"/> <%include file="/vue-components/quality-chooser.mako"/> diff --git a/themes/dark/templates/editShow.mako b/themes/dark/templates/editShow.mako index a681502d6f..b0f70ff38d 100644 --- a/themes/dark/templates/editShow.mako +++ b/themes/dark/templates/editShow.mako @@ -1,7 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%block name="scripts"> <%include file="/vue-components/sub-menu.mako"/> <%include file="/vue-components/quality-chooser.mako"/> diff --git a/themes/light/templates/editShow.mako b/themes/light/templates/editShow.mako index a681502d6f..b0f70ff38d 100644 --- a/themes/light/templates/editShow.mako +++ b/themes/light/templates/editShow.mako @@ -1,7 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%block name="scripts"> <%include file="/vue-components/sub-menu.mako"/> <%include file="/vue-components/quality-chooser.mako"/> From e3303bee13e8f4b4659ba1d3b1ff79230755aa71 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 09:28:15 +0300 Subject: [PATCH 43/51] Update config API - postProcessing, metadata --- dredd/api-description.yml | 76 ++++++++++++++++++ medusa/metadata/generic.py | 30 +++++++ medusa/server/api/v2/base.py | 34 +++++++- medusa/server/api/v2/config.py | 104 +++++++++++++++++++++++-- themes-default/slim/static/js/store.js | 40 +++++++++- themes/dark/assets/js/store.js | 40 +++++++++- themes/dark/assets/js/store.js.map | 2 +- themes/light/assets/js/store.js | 40 +++++++++- themes/light/assets/js/store.js.map | 2 +- 9 files changed, 352 insertions(+), 16 deletions(-) diff --git a/dredd/api-description.yml b/dredd/api-description.yml index 1c0a9a7589..59ad711817 100644 --- a/dredd/api-description.yml +++ b/dredd/api-description.yml @@ -1406,6 +1406,79 @@ definitions: numWarnings: description: Number of logged warnings type: integer + postProcessing: + type: object + properties: + naming: + type: object + properties: + stripYear: + type: boolean + pattern: + type: string + multiEp: + type: integer + patternAirByDate: + type: string + patternSports: + type: string + patternAnime: + type: string + enableCustomNamingAirByDate: + type: boolean + enableCustomNamingSports: + type: boolean + enableCustomNamingAnime: + type: boolean + animeMultiEp: + type: integer + animeNamingType: + type: integer + seriesDownloadDir: + type: string + processAutomatically: + type: boolean + postponeIfSyncFiles: + type: boolean + postponeIfNoSubs: + type: boolean + renameEpisodes: + type: boolean + createMissingShowDirs: + type: boolean + addShowsWithoutDir: + type: boolean + moveAssociatedFiles: + type: boolean + nfoRename: + type: boolean + airdateEpisodes: + type: boolean + unpack: + type: boolean + deleteRarContent: + type: boolean + noDelete: + type: boolean + processMethod: + type: string + reflinkAvailable: + type: boolean + autoPostprocessorFrequency: + type: integer + syncFiles: + type: array + fileTimestampTimezone: + type: string + allowedExtensions: + type: array + extraScripts: + type: array + extraScriptsUrl: + type: string + multiEpStrings: + type: object + Log: type: object properties: @@ -1834,6 +1907,9 @@ parameters: type: string enum: - main + - statuses + - qualities + - metadata log-level: name: level in: query diff --git a/medusa/metadata/generic.py b/medusa/metadata/generic.py index 33a6fb4f94..d85fe34a66 100644 --- a/medusa/metadata/generic.py +++ b/medusa/metadata/generic.py @@ -1004,3 +1004,33 @@ def size_str_to_int(x): u'Could not find any {type} images on TMDB for {series}', {u'type': img_type, u'series': show.name} ) + + def to_json(self): + """Return JSON representation.""" + data = {} + data['id'] = self.get_id() + data['name'] = self.name + data['showMetadata'] = self.show_metadata + data['episodeMetadata'] = self.episode_metadata + data['fanart'] = self.fanart + data['poster'] = self.poster + data['banner'] = self.banner + data['episodeThumbnails'] = self.episode_thumbnails + data['seasonPosters'] = self.season_posters + data['seasonBanners'] = self.season_banners + data['seasonAllPoster'] = self.season_all_poster + data['seasonAllBanner'] = self.season_all_banner + + data['example'] = {} + data['example']['banner'] = self.eg_banner + data['example']['episodeMetadata'] = self.eg_episode_metadata + data['example']['episodeThumbnails'] = self.eg_episode_thumbnails + data['example']['fanart'] = self.eg_fanart + data['example']['poster'] = self.eg_poster + data['example']['seasonAllBanner'] = self.eg_season_all_banner + data['example']['seasonAllPoster'] = self.eg_season_all_poster + data['example']['seasonBanners'] = self.eg_season_banners + data['example']['seasonPosters'] = self.eg_season_posters + data['example']['showMetadata'] = self.eg_show_metadata + + return data diff --git a/medusa/server/api/v2/base.py b/medusa/server/api/v2/base.py index ec07b7bb3d..9a3dc05ede 100644 --- a/medusa/server/api/v2/base.py +++ b/medusa/server/api/v2/base.py @@ -19,7 +19,7 @@ from medusa import app from medusa.logger.adapters.style import BraceAdapter -from six import string_types, text_type, viewitems +from six import itervalues, string_types, text_type, viewitems from tornado.httpclient import HTTPError from tornado.httputil import url_concat @@ -492,3 +492,35 @@ def __init__(self, target, attr, enums, attr_type=text_type, converter=None, super(EnumField, self).__init__(target, attr, attr_type, validator=lambda v: v in enums, converter=converter, default_value=default_value, setter=setter, post_processor=post_processor) + + +# @TODO: Make this field more dynamic (a dict patch field) +class MetadataStructureField(PatchField): + """Process the metadata structure.""" + + def __init__(self, target, attr): + """Constructor.""" + super(MetadataStructureField, self).__init__(target, attr, dict, validator=None, converter=None, + default_value=None, setter=None, post_processor=None) + + def patch(self, target, value): + """Patch the field with the specified value.""" + map_values = { + 'showMetadata': 'show_metadata', + 'episodeMetadata': 'episode_metadata', + 'episodeThumbnails': 'episode_thumbnails', + 'seasonPosters': 'season_posters', + 'seasonBanners': 'season_banners', + 'seasonAllPoster': 'season_all_poster', + 'seasonAllBanner': 'season_all_banner', + } + + try: + for new_provider_config in itervalues(value): + for k, v in viewitems(new_provider_config): + setattr(target.metadata_provider_dict[new_provider_config['name']], map_values.get(k, k), v) + except Exception as error: + log.warning('Error trying to change attribute app.metadata_provider_dict: {0!r}', error) + return False + + return True diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 7fc491b5cf..e12a16d24b 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -4,6 +4,7 @@ import inspect import logging +import pkgutil import platform import sys @@ -25,12 +26,13 @@ EnumField, IntegerField, ListField, + MetadataStructureField, StringField, iter_nested_items, set_nested_value, ) -from six import iteritems, text_type +from six import iteritems, itervalues, text_type from tornado.escape import json_decode @@ -114,6 +116,38 @@ class ConfigHandler(BaseRequestHandler): 'backlogOverview.period': StringField(app, 'BACKLOG_PERIOD'), 'backlogOverview.status': StringField(app, 'BACKLOG_STATUS'), 'rootDirs': ListField(app, 'ROOT_DIRS'), + 'postProcessing.seriesDownloadDir': StringField(app, 'TV_DOWNLOAD_DIR'), + 'postProcessing.processAutomatically': BooleanField(app, 'PROCESS_AUTOMATICALLY'), + 'postProcessing.processMethod': StringField(app, 'PROCESS_METHOD'), + 'postProcessing.deleteRarContent': BooleanField(app, 'DELRARCONTENTS'), + 'postProcessing.unpack': BooleanField(app, 'UNPACK'), + 'postProcessing.noDelete': BooleanField(app, 'NO_DELETE'), + 'postProcessing.postponeIfSyncFiles': BooleanField(app, 'POSTPONE_IF_SYNC_FILES'), + 'postProcessing.autoPostprocessorFrequency': IntegerField(app, 'AUTOPOSTPROCESSOR_FREQUENCY'), + 'postProcessing.airdateEpisodes': BooleanField(app, 'AIRDATE_EPISODES'), + + 'postProcessing.moveAssociatedFiles': BooleanField(app, 'MOVE_ASSOCIATED_FILES'), + 'postProcessing.allowedExtensions': ListField(app, 'ALLOWED_EXTENSIONS'), + 'postProcessing.addShowsWithoutDir': BooleanField(app, 'ADD_SHOWS_WO_DIR'), + 'postProcessing.createMissingShowDirs': BooleanField(app, 'CREATE_MISSING_SHOW_DIRS'), + 'postProcessing.renameEpisodes': BooleanField(app, 'RENAME_EPISODES'), + 'postProcessing.postponeIfNoSubs': BooleanField(app, 'POSTPONE_IF_NO_SUBS'), + 'postProcessing.nfoRename': BooleanField(app, 'NFO_RENAME'), + 'postProcessing.syncFiles': ListField(app, 'SYNC_FILES'), + 'postProcessing.fileTimestampTimezone': StringField(app, 'FILE_TIMESTAMP_TIMEZONE'), + 'postProcessing.extraScripts': ListField(app, 'EXTRA_SCRIPTS'), + 'postProcessing.extraScriptsUrl': StringField(app, 'EXTRA_SCRIPTS_URL'), + 'postProcessing.naming.pattern': StringField(app, 'NAMING_PATTERN'), + 'postProcessing.naming.enableCustomNamingAnime': BooleanField(app, 'NAMING_CUSTOM_ANIME'), + 'postProcessing.naming.enableCustomNamingSports': BooleanField(app, 'NAMING_CUSTOM_SPORTS'), + 'postProcessing.naming.enableCustomNamingAirByDate': BooleanField(app, 'NAMING_CUSTOM_ABD'), + 'postProcessing.naming.patternSports': StringField(app, 'NAMING_SPORTS_PATTERN'), + 'postProcessing.naming.patternAirByDate': StringField(app, 'NAMING_ABD_PATTERN'), + 'postProcessing.naming.patternAnime': StringField(app, 'NAMING_ANIME_PATTERN'), + 'postProcessing.naming.animeMultiEp': IntegerField(app, 'NAMING_ANIME_MULTI_EP'), + 'postProcessing.naming.animeNamingType': IntegerField(app, 'NAMING_ANIME'), + 'postProcessing.naming.multiEp': IntegerField(app, 'NAMING_MULTI_EP'), + 'postProcessing.naming.stripYear': BooleanField(app, 'NAMING_STRIP_YEAR') } def get(self, identifier, path_param=None): @@ -158,6 +192,18 @@ def patch(self, identifier, *args, **kwargs): accepted = {} ignored = {} + # Remove the metadata providers from the nested items. + # It's ugly but I don't see a better solution for it right now. + if data.get('metadata'): + metadata_providers = data['metadata'].pop('metadataProviders') + + if metadata_providers: + patch_metadata_providers = MetadataStructureField(app, 'metadata_provider_dict') + if patch_metadata_providers and patch_metadata_providers.patch(app, metadata_providers): + set_nested_value(accepted, 'metadata.metadataProviders', metadata_providers) + else: + set_nested_value(ignored, 'metadata.metadataProviders', metadata_providers) + for key, value in iter_nested_items(data): patch_field = self.patches.get(key) if patch_field and patch_field.patch(app, value): @@ -187,11 +233,10 @@ class DataGenerator(object): @classmethod def sections(cls): """Get the available section names.""" - return [ - name[5:] - for (name, function) in inspect.getmembers(cls, predicate=inspect.isfunction) - if name.startswith('data_') - ] + cls.data__ = [name[5:] for (name, function) in inspect.getmembers(cls, predicate=inspect.isfunction) if + name.startswith('data_')] + cls.cls_data__ = cls.data__ + return cls.cls_data__ @classmethod def get_data(cls, section): @@ -357,8 +402,40 @@ def data_main(): section_data['indexers']['config'] = get_indexer_config() section_data['postProcessing'] = NonEmptyDict() - section_data['postProcessing']['processMethod'] = app.PROCESS_METHOD + section_data['postProcessing']['naming'] = NonEmptyDict() + section_data['postProcessing']['naming']['pattern'] = app.NAMING_PATTERN + section_data['postProcessing']['naming']['multiEp'] = int(app.NAMING_MULTI_EP) + section_data['postProcessing']['naming']['patternAirByDate'] = app.NAMING_ABD_PATTERN + section_data['postProcessing']['naming']['patternSports'] = app.NAMING_SPORTS_PATTERN + section_data['postProcessing']['naming']['patternAnime'] = app.NAMING_ANIME_PATTERN + section_data['postProcessing']['naming']['enableCustomNamingAirByDate'] = bool(app.NAMING_CUSTOM_ABD) + section_data['postProcessing']['naming']['enableCustomNamingSports'] = bool(app.NAMING_CUSTOM_SPORTS) + section_data['postProcessing']['naming']['enableCustomNamingAnime'] = bool(app.NAMING_CUSTOM_ANIME) + section_data['postProcessing']['naming']['animeMultiEp'] = int(app.NAMING_ANIME_MULTI_EP) + section_data['postProcessing']['naming']['animeNamingType'] = int(app.NAMING_ANIME) + section_data['postProcessing']['naming']['stripYear'] = bool(app.NAMING_STRIP_YEAR) + section_data['postProcessing']['seriesDownloadDir'] = app.TV_DOWNLOAD_DIR + section_data['postProcessing']['processAutomatically'] = bool(app.PROCESS_AUTOMATICALLY) + section_data['postProcessing']['postponeIfSyncFiles'] = bool(app.POSTPONE_IF_SYNC_FILES) section_data['postProcessing']['postponeIfNoSubs'] = bool(app.POSTPONE_IF_NO_SUBS) + section_data['postProcessing']['renameEpisodes'] = bool(app.RENAME_EPISODES) + section_data['postProcessing']['createMissingShowDirs'] = bool(app.CREATE_MISSING_SHOW_DIRS) + section_data['postProcessing']['addShowsWithoutDir'] = bool(app.ADD_SHOWS_WO_DIR) + section_data['postProcessing']['moveAssociatedFiles'] = bool(app.MOVE_ASSOCIATED_FILES) + section_data['postProcessing']['nfoRename'] = bool(app.NFO_RENAME) + section_data['postProcessing']['airdateEpisodes'] = bool(app.AIRDATE_EPISODES) + section_data['postProcessing']['unpack'] = bool(app.UNPACK) + section_data['postProcessing']['deleteRarContent'] = bool(app.DELRARCONTENTS) + section_data['postProcessing']['noDelete'] = bool(app.NO_DELETE) + section_data['postProcessing']['processMethod'] = app.PROCESS_METHOD + section_data['postProcessing']['reflinkAvailable'] = bool(pkgutil.find_loader('reflink')) + section_data['postProcessing']['autoPostprocessorFrequency'] = app.AUTOPOSTPROCESSOR_FREQUENCY + section_data['postProcessing']['syncFiles'] = app.SYNC_FILES + section_data['postProcessing']['fileTimestampTimezone'] = app.FILE_TIMESTAMP_TIMEZONE + section_data['postProcessing']['allowedExtensions'] = app.ALLOWED_EXTENSIONS + section_data['postProcessing']['extraScripts'] = app.EXTRA_SCRIPTS + section_data['postProcessing']['extraScriptsUrl'] = app.EXTRA_SCRIPTS_URL + section_data['postProcessing']['multiEpStrings'] = common.MULTI_EP_STRINGS return section_data @@ -430,3 +507,16 @@ def data_statuses(): section_data['strings'] = common.statusStrings return section_data + + @staticmethod + def data_metadata(): + """Metadata.""" + section_data = NonEmptyDict() + + section_data['metadataProviders'] = NonEmptyDict() + + for provider in itervalues(app.metadata_provider_dict): + json_repr = provider.to_json() + section_data['metadataProviders'][json_repr['id']] = json_repr + + return section_data diff --git a/themes-default/slim/static/js/store.js b/themes-default/slim/static/js/store.js index bd9377eb60..e21bbc878e 100644 --- a/themes-default/slim/static/js/store.js +++ b/themes-default/slim/static/js/store.js @@ -73,6 +73,9 @@ const store = new Store({ }, qualities: {}, statuses: {}, + metadata: { + metadataProviders: {} + }, // Main config config: { wikiUrl: null, @@ -278,8 +281,41 @@ const store = new Store({ deleteFailed: null }, postProcessing: { + naming: { + pattern: null, + multiEp: null, + enableCustomNamingSports: null, + enableCustomNamingAirByDate: null, + patternSports: null, + patternAirByDate: null, + enableCustomNamingAnime: null, + patternAnime: null, + animeMultiEp: null, + animeNamingType: null, + stripYear: null + }, + seriesDownloadDir: null, + processAutomatically: null, processMethod: null, - postponeIfNoSubs: null + deleteRarContent: null, + unpack: null, + noDelete: null, + reflinkAvailable: null, + postponeIfSyncFiles: null, + autoPostprocessorFrequency: 10, + airdateEpisodes: null, + moveAssociatedFiles: null, + allowedExtensions: [], + addShowsWithoutDir: null, + createMissingShowDirs: null, + renameEpisodes: null, + postponeIfNoSubs: null, + nfoRename: null, + syncFiles: [], + fileTimestampTimezone: 'local', + extraScripts: [], + extraScriptsUrl: null, + multiEpStrings: null }, sslVersion: null, pythonVersion: null, @@ -403,7 +439,7 @@ const store = new Store({ if (section === 'main') { state.config = config; } - if (['qualities', 'statuses'].includes(section)) { + if (['qualities', 'statuses', 'metadata'].includes(section)) { state[section] = config; } }, diff --git a/themes/dark/assets/js/store.js b/themes/dark/assets/js/store.js index bd9377eb60..e21bbc878e 100644 --- a/themes/dark/assets/js/store.js +++ b/themes/dark/assets/js/store.js @@ -73,6 +73,9 @@ const store = new Store({ }, qualities: {}, statuses: {}, + metadata: { + metadataProviders: {} + }, // Main config config: { wikiUrl: null, @@ -278,8 +281,41 @@ const store = new Store({ deleteFailed: null }, postProcessing: { + naming: { + pattern: null, + multiEp: null, + enableCustomNamingSports: null, + enableCustomNamingAirByDate: null, + patternSports: null, + patternAirByDate: null, + enableCustomNamingAnime: null, + patternAnime: null, + animeMultiEp: null, + animeNamingType: null, + stripYear: null + }, + seriesDownloadDir: null, + processAutomatically: null, processMethod: null, - postponeIfNoSubs: null + deleteRarContent: null, + unpack: null, + noDelete: null, + reflinkAvailable: null, + postponeIfSyncFiles: null, + autoPostprocessorFrequency: 10, + airdateEpisodes: null, + moveAssociatedFiles: null, + allowedExtensions: [], + addShowsWithoutDir: null, + createMissingShowDirs: null, + renameEpisodes: null, + postponeIfNoSubs: null, + nfoRename: null, + syncFiles: [], + fileTimestampTimezone: 'local', + extraScripts: [], + extraScriptsUrl: null, + multiEpStrings: null }, sslVersion: null, pythonVersion: null, @@ -403,7 +439,7 @@ const store = new Store({ if (section === 'main') { state.config = config; } - if (['qualities', 'statuses'].includes(section)) { + if (['qualities', 'statuses', 'metadata'].includes(section)) { state[section] = config; } }, diff --git a/themes/dark/assets/js/store.js.map b/themes/dark/assets/js/store.js.map index 87e4d0bee1..3368609b7e 100644 --- a/themes/dark/assets/js/store.js.map +++ b/themes/dark/assets/js/store.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world

  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file diff --git a/themes/light/assets/js/store.js b/themes/light/assets/js/store.js index bd9377eb60..e21bbc878e 100644 --- a/themes/light/assets/js/store.js +++ b/themes/light/assets/js/store.js @@ -73,6 +73,9 @@ const store = new Store({ }, qualities: {}, statuses: {}, + metadata: { + metadataProviders: {} + }, // Main config config: { wikiUrl: null, @@ -278,8 +281,41 @@ const store = new Store({ deleteFailed: null }, postProcessing: { + naming: { + pattern: null, + multiEp: null, + enableCustomNamingSports: null, + enableCustomNamingAirByDate: null, + patternSports: null, + patternAirByDate: null, + enableCustomNamingAnime: null, + patternAnime: null, + animeMultiEp: null, + animeNamingType: null, + stripYear: null + }, + seriesDownloadDir: null, + processAutomatically: null, processMethod: null, - postponeIfNoSubs: null + deleteRarContent: null, + unpack: null, + noDelete: null, + reflinkAvailable: null, + postponeIfSyncFiles: null, + autoPostprocessorFrequency: 10, + airdateEpisodes: null, + moveAssociatedFiles: null, + allowedExtensions: [], + addShowsWithoutDir: null, + createMissingShowDirs: null, + renameEpisodes: null, + postponeIfNoSubs: null, + nfoRename: null, + syncFiles: [], + fileTimestampTimezone: 'local', + extraScripts: [], + extraScriptsUrl: null, + multiEpStrings: null }, sslVersion: null, pythonVersion: null, @@ -403,7 +439,7 @@ const store = new Store({ if (section === 'main') { state.config = config; } - if (['qualities', 'statuses'].includes(section)) { + if (['qualities', 'statuses', 'metadata'].includes(section)) { state[section] = config; } }, diff --git a/themes/light/assets/js/store.js.map b/themes/light/assets/js/store.js.map index 87e4d0bee1..3368609b7e 100644 --- a/themes/light/assets/js/store.js.map +++ b/themes/light/assets/js/store.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file From b7c562f036637f9d18a4f0d33abeb40676724847 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 09:28:25 +0300 Subject: [PATCH 44/51] Standardize metadata examples --- medusa/metadata/generic.py | 12 ++++++++++++ medusa/metadata/kodi.py | 4 ++-- medusa/metadata/mede8er.py | 2 +- medusa/metadata/media_browser.py | 20 ++++++++++---------- medusa/metadata/ps3.py | 16 ++++++++-------- medusa/metadata/tivo.py | 18 +++++++++--------- medusa/metadata/wdtv.py | 12 ++++++------ 7 files changed, 48 insertions(+), 36 deletions(-) diff --git a/medusa/metadata/generic.py b/medusa/metadata/generic.py index d85fe34a66..dc1ab5b004 100644 --- a/medusa/metadata/generic.py +++ b/medusa/metadata/generic.py @@ -76,6 +76,18 @@ def __init__(self, show_metadata=False, episode_metadata=False, fanart=False, self.season_all_poster = season_all_poster self.season_all_banner = season_all_banner + # Web UI metadata template (override when subclassing) + self.eg_show_metadata = 'not supported' + self.eg_episode_metadata = 'not supported' + self.eg_fanart = 'not supported' + self.eg_poster = 'not supported' + self.eg_banner = 'not supported' + self.eg_episode_thumbnails = 'not supported' + self.eg_season_posters = 'not supported' + self.eg_season_banners = 'not supported' + self.eg_season_all_poster = 'not supported' + self.eg_season_all_banner = 'not supported' + # Reuse indexer api, as it's crazy to hit the api with a full search, for every season search. self.indexer_api = None diff --git a/medusa/metadata/kodi.py b/medusa/metadata/kodi.py index dfdd04e1ed..9605643636 100644 --- a/medusa/metadata/kodi.py +++ b/medusa/metadata/kodi.py @@ -62,9 +62,9 @@ def __init__(self, self.eg_banner = 'folder.jpg' self.eg_episode_thumbnails = 'Season##\\filename.tbn' self.eg_season_posters = 'season##.tbn' - self.eg_season_banners = 'not supported' + # self.eg_season_banners = 'not supported' self.eg_season_all_poster = 'season-all.tbn' - self.eg_season_all_banner = 'not supported' + # self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def create_season_banners(self, ep_obj): diff --git a/medusa/metadata/mede8er.py b/medusa/metadata/mede8er.py index 3bedbe70e2..29c36692cd 100644 --- a/medusa/metadata/mede8er.py +++ b/medusa/metadata/mede8er.py @@ -63,7 +63,7 @@ def __init__(self, self.fanart_name = 'fanart.jpg' - # web-ui metadata template + # web-ui metadata template (based on MediaBrowser) # self.eg_show_metadata = 'series.xml' self.eg_episode_metadata = 'Season##\\filename.xml' self.eg_fanart = 'fanart.jpg' diff --git a/medusa/metadata/media_browser.py b/medusa/metadata/media_browser.py index c6e59da2ff..abf23e90b5 100644 --- a/medusa/metadata/media_browser.py +++ b/medusa/metadata/media_browser.py @@ -71,16 +71,16 @@ def __init__(self, self.poster_name = u'folder.jpg' # web-ui metadata template - self.eg_show_metadata = u'series.xml' - self.eg_episode_metadata = u'Season##\\metadata\\filename.xml' - self.eg_fanart = u'backdrop.jpg' - self.eg_poster = u'folder.jpg' - self.eg_banner = u'banner.jpg' - self.eg_episode_thumbnails = u'Season##\\metadata\\filename.jpg' - self.eg_season_posters = u'Season##\\folder.jpg' - self.eg_season_banners = u'Season##\\banner.jpg' - self.eg_season_all_poster = u'not supported' - self.eg_season_all_banner = u'not supported' + self.eg_show_metadata = 'series.xml' + self.eg_episode_metadata = 'Season##\\metadata\\filename.xml' + self.eg_fanart = 'backdrop.jpg' + self.eg_poster = 'folder.jpg' + self.eg_banner = 'banner.jpg' + self.eg_episode_thumbnails = 'Season##\\metadata\\filename.jpg' + self.eg_season_posters = 'Season##\\folder.jpg' + self.eg_season_banners = 'Season##\\banner.jpg' + # self.eg_season_all_poster = 'not supported' + # self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): diff --git a/medusa/metadata/ps3.py b/medusa/metadata/ps3.py index 4d8628d519..bab72be55a 100644 --- a/medusa/metadata/ps3.py +++ b/medusa/metadata/ps3.py @@ -48,16 +48,16 @@ def __init__(self, self.poster_name = 'cover.jpg' # web-ui metadata template - self.eg_show_metadata = 'not supported' - self.eg_episode_metadata = 'not supported' - self.eg_fanart = 'not supported' + # self.eg_show_metadata = 'not supported' + # self.eg_episode_metadata = 'not supported' + # self.eg_fanart = 'not supported' self.eg_poster = 'cover.jpg' - self.eg_banner = 'not supported' + # self.eg_banner = 'not supported' self.eg_episode_thumbnails = 'Season##\\filename.ext.cover.jpg' - self.eg_season_posters = 'not supported' - self.eg_season_banners = 'not supported' - self.eg_season_all_poster = 'not supported' - self.eg_season_all_banner = 'not supported' + # self.eg_season_posters = 'not supported' + # self.eg_season_banners = 'not supported' + # self.eg_season_all_poster = 'not supported' + # self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): diff --git a/medusa/metadata/tivo.py b/medusa/metadata/tivo.py index ee463b4815..ff150e1e86 100644 --- a/medusa/metadata/tivo.py +++ b/medusa/metadata/tivo.py @@ -62,16 +62,16 @@ def __init__(self, self._ep_nfo_extension = 'txt' # web-ui metadata template - self.eg_show_metadata = 'not supported' + # self.eg_show_metadata = 'not supported' self.eg_episode_metadata = 'Season##\\.meta\\filename.ext.txt' - self.eg_fanart = 'not supported' - self.eg_poster = 'not supported' - self.eg_banner = 'not supported' - self.eg_episode_thumbnails = 'not supported' - self.eg_season_posters = 'not supported' - self.eg_season_banners = 'not supported' - self.eg_season_all_poster = 'not supported' - self.eg_season_all_banner = 'not supported' + # self.eg_fanart = 'not supported' + # self.eg_poster = 'not supported' + # self.eg_banner = 'not supported' + # self.eg_episode_thumbnails = 'not supported' + # self.eg_season_posters = 'not supported' + # self.eg_season_banners = 'not supported' + # self.eg_season_all_poster = 'not supported' + # self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): diff --git a/medusa/metadata/wdtv.py b/medusa/metadata/wdtv.py index 0dd6f00ce8..3f3c03fa90 100644 --- a/medusa/metadata/wdtv.py +++ b/medusa/metadata/wdtv.py @@ -67,16 +67,16 @@ def __init__(self, self.poster_name = 'folder.jpg' # web-ui metadata template - self.eg_show_metadata = 'not supported' + # self.eg_show_metadata = 'not supported' self.eg_episode_metadata = 'Season##\\filename.xml' - self.eg_fanart = 'not supported' + # self.eg_fanart = 'not supported' self.eg_poster = 'folder.jpg' - self.eg_banner = 'not supported' + # self.eg_banner = 'not supported' self.eg_episode_thumbnails = 'Season##\\filename.metathumb' self.eg_season_posters = 'Season##\\folder.jpg' - self.eg_season_banners = 'not supported' - self.eg_season_all_poster = 'not supported' - self.eg_season_all_banner = 'not supported' + # self.eg_season_banners = 'not supported' + # self.eg_season_all_poster = 'not supported' + # self.eg_season_all_banner = 'not supported' # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): From 1995cd1830b3a41f2d2c1aae4ebb8e844f3e6914 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 09:29:58 +0300 Subject: [PATCH 45/51] Update store config actions --- themes-default/slim/static/js/store.js | 12 +++++++++--- themes/dark/assets/js/store.js | 12 +++++++++--- themes/dark/assets/js/store.js.map | 2 +- themes/light/assets/js/store.js | 12 +++++++++--- themes/light/assets/js/store.js.map | 2 +- 5 files changed, 29 insertions(+), 11 deletions(-) diff --git a/themes-default/slim/static/js/store.js b/themes-default/slim/static/js/store.js index e21bbc878e..097c9a0dc5 100644 --- a/themes-default/slim/static/js/store.js +++ b/themes-default/slim/static/js/store.js @@ -480,18 +480,24 @@ const store = new Store({ return api.get('/config/' + (section || '')).then(res => { if (section) { const config = res.data; - return commit(ADD_CONFIG, { section, config }); + commit(ADD_CONFIG, { section, config }); + return config; } - Object.keys(res.data).forEach(section => { - const config = res.data[section]; + const sections = res.data; + Object.keys(sections).forEach(section => { + const config = sections[section]; commit(ADD_CONFIG, { section, config }); }); + + return sections; }); }, setConfig(context, { section, config }) { if (section !== 'main') { return; } + config = Object.keys(config).length ? config : store.state.config; + return api.patch('config/' + section, config); }, updateConfig(context, { section, config }) { diff --git a/themes/dark/assets/js/store.js b/themes/dark/assets/js/store.js index e21bbc878e..097c9a0dc5 100644 --- a/themes/dark/assets/js/store.js +++ b/themes/dark/assets/js/store.js @@ -480,18 +480,24 @@ const store = new Store({ return api.get('/config/' + (section || '')).then(res => { if (section) { const config = res.data; - return commit(ADD_CONFIG, { section, config }); + commit(ADD_CONFIG, { section, config }); + return config; } - Object.keys(res.data).forEach(section => { - const config = res.data[section]; + const sections = res.data; + Object.keys(sections).forEach(section => { + const config = sections[section]; commit(ADD_CONFIG, { section, config }); }); + + return sections; }); }, setConfig(context, { section, config }) { if (section !== 'main') { return; } + config = Object.keys(config).length ? config : store.state.config; + return api.patch('config/' + section, config); }, updateConfig(context, { section, config }) { diff --git a/themes/dark/assets/js/store.js.map b/themes/dark/assets/js/store.js.map index 3368609b7e..56339e1f4e 100644 --- a/themes/dark/assets/js/store.js.map +++ b/themes/dark/assets/js/store.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file diff --git a/themes/light/assets/js/store.js b/themes/light/assets/js/store.js index e21bbc878e..097c9a0dc5 100644 --- a/themes/light/assets/js/store.js +++ b/themes/light/assets/js/store.js @@ -480,18 +480,24 @@ const store = new Store({ return api.get('/config/' + (section || '')).then(res => { if (section) { const config = res.data; - return commit(ADD_CONFIG, { section, config }); + commit(ADD_CONFIG, { section, config }); + return config; } - Object.keys(res.data).forEach(section => { - const config = res.data[section]; + const sections = res.data; + Object.keys(sections).forEach(section => { + const config = sections[section]; commit(ADD_CONFIG, { section, config }); }); + + return sections; }); }, setConfig(context, { section, config }) { if (section !== 'main') { return; } + config = Object.keys(config).length ? config : store.state.config; + return api.patch('config/' + section, config); }, updateConfig(context, { section, config }) { diff --git a/themes/light/assets/js/store.js.map b/themes/light/assets/js/store.js.map index 3368609b7e..56339e1f4e 100644 --- a/themes/light/assets/js/store.js.map +++ b/themes/light/assets/js/store.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file From b0afdef95ab44467e68c1ccc76d71d30dde63c7b Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 11:18:35 +0300 Subject: [PATCH 46/51] Fix config test errors --- tests/apiv2/test_config.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index 40a0a3bcb4..b60ce1681c 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -26,6 +26,9 @@ def config(monkeypatch, app_config): app_config('LOCALE', (None, 'ABC')) app_locale = 'Unknown.ABC' + # postProcessing.naming + app_config('NAMING_ANIME', 3) + config_data = NonEmptyDict() config_data['anonRedirect'] = app.ANON_REDIRECT config_data['animeSplitHome'] = bool(app.ANIME_SPLIT_HOME) @@ -232,7 +235,7 @@ def test_config_get_detailed(http_client, create_url, auth_headers, config, quer @pytest.mark.gen_test -def test_config_get_detailed_bad_request(http_client, create_url, auth_headers): +def test_config_get_detailed_bad_request(http_client, create_url, auth_headers, config): # given url = create_url('/config/main/abcdef/') @@ -245,7 +248,7 @@ def test_config_get_detailed_bad_request(http_client, create_url, auth_headers): @pytest.mark.gen_test -def test_config_get_not_found(http_client, create_url, auth_headers): +def test_config_get_not_found(http_client, create_url, auth_headers, config): # given url = create_url('/config/abcdef/') From 00df4f30bba59acf17ac4da07f86b46ee0043efc Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 11:34:19 +0300 Subject: [PATCH 47/51] Update API config test --- tests/apiv2/test_config.py | 40 +++++++++++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index b60ce1681c..7902dd4eff 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -4,15 +4,16 @@ import json import platform +import pkgutil import sys -from medusa import app, classes, db, logger +from medusa import app, classes, common, db, logger from medusa.helper.mappings import NonEmptyDict from medusa.indexers.indexer_config import get_indexer_config import pytest -from six import iteritems +from six import iteritems, text_type from tornado.httpclient import HTTPError @@ -192,8 +193,40 @@ def config(monkeypatch, app_config): config_data['indexers']['config'] = get_indexer_config() config_data['postProcessing'] = NonEmptyDict() - config_data['postProcessing']['processMethod'] = app.PROCESS_METHOD + config_data['postProcessing']['naming'] = NonEmptyDict() + config_data['postProcessing']['naming']['pattern'] = app.NAMING_PATTERN + config_data['postProcessing']['naming']['multiEp'] = int(app.NAMING_MULTI_EP) + config_data['postProcessing']['naming']['patternAirByDate'] = app.NAMING_ABD_PATTERN + config_data['postProcessing']['naming']['patternSports'] = app.NAMING_SPORTS_PATTERN + config_data['postProcessing']['naming']['patternAnime'] = app.NAMING_ANIME_PATTERN + config_data['postProcessing']['naming']['enableCustomNamingAirByDate'] = bool(app.NAMING_CUSTOM_ABD) + config_data['postProcessing']['naming']['enableCustomNamingSports'] = bool(app.NAMING_CUSTOM_SPORTS) + config_data['postProcessing']['naming']['enableCustomNamingAnime'] = bool(app.NAMING_CUSTOM_ANIME) + config_data['postProcessing']['naming']['animeMultiEp'] = int(app.NAMING_ANIME_MULTI_EP) + config_data['postProcessing']['naming']['animeNamingType'] = int(app.NAMING_ANIME) + config_data['postProcessing']['naming']['stripYear'] = bool(app.NAMING_STRIP_YEAR) + config_data['postProcessing']['seriesDownloadDir'] = app.TV_DOWNLOAD_DIR + config_data['postProcessing']['processAutomatically'] = bool(app.PROCESS_AUTOMATICALLY) + config_data['postProcessing']['postponeIfSyncFiles'] = bool(app.POSTPONE_IF_SYNC_FILES) config_data['postProcessing']['postponeIfNoSubs'] = bool(app.POSTPONE_IF_NO_SUBS) + config_data['postProcessing']['renameEpisodes'] = bool(app.RENAME_EPISODES) + config_data['postProcessing']['createMissingShowDirs'] = bool(app.CREATE_MISSING_SHOW_DIRS) + config_data['postProcessing']['addShowsWithoutDir'] = bool(app.ADD_SHOWS_WO_DIR) + config_data['postProcessing']['moveAssociatedFiles'] = bool(app.MOVE_ASSOCIATED_FILES) + config_data['postProcessing']['nfoRename'] = bool(app.NFO_RENAME) + config_data['postProcessing']['airdateEpisodes'] = bool(app.AIRDATE_EPISODES) + config_data['postProcessing']['unpack'] = bool(app.UNPACK) + config_data['postProcessing']['deleteRarContent'] = bool(app.DELRARCONTENTS) + config_data['postProcessing']['noDelete'] = bool(app.NO_DELETE) + config_data['postProcessing']['processMethod'] = app.PROCESS_METHOD + config_data['postProcessing']['reflinkAvailable'] = bool(pkgutil.find_loader('reflink')) + config_data['postProcessing']['autoPostprocessorFrequency'] = app.AUTOPOSTPROCESSOR_FREQUENCY + config_data['postProcessing']['syncFiles'] = app.SYNC_FILES + config_data['postProcessing']['fileTimestampTimezone'] = app.FILE_TIMESTAMP_TIMEZONE + config_data['postProcessing']['allowedExtensions'] = list(app.ALLOWED_EXTENSIONS) + config_data['postProcessing']['extraScripts'] = app.EXTRA_SCRIPTS + config_data['postProcessing']['extraScriptsUrl'] = app.EXTRA_SCRIPTS_URL + config_data['postProcessing']['multiEpStrings'] = {text_type(k): v for k, v in iteritems(common.MULTI_EP_STRINGS)} return config_data @@ -220,6 +253,7 @@ def test_config_get(http_client, create_url, auth_headers, config): 'localUser', 'githubUrl', 'dbPath', + 'postProcessing' ]) def test_config_get_detailed(http_client, create_url, auth_headers, config, query): # given From c67e082e52cc897314e1918badf82a437f2d21ab Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 12:02:15 +0300 Subject: [PATCH 48/51] Add test for metadata section --- tests/apiv2/test_config.py | 65 ++++++++++++++++++++++++++++++++------ 1 file changed, 56 insertions(+), 9 deletions(-) diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index 7902dd4eff..7ab9d59b95 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -7,19 +7,19 @@ import pkgutil import sys -from medusa import app, classes, common, db, logger +from medusa import app, classes, common, db, logger, metadata from medusa.helper.mappings import NonEmptyDict from medusa.indexers.indexer_config import get_indexer_config import pytest -from six import iteritems, text_type +from six import iteritems, itervalues, text_type from tornado.httpclient import HTTPError @pytest.fixture -def config(monkeypatch, app_config): +def config_main(monkeypatch, app_config): python_version = 'Python Test v1.2.3.4' monkeypatch.setattr(sys, 'version', python_version) app_config('PID', 4321) @@ -232,9 +232,9 @@ def config(monkeypatch, app_config): @pytest.mark.gen_test -def test_config_get(http_client, create_url, auth_headers, config): +def test_config_get(http_client, create_url, auth_headers, config_main): # given - expected = config + expected = config_main url = create_url('/config/main') @@ -255,9 +255,9 @@ def test_config_get(http_client, create_url, auth_headers, config): 'dbPath', 'postProcessing' ]) -def test_config_get_detailed(http_client, create_url, auth_headers, config, query): +def test_config_get_detailed(http_client, create_url, auth_headers, config_main, query): # given - expected = config[query] + expected = config_main[query] url = create_url('/config/main/{0}/'.format(query)) # when @@ -269,7 +269,7 @@ def test_config_get_detailed(http_client, create_url, auth_headers, config, quer @pytest.mark.gen_test -def test_config_get_detailed_bad_request(http_client, create_url, auth_headers, config): +def test_config_get_detailed_bad_request(http_client, create_url, auth_headers, config_main): # given url = create_url('/config/main/abcdef/') @@ -282,7 +282,7 @@ def test_config_get_detailed_bad_request(http_client, create_url, auth_headers, @pytest.mark.gen_test -def test_config_get_not_found(http_client, create_url, auth_headers, config): +def test_config_get_not_found(http_client, create_url, auth_headers, config_main): # given url = create_url('/config/abcdef/') @@ -292,3 +292,50 @@ def test_config_get_not_found(http_client, create_url, auth_headers, config): # then assert 404 == error.value.code + + +@pytest.fixture +def config_metadata(monkeypatch, app_config): + # initialize metadata_providers + default_config = ['0'] * 10 + providers = [ + (default_config, metadata.kodi), + (default_config, metadata.kodi_12plus), + (default_config, metadata.media_browser), + (default_config, metadata.ps3), + (default_config, metadata.wdtv), + (default_config, metadata.tivo), + (default_config, metadata.mede8er) + ] + + metadata_provider_dict = app_config('metadata_provider_dict', metadata.get_metadata_generator_dict()) + for cur_metadata_tuple in providers: + (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple + tmp_provider = cur_metadata_class.metadata_class() + tmp_provider.set_config(cur_metadata_config) + monkeypatch.setitem(metadata_provider_dict, tmp_provider.name, tmp_provider) + + section_data = NonEmptyDict() + + section_data['metadataProviders'] = NonEmptyDict() + + for provider in itervalues(app.metadata_provider_dict): + json_repr = provider.to_json() + section_data['metadataProviders'][json_repr['id']] = json_repr + + return section_data + + +@pytest.mark.gen_test +def test_config_get_metadata(http_client, create_url, auth_headers, config_metadata): + # given + expected = config_metadata + + url = create_url('/config/metadata') + + # when + response = yield http_client.fetch(url, **auth_headers) + + # then + assert response.code == 200 + assert expected == json.loads(response.body) From 6a1f67214e4431113d05dfed29c2d373085296bc Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 12:32:44 +0300 Subject: [PATCH 49/51] Update store.setConfig --- themes-default/slim/static/js/store.js | 6 ++++-- themes/dark/assets/js/store.js | 6 ++++-- themes/dark/assets/js/store.js.map | 2 +- themes/light/assets/js/store.js | 6 ++++-- themes/light/assets/js/store.js.map | 2 +- 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/themes-default/slim/static/js/store.js b/themes-default/slim/static/js/store.js index 097c9a0dc5..b19086fbef 100644 --- a/themes-default/slim/static/js/store.js +++ b/themes-default/slim/static/js/store.js @@ -483,12 +483,12 @@ const store = new Store({ commit(ADD_CONFIG, { section, config }); return config; } + const sections = res.data; Object.keys(sections).forEach(section => { const config = sections[section]; commit(ADD_CONFIG, { section, config }); }); - return sections; }); }, @@ -496,7 +496,9 @@ const store = new Store({ if (section !== 'main') { return; } - config = Object.keys(config).length ? config : store.state.config; + + // If an empty config object was passed, use the current state config + config = Object.keys(config).length === 0 ? context.state.config : config; return api.patch('config/' + section, config); }, diff --git a/themes/dark/assets/js/store.js b/themes/dark/assets/js/store.js index 097c9a0dc5..b19086fbef 100644 --- a/themes/dark/assets/js/store.js +++ b/themes/dark/assets/js/store.js @@ -483,12 +483,12 @@ const store = new Store({ commit(ADD_CONFIG, { section, config }); return config; } + const sections = res.data; Object.keys(sections).forEach(section => { const config = sections[section]; commit(ADD_CONFIG, { section, config }); }); - return sections; }); }, @@ -496,7 +496,9 @@ const store = new Store({ if (section !== 'main') { return; } - config = Object.keys(config).length ? config : store.state.config; + + // If an empty config object was passed, use the current state config + config = Object.keys(config).length === 0 ? context.state.config : config; return api.patch('config/' + section, config); }, diff --git a/themes/dark/assets/js/store.js.map b/themes/dark/assets/js/store.js.map index 56339e1f4e..d76b3d8af4 100644 --- a/themes/dark/assets/js/store.js.map +++ b/themes/dark/assets/js/store.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file diff --git a/themes/light/assets/js/store.js b/themes/light/assets/js/store.js index 097c9a0dc5..b19086fbef 100644 --- a/themes/light/assets/js/store.js +++ b/themes/light/assets/js/store.js @@ -483,12 +483,12 @@ const store = new Store({ commit(ADD_CONFIG, { section, config }); return config; } + const sections = res.data; Object.keys(sections).forEach(section => { const config = sections[section]; commit(ADD_CONFIG, { section, config }); }); - return sections; }); }, @@ -496,7 +496,9 @@ const store = new Store({ if (section !== 'main') { return; } - config = Object.keys(config).length ? config : store.state.config; + + // If an empty config object was passed, use the current state config + config = Object.keys(config).length === 0 ? context.state.config : config; return api.patch('config/' + section, config); }, diff --git a/themes/light/assets/js/store.js.map b/themes/light/assets/js/store.js.map index 56339e1f4e..d76b3d8af4 100644 --- a/themes/light/assets/js/store.js.map +++ b/themes/light/assets/js/store.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/store.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;ihello world
  • item 1
  • item 2
', 'notification-test');\n },\n setLayout: function setLayout(context, _ref7) {\n var page = _ref7.page,\n layout = _ref7.layout;\n\n return api.patch('config/main', {\n layout: _defineProperty({}, page, layout)\n // For now we reload the page since the layouts use python still\n }).then(setTimeout(function () {\n return location.reload();\n }, 500));\n }\n },\n // @TODO Add logging here\n plugins: []\n});\n\nvar websocketUrl = function () {\n var proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';\n var WSMessageUrl = '/ui';\n return proto + '//' + window.location.hostname + ':' + window.location.port + webRoot + '/ws' + WSMessageUrl;\n}();\n\nVue.use(VueNativeSock, websocketUrl, {\n store: store,\n format: 'json',\n reconnection: true, // (Boolean) whether to reconnect automatically (false)\n reconnectionAttempts: 2, // (Number) number of reconnection attempts before giving up (Infinity),\n reconnectionDelay: 1000 // (Number) how long to initially wait before attempting a new (1000)\n});\n\nwindow.store = store;\n\n},{}]},{},[1]);\n"],"file":"store.js"} \ No newline at end of file From d29fe3c2faba9cf4252ebe8dcd76d228c8e7e4cb Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 16:01:46 +0300 Subject: [PATCH 50/51] Revert change to `DataGenerator.sections` --- medusa/server/api/v2/config.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index e12a16d24b..072028fec9 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -233,10 +233,11 @@ class DataGenerator(object): @classmethod def sections(cls): """Get the available section names.""" - cls.data__ = [name[5:] for (name, function) in inspect.getmembers(cls, predicate=inspect.isfunction) if - name.startswith('data_')] - cls.cls_data__ = cls.data__ - return cls.cls_data__ + return [ + name[5:] + for (name, function) in inspect.getmembers(cls, predicate=inspect.isfunction) + if name.startswith('data_') + ] @classmethod def get_data(cls, section): From b7275b89a4ad96f8f1eae96ac4bb1ad43ac3427f Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 28 Jul 2018 19:08:49 +0300 Subject: [PATCH 51/51] Fix incorrect identifier on `changeEpisodeStatuses` (#4783) Fixes #4776 --- medusa/server/web/manage/handler.py | 10 ++++++++-- themes-default/slim/static/js/manage/init.js | 2 +- themes/dark/assets/js/manage/init.js | 2 +- themes/dark/assets/js/manage/init.js.map | 2 +- themes/light/assets/js/manage/init.js | 2 +- themes/light/assets/js/manage/init.js.map | 2 +- 6 files changed, 13 insertions(+), 7 deletions(-) diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index d4256f9f50..d1b97007c2 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -170,10 +170,16 @@ def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): status_list + [cur_indexer_id, cur_series_id] ) - all_eps = ['{season}x{episode}'.format(season=x[b'season'], episode=x[b'episode']) for x in all_eps_results] + all_eps = ['s{season}e{episode}'.format(season=x[b'season'], episode=x[b'episode']) for x in all_eps_results] to_change[cur_indexer_id, cur_series_id] = all_eps - self.setStatus(indexer_id_to_name(int(cur_indexer_id)), cur_series_id, '|'.join(to_change[(cur_indexer_id, cur_series_id)]), newStatus, direct=True) + self.setStatus( + indexername=indexer_id_to_name(int(cur_indexer_id)), + seriesid=cur_series_id, + eps='|'.join(to_change[(cur_indexer_id, cur_series_id)]), + status=newStatus, + direct=True + ) return self.redirect('/manage/episodeStatuses/') diff --git a/themes-default/slim/static/js/manage/init.js b/themes-default/slim/static/js/manage/init.js index ae2631236f..9a8fd09bc1 100644 --- a/themes-default/slim/static/js/manage/init.js +++ b/themes-default/slim/static/js/manage/init.js @@ -4,7 +4,7 @@ MEDUSA.manage.init = function() { const series = indexerId + '-' + seriesId; row += ' '; - row += ' '; + row += ' '; row += ' ' + season + 'x' + episode + ''; row += ' ' + name + ''; row += ' '; diff --git a/themes/dark/assets/js/manage/init.js b/themes/dark/assets/js/manage/init.js index ae2631236f..9a8fd09bc1 100644 --- a/themes/dark/assets/js/manage/init.js +++ b/themes/dark/assets/js/manage/init.js @@ -4,7 +4,7 @@ MEDUSA.manage.init = function() { const series = indexerId + '-' + seriesId; row += ' '; - row += ' '; + row += ' '; row += ' ' + season + 'x' + episode + ''; row += ' ' + name + ''; row += ' '; diff --git a/themes/dark/assets/js/manage/init.js.map b/themes/dark/assets/js/manage/init.js.map index 7740f91baf..1bed467529 100644 --- a/themes/dark/assets/js/manage/init.js.map +++ b/themes/dark/assets/js/manage/init.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/manage/init.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i';\n row += ' ';\n row += ' ' + season + 'x' + episode + '';\n row += ' ' + name + '';\n row += ' ';\n\n return row;\n };\n\n $.makeSubtitleRow = function (indexerId, seriesId, season, episode, name, subtitles, checked) {\n // eslint-disable-line max-params\n var row = '';\n var series = indexerId + '-' + seriesId;\n\n row += '';\n row += '';\n row += '' + season + 'x' + episode + '';\n if (subtitles.length > 0) {\n row += '';\n subtitles = subtitles.split(',');\n for (var i in subtitles) {\n if ({}.hasOwnProperty.call(subtitles, i)) {\n row += '\"' ';\n }\n }\n row += '';\n } else {\n row += 'No subtitles';\n }\n row += '' + name + '';\n row += '';\n\n return row;\n };\n};\n\n},{}]},{},[1]);\n"],"file":"init.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/manage/init.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i';\n row += ' ';\n row += ' ' + season + 'x' + episode + '';\n row += ' ' + name + '';\n row += ' ';\n\n return row;\n };\n\n $.makeSubtitleRow = function (indexerId, seriesId, season, episode, name, subtitles, checked) {\n // eslint-disable-line max-params\n var row = '';\n var series = indexerId + '-' + seriesId;\n\n row += '';\n row += '';\n row += '' + season + 'x' + episode + '';\n if (subtitles.length > 0) {\n row += '';\n subtitles = subtitles.split(',');\n for (var i in subtitles) {\n if ({}.hasOwnProperty.call(subtitles, i)) {\n row += '\"' ';\n }\n }\n row += '';\n } else {\n row += 'No subtitles';\n }\n row += '' + name + '';\n row += '';\n\n return row;\n };\n};\n\n},{}]},{},[1]);\n"],"file":"init.js"} \ No newline at end of file diff --git a/themes/light/assets/js/manage/init.js b/themes/light/assets/js/manage/init.js index ae2631236f..9a8fd09bc1 100644 --- a/themes/light/assets/js/manage/init.js +++ b/themes/light/assets/js/manage/init.js @@ -4,7 +4,7 @@ MEDUSA.manage.init = function() { const series = indexerId + '-' + seriesId; row += ' '; - row += ' '; + row += ' '; row += ' ' + season + 'x' + episode + ''; row += ' ' + name + ''; row += ' '; diff --git a/themes/light/assets/js/manage/init.js.map b/themes/light/assets/js/manage/init.js.map index 7740f91baf..1bed467529 100644 --- a/themes/light/assets/js/manage/init.js.map +++ b/themes/light/assets/js/manage/init.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/manage/init.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i';\n row += ' ';\n row += ' ' + season + 'x' + episode + '';\n row += ' ' + name + '';\n row += ' ';\n\n return row;\n };\n\n $.makeSubtitleRow = function (indexerId, seriesId, season, episode, name, subtitles, checked) {\n // eslint-disable-line max-params\n var row = '';\n var series = indexerId + '-' + seriesId;\n\n row += '';\n row += '';\n row += '' + season + 'x' + episode + '';\n if (subtitles.length > 0) {\n row += '';\n subtitles = subtitles.split(',');\n for (var i in subtitles) {\n if ({}.hasOwnProperty.call(subtitles, i)) {\n row += '\"' ';\n }\n }\n row += '';\n } else {\n row += 'No subtitles';\n }\n row += '' + name + '';\n row += '';\n\n return row;\n };\n};\n\n},{}]},{},[1]);\n"],"file":"init.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/manage/init.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i';\n row += ' ';\n row += ' ' + season + 'x' + episode + '';\n row += ' ' + name + '';\n row += ' ';\n\n return row;\n };\n\n $.makeSubtitleRow = function (indexerId, seriesId, season, episode, name, subtitles, checked) {\n // eslint-disable-line max-params\n var row = '';\n var series = indexerId + '-' + seriesId;\n\n row += '';\n row += '';\n row += '' + season + 'x' + episode + '';\n if (subtitles.length > 0) {\n row += '';\n subtitles = subtitles.split(',');\n for (var i in subtitles) {\n if ({}.hasOwnProperty.call(subtitles, i)) {\n row += '\"' ';\n }\n }\n row += '';\n } else {\n row += 'No subtitles';\n }\n row += '' + name + '';\n row += '';\n\n return row;\n };\n};\n\n},{}]},{},[1]);\n"],"file":"init.js"} \ No newline at end of file