Skip to content

Commit

Permalink
pylint manual_snatch.py
Browse files Browse the repository at this point in the history
  • Loading branch information
fernandog committed Mar 18, 2016
1 parent 8199ed4 commit f7d90d5
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 17 deletions.
2 changes: 1 addition & 1 deletion contrib/nzbToMedia
33 changes: 17 additions & 16 deletions sickbeard/manual_snatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def getQualityClass(ep_obj):


def getEpisode(show, season=None, episode=None, absolute=None):
""" Get a specific episode object based on show, season and episode number
""" Get a specific episode object based on show, season and episode number
:param show: Season number
:param season: Season number
Expand Down Expand Up @@ -86,7 +86,7 @@ def getEpisodes(searchThread, searchstatus):
showObj = Show.find(sickbeard.showList, int(searchThread.show.indexerid))

if not showObj:
logger.log(u'No Show Object found for show with indexerID: ' + str(searchThread.show.indexerid), logger.ERROR)
logger.log(u'No Show Object found for show with indexerID: {}'.format(searchThread.show.indexerid), logger.ERROR)
return results

if isinstance(searchThread, (sickbeard.search_queue.ManualSearchQueueItem, sickbeard.search_queue.ManualSnatchQueueItem)):
Expand All @@ -109,8 +109,7 @@ def getEpisodes(searchThread, searchstatus):
'searchstatus': searchstatus,
'status': statusStrings[epObj.status],
'quality': getQualityClass(epObj),
'overview': Overview.overviewStrings[showObj.getOverview(epObj.status)]
})
'overview': Overview.overviewStrings[showObj.getOverview(epObj.status)]})

return results

Expand Down Expand Up @@ -155,7 +154,7 @@ def collectEpisodesFromSearchThread(show):
return episodes


def get_provider_cache_results(indexer, show_all_results=None, perform_search=None, **search_show):
def get_provider_cache_results(indexer, show_all_results=None, perform_search=None, **search_show): # pylint: disable=too-many-locals,unused-argument
"""
Check all provider cache tables for search results
"""
Expand All @@ -168,9 +167,8 @@ def get_provider_cache_results(indexer, show_all_results=None, perform_search=No
showObj = Show.find(sickbeard.showList, int(show))

main_db_con = db.DBConnection('cache.db')
sql_return = {}
sql_return = found_items = []
provider_results = {'last_prov_updates': {}, 'error': {}, 'found_items': []}
found_items = []

providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_daily]
for curProvider in providers:
Expand All @@ -182,7 +180,7 @@ def get_provider_cache_results(indexer, show_all_results=None, perform_search=No
# TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
# If table doesn't exist, start a search to create table and new columns seeders, leechers and size
if table_exists and 'seeders' in columns and 'leechers' in columns and 'size' in columns:

common_sql = "SELECT rowid, ? as 'provider_type', ? as 'provider_image', \
? as 'provider', ? as 'provider_id', name, season, \
episodes, indexerid, url, time, (select max(time) from '{provider_id}') as lastupdate, \
Expand All @@ -191,11 +189,13 @@ def get_provider_cache_results(indexer, show_all_results=None, perform_search=No
additional_sql = " AND episodes LIKE ? AND season = ?"

if not int(show_all_results):
sql_return = main_db_con.select(common_sql + additional_sql, \
(curProvider.provider_type.title(),curProvider.image_name(), curProvider.name, curProvider.get_id(), show, "|%" + episode + "|%", season))
sql_return = main_db_con.select(common_sql + additional_sql,
(curProvider.provider_type.title(), curProvider.image_name(),
curProvider.name, curProvider.get_id(), show, "|%" + episode + "|%", season))
else:
sql_return = main_db_con.select(common_sql, \
(curProvider.provider_type.title(),curProvider.image_name(), curProvider.name, curProvider.get_id(), show))
sql_return = main_db_con.select(common_sql,
(curProvider.provider_type.title(), curProvider.image_name(),
curProvider.name, curProvider.get_id(), show))

if sql_return:
for item in sql_return:
Expand All @@ -211,12 +211,13 @@ def get_provider_cache_results(indexer, show_all_results=None, perform_search=No
# retrieve the episode object and fail if we can't get one
ep_obj = getEpisode(show, season, episode)
if isinstance(ep_obj, str):
#ui.notifications.error(u"Something went wrong when starting the manual search for show {0}, and episode: {1}x{2}".
# format(showObj.name, season, episode))
provider_results['error'] = 'Something went wrong when starting the manual search for show {0}, and episode: {1}x{2}'.format(showObj.name, season, episode)
# ui.notifications.error(u"Something went wrong when starting the manual search for show {0}, and episode: {1}x{2}".
# format(showObj.name, season, episode))
provider_results['error'] = 'Something went wrong when starting the manual search for show {0}, \
and episode: {1}x{2}'.format(showObj.name, season, episode)

# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj, bool(int(down_cur_quality)), True)
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj, bool(int(down_cur_quality)), True) # pylint: disable=maybe-no-member

sickbeard.searchQueueScheduler.action.add_item(ep_queue_item)

Expand Down

0 comments on commit f7d90d5

Please sign in to comment.