Skip to content

Commit

Permalink
Moved the creation of the searchResult to the searchQueue item.
Browse files Browse the repository at this point in the history
* Changed searchResult to snakecase.
* Removed unneeded params for snatchSelection() method.
* Changed mako to not send unneeded params. rowid is enough.
* Added check for getting the show_obj, chances are slim, but maybe a user leaves a snatchSelection window open, while deleting the show.
* Some pep8 changes.
* Added docstrings to webserve.py and search_queue.py methods.
  • Loading branch information
p0psicles authored and fernandog committed Apr 1, 2016
1 parent e5c203f commit 7b8878e
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 42 deletions.
2 changes: 1 addition & 1 deletion gui/slick/views/snatchSelection.mako
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@
<td align="center">${pretty_file_size(hItem["size"]) if hItem["size"] > -1 else 'N/A'}</td>
<td align="center">${hItem["provider_type"]}</td>
<td align="center">${datetime.datetime.fromtimestamp(hItem["time"]).strftime(sickbeard.DATE_PRESET+" "+sickbeard.TIME_PRESET)}</td>
<td align="center" class="col-search" width="5%"><a class="epManualSnatch" id="${str(show.indexerid)}x${season}x${episode}" name="${str(show.indexerid)}x${season}x${episode}" href="${srRoot}/home/pickManualSnatch?provider=${hItem["provider_id"]}&amp;rowid=${hItem["rowid"]}&show=${show.indexerid}&amp;season=${season}&amp;episode=${episode}"><img src="${srRoot}/images/download.png" width="16" height="16" alt="search" title="Download selected episode" /></a></td>
<td align="center" class="col-search" width="5%"><a class="epManualSnatch" id="${str(show.indexerid)}x${season}x${episode}" name="${str(show.indexerid)}x${season}x${episode}" href="${srRoot}/home/pickManualSnatch?provider=${hItem["provider_id"]}&amp;rowid=${hItem["rowid"]}"><img src="${srRoot}/images/download.png" width="16" height="16" alt="search" title="Download selected episode" /></a></td>
</tr>
% endfor
</tbody>
Expand Down
59 changes: 43 additions & 16 deletions sickbeard/search_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def is_in_queue(self, show, segment):

def is_ep_in_queue(self, segment):
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem, ManualSnatchQueueItem)) and cur_item.segment == segment:
return True
return False

Expand Down Expand Up @@ -126,7 +126,7 @@ def add_item(self, item):
elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment):
# manual and failed searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, ManualSnatchQueueItem):
elif isinstance(item, ManualSnatchQueueItem) and not self.is_ep_in_queue(item.segment):
# manual and failed searches
generic_queue.GenericQueue.add_item(self, item)
else:
Expand Down Expand Up @@ -203,7 +203,7 @@ def run(self):
# just use the first result for now
if searchResult[0].seeders not in (-1, None) and searchResult[0].leechers not in (-1, None):
logger.log(u"Downloading {0} with {1} seeders and {2} leechers from {3}".
format(searchResult[0].name,
format(searchResult[0].name,
searchResult[0].seeders, searchResult[0].leechers, searchResult[0].provider.name))
else:
logger.log(u"Downloading {0} from {1}".format(searchResult[0].name, searchResult[0].provider.name))
Expand Down Expand Up @@ -234,40 +234,66 @@ def run(self):


class ManualSnatchQueueItem(generic_queue.QueueItem):
def __init__(self, searchResult):
"""
A queue item that can be used to queue the snatch of a search result.
Currently used for the snatchSelection feature.
@param show: A show object
@param segment: A list of episode objects
@param provider: The provider id. For example nyaatorrent and not NyaaTorrent. Or usernet_crawler and not Usenet-Crawler
@param cached_result: An sql result of the searched result retrieved from the provider cache table.
@return: The run() methods snatches the episode(s) if possible.
"""
def __init__(self, show, segment, provider, cached_result):
generic_queue.QueueItem.__init__(self, u'Manual Snatch', MANUAL_SNATCH)
self.priority = generic_queue.QueuePriorities.HIGH

self.success = None
self.started = None
self.results = None
self.searchResult = searchResult
self.provider = provider
self.segment = segment
self.show = show
self.cached_result = cached_result

def run(self):
generic_queue.QueueItem.run(self)
self.started = True

search_result = sickbeard.providers.getProviderClass(self.provider).get_result(self.segment)
search_result.show = self.show
search_result.url = self.cached_result['url']
search_result.quality = int(self.cached_result['quality'])
search_result.name = self.cached_result['name']
search_result.size = int(self.cached_result['size'])
search_result.seeders = int(self.cached_result['seeders'])
search_result.leechers = int(self.cached_result['leechers'])
search_result.release_group = self.cached_result['release_group']
search_result.version = int(self.cached_result['version'])

try:
logger.log(u"Beginning to manual snatch release: {}".format(self.searchResult.name))
logger.log(u"Beginning to manual snatch release: {0}".format(search_result.name))

if self.searchResult:
if self.searchResult.seeders not in (-1, None) and self.searchResult.leechers not in (-1, None):
if search_result:
if search_result.seeders not in (-1, None) and search_result.leechers not in (-1, None):
logger.log(u"Downloading {0} with {1} seeders and {2} leechers from {3}".
format(self.searchResult.name,
self.searchResult.seeders, self.searchResult.seeders, self.searchResult.provider.name))
format(search_result.name,
search_result.seeders, search_result.leechers, search_result.provider.name))
else:
logger.log(u"Downloading {0} from {1}".format(self.searchResult.name, self.searchResult.provider.name))
self.success = search.snatchEpisode(self.searchResult)
logger.log(u"Downloading {0} from {1}".format(search_result.name, search_result.provider.name))
self.success = search.snatchEpisode(search_result)
else:
logger.log(u"Unable to snatch release: {}".format(self.searchResult.name))
logger.log(u"Unable to snatch release: {0}".format(search_result.name))

# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])

except Exception:
self.success = False
logger.log(traceback.format_exc(), logger.DEBUG)
ui.notifications.message('Error while snatching selected result', "Couldn't snatch the result for <i>%s</i>".format(self.searchResult.name))
ui.notifications.message('Error while snatching selected result',
"Couldn't snatch the result for <i>{0}</i>".format(search_result.name))

if self.success is None:
self.success = False
Expand Down Expand Up @@ -300,8 +326,9 @@ def run(self):
for result in searchResult:
# just use the first result for now
if result.seeders not in (-1, None) and result.leechers not in (-1, None):
logger.log(u"Downloading {0} with {1} seeders and {2} leechers from {3}".format(result.name,
result.seeders, result.leechers, result.provider.name))
logger.log(u"Downloading {0} with {1} seeders and {2} leechers from {3}".
format(result.name,
result.seeders, result.leechers, result.provider.name))
else:
logger.log(u"Downloading {0} from {1}".format(result.name, result.provider.name))
self.success = search.snatchEpisode(result)
Expand Down
56 changes: 31 additions & 25 deletions sickbeard/webserve.py
Original file line number Diff line number Diff line change
Expand Up @@ -1401,55 +1401,61 @@ def titler(x):
action="displayShow"
)

def pickManualSnatch(self, show=None, season=None, episode=None, provider=None, rowid=None):
def pickManualSnatch(self, provider=None, rowid=None):
"""
Tries to Perform the snatch for a manualSelected episode, episodes or season pack.
@param provider: The provider id, passed as usenet_crawler and not the provider name (Usenet-Crawler)
@param rowid: The provider's cache table's rowid. (currently the implicit sqlites rowid is used, needs to be replaced in future)
@return: A json with a {'success': true} or false.
"""

# Try to retrieve the cached result from the providers cache table.
# TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column

try:
main_db_con = db.DBConnection('cache.db')
sql_return = main_db_con.action("SELECT * FROM '%s' WHERE rowid = ?" %
(sickbeard.providers.getProviderClass(provider).get_id()), [rowid], fetchone=True)
cached_result = main_db_con.action("SELECT * FROM '%s' WHERE rowid = ?" %
provider, [rowid], fetchone=True)
except Exception as e:
return self._genericMessage("Error", "Couldn't read cached results. Error: {}".format(e))

if not (sql_return['url'] or sql_return['quality'] or sql_return['name'] or provider or episode):
if not all([cached_result['url'],
cached_result['quality'],
cached_result['name'],
cached_result['indexerid'],
cached_result['season'],
provider]):
return self._genericMessage("Error", "Cached result doesn't have all needed info to snatch episode")

try:
show = int(show) # fails if show id ends in a period SickRage/sickrage-issues#65
show = int(cached_result['indexerid']) # fails if show id ends in a period SickRage/sickrage-issues#65
show_obj = Show.find(sickbeard.showList, show)
except (ValueError, TypeError):
return self._genericMessage("Error", "Invalid show ID: {}".format(show))
return self._genericMessage("Error", "Invalid show ID: {0}".format(show))

if not show_obj:
return self._genericMessage("Error", "Could not find a show with id {0} in the list of shows, did you remove the show?".format(show))

# Create a list of episode object(s)
# if multi-episode: |1|2|
# if single-episode: |1|
# TODO: Handle Season Packs: || (no episode)
episodes = sql_return['episodes'].strip("|").split("|")
episodes = cached_result['episodes'].strip("|").split("|")
ep_objs = []
for episode in episodes:
if episode:
ep_objs.append(TVEpisode(show_obj, int(season), int(episode)))

# TODO: Can this be moved to the ManualSnatchQueueItem?
search_result = sickbeard.providers.getProviderClass(provider).get_result(ep_objs)
search_result.show = show_obj
search_result.url = sql_return['url']
search_result.quality = int(sql_return['quality'])
search_result.name = sql_return['name']
search_result.size = int(sql_return['size'])
search_result.seeders = int(sql_return['seeders'])
search_result.leechers = int(sql_return['leechers'])
search_result.release_group = sql_return['release_group']
search_result.version = int(sql_return['version'])

ep_queue_item = search_queue.ManualSnatchQueueItem(search_result)
ep_objs.append(TVEpisode(show_obj, int(cached_result['season']), int(episode)))

sickbeard.searchQueueScheduler.action.add_item(ep_queue_item)
# Create the queue item
snatch_queue_item = search_queue.ManualSnatchQueueItem(show_obj, ep_objs, provider, cached_result)

# Add the queue item to the queue
sickbeard.searchQueueScheduler.action.add_item(snatch_queue_item)

while ep_queue_item.success is not False:
if ep_queue_item.started and ep_queue_item.success:
while snatch_queue_item.success is not False:
if snatch_queue_item.started and snatch_queue_item.success:
return json.dumps({'result': 'success'})
time.sleep(1)

Expand Down

0 comments on commit 7b8878e

Please sign in to comment.