Skip to content

Commit

Permalink
Improve multi-episode and season snatches. Fixes #229. Fixes #4750 (#…
Browse files Browse the repository at this point in the history
…4675)

* Rename Season search mode to Backlog search mode

* Improve multi-episode and season snatches. Fixes #229

* Remove unused import

* Add support for multi-episodes cache results, remove naive quality comparisions

* Add TODO

* Better formatting

* Remove support to download seasons for multi-episodes (torrents only)

* Use plural for better representation

* Review

* Update CHANGELOG.md

* Moved v0.2.9 changelog items to 'Unreleased'
  • Loading branch information
medariox authored and p0psicles committed Aug 1, 2018
1 parent 7daab93 commit 55bfdaf
Show file tree
Hide file tree
Showing 5 changed files with 129 additions and 140 deletions.
8 changes: 6 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
## Unreleased

#### Fixes

#### New Features

#### Improvements
- Converted /config/postProcessing to a Vue component ([#4259](https://github.com/pymedusa/Medusa/pull/4259))

#### Fixes
- Fixed error when changing episode status from episode status management ([#4783](https://github.com/pymedusa/Medusa/pull/4783))
- Fixed multi-episode snatches not being marked as snatched in history ([#229](https://github.com/pymedusa/Medusa/issues/229))
- Fixed whole seasons being downloaded as multi-episode replacement ([#4750](https://github.com/pymedusa/Medusa/issues/4750))


-----

## 0.2.8 (2018-07-28)
Expand Down
7 changes: 5 additions & 2 deletions medusa/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,8 +211,11 @@ def add_result_to_cache(self, cache):

def create_episode_object(self):
"""Use this result to create an episode segment out of it."""
if self.actual_season and self.actual_episodes and self.series:
self.episodes = [self.series.get_episode(self.actual_season, ep) for ep in self.actual_episodes]
if self.actual_season and self.series:
if self.actual_episodes:
self.episodes = [self.series.get_episode(self.actual_season, ep) for ep in self.actual_episodes]
else:
self.episodes = self.series.get_all_episodes(self.actual_season)
return self.episodes

def finish_search_result(self, provider):
Expand Down
40 changes: 17 additions & 23 deletions medusa/providers/generic_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,21 +246,22 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False

results = {}
items_list = []
season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly'

for episode in episodes:
if not manual_search:
cache_result = self.cache.search_cache(episode, forced_search=forced_search,
down_cur_quality=download_current_quality)
if cache_result:
if episode.episode not in results:
results[episode.episode] = cache_result
else:
results[episode.episode].extend(cache_result)

cache_results = self.cache.find_needed_episodes(
episode, forced_search=forced_search, down_cur_quality=download_current_quality
)
if cache_results:
for episode_no in cache_results:
if episode_no not in results:
results[episode_no] = cache_results[episode_no]
else:
results[episode_no] += cache_results[episode_no]
continue

search_strings = []
season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly'
if season_search:
search_strings = self._get_season_search_strings(episode)
elif search_mode == 'eponly':
Expand All @@ -272,13 +273,11 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False
search_string, ep_obj=episode, manual_search=manual_search
)

# In season search, we can't loop in episodes lists as we only need one episode to get the season string
# In season search, we can't loop in episodes lists as we
# only need one episode to get the season string
if search_mode == 'sponly':
break

if len(results) == len(episodes):
return results

# Remove duplicate items
unique_items = self.remove_duplicate_mappings(items_list)
log.debug('Found {0} unique items', len(unique_items))
Expand All @@ -302,8 +301,6 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False
# unpack all of the quality lists into a single sorted list
items_list = list(sorted_items)

cl = []

# Move through each item and parse it into a quality
search_results = []
for item in items_list:
Expand Down Expand Up @@ -442,6 +439,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False
search_result.actual_season = int(sql_results[0][b'season'])
search_result.actual_episodes = [int(sql_results[0][b'episode'])]

cl = []
# Iterate again over the search results, and see if there is anything we want.
for search_result in search_results:

Expand All @@ -457,15 +455,15 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False

log.debug('Found result {0} at {1}', search_result.name, search_result.url)

episode_object = search_result.create_episode_object()
search_result.create_episode_object()
# result = self.get_result(episode_object, search_result)
search_result.finish_search_result(self)

if not episode_object:
if not search_result.actual_episodes:
episode_number = SEASON_RESULT
log.debug('Found season pack result {0} at {1}', search_result.name, search_result.url)
elif len(episode_object) == 1:
episode_number = episode_object[0].episode
elif len(search_result.actual_episodes) == 1:
episode_number = search_result.actual_episode
log.debug('Found single episode result {0} at {1}', search_result.name, search_result.url)
else:
episode_number = MULTI_EP_RESULT
Expand Down Expand Up @@ -520,10 +518,6 @@ def make_id(name):

return re.sub(r'[^\w\d_]', '_', str(name).strip().lower())

def search_rss(self, episodes):
"""Find cached needed episodes."""
return self.cache.find_needed_episodes(episodes)

def seed_ratio(self):
"""Return ratio."""
return ''
Expand Down
131 changes: 57 additions & 74 deletions medusa/search/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import os
import threading
import time
from builtins import str

from medusa import (
app,
Expand Down Expand Up @@ -476,87 +475,108 @@ def wanted_episodes(series_obj, from_date):
u'reason': should_search_reason,
}
)

ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode'])
ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality]
ep_obj.wanted_quality = [
quality
for quality in all_qualities
if Quality.is_higher_quality(
cur_quality, quality, allowed_qualities, preferred_qualities
)
]
wanted.append(ep_obj)

return wanted


def search_for_needed_episodes(force=False):
"""
Check providers for details on wanted episodes.
"""Search providers for needed episodes.
:return: episodes we have a search hit for
:param force: run the search even if no episodes are needed
:return: list of found episodes
"""
found_results = {}

show_list = app.showList
from_date = datetime.date.fromordinal(1)
episodes = []

for cur_show in show_list:
if cur_show.paused:
log.debug(u'Not checking for needed episodes of {0} because the show is paused', cur_show.name)
log.debug(
u'Not checking for needed episodes of {0} because the show is paused',
cur_show.name,
)
continue
episodes.extend(wanted_episodes(cur_show, from_date))

if not episodes and not force:
# nothing wanted so early out, ie: avoid whatever arbitrarily
# complex thing a provider cache update entails, for example,
# reading rss feeds
return list(itervalues(found_results))

original_thread_name = threading.currentThread().name
return []

providers = enabled_providers(u'daily')

if not providers:
log.warning(u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
u' Please check your settings')
return list(itervalues(found_results))
log.warning(
u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
u' Please check your settings'
)
return []

original_thread_name = threading.currentThread().name
log.info(u'Using daily search providers')

for cur_provider in providers:
threading.currentThread().name = u'{thread} :: [{provider}]'.format(thread=original_thread_name,
provider=cur_provider.name)
threading.currentThread().name = u'{thread} :: [{provider}]'.format(
thread=original_thread_name, provider=cur_provider.name
)
cur_provider.cache.update_cache()

single_results = {}
multi_results = []
for cur_provider in providers:
threading.currentThread().name = u'{thread} :: [{provider}]'.format(thread=original_thread_name,
provider=cur_provider.name)
threading.currentThread().name = u'{thread} :: [{provider}]'.format(
thread=original_thread_name, provider=cur_provider.name
)
try:
cur_found_results = cur_provider.search_rss(episodes)
found_results = cur_provider.cache.find_needed_episodes(episodes)
except AuthException as error:
log.error(u'Authentication error: {0}', ex(error))
continue

# pick a single result for each episode, respecting existing results
for cur_ep in cur_found_results:
if not cur_ep.series or cur_ep.series.paused:
log.debug(u'Skipping {0} because the show is paused ', cur_ep.pretty_name())
for episode_no, results in iteritems(found_results):
if results[0].series.paused:
log.debug(u'Skipping {0} because the show is paused.', results[0].series.name)
continue

# if all results were rejected move on to the next episode
wanted_results = filter_results(cur_found_results[cur_ep])
wanted_results = filter_results(results)
if not wanted_results:
log.debug(u'All found results for {0} were rejected.', cur_ep.pretty_name())
log.debug(u'All found results for {0} were rejected.', results[0].series.name)
continue

best_result = pick_result(wanted_results)
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
if cur_ep in found_results and best_result.quality <= found_results[cur_ep].quality:
continue

# Skip the result if search delay is enabled for the provider.
if delay_search(best_result):
continue

found_results[cur_ep] = best_result
if episode_no in (SEASON_RESULT, MULTI_EP_RESULT):
multi_results.append(best_result)
else:
# if it's already in the list (from another provider) and
# the newly found quality is no better then skip it
if episode_no in single_results:
allowed_qualities, preferred_qualities = results[0].series.current_qualities
if not Quality.is_higher_quality(single_results[episode_no].quality,
best_result.quality, allowed_qualities,
preferred_qualities):
continue

single_results[episode_no] = best_result

threading.currentThread().name = original_thread_name

return list(itervalues(found_results))
return combine_results(multi_results, list(itervalues(single_results)))


def delay_search(best_result):
Expand Down Expand Up @@ -805,47 +825,22 @@ def collect_multi_candidates(candidates, series_obj, episodes, down_cur_quality)
if not wanted_candidates:
return multi_candidates, single_candidates

searched_seasons = {str(x.season) for x in episodes}
main_db_con = db.DBConnection()
selection = main_db_con.select(
'SELECT episode '
'FROM tv_episodes '
'WHERE indexer = ?'
' AND showid = ?'
' AND ( season IN ( {0} ) )'.format(','.join(searched_seasons)),
[series_obj.indexer, series_obj.series_id]
)
all_eps = [int(x[b'episode']) for x in selection]
log.debug(u'Episodes list: {0}', all_eps)

for candidate in wanted_candidates:
season_quality = candidate.quality

all_wanted = True
any_wanted = False
for cur_ep_num in all_eps:
for season in {x.season for x in episodes}:
if not series_obj.want_episode(season, cur_ep_num, season_quality,
down_cur_quality):
all_wanted = False
else:
any_wanted = True
wanted_episodes = (
series_obj.want_episode(ep_obj.season, ep_obj.episode, candidate.quality, down_cur_quality)
for ep_obj in candidate.episodes
)

if all_wanted:
if all(wanted_episodes):
log.info(u'All episodes in this season are needed, adding {0} {1}',
candidate.provider.provider_type,
candidate.name)
ep_objs = []
for cur_ep_num in all_eps:
for season in {x.season for x in episodes}:
ep_objs.append(series_obj.get_episode(season, cur_ep_num))
candidate.episodes = ep_objs

# Skip the result if search delay is enabled for the provider
if not delay_search(candidate):
multi_candidates.append(candidate)

elif not any_wanted:
elif not any(wanted_episodes):
log.debug(u'No episodes in this season are needed at this quality, ignoring {0} {1}',
candidate.provider.provider_type,
candidate.name)
Expand All @@ -866,18 +861,6 @@ def collect_multi_candidates(candidates, series_obj, episodes, down_cur_quality)
elif len(cur_result.episodes) > 1:
multi_candidates.append(cur_result)

# If this is a torrent all we can do is get the entire torrent,
# user will have to select which eps not do download in his torrent client
else:
log.info(u'Adding multi-episode result for full-season torrent.'
u' Undesired episodes can be skipped in the torrent client if desired!')
ep_objs = []
for cur_ep_num in all_eps:
for season in {x.season for x in episodes}:
ep_objs.append(series_obj.get_episode(season, cur_ep_num))
candidate.episodes = ep_objs
multi_candidates.append(candidate)

return multi_candidates, single_candidates


Expand Down
Loading

0 comments on commit 55bfdaf

Please sign in to comment.