Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Manual Search] Keep original function updateCache and create another for manual search #88

Merged
merged 1 commit into from
Mar 4, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sickbeard/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,7 +564,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False, ma

# Update the cache if a manual search is being runned
if manualSelect:
results = curProvider.cache.updateCache(searchResults[curEp])
results = curProvider.cache.update_cache_manual_search(searchResults[curEp])
if results:
# If we have at least a result from one provider, it's good enough to be marked as result
finalResults.append(results)
Expand Down
63 changes: 36 additions & 27 deletions sickbeard/tvcache.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,44 +130,53 @@ def _checkAuth(self, data): # pylint:disable=unused-argument, no-self-use
def _checkItemAuth(self, title, url): # pylint:disable=unused-argument, no-self-use
return True

def updateCache(self, manualData = None):
def updateCache(self):
# check if we should update
if not self.shouldUpdate():
return

cl = []
try:
data = self._getRSSData()
if self._checkAuth(data):
# clear cache
self._clearCache()

# set updated
self.setLastUpdate()

cl = []
for item in data['entries'] or []:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)

if len(cl) > 0:
cache_db_con = self._getDB()
cache_db_con.mass_action(cl)

except AuthException as e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception as e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + repr(e), logger.DEBUG)

def update_cache_manual_search(self, manual_data = None):

if manualData:
for item in manualData:
try:
cl = []
for item in manual_data:
logger.log(u"Adding to cache item found in manual search: {}".format(item.name), logger.DEBUG)
ci = self._addCacheEntry(item.name, item.url, item.seeders, item.leechers, item.size)
if ci is not None:
cl.append(ci)
# check if we should update (providers min time check)
elif self.shouldUpdate():
try:
data = self._getRSSData()
if self._checkAuth(data):
# clear cache
self._clearCache()

# set updated
self.setLastUpdate()

for item in data['entries'] or []:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
except AuthException as e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception as e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + repr(e), logger.DEBUG)
except Exception as e:
logger.log(u"Error while adding to cache item found in manual seach for provider " + self.provider.name + ", skipping: " + repr(e), logger.WARNING)

results = []
if len(cl) > 0:
cache_db_con = self._getDB()
results = cache_db_con.mass_action(cl)

if results:
return True

return False
return any(results)

def getRSSFeed(self, url, params=None):
return getFeed(url, params=params, request_hook=self.provider.get_url)
Expand Down