Skip to content

Commit

Permalink
release: v2.41.0
Browse files Browse the repository at this point in the history
  • Loading branch information
newt-sc committed Jan 24, 2021
1 parent b221513 commit 55f6b7a
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 11 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
* [v2.41.0](https://github.com/a4k-openproject/a4kScrapers/releases/tag/a4kScrapers-2.41.0):
* add caching fallbacks

* [v2.40.0](https://github.com/a4k-openproject/a4kScrapers/releases/tag/a4kScrapers-2.40.0):
* fix file open issues on KODI 19

Expand Down
2 changes: 1 addition & 1 deletion meta.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"author": "Unknown",
"version":"2.40.0",
"version":"2.41.0",
"name":"a4kScrapers",
"update_directory": "https://github.com/a4k-openproject/a4kScrapers/archive/",
"remote_meta": "https://raw.githubusercontent.com/newt-sc/a4kScrapers/master/meta.json",
Expand Down
14 changes: 11 additions & 3 deletions providerModules/a4kScrapers/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,15 @@ def _update_request_options(request_options):
request_options.setdefault('headers', {})
request_options['headers'].update(headers)

lock = filelock.FileLock(_request_cache_path + '.lock')
lock = filelock.SoftFileLock(_request_cache_path + '.lock')
def remove_lock():
try: os.remove(_request_cache_path + '.lock')
except: pass
remove_lock()

def _save_cf_cookies(cfscrape, response):
global lock

with lock:
cookies = ''

Expand Down Expand Up @@ -176,10 +183,11 @@ def _request_core(self, request, sequental = None, cf_retries=3):
response_err = response
self._verify_response(response)

try:
try:
if self.exc_msg == '' and response.request.headers.get('X-Domain', None) is not None:
_save_cf_cookies(self._cfscrape, response)
except: pass
except:
remove_lock()

return response
except:
Expand Down
50 changes: 46 additions & 4 deletions providerModules/a4kScrapers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,23 @@
from html import unescape
from urllib.parse import quote_plus, quote, unquote

try:
from resources.lib.modules import database as alt_database
except:
alt_database_dict = {}
def alt_get_or_add(fn, *args, **kwargs):
key = _hash_function(fn, *args)

if alt_database_dict.get(key, None):
return alt_database_dict[key]

return alt_database_dict.setdefault(key, fn(*args, **kwargs))

alt_database = lambda: None
alt_database.get = lambda fn, duration, *args, **kwargs: alt_get_or_add(fn, *args, **kwargs)
alt_database.cache_get = lambda key: None
alt_database.cache_insert = lambda key, value: None

def _generate_md5(*args):
md5_hash = hashlib.md5()
try:
Expand Down Expand Up @@ -62,8 +79,15 @@ def _cache_get():
except:
return {}

lock = filelock.FileLock(_cache_path + '.lock')
lock = filelock.SoftFileLock(_cache_path + '.lock')
def remove_lock():
try: os.remove(_cache_path + '.lock')
except: pass
remove_lock()

def get_or_add(key, value, fn, duration, *args, **kwargs):
global lock

with lock:
database_dict = _cache_get()
key = _hash_function(fn, *args) if not key else key
Expand All @@ -81,9 +105,27 @@ def get_or_add(key, value, fn, duration, *args, **kwargs):
return value

database = lambda: None
database.get = lambda fn, duration, *args, **kwargs: get_or_add(None, None, fn, duration, *args, **kwargs)
database.cache_get = lambda key: get_or_add(key, None, None, None)
database.cache_insert = lambda key, value: get_or_add(key, value, None, None)
def db_get(fn, duration, *args, **kwargs):
try: return get_or_add(None, None, fn, duration, *args, **kwargs)
except:
remove_lock()
try: return alt_database.get(fn, duration, *args, **kwargs)
except: return None
database.get = db_get
def db_cache_get(key):
try: return get_or_add(key, None, None, None)
except:
remove_lock()
try: return alt_database.cache_get(key)
except: return None
database.cache_get = db_cache_get
def db_cache_insert(key, value):
try: return get_or_add(key, value, None, None)
except:
remove_lock()
try: alt_database.cache_insert(key, value)
except: return None
database.cache_insert = db_cache_insert

DEV_MODE = os.getenv('A4KSCRAPERS_TEST') == '1'
DEV_MODE_ALL = os.getenv('A4KSCRAPERS_TEST_ALL') == '1'
Expand Down
6 changes: 3 additions & 3 deletions providers/a4kScrapers/en/torrent/torrentapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def _get_token(self, url):
response = self._request.get(url.base + '&get_token=get_token')
return core.json.loads(response.text)['token']

def _search_request(self, url, query, force_token_refresh=False, too_many_requests_max_retries=2, no_results_max_retries=2):
def _search_request(self, url, query, force_token_refresh=False, too_many_requests_max_retries=3, no_results_max_retries=2):
token = core.database.get(self._get_token, 0 if force_token_refresh else 1, url)

search = url.search
Expand Down Expand Up @@ -56,8 +56,8 @@ def _search_request(self, url, query, force_token_refresh=False, too_many_reques
too_many_requests_max_retries -= 1
return self._search_request(url, original_query, force_token_refresh, too_many_requests_max_retries, no_results_max_retries)
# no results found
elif core.DEV_MODE and error_code == 20 and no_results_max_retries > 0:
core.time.sleep(6)
elif error_code == 20 and no_results_max_retries > 0:
core.time.sleep(2)
core.tools.log('Retrying after no results from %s' % search_url, 'info')
no_results_max_retries -= 1
return self._search_request(url, original_query, force_token_refresh, too_many_requests_max_retries, no_results_max_retries)
Expand Down

0 comments on commit 55f6b7a

Please sign in to comment.