Permalink
Browse files

2.020 fix timeout not functional

  • Loading branch information...
fffonion committed Mar 11, 2018
1 parent 64852c6 commit 4c88fc61185059f25c0da1168f83a241cbc7f9ce
Showing with 18 additions and 9 deletions.
  1. +3 −3 xeHentai/const.py
  2. +9 −4 xeHentai/filters.py
  3. +6 −2 xeHentai/worker.py
@@ -48,10 +48,10 @@
RESTR_SITE = "https*://(?:[g\.]*e\-|ex)hentai\.org"
_FALLBACK_CF_IP = ("104.24.255.11", "104.24.254.11")
FALLBACK_CF_IP = ("104.24.255.11", "104.24.254.11")
FALLBACK_IP_MAP = {
'e-hentai.org': _FALLBACK_CF_IP,
'forums.e-hentai.org': ("94.100.18.243", ) + _FALLBACK_CF_IP,
'e-hentai.org': FALLBACK_CF_IP,
'forums.e-hentai.org': ("94.100.18.243", ),
'exhentai.org': ("217.23.13.91","217.23.13.45","109.236.84.136","109.236.92.143","109.236.84.145","109.236.92.166")
}
@@ -197,11 +197,16 @@ def download_file(r, suc, fail, dirpath = dirpath):
# merge the iter_content iterator with our custom stream_cb
def _yield(chunk_size=16384, _r=r):
from requests.exceptions import ConnectionError
length_read = 0
for _ in _r.iter_content(chunk_size):
length_read += len(_)
_r.iter_content_cb(_)
yield _
try:
for _ in _r.iter_content(chunk_size):
length_read += len(_)
_r.iter_content_cb(_)
yield _
except ConnectionError: # read timeout
fail((ERR_IMAGE_BROKEN, r._real_url, r.url))
raise DownloadAbortedException()
if length_read != r.content_length:
fail((ERR_IMAGE_BROKEN, r._real_url, r.url))
raise DownloadAbortedException()
@@ -8,6 +8,7 @@
import time
import random
import requests
from requests.adapters import HTTPAdapter
import traceback
from threading import Thread, RLock
from . import util
@@ -30,9 +31,10 @@ def __init__(self, url):
self.url = self._real_url = url
self.headers = {}
class FallbackIpAdapter(requests.adapters.HTTPAdapter):
class FallbackIpAdapter(HTTPAdapter):
def __init__(self, ip_map=FALLBACK_IP_MAP, **kwargs):
self.ip_map = ip_map
kwargs.update({'max_retries': 1})
requests.adapters.HTTPAdapter.__init__(self, **kwargs)
# override
@@ -68,11 +70,12 @@ class HttpReq(object):
def __init__(self, headers = {}, proxy = None, proxy_policy = None, retry = 10, timeout = 20, logger = None, tname = "main"):
self.session = requests.Session()
self.session.headers = headers
self.session.timeout = timeout
for u in ('forums.e-hentai.org', 'e-hentai.org', 'exhentai.org'):
self.session.mount('http://%s' % u, FallbackIpAdapter())
self.session.mount('https://%s' % u, FallbackIpAdapter())
self.session.mount('http://', HTTPAdapter(max_retries=0))
self.retry = retry
self.timeout = timeout
self.proxy = proxy
self.proxy_policy = proxy_policy
self.logger = logger
@@ -92,6 +95,7 @@ def request(self, method, url, _filter, suc, fail, data=None, stream_cb=None):
r = f(method, url,
allow_redirects=False,
data=data,
timeout=self.timeout,
stream=stream_cb != None)
except requests.RequestException as ex:
self.logger.warning("%s-%s %s %s: %s" % (i18n.THREAD, self.tname, method, url, ex))

0 comments on commit 4c88fc6

Please sign in to comment.