Skip to content

Commit

Permalink
minor changes related to pep8
Browse files Browse the repository at this point in the history
  • Loading branch information
constverum committed Sep 26, 2017
1 parent d560297 commit d71e38f
Show file tree
Hide file tree
Showing 12 changed files with 137 additions and 103 deletions.
3 changes: 2 additions & 1 deletion examples/basic.py
Expand Up @@ -6,7 +6,8 @@
async def show(proxies):
while True:
proxy = await proxies.get()
if proxy is None: break
if proxy is None:
break
print('Found proxy: %s' % proxy)

proxies = asyncio.Queue()
Expand Down
3 changes: 2 additions & 1 deletion examples/find_and_use.py
Expand Up @@ -47,7 +47,8 @@ def main():

judges = ['http://httpbin.org/get?show_env',
'https://httpbin.org/get?show_env']
providers = ['http://www.proxylists.net/', 'http://fineproxy.org/eng/fresh-proxies/']
providers = ['http://www.proxylists.net/',
'http://fineproxy.org/eng/fresh-proxies/']

broker = Broker(
proxies, timeout=8, max_conn=200, max_tries=3, verify_ssl=False,
Expand Down
31 changes: 21 additions & 10 deletions proxybroker/__init__.py
Expand Up @@ -17,28 +17,39 @@
__title__ = 'ProxyBroker'
__package__ = 'proxybroker'
__version__ = '0.2.0'
__short_description__ = '[Finder/Checker/Server] Finds public proxies from multiple sources and concurrently checks them. Supports HTTP(S) and SOCKS4/5.'
__short_description__ = '[Finder/Checker/Server] Finds public proxies from multiple sources and concurrently checks them. Supports HTTP(S) and SOCKS4/5.' # noqa
__author__ = 'Constverum'
__author_email__ = 'constverum@gmail.com'
__url__ = 'https://github.com/constverum/ProxyBroker'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2015-2016 Constverum'
__copyright__ = 'Copyright 2015-2017 Constverum'


from .proxy import Proxy
from .judge import Judge
from .providers import Provider
from .checker import Checker
from .server import Server, ProxyPool
from .api import Broker
from .proxy import Proxy # noqa
from .judge import Judge # noqa
from .providers import Provider # noqa
from .checker import Checker # noqa
from .server import Server, ProxyPool # noqa
from .api import Broker # noqa


import logging
import warnings
import logging # noqa
import warnings # noqa


logger = logging.getLogger('asyncio')
logger.addFilter(logging.Filter('has no effect when using ssl'))

warnings.simplefilter('always', UserWarning)
warnings.simplefilter('once', DeprecationWarning)


__all__ = (
Proxy,
Judge,
Provider,
Checker,
Server,
ProxyPool,
Broker,
)
9 changes: 5 additions & 4 deletions proxybroker/api.py
Expand Up @@ -6,7 +6,7 @@
from functools import partial
from collections import defaultdict, Counter

from .errors import *
from .errors import ResolveError
from .proxy import Proxy
from .server import Server
from .checker import Checker
Expand Down Expand Up @@ -124,7 +124,8 @@ async def find(self, *, types=None, data=None, countries=None,
(optional) String or list with proxies. Also can be a file-like
object supports `read()` method. Used instead of providers
:param list countries:
(optional) List of ISO country codes where should be located proxies
(optional) List of ISO country codes where should be located
proxies
:param bool post:
(optional) Flag indicating use POST instead of GET for requests
when checking proxies
Expand Down Expand Up @@ -190,8 +191,8 @@ def serve(self, host='127.0.0.1', port=8888, limit=100, **kwargs):
proxies, checking of new proxies will be lazily paused.
Checking will be resumed if all the found proxies will be discarded
in the process of working with them (see :attr:`max_error_rate`,
:attr:`max_resp_time`). And will continue until it finds one working
proxy and paused again. The default value is 100
:attr:`max_resp_time`). And will continue until it finds one
working proxy and paused again. The default value is 100
:param int max_tries:
(optional) The maximum number of attempts to handle an incoming
request. If not specified, it will use the value specified during
Expand Down
38 changes: 20 additions & 18 deletions proxybroker/checker.py
@@ -1,12 +1,11 @@
import zlib
import time
import asyncio
import time
import zlib
import warnings

import aiohttp

from .errors import *
from .proxy import Proxy
from .errors import (
BadStatusError, BadResponseError, ProxyEmptyRecvError, ProxyConnError,
ProxyTimeoutError, ProxyRecvError, ProxySendError, ResolveError)
from .judge import Judge, get_judges
from .utils import log, get_headers, get_all_ip, get_status_code, parse_headers
from .resolver import Resolver
Expand All @@ -33,7 +32,7 @@ def __init__(self, judges, max_tries=3, timeout=8, verify_ssl=False,
self._req_http_proto = not types or bool(
('HTTP', 'CONNECT:80', 'SOCKS4', 'SOCKS5') & types.keys())
self._req_https_proto = not types or bool(('HTTPS',) & types.keys())
self._req_smtp_proto = not types or bool(('CONNECT:25',) & types.keys())
self._req_smtp_proto = not types or bool(('CONNECT:25',) & types.keys()) # noqa

self._ngtrs = {proto for proto in types or NGTRS}

Expand All @@ -53,7 +52,8 @@ async def check_judges(self):

if len(Judge.available['HTTP']) == 0:
nojudges.append('HTTP')
disable_protocols.extend(['HTTP', 'CONNECT:80', 'SOCKS4', 'SOCKS5'])
disable_protocols.extend(
['HTTP', 'CONNECT:80', 'SOCKS4', 'SOCKS5'])
self._req_http_proto = False
# for coroutines, which is already waiting
Judge.ev['HTTP'].set()
Expand Down Expand Up @@ -89,18 +89,16 @@ def _types_passed(self, proxy):
return True
for proto, lvl in proxy.types.copy().items():
req_levels = self._types.get(proto)
# log.debug('proxy: %s; proto: %s; lvl: %s; req_levels: %s;' % (proxy, proto, lvl, req_levels))
if not req_levels or (lvl in req_levels):
# log.debug('TRUE!')
if not self._strict:
return True
else:
# log.debug('FALSE!')
if self._strict:
del proxy.types[proto]
if self._strict and proxy.types:
return True
proxy.log('Protocol or the level of anonymity differs from the requested')
proxy.log(
'Protocol or the level of anonymity differs from the requested')
return False

async def _in_DNSBL(self, host):
Expand Down Expand Up @@ -158,7 +156,7 @@ async def _check_conn_25(self, proxy, proto):
except ProxyTimeoutError:
continue
except (ProxyConnError, ProxyRecvError, ProxySendError,
ProxyEmptyRecvError, BadStatusError, BadResponseError) as e:
ProxyEmptyRecvError, BadStatusError, BadResponseError):
break
else:
proxy.types[proxy.ngtr.name] = None
Expand All @@ -182,7 +180,7 @@ async def _check(self, proxy, proto):
except ProxyTimeoutError:
continue
except (ProxyConnError, ProxyRecvError, ProxySendError,
ProxyEmptyRecvError, BadStatusError, BadResponseError) as e:
ProxyEmptyRecvError, BadStatusError, BadResponseError):
break
else:
content = _decompress_content(headers, content)
Expand Down Expand Up @@ -211,7 +209,8 @@ def _request(method, host, path, fullpath=False, data=''):
'path': 'http://%s%s' % (host, path) if fullpath else path, # HTTP
'headers': '\r\n'.join(('%s: %s' % (k, v) for k, v in hdrs.items())),
'data': data}
req = ('{method} {path} HTTP/1.1\r\n{headers}\r\n\r\n{data}').format(**kw).encode()
req = (('{method} {path} HTTP/1.1\r\n{headers}\r\n\r\n{data}')
.format(**kw).encode())
return req, rv


Expand Down Expand Up @@ -243,7 +242,9 @@ def _decompress_content(headers, content):
is_compressed = headers.get('Content-Encoding') in ('gzip', 'deflate')
is_chunked = headers.get('Transfer-Encoding') == 'chunked'
if is_compressed:
# gzip: zlib.MAX_WBITS|16; deflate: -zlib.MAX_WBITS; auto: zlib.MAX_WBITS|32;
# gzip: zlib.MAX_WBITS|16;
# deflate: -zlib.MAX_WBITS;
# auto: zlib.MAX_WBITS|32;
if is_chunked:
# b'278\r\n\x1f\x8b...\x00\r\n0\r\n\r\n' => b'\x1f\x8b...\x00
content = b''.join(content.split(b'\r\n')[1::2])
Expand All @@ -264,8 +265,9 @@ def _check_test_response(proxy, headers, content, rv):
proxy.log('Response: correct')
return True
else:
proxy.log('Response: not correct; ip: %s, rv: %s, ref: %s, cookie: %s' % (
bool(foundIP), verIsCorrect, refSupported, cookieSupported))
proxy.log(
'Response: not correct; ip: %s, rv: %s, ref: %s, cookie: %s' % (
bool(foundIP), verIsCorrect, refSupported, cookieSupported))
return False


Expand Down
10 changes: 6 additions & 4 deletions proxybroker/judge.py
@@ -1,10 +1,10 @@
import random
import asyncio
import random
from urllib.parse import urlparse

import aiohttp

from .errors import *
from .errors import ResolveError
from .utils import log, get_headers
from .resolver import Resolver

Expand Down Expand Up @@ -70,12 +70,14 @@ async def check(self, real_ext_ip):
loop=self._loop, verify_ssl=self.verify_ssl, force_close=True)
try:
with aiohttp.Timeout(self.timeout, loop=self._loop):
async with aiohttp.ClientSession(connector=connector, loop=self._loop) as session,\
async with aiohttp.ClientSession(connector=connector,
loop=self._loop) as session,\
session.get(url=self.url, headers=headers,
allow_redirects=False) as resp:
page = await resp.text()
except (asyncio.TimeoutError, aiohttp.ClientOSError,
aiohttp.ClientResponseError, aiohttp.ServerDisconnectedError) as e:
aiohttp.ClientResponseError,
aiohttp.ServerDisconnectedError) as e:
log.debug('%s is failed. Error: %r;' % (self, e))
return

Expand Down
6 changes: 3 additions & 3 deletions proxybroker/negotiators.py
Expand Up @@ -2,7 +2,7 @@
from socket import inet_aton
from abc import ABC, abstractmethod

from .errors import *
from .errors import BadStatusError, BadResponseError
from .utils import get_headers, get_status_code


Expand Down Expand Up @@ -63,7 +63,7 @@ async def negotiate(self, **kwargs):
self._proxy.log('Failed (invalid data)', err=BadResponseError)
raise BadResponseError
else:
self._proxy.log('Request granted')
self._proxy.log('Request is granted')


class Socks4Ngtr(BaseNegotiator):
Expand All @@ -83,7 +83,7 @@ async def negotiate(self, **kwargs):
raise BadResponseError
# resp = b'\x00Z\x00\x00\x00\x00\x00\x00' // ord('Z') == 90 == 0x5A
else:
self._proxy.log('Request granted')
self._proxy.log('Request is granted')


class Connect80Ngtr(BaseNegotiator):
Expand Down

0 comments on commit d71e38f

Please sign in to comment.