Skip to content

Commit

Permalink
drop bad proxy sites
Browse files Browse the repository at this point in the history
  • Loading branch information
JarbasAl committed Mar 25, 2024
1 parent f6714e1 commit 64892f4
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 327 deletions.
6 changes: 2 additions & 4 deletions anon_requests/proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@
from anon_requests.proxy_sources import ProxyGetter, ProxyType, ProxyAnonymity
from anon_requests.proxy_sources.free_proxy_list import SSLProxies, \
SocksProxy, FreeProxyList, UKProxy, USProxy, AnonProxies
from anon_requests.proxy_sources.free_proxy_cz import FreeProxyCZ
from anon_requests.proxy_sources.hidemyname import HideMyName
from anon_requests.proxy_sources.proxydb import ProxyDB
from anon_requests.proxy_sources.proxyscan import ProxyScan
from anon_requests.proxy_sources.pubproxy import PubProxy
from anon_requests.proxy_sources.spysme import SpysMe
from anon_requests.proxy_sources.spysone import SpysOne
Expand All @@ -24,8 +22,8 @@ def scrap_proxy_list(self):
print("scrapping proxies")
with concurrent.futures.ThreadPoolExecutor(max_workers=self.workers) \
as executor:
sources = [SSLProxies(), HideMyName(), FreeProxyCZ(), SSHOcean(),
ProxyDB(), ProxyScan(), PubProxy(), MyProxy(),
sources = [SSLProxies(), HideMyName(), SSHOcean(),
ProxyDB(), PubProxy(), MyProxy(),
SpysMe(), FreeProxyList(), SocksProxy(), USProxy(),
UKProxy(), AnonProxies(), SpysOne(), ProxyNova()]
# Start the scrap operations and mark each future with its source
Expand Down
156 changes: 0 additions & 156 deletions anon_requests/proxy_sources/free_proxy_cz.py

This file was deleted.

143 changes: 0 additions & 143 deletions anon_requests/proxy_sources/proxyscan.py

This file was deleted.

51 changes: 27 additions & 24 deletions anon_requests/proxy_sources/spysme.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,33 @@ def scrap_proxy_list(self):
data = [p.strip() for p in page.text.split("\n") if p and p[
0].isdigit()]
for p in data:
p = p.replace("!", "")
proxy, fields = p.split(" ")[:2]
ip, port = proxy.split(":")
if fields.endswith("-S"):
proxy_type = ProxyType.HTTPS
fields = fields[:-2]
else:
proxy_type = ProxyType.HTTP

country_code, anon = fields.split("-")
if anon == "H":
anon = ProxyAnonymity.ELITE
elif anon == "A":
anon = ProxyAnonymity.ANONYMOUS
else:
anon = ProxyAnonymity.TRANSPARENT
proxies.append({"ip": ip,
"port": port,
"country_code": country_code,
"proxy_anonymity": anon,
"proxy_type": proxy_type,
"urls": {"http": proxy,
"https": proxy}
})
try:
p = p.replace("!", "")
proxy, fields = p.split(" ")[:2]
ip, port = proxy.split(":")
if fields.endswith("-S"):
proxy_type = ProxyType.HTTPS
fields = fields[:-2]
else:
proxy_type = ProxyType.HTTP

country_code, anon = fields.split("-")
if anon == "H":
anon = ProxyAnonymity.ELITE
elif anon == "A":
anon = ProxyAnonymity.ANONYMOUS
else:
anon = ProxyAnonymity.TRANSPARENT
proxies.append({"ip": ip,
"port": port,
"country_code": country_code,
"proxy_anonymity": anon,
"proxy_type": proxy_type,
"urls": {"http": proxy,
"https": proxy}
})
except:
pass
return proxies


Expand Down

0 comments on commit 64892f4

Please sign in to comment.