Skip to content

Commit

Permalink
chore: del redundant code
Browse files Browse the repository at this point in the history
  • Loading branch information
shengchenyang committed Apr 2, 2024
1 parent f783546 commit 5cd28cc
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions ayugespidertools/scraper/middlewares/proxy/exclusive.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,7 @@ def from_crawler(cls, crawler: "Crawler") -> "Self":
def get_proxy_ip(self, proxy_url: str, index: int) -> str:
"""获取独享代理接口的索引为 proxy_index 的代理信息"""
try:
req = urllib.request.Request(url=proxy_url)
r = urllib.request.urlopen(req)
r = urllib.request.urlopen(url=proxy_url)
content = r.read().decode(errors="ignore")
proxy_list = json.loads(content).get("data").get("proxy_list")
proxy_list.sort()
Expand Down

0 comments on commit 5cd28cc

Please sign in to comment.