-
Notifications
You must be signed in to change notification settings - Fork 6
/
scraper.py
42 lines (28 loc) · 945 Bytes
/
scraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import requests
from bs4 import BeautifulSoup
from tabulate import *
import csv
class ProxyScraper:
results = []
def fetch(self, url):
return requests.get(url)
def parse(self, html):
content = BeautifulSoup(html, 'lxml')
table = content.find('table')
rows = table.findAll('tr')
headers = [header.text for header in rows[0]]
results = [headers]
for row in rows:
if len(row.findAll('td')):
self.results.append([data.text for data in row.findAll('td')])
def to_csv(self):
with open('proxies.csv', 'w') as csv_file:
writer = csv.writer(csv_file)
writer.writerows(self.results)
def run(self):
response = self.fetch('https://www.free-proxy-list.net/')
self.parse(response.text)
self.to_csv()
if __name__ == '__main__':
scraper = ProxyScraper()
scraper.run()