/
scraper.py
80 lines (56 loc) · 1.69 KB
/
scraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import requests
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
import string
import re
import datetime
import sqlite3
import time
all_links = []
e_name = []
f1 = []
f2 = []
all_rows = []
fighters = []
fights=[]
def scrape_data():
data = requests.get("http://ufcstats.com/statistics/events/upcoming")
soup = BeautifulSoup(data.text, 'html.parser')
table = soup.find('table', {"class": "b-statistics__table-events"})
links = table.find_all('a', href=True)
for link in links:
all_links.append(link.get('href'))
for link in all_links:
print(f"Now currently scraping link: {link}")
time.sleep(1)
h2 = soup.find("h2")
e_name.append(h2.text.strip())
fights= soup.find('table',{'class':"b-fight-details__table b-fight-details__table_style_margin-top b-fight-details__table_type_event-details js-fight-table"})
for fight in fights:
fighters = fights.find_all('a', {"href": re.compile("http://ufcstats.com/fighter-details")})
try:
f1.append(fighters[0].text.strip())
f2.append(fighters[1].text.strip())
except IndexError:
f1.append("null")
f2.append("null")
continue
return None
# preprocessing
# remove rows where DOB is null
# impute stance as orthodox for missing stances
def create_df():
# create empty dataframe
df = pd.DataFrame()
df["Event"] = e_name
df["Fighter1"] = f1
df["Fighter2"] = f2
return df
scrape_data()
df = create_df()
print("Scraping completed")
conn = sqlite3.connect('data.sqlite')
df.to_sql('data', conn, if_exists='replace')
print('Db successfully constructed and saved')
conn.close()