/
request_logger.py
30 lines (25 loc) · 1.13 KB
/
request_logger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
from datetime import datetime
from nba_ss_db import db
def log_request(api_request, table_name):
"""
Logs the api_request with a time stamp to the table
called "scrape_log".
"""
curr_time = datetime.now().strftime('%Y-%m-%d %X %f')
db.utils.execute_sql("""INSERT INTO scrape_log VALUES (?, ?, ?);""", params=(curr_time, api_request, table_name))
def already_scraped(api_request):
"""
Returns True if the api_request has already been scraped.
This is determined by whether or not the api_request str
exists within the table "scrape_log.
"""
api_request_log = db.utils.execute_sql("""SELECT * FROM scrape_log WHERE api_request = ? LIMIT 1;""", params=(api_request, ))
return len(api_request_log.rows) != 0
def get_last_scraped(api_request):
"""
Returns True if the api_request has already been scraped.
This is determined by whether or not the api_request str
exists within the table "scrape_log.
"""
api_request_date_query = db.utils.execute_sql("""SELECT MAX(date) FROM scrape_log WHERE api_request = ?;""", params=(api_request, ))
return api_request_date_query.rows[0][0]