Permalink
Browse files

refactor: `symbol_id` -> `symbol`

  • Loading branch information...
stnbu committed Sep 7, 2018
1 parent acdb556 commit c84a906d40190b5e24da9c0f74af0842fdc399be
Showing with 40 additions and 41 deletions.
  1. +20 −21 coincharts/daemonize.py
  2. +15 −15 coincharts/data.py
  3. +1 −1 coincharts/models.py
  4. +1 −1 coincharts/urls.py
  5. +3 −3 coincharts/views.py
@@ -17,8 +17,7 @@
import daemon
import daemon.pidfile
from coincharts.daemon import db
from coincharts import config
from coincharts import config, db
# We're replacing the module with a dict. Importing the file shouldn't result in reading from disk, etc. That's why.
config = config.get_config()
@@ -67,12 +66,12 @@ def round_up_hour(cls, dt):
dt = dt.replace(minute=0, second=0, microsecond=0)
return dt
def __init__(self, symbol_ids, dir_path):
self.symbol_ids = symbol_ids
def __init__(self, symbols, dir_path):
self.symbols = symbols
self.dir_path = dir_path
def get_url(self, symbol_id, query_data):
url_beginning = ('https', 'rest.coinapi.io/v1', 'ohlcv/{}/history'.format(symbol_id))
def get_url(self, symbol, query_data):
url_beginning = ('https', 'rest.coinapi.io/v1', 'ohlcv/{}/history'.format(symbol))
query = []
for key, value in query_data.items():
if not value:
@@ -86,13 +85,13 @@ def get_url(self, symbol_id, query_data):
url = urllib.parse.urlunparse(url_beginning + url_end)
return url
def fetch(self, symbol_id):
last_date = self.get_last_date_from_store(symbol_id)
def fetch(self, symbol):
last_date = self.get_last_date_from_store(symbol)
if last_date is None:
logger.debug('last date for {} not found. using default of {}'.format(symbol_id, self.first_date))
logger.debug('last date for {} not found. using default of {}'.format(symbol, self.first_date))
last_date = parse_dt(self.first_date)
else:
logger.debug('date of last record for {} is {}'.format(symbol_id, last_date))
logger.debug('date of last record for {} is {}'.format(symbol, last_date))
self.validate_datetime_object(last_date)
now = datetime.datetime.now(tz=pytz.UTC)
@@ -105,7 +104,7 @@ def fetch(self, symbol_id):
query_data = dict(self.query_template)
query_data['time_start'] = first_fetch_date
query_data['limit'] = 1500 # just over one year of records @6hrs
url = self.get_url(symbol_id, query_data)
url = self.get_url(symbol, query_data)
logger.debug('getting url {}'.format(url))
response = requests.get(url, headers=self.headers)
if response.status_code != 200:
@@ -115,31 +114,31 @@ def fetch(self, symbol_id):
response.headers['X-RateLimit-Remaining']))
data = response.json()
# validate the FIRST date from the data returned. Not perfect, but will prevent future heartache.
self.validate_datetime_object(data[0]['time'])
self.validate_datetime_object(data[0]['time_period_start']) # TODO. how to track that we want *this* time field
return data
def get_last_date_from_store(self, symbol_id):
def get_last_date_from_store(self, symbol):
try:
obj = db.Prices.objects.filter(symbol_id=symbol_id).order_by('id').latest()
obj = db.Prices.objects.filter(symbol=symbol).order_by('id').latest()
except db.Prices.DoesNotExist:
logging.info('No `time_period_end` value found for {}'.format(symbol_id))
logging.info('No `time_period_end` value found for {}'.format(symbol))
return None
dt = getattr(obj, 'time')
return parse_dt(dt)
def insert(self, symbol_id, data):
logger.debug('inserting {} records for symbol_id {}'.format(len(data), symbol_id))
def insert(self, symbol, data):
logger.debug('inserting {} records for symbol {}'.format(len(data), symbol))
insertions = []
for row in data:
insertions.append(db.Prices(symbol_id=symbol_id, **row))
insertions.append(db.Prices(symbol=symbol, **row))
# `.save()` done by django orm after `bulk_create`
db.Prices.objects.bulk_create(insertions)
def update(self):
# TODO: probably opportunities for parallelization
for symbol_id in self.symbol_ids:
data = self.fetch(symbol_id)
self.insert(symbol_id, data)
for symbol in self.symbols:
data = self.fetch(symbol)
self.insert(symbol, data)
def worker(dir_path, daemonize=True):
@@ -10,12 +10,12 @@
from coincharts import config
config = config.get_config()
symbol_ids = config['history_symbols']
symbols = config['history_symbols']
class SymbolIdInfo(object):
class SymbolInfo(object):
def __init__(self, symbol_id):
self.symbol_id = symbol_id
def __init__(self, symbol):
self.symbol = symbol
def normalize_price(self, price):
return (price - self.min) / (self.max - self.min)
@@ -43,7 +43,7 @@ def max(self):
@property
@memoize
def history(self):
return Prices.objects.filter(symbol_id=self.symbol_id)
return Prices.objects.filter(symbol=self.symbol)
@property
def normalized_price_history(self):
@@ -52,7 +52,7 @@ def normalized_price_history(self):
yield (price.price - self.min) / price_delta
class SymbolIdComparison(dict):
class SymbolComparison(dict):
def __init__(self, *args, **kwargs):
self.update(*args, **kwargs)
@@ -61,12 +61,12 @@ def __init__(self, *args, **kwargs):
@memoize
def start_date_indexes(self):
indexes = {}
for symbol_id, data in self.items():
for symbol, data in self.items():
try:
indexes[symbol_id] = [s.time for s in data.history].index(self.earliest_common_time)
indexes[symbol] = [s.time for s in data.history].index(self.earliest_common_time)
except ValueError:
raise ValueError('Could not find date {} in history of {}'.format(
self.earliest_common_time, symbol_id))
self.earliest_common_time, symbol))
return indexes
@property
@@ -76,7 +76,7 @@ def earliest_common_time(self):
def normalized_price_history_averages(self):
normalized_price_history_generators = []
for symbol_id, data in self.items():
for symbol, data in self.items():
normalized_price_history_generators.append(
data.normalized_price_history
)
@@ -93,18 +93,18 @@ def normalized_price_history_averages(self):
if __name__ == '__main__':
symbol_id_info = SymbolIdComparison()
for symbol_id in symbol_ids:
symbol_id_info[symbol_id] = SymbolIdInfo(symbol_id)
symbol_info = SymbolComparison()
for symbol in symbols:
symbol_info[symbol] = SymbolInfo(symbol)
# print('name\t\t\tmin\tmax\t\t\trange')
# for name, info in symbol_id_info.items():
# for name, info in symbol_info.items():
# print(name,
# info.min,
# info.max, '\t',
# info.date_range, sep='\t')
comparison = SymbolIdComparison(symbol_id_info)
comparison = SymbolComparison(symbol_info)
for p in comparison.normalized_price_history_averages():
print(p)
@@ -4,7 +4,7 @@
class Prices(models.Model):
symbol_id = models.CharField(db_index=True, max_length=100)
symbol = models.CharField(db_index=True, max_length=100)
time_period_start = models.CharField(max_length=100)
time_period_end = models.CharField(max_length=100)
time_open = models.CharField(max_length=100)
@@ -3,5 +3,5 @@
from . import views
urlpatterns = [
path('<str:symbol_id>/', views.index, name='index'),
path('<str:symbol>/', views.index, name='index'),
]
@@ -9,8 +9,8 @@
import svg_graph
def index(request, symbol_id):
prices = Prices.objects.filter(symbol_id=symbol_id)[:1000]
def index(request, symbol):
prices = Prices.objects.filter(symbol=symbol)[:1000]
def string_to_epoch(string):
return time.mktime(parse_dt(string).timetuple())
@@ -22,7 +22,7 @@ def prices_gen(prices):
return x
title = '{} from {} to {}'.format(
symbol_id,
symbol,
prices[0].time,
prices[len(prices)-1].time, # "negative indexing is not supported"
)

0 comments on commit c84a906

Please sign in to comment.