-
Notifications
You must be signed in to change notification settings - Fork 1
/
orm.py
205 lines (168 loc) · 6.64 KB
/
orm.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
# -*- coding: utf-8 -*-
import shutil
from datetime import datetime
from pathlib import Path
from peewee import AutoField
from peewee import SqliteDatabase
from peewee import Model
from peewee import IntegerField
from peewee import FloatField
from peewee import TimestampField
from peewee import ForeignKeyField
from peewee import CharField
from peewee import OperationalError
from peewee_moves import DatabaseManager
from trendlines import logger
from trendlines import utils
from trendlines.__about__ import __project_url__
DB_OPTS = {
'journal_mode': 'wal',
'cache_size': -1 * 64000, # 64MB
'foreign_keys': 1,
'ignore_check_constraints': 0,
'synchronous': 0,
}
db = SqliteDatabase(None)
class BaseModel(Model):
class Meta(object):
database = db
class DataModel(BaseModel):
"""
Model for storing data points.
I've kept this broken out into a separate class because there may be
a point in the future where I want to switch to one-file-per-metric
and having a separate DataModel class will make this easier.
"""
pass
class InternalModel(BaseModel):
"""
Model for internal data tables.
*IF* we were to move to one-file-per-metric, then this model would
hold all of the non-dynamically-generated tables.
"""
pass
class Metric(InternalModel):
"""
Table holding all of the Metric information.
"""
metric_id = AutoField()
name = CharField(max_length=120, unique=True)
units = CharField(max_length=24, null=True)
upper_limit = FloatField(null=True)
lower_limit = FloatField(null=True)
def __repr__(self):
s = "<Metric: {id}, {name}, units={units}>"
return s.format(id=self.metric_id, name=self.name, units=self.units)
def __str__(self):
return repr(self)
class DataPoint(DataModel):
"""
Table holding all of the data points.
The ``timestamp`` field stores values as UTC but queries return
naive :class:`datetime.datetime` objects (no timezone info).
"""
datapoint_id = AutoField()
metric = ForeignKeyField(Metric, backref="datapoints",
on_delete="CASCADE")
value = FloatField()
timestamp = TimestampField(utc=True)
def __repr__(self):
s = "<DataPoint: {id}, {metric}, {value}, {timestamp}>"
return s.format(id=self.datapoint_id,
metric=self.metric.name,
value=self.value,
timestamp=self.timestamp)
def __str__(self):
return repr(self)
def create_db(name):
"""
Create the database and the tables.
Applies any missing migrations. Does nothing if all migrations
have been applied.
Parameters
----------
name : str
The name/path of the database, as given by ``app.config['DATABASE']``.
"""
# import pdb; pdb.set_trace()
# Convert to a Path object because I like working with those better.
full_path = Path(name).resolve()
file_exists = full_path.exists()
if file_exists:
logger.debug("Connecting to existing database: '%s'." % full_path)
else:
logger.debug("Creating new database: '%s'" % full_path)
db.init(str(full_path), pragmas=DB_OPTS)
try:
# This will create the file if it doesn't exist.
db.connect()
except OperationalError:
# Try to figure out why OperationalError happened.
if file_exists:
msg = ("Database file %s exists, but we're unable to connect."
" Perhaps the permissions are incorrect?")
logger.error(msg % full_path)
else:
msg = ("Unable to create %s. Perhaps the parent folder is missing"
" or permissions are incorrect?")
logger.error(msg % full_path)
logger.error("Unable to create/open database file '%s'" % full_path)
raise
# Either way, we want to run migrations. However, we only need to make a
# backup if the file already exists.
if file_exists:
# Create a backup before doing anything.
backup_file = utils.backup_file(full_path)
logger.debug("Created database backup file: {}".format(backup_file))
# This will edit the database file, creating the `migration_history`
# table if needed. Hence why we do it *after* the backup.
try:
manager = DatabaseManager(db)
except PermissionError:
# When runnng in the docker container, this will attempt to create
# a `/migrations` directory. It should be `/trendlines/migrations`.
# If this still fails, let the error propogate but make sure to
# close the db connection
try:
msg = "Failed to open default migration directory, trying '%s'"
alt_dir = "/trendlines/migrations"
logger.debug(msg % alt_dir)
manager = DatabaseManager(db, directory=alt_dir)
logger.debug("Success")
except PermissionError:
raise
finally:
db.close()
# Check the status. Creating a new file means we'll need migrations.
# However, we don't need to check for that because it's guaranteed
# that a new file will have len(manager.diff) > 0
needs_migrations = len(manager.diff) > 0
if needs_migrations:
logger.info("Missing migrations: {}".format(manager.diff))
# Apply the migrations
success = manager.upgrade()
if success:
logger.info("Successfully applied database migrations.")
elif file_exists:
# revert our changes by restoring the backup
msg = ("Failed to apply database migrations. Reverting to backup"
" file. Please submit an issue at {} with details.")
logger.critical(msg.format(__project_url__))
shutil.copy(str(backup_file), str(full_path))
else:
# It's a new file, so no backup was made.
msg = ("Failed to apply database migrations to the new file."
" Please see the logs for more info.")
logger.critical(msg)
else:
logger.info("Database is up to date. No migrations to apply.")
# Since we didn't make any changes, we can remove the backup file.
# Is it possible to ever have backup_file not exist if we didn't
# apply migrations?
# No, because not applying migrations implies that the file already
# existed, and if the file already existed then a backup was made.
# Thus we don't need to check for FileNotFoundError.
backup_file.unlink()
logger.debug("Removed superfluous backup file: %s" % backup_file)
# Make sure to close the database if things went well.
db.close()