26
26
def get_device_id () -> str :
27
27
path = Path (get_data_dir ("aw-server" )) / "device_id"
28
28
if path .exists ():
29
- with open (path , 'r' ) as f :
29
+ with open (path , "r" ) as f :
30
30
return f .read ()
31
31
else :
32
32
uuid = str (uuid4 ())
33
- with open (path , 'w' ) as f :
33
+ with open (path , "w" ) as f :
34
34
f .write (uuid )
35
35
return uuid
36
36
@@ -39,8 +39,11 @@ def check_bucket_exists(f):
39
39
@functools .wraps (f )
40
40
def g (self , bucket_id , * args , ** kwargs ):
41
41
if bucket_id not in self .db .buckets ():
42
- raise NotFound ("NoSuchBucket" , "There's no bucket named {}" .format (bucket_id ))
42
+ raise NotFound (
43
+ "NoSuchBucket" , "There's no bucket named {}" .format (bucket_id )
44
+ )
43
45
return f (self , bucket_id , * args , ** kwargs )
46
+
44
47
return g
45
48
46
49
@@ -53,10 +56,10 @@ def __init__(self, db, testing) -> None:
53
56
def get_info (self ) -> Dict [str , Dict ]:
54
57
"""Get server info"""
55
58
payload = {
56
- ' hostname' : gethostname (),
57
- ' version' : __version__ ,
58
- ' testing' : self .testing ,
59
- ' device_id' : get_device_id (),
59
+ " hostname" : gethostname (),
60
+ " version" : __version__ ,
61
+ " testing" : self .testing ,
62
+ " device_id" : get_device_id (),
60
63
}
61
64
return payload
62
65
@@ -106,18 +109,29 @@ def import_bucket(self, bucket_data: Any):
106
109
type = bucket_data ["type" ],
107
110
client = bucket_data ["client" ],
108
111
hostname = bucket_data ["hostname" ],
109
- created = (bucket_data ["created" ]
110
- if isinstance (bucket_data ["created" ], datetime )
111
- else iso8601 .parse_date (bucket_data ["created" ])),
112
+ created = (
113
+ bucket_data ["created" ]
114
+ if isinstance (bucket_data ["created" ], datetime )
115
+ else iso8601 .parse_date (bucket_data ["created" ])
116
+ ),
117
+ )
118
+ self .create_events (
119
+ bucket_id ,
120
+ [Event (** e ) if isinstance (e , dict ) else e for e in bucket_data ["events" ]],
112
121
)
113
- self .create_events (bucket_id , [Event (** e ) if isinstance (e , dict ) else e for e in bucket_data ["events" ]])
114
122
115
123
def import_all (self , buckets : Dict [str , Any ]):
116
124
for bid , bucket in buckets .items ():
117
125
self .import_bucket (bucket )
118
126
119
- def create_bucket (self , bucket_id : str , event_type : str , client : str ,
120
- hostname : str , created : Optional [datetime ] = None )-> bool :
127
+ def create_bucket (
128
+ self ,
129
+ bucket_id : str ,
130
+ event_type : str ,
131
+ client : str ,
132
+ hostname : str ,
133
+ created : Optional [datetime ] = None ,
134
+ ) -> bool :
121
135
"""
122
136
Create bucket.
123
137
Returns True if successful, otherwise false if a bucket with the given ID already existed.
@@ -131,7 +145,7 @@ def create_bucket(self, bucket_id: str, event_type: str, client: str,
131
145
type = event_type ,
132
146
client = client ,
133
147
hostname = hostname ,
134
- created = created
148
+ created = created ,
135
149
)
136
150
return True
137
151
@@ -143,14 +157,20 @@ def delete_bucket(self, bucket_id: str) -> None:
143
157
return None
144
158
145
159
@check_bucket_exists
146
- def get_events (self , bucket_id : str , limit : int = - 1 ,
147
- start : datetime = None , end : datetime = None ) -> List [Event ]:
160
+ def get_events (
161
+ self ,
162
+ bucket_id : str ,
163
+ limit : int = - 1 ,
164
+ start : datetime = None ,
165
+ end : datetime = None ,
166
+ ) -> List [Event ]:
148
167
"""Get events from a bucket"""
149
168
logger .debug ("Received get request for events in bucket '{}'" .format (bucket_id ))
150
169
if limit is None : # Let limit = None also mean "no limit"
151
170
limit = - 1
152
- events = [event .to_json_dict () for event in
153
- self .db [bucket_id ].get (limit , start , end )]
171
+ events = [
172
+ event .to_json_dict () for event in self .db [bucket_id ].get (limit , start , end )
173
+ ]
154
174
return events
155
175
156
176
@check_bucket_exists
@@ -161,10 +181,13 @@ def create_events(self, bucket_id: str, events: List[Event]) -> Optional[Event]:
161
181
return self .db [bucket_id ].insert (events [0 ] if len (events ) == 1 else events )
162
182
163
183
@check_bucket_exists
164
- def get_eventcount (self , bucket_id : str ,
165
- start : datetime = None , end : datetime = None ) -> int :
184
+ def get_eventcount (
185
+ self , bucket_id : str , start : datetime = None , end : datetime = None
186
+ ) -> int :
166
187
"""Get eventcount from a bucket"""
167
- logger .debug ("Received get request for eventcount in bucket '{}'" .format (bucket_id ))
188
+ logger .debug (
189
+ "Received get request for eventcount in bucket '{}'" .format (bucket_id )
190
+ )
168
191
return self .db [bucket_id ].get_eventcount (start , end )
169
192
170
193
@check_bucket_exists
@@ -195,8 +218,15 @@ def heartbeat(self, bucket_id: str, heartbeat: Event, pulsetime: float) -> Event
195
218
196
219
Inspired by: https://wakatime.com/developers#heartbeats
197
220
"""
198
- logger .debug ("Received heartbeat in bucket '{}'\n \t timestamp: {}, duration: {}, pulsetime: {}\n \t data: {}" .format (
199
- bucket_id , heartbeat .timestamp , heartbeat .duration , pulsetime , heartbeat .data ))
221
+ logger .debug (
222
+ "Received heartbeat in bucket '{}'\n \t timestamp: {}, duration: {}, pulsetime: {}\n \t data: {}" .format (
223
+ bucket_id ,
224
+ heartbeat .timestamp ,
225
+ heartbeat .duration ,
226
+ pulsetime ,
227
+ heartbeat .data ,
228
+ )
229
+ )
200
230
201
231
# The endtime here is set such that in the event that the heartbeat is older than an
202
232
# existing event we should try to merge it with the last event before the heartbeat instead.
@@ -220,16 +250,32 @@ def heartbeat(self, bucket_id: str, heartbeat: Event, pulsetime: float) -> Event
220
250
merged = heartbeat_merge (last_event , heartbeat , pulsetime )
221
251
if merged is not None :
222
252
# Heartbeat was merged into last_event
223
- logger .debug ("Received valid heartbeat, merging. (bucket: {})" .format (bucket_id ))
253
+ logger .debug (
254
+ "Received valid heartbeat, merging. (bucket: {})" .format (
255
+ bucket_id
256
+ )
257
+ )
224
258
self .last_event [bucket_id ] = merged
225
259
self .db [bucket_id ].replace_last (merged )
226
260
return merged
227
261
else :
228
- logger .info ("Received heartbeat after pulse window, inserting as new event. (bucket: {})" .format (bucket_id ))
262
+ logger .info (
263
+ "Received heartbeat after pulse window, inserting as new event. (bucket: {})" .format (
264
+ bucket_id
265
+ )
266
+ )
229
267
else :
230
- logger .debug ("Received heartbeat with differing data, inserting as new event. (bucket: {})" .format (bucket_id ))
268
+ logger .debug (
269
+ "Received heartbeat with differing data, inserting as new event. (bucket: {})" .format (
270
+ bucket_id
271
+ )
272
+ )
231
273
else :
232
- logger .info ("Received heartbeat, but bucket was previously empty, inserting as new event. (bucket: {})" .format (bucket_id ))
274
+ logger .info (
275
+ "Received heartbeat, but bucket was previously empty, inserting as new event. (bucket: {})" .format (
276
+ bucket_id
277
+ )
278
+ )
233
279
234
280
self .db [bucket_id ].insert (heartbeat )
235
281
self .last_event [bucket_id ] = heartbeat
@@ -238,7 +284,9 @@ def heartbeat(self, bucket_id: str, heartbeat: Event, pulsetime: float) -> Event
238
284
def query2 (self , name , query , timeperiods , cache ):
239
285
result = []
240
286
for timeperiod in timeperiods :
241
- period = timeperiod .split ("/" )[:2 ] # iso8601 timeperiods are separated by a slash
287
+ period = timeperiod .split ("/" )[
288
+ :2
289
+ ] # iso8601 timeperiods are separated by a slash
242
290
starttime = iso8601 .parse_date (period [0 ])
243
291
endtime = iso8601 .parse_date (period [1 ])
244
292
query = str ().join (query )
@@ -249,7 +297,7 @@ def query2(self, name, query, timeperiods, cache):
249
297
def get_log (self ):
250
298
"""Get the server log in json format"""
251
299
payload = []
252
- with open (get_log_file_path (), 'r' ) as log_file :
300
+ with open (get_log_file_path (), "r" ) as log_file :
253
301
for line in log_file .readlines ()[::- 1 ]:
254
302
payload .append (json .loads (line ))
255
303
return payload , 200
0 commit comments