Skip to content

Commit

Permalink
Fully working pure python aggregation algorithm with correct averagin…
Browse files Browse the repository at this point in the history
…g of multiple datapoints in bucket
  • Loading branch information
Panos committed Jul 22, 2015
1 parent 7963e7e commit a4377aa
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 19 deletions.
23 changes: 12 additions & 11 deletions graphite_influxdb.py
Expand Up @@ -220,16 +220,12 @@ def _fill_end_gaps(data, index, target_end_time, step):
while step_end <= target_end_time:
from pprint import pprint
# import ipdb; ipdb.set_trace()
vals = []
try:
while (data[index][0] + datetime.timedelta(seconds=step/2.0)) >= step_end \
> (data[index+1][0] - datetime.timedelta(seconds=step/2.0)):
val1 = data[index][1] if data[index][1] else 0
val = (val1 + data[index+1][1])/2.0
del data[index+1]
while (step_end - datetime.timedelta(seconds=step/2.0)) <= \
data[index][0] <= (step_end + datetime.timedelta(seconds=step/2.0)):
vals.append(data[index][1])
del data[index]
data.insert(index, (step_end, val))
index -= 1
continue
else:
if data[index][0] < step_end:
index += 1
Expand All @@ -239,12 +235,17 @@ def _fill_end_gaps(data, index, target_end_time, step):
index += 1
step_end += datetime.timedelta(seconds=step)
continue
data.insert(index, (step_end, None))
if not vals:
data.insert(index, (step_end, None))
if vals:
data.insert(index, (step_end, sum(vals)/len(vals)))
except IndexError:
# import ipdb; ipdb.set_trace()
if index < len(data) and ((data[index][0] - datetime.timedelta(seconds=step/2)) <= step_end):
break
data.insert(index, (step_end, None))
if vals:
data.insert(index, (step_end, sum(vals)/len(vals)))
else:
data.insert(index, (step_end, None))
step_end += datetime.timedelta(seconds=step)
index += 1

Expand Down
1 change: 0 additions & 1 deletion requirements.txt
@@ -1,3 +1,2 @@
influxdb>=2.6.0
graphite-api
pandas
11 changes: 4 additions & 7 deletions tests/test_graphite_influxdb.py
Expand Up @@ -2,6 +2,7 @@
import graphite_influxdb
import datetime
import random
from pprint import pprint

class GraphiteInfluxdbTestCase(unittest.TestCase):

Expand All @@ -10,23 +11,20 @@ def setUp(self):
self.reader = graphite_influxdb.InfluxdbReader(None, None, self.step, None)
self.start_time, self.end_time, self.series_name = datetime.datetime.now(), \
datetime.datetime.now() + datetime.timedelta(hours=1), 'my_series'
self.steps = int(round((int(self.end_time.strftime("%s")) - int(self.start_time.strftime("%s"))) * 1.0 / self.step)) + 1
self.steps = int(round((int(self.end_time.strftime("%s")) - int(self.start_time.strftime("%s"))) * 1.0 / self.step))
self.datapoints = [(self.start_time + datetime.timedelta(minutes=20), random.randint(1,5)),
# Two points in same step
# Multiple points in same step
(self.start_time + datetime.timedelta(minutes=40), random.randint(1,5)),
(self.start_time + datetime.timedelta(minutes=40, seconds=30), random.randint(1,5)),
(self.start_time + datetime.timedelta(minutes=40, seconds=40), random.randint(1,5)),
(self.start_time + datetime.timedelta(minutes=59), random.randint(1,5)),
]

def test_fix_datapoints(self):
"""Test that filling datapoints gives expected results"""
# self.datapoints = self.reader.fix_datapoints(self.datapoints,
# int(self.start_time.strftime("%s")),
# int(self.end_time.strftime("%s")), self.step, self.series_name)
self.reader.fix_datapoints(self.datapoints,
int(self.start_time.strftime("%s")),
int(self.end_time.strftime("%s")), self.step, self.series_name)
# import ipdb; ipdb.set_trace()
self.assertTrue(self.steps == len(self.datapoints),
msg="Expected %s datapoints, got %s instead" % (
self.steps, len(self.datapoints),))
Expand All @@ -40,7 +38,6 @@ def test_fix_datapoints_multi(self):
int(self.start_time.strftime("%s")),
int(self.end_time.strftime("%s")), self.step)
for series_name in data:
# import ipdb; ipdb.set_trace()
self.assertTrue(self.steps == len(list(data[series_name])),
msg="Expected %s datapoints, got %s instead" % (
self.steps, len(list(data[series_name])),))

0 comments on commit a4377aa

Please sign in to comment.