Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into info_json
Browse files Browse the repository at this point in the history
  • Loading branch information
piotr1212 committed May 8, 2017
2 parents 959a5a8 + eafbc03 commit f9ff46e
Show file tree
Hide file tree
Showing 18 changed files with 356 additions and 210 deletions.
11 changes: 7 additions & 4 deletions bin/rrd2whisper.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/usr/bin/env python

import errno
import os
import sys
import time
Expand Down Expand Up @@ -38,8 +39,9 @@
', '.join(aggregationMethods),
default='average',
type='string')
option_parser.add_option('--destinationPath',
help="Path to place created whisper file. Defaults to the " +
option_parser.add_option(
'--destinationPath',
help="Path to place created whisper file. Defaults to the " +
"RRD file's source path.",
default=None,
type='string')
Expand Down Expand Up @@ -115,10 +117,11 @@
if not os.path.isdir(destination_path):
try:
os.makedirs(destination_path)
except OSError as exc: # Python >2.5
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(destination_path):
pass
else: raise
else:
raise
rrd_file = os.path.basename(rrd_path).replace('.rrd', '%s.wsp' % suffix)
path = destination_path + '/' + rrd_file
else:
Expand Down
32 changes: 21 additions & 11 deletions bin/whisper-create.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@
except ImportError:
raise SystemExit('[ERROR] Please make sure whisper is installed properly')


def byte_format(num):
for x in ['bytes','KB','MB']:
for x in ['bytes', 'KB', 'MB']:
if num < 1024.0:
return "%.3f%s" % (num, x)
num /= 1024.0
Expand All @@ -22,7 +23,7 @@ def byte_format(num):
try:
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
except AttributeError:
#OS=windows
# OS=windows
pass

option_parser = optparse.OptionParser(
Expand All @@ -38,12 +39,17 @@ def byte_format(num):
''')
option_parser.add_option('--xFilesFactor', default=0.5, type='float')
option_parser.add_option('--aggregationMethod', default='average',
type='string', help="Function to use when aggregating values (%s)" %
', '.join(whisper.aggregationMethods))
type='string',
help="Function to use when aggregating values (%s)" %
', '.join(whisper.aggregationMethods))
option_parser.add_option('--overwrite', default=False, action='store_true')
option_parser.add_option('--estimate', default=False, action='store_true', help="Don't create a whisper file, estimate storage requirements based on archive definitions")
option_parser.add_option('--sparse', default=False, action='store_true', help="Create new whisper as sparse file")
option_parser.add_option('--fallocate', default=False, action='store_true', help="Create new whisper and use fallocate")
option_parser.add_option('--estimate', default=False, action='store_true',
help="Don't create a whisper file, estimate storage "
"requirements based on archive definitions")
option_parser.add_option('--sparse', default=False, action='store_true',
help="Create new whisper as sparse file")
option_parser.add_option('--fallocate', default=False, action='store_true',
help="Create new whisper and use fallocate")

(options, args) = option_parser.parse_args()

Expand All @@ -63,9 +69,11 @@ def byte_format(num):

size = 16 + (archives * 12) + (total_points * 12)
disk_size = int(math.ceil(size / 4096.0) * 4096)
print("\nEstimated Whisper DB Size: %s (%s bytes on disk with 4k blocks)\n" % (byte_format(size), disk_size))
print("\nEstimated Whisper DB Size: %s (%s bytes on disk with 4k blocks)\n" %
(byte_format(size), disk_size))
for x in [1, 5, 10, 50, 100, 500]:
print("Estimated storage requirement for %sk metrics: %s" % (x, byte_format(x * 1000 * disk_size)))
print("Estimated storage requirement for %sk metrics: %s" %
(x, byte_format(x * 1000 * disk_size)))
sys.exit(0)

if len(args) < 2:
Expand All @@ -81,9 +89,11 @@ def byte_format(num):
os.unlink(path)

try:
whisper.create(path, archives, xFilesFactor=options.xFilesFactor, aggregationMethod=options.aggregationMethod, sparse=options.sparse, useFallocate=options.fallocate)
whisper.create(path, archives, xFilesFactor=options.xFilesFactor,
aggregationMethod=options.aggregationMethod, sparse=options.sparse,
useFallocate=options.fallocate)
except whisper.WhisperException as exc:
raise SystemExit('[ERROR] %s' % str(exc))

size = os.stat(path).st_size
print('Created: %s (%d bytes)' % (path,size))
print('Created: %s (%d bytes)' % (path, size))
58 changes: 35 additions & 23 deletions bin/whisper-diff.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/python -tt

import sys
import time
import optparse
import json

Expand All @@ -20,7 +19,8 @@
option_parser.add_option('--no-headers', default=False, action='store_true',
help="do not print column headers")
option_parser.add_option('--until', default=None, type='int',
help="Unix epoch time of the end of your requested interval (default: None)")
help="Unix epoch time of the end of your requested "
"interval (default: None)")
option_parser.add_option('--json', default=False, action='store_true',
help="Output results in JSON form")

Expand All @@ -30,44 +30,47 @@
option_parser.print_help()
sys.exit(1)

(path_a,path_b) = args[0::1]
(path_a, path_b) = args[0::1]

if options.until:
until_time = int( options.until )
until_time = int(options.until)
else:
until_time = None

def print_diffs(diffs,pretty=True,headers=True):


def print_diffs(diffs, pretty=True, headers=True):
if pretty:
h = "%7s %11s %13s %13s\n"
f = "%7s %11d %13s %13s\n"
else:
h = "%s %s %s %s\n"
f = "%s %d %s %s\n"
if headers:
sys.stdout.write(h%('archive','timestamp','value_a','value_b'))
sys.stdout.write(h % ('archive', 'timestamp', 'value_a', 'value_b'))
for archive, points, total in diffs:
count = count=points.__len__()
if pretty:
sys.stdout.write('Archive %d (%d of %d datapoints differ)\n'%(archive,points.__len__(),total))
sys.stdout.write(h%('','timestamp','value_a','value_b'))
sys.stdout.write('Archive %d (%d of %d datapoints differ)\n' %
(archive, points.__len__(), total))
sys.stdout.write(h % ('', 'timestamp', 'value_a', 'value_b'))
for p in points:
if pretty:
sys.stdout.write(f%('',p[0],p[1],p[2]))
sys.stdout.write(f % ('', p[0], p[1], p[2]))
else:
sys.stdout.write(f%(archive,p[0],p[1],p[2]))
sys.stdout.write(f % (archive, p[0], p[1], p[2]))


def print_summary(diffs,pretty=True,headers=True):
def print_summary(diffs, pretty=True, headers=True):
if pretty:
f = "%7s %9s %9s\n"
else:
f = "%s %s %s\n"
if headers:
sys.stdout.write(f%('archive','total','differing'))
sys.stdout.write(f % ('archive', 'total', 'differing'))
for archive, points, total in diffs:
sys.stdout.write(f%(archive,total,points.__len__()))
sys.stdout.write(f % (archive, total, points.__len__()))

def print_summary_json(diffs,path_a,path_b):

def print_summary_json(diffs, path_a, path_b):
print json.dumps({'path_a': path_a,
'path_b': path_b,
'archives': [{'archive': archive,
Expand All @@ -76,28 +79,37 @@ def print_summary_json(diffs,path_a,path_b):
for archive, points, total in diffs]},
sort_keys=True, indent=2, separators=(',', ' : '))

def print_diffs_json(diffs,path_a,path_b):

def print_diffs_json(diffs, path_a, path_b):
print json.dumps({'path_a': path_a,
'path_b': path_b,
'archives': [{'archive': archive,
'total': total,
'points': points.__len__(),
'datapoint': [{'timestamp': p[0], 'value_a': p[1], 'value_b': p[2]} for p in points]}
'datapoint': [{
'timestamp': p[0],
'value_a': p[1],
'value_b': p[2]
} for p in points]}
for archive, points, total in diffs]},
sort_keys=True, indent=2, separators=(',', ' : '))


def main():
archive_diffs = whisper.diff(path_a,path_b,ignore_empty=options.ignore_empty,until_time=until_time)
archive_diffs = whisper.diff(path_a, path_b, ignore_empty=options.ignore_empty,
until_time=until_time)
if options.summary:
if options.json:
print_summary_json(archive_diffs,path_a,path_b)
print_summary_json(archive_diffs, path_a, path_b)
else:
print_summary(archive_diffs,pretty=(not options.columns),headers=(not options.no_headers))
print_summary(archive_diffs, pretty=(not options.columns),
headers=(not options.no_headers))
else:
if options.json:
print_diffs_json(archive_diffs,path_a,path_b)
print_diffs_json(archive_diffs, path_a, path_b)
else:
print_diffs(archive_diffs,pretty=(not options.columns),headers=(not options.no_headers))
print_diffs(archive_diffs, pretty=(not options.columns),
headers=(not options.no_headers))

if __name__ == "__main__":
main()
42 changes: 27 additions & 15 deletions bin/whisper-dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,18 @@
else:
path = args[0]


def mmap_file(filename):
fd = os.open(filename, os.O_RDONLY)
map = mmap.mmap(fd, os.fstat(fd).st_size, prot=mmap.PROT_READ)
os.close(fd)
return map


def read_header(map):
try:
(aggregationType,maxRetention,xFilesFactor,archiveCount) = struct.unpack(whisper.metadataFormat,map[:whisper.metadataSize])
(aggregationType, maxRetention, xFilesFactor, archiveCount) \
= struct.unpack(whisper.metadataFormat, map[:whisper.metadataSize])
except:
raise whisper.CorruptWhisperFile("Unable to unpack header")

Expand All @@ -43,28 +46,32 @@ def read_header(map):

for i in xrange(archiveCount):
try:
(offset, secondsPerPoint, points) = struct.unpack(whisper.archiveInfoFormat, map[archiveOffset:archiveOffset+whisper.archiveInfoSize])
(offset, secondsPerPoint, points) = struct.unpack(
whisper.archiveInfoFormat,
map[archiveOffset:archiveOffset + whisper.archiveInfoSize]
)
except:
raise whisper.CorruptWhisperFile("Unable to read archive %d metadata" % i)

archiveInfo = {
'offset' : offset,
'secondsPerPoint' : secondsPerPoint,
'points' : points,
'retention' : secondsPerPoint * points,
'size' : points * whisper.pointSize,
'offset': offset,
'secondsPerPoint': secondsPerPoint,
'points': points,
'retention': secondsPerPoint * points,
'size': points * whisper.pointSize,
}
archives.append(archiveInfo)
archiveOffset += whisper.archiveInfoSize

header = {
'aggregationMethod' : whisper.aggregationTypeToMethod.get(aggregationType, 'average'),
'maxRetention' : maxRetention,
'xFilesFactor' : xFilesFactor,
'archives' : archives,
'aggregationMethod': whisper.aggregationTypeToMethod.get(aggregationType, 'average'),
'maxRetention': maxRetention,
'xFilesFactor': xFilesFactor,
'archives': archives,
}
return header


def dump_header(header):
print('Meta data:')
print(' aggregation method: %s' % header['aggregationMethod'])
Expand All @@ -73,8 +80,9 @@ def dump_header(header):
print("")
dump_archive_headers(header['archives'])


def dump_archive_headers(archives):
for i,archive in enumerate(archives):
for i, archive in enumerate(archives):
print('Archive %d info:' % i)
print(' offset: %d' % archive['offset'])
print(' seconds per point: %d' % archive['secondsPerPoint'])
Expand All @@ -83,12 +91,16 @@ def dump_archive_headers(archives):
print(' size: %d' % archive['size'])
print("")


def dump_archives(archives):
for i,archive in enumerate(archives):
print('Archive %d data:' %i)
for i, archive in enumerate(archives):
print('Archive %d data:' % i)
offset = archive['offset']
for point in xrange(archive['points']):
(timestamp, value) = struct.unpack(whisper.pointFormat, map[offset:offset+whisper.pointSize])
(timestamp, value) = struct.unpack(
whisper.pointFormat,
map[offset:offset + whisper.pointSize]
)
print('%d: %d, %10.35g' % (point, timestamp, value))
offset += whisper.pointSize
print
Expand Down
Loading

0 comments on commit f9ff46e

Please sign in to comment.