Skip to content

Commit

Permalink
Merge pull request #11 from kolomenkin/r_speedtest
Browse files Browse the repository at this point in the history
add speedtest script
  • Loading branch information
siddhantgoel committed May 20, 2018
2 parents 57abec0 + 90b201a commit 6425e15
Show file tree
Hide file tree
Showing 2 changed files with 111 additions and 0 deletions.
1 change: 1 addition & 0 deletions requirements.dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ sphinx-autobuild==0.6.0
sphinx-rtd-theme==0.2.4
sphinxcontrib-websupport==1.0.1
twine==1.8.1
numpy==1.11.3
110 changes: 110 additions & 0 deletions utils/speedtest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
from io import BytesIO
from numpy import random
from requests_toolbelt import MultipartEncoder
from streaming_form_data.parser import StreamingFormDataParser
from streaming_form_data.targets import NullTarget, BaseTarget
from time import time


class DummyTarget(BaseTarget):
def __init__(self, print_report, gather_data):
self.print_report = print_report
self.gather_data = gather_data
self.result = bytearray()

def start(self):
if self.print_report:
print('DummyTarget: start')

def data_received(self, chunk):
if self.print_report:
print('DummyTarget: data_received:', len(chunk), 'bytes')
if self.gather_data:
self.result += chunk

def finish(self):
if self.print_report:
print('DummyTarget: finish')

def get_result(self):
return self.result


def fill_bytes_random_fast(size):
random.seed(42)
return random.bytes(size)


def main():
print('Prepare data...')
begin_time = time()

kibibyte = 1024
mebibyte = kibibyte * kibibyte
filedata_size = 40 * mebibyte

filedata = fill_bytes_random_fast(filedata_size)
# print('data sample:', filedata[0:100])

with BytesIO(filedata) as fd:
content_type = 'binary/octet-stream'

encoder = MultipartEncoder(fields={
'file': ('file', fd, content_type)
})
headers = {'Content-Type': encoder.content_type}
body = encoder.to_string()

print_report = False
gather_data = False
if not gather_data:
filedata = None # free memory
target = DummyTarget(print_report=print_report, gather_data=gather_data)

parser = StreamingFormDataParser(headers)
parser.register('name', NullTarget())
parser.register('lines', NullTarget())
parser.register('file', target)

defaultChunksize = 32 * kibibyte
position = 0
body_length = len(body)
remaining = body_length

end_time = time()
print('Data prepared')
time_diff = end_time - begin_time
print('Preparation took: %.3f sec; speed: %.3f MB/s; body size: %.3f MB' %
(time_diff,
(body_length / time_diff / mebibyte if time_diff > 0 else 0),
body_length / mebibyte))

print('Begin test...')

begin_time = time()
while remaining > 0:
chunksize = min(defaultChunksize, remaining)
parser.data_received(body[position: position + chunksize])
remaining -= chunksize
position += chunksize
end_time = time()

print('End test')

if gather_data:
result = target.get_result()
if result != filedata:
print('-------------------------------------------')
print('ERROR! Decoded data mismatch! Orig size: ',
len(filedata), '; got size:', len(result))
print('-------------------------------------------')

time_diff = end_time - begin_time
print('Test took: %.3f sec; speed: %.3f MB/s; body size: %.3f MB' %
(time_diff,
(body_length / time_diff / mebibyte if time_diff > 0 else 0),
body_length / mebibyte))


if __name__ == '__main__':
main()

0 comments on commit 6425e15

Please sign in to comment.