Fetching contributors…
Cannot retrieve contributors at this time
78 lines (61 sloc) 2.03 KB
#!/usr/bin/env python
Really simple rudimentary benchmark to compare ConnectionPool versus standard
urllib to demonstrate the usefulness of connection re-using.
from __future__ import print_function
import sys
import time
import urllib
import urllib3
# URLs to download. Doesn't matter as long as they're from the same host, so we
# can take advantage of connection re-using.
def urllib_get(url_list):
assert url_list
for url in url_list:
now = time.time()
r = urllib.urlopen(url)
elapsed = time.time() - now
print("Got in %0.3f: %s" % (elapsed, url))
def pool_get(url_list):
assert url_list
pool = urllib3.PoolManager()
for url in url_list:
now = time.time()
r = pool.request('GET', url, assert_same_host=False)
elapsed = time.time() - now
print("Got in %0.3fs: %s" % (elapsed, url))
if __name__ == '__main__':
print("Running pool_get ...")
now = time.time()
pool_elapsed = time.time() - now
print("Running urllib_get ...")
now = time.time()
urllib_elapsed = time.time() - now
print("Completed pool_get in %0.3fs" % pool_elapsed)
print("Completed urllib_get in %0.3fs" % urllib_elapsed)
Example results:
Completed pool_get in 1.163s
Completed urllib_get in 2.318s