Permalink
Browse files

Add timeouts to urllib2 calls otherwise we run into situations where …

…gmond is hosed
  • Loading branch information...
1 parent 4828cb4 commit 246721e429d3883a3f844e149748c35d7517be0a @vvuksan vvuksan committed Nov 7, 2012
View
4 apache_status/python_modules/apache_status.py
@@ -68,7 +68,7 @@ def get_metrics():
req = urllib2.Request(SERVER_STATUS_URL + "?auto")
# Download the status file
- res = urllib2.urlopen(req)
+ res = urllib2.urlopen(req, None, 2)
for line in res:
split_line = line.rstrip().split(": ")
@@ -93,7 +93,7 @@ def get_metrics():
req2 = urllib2.Request(SERVER_STATUS_URL)
# Download the status file
- res = urllib2.urlopen(req2)
+ res = urllib2.urlopen(req2, None, 2)
for line in res:
regMatch = SSL_REGEX.match(line)
View
3 couchdb/python_modules/couchdb.py
@@ -54,7 +54,8 @@ def _get_couchdb_stats(url, refresh_rate):
logging.warning('The specified refresh_rate of %d is invalid and has been substituted with 60!' % refresh_rate)
url += '?range=60'
- c = urllib2.urlopen(url)
+ # Set time out for urlopen to 2 seconds otherwise we run into the possibility of hosing gmond
+ c = urllib2.urlopen(url, None, 2)
json_data = c.read()
c.close()
View
6 elasticsearch/python_modules/elasticsearch.py
@@ -127,7 +127,7 @@ def update_result(result, url):
diff = now - last_update
if diff > 20:
print '[elasticsearch] ' + str(diff) + ' seconds passed - Fetching ' + url
- result = json.load(urllib.urlopen(url))
+ result = json.load(urllib.urlopen(url, None, 2))
last_update = now
return result
@@ -199,7 +199,7 @@ def metric_init(params):
# First iteration - Grab statistics
print('[elasticsearch] Fetching ' + url_cluster)
- result = json.load(urllib.urlopen(url_cluster))
+ result = json.load(urllib.urlopen(url_cluster, None, 2))
metric_group = params.get('metric_group', 'elasticsearch')
@@ -220,7 +220,7 @@ def metric_init(params):
url_indices = '{0}{1}/_stats'.format(host, index)
print('[elasticsearch] Fetching ' + url_indices)
- r_indices = json.load(urllib.urlopen(url_indices))
+ r_indices = json.load(urllib.urlopen(url_indices, None, 2))
descriptors += get_indices_descriptors(index,
Desc_Skel,
r_indices,
View
2 httpd/python_modules/httpd.py
@@ -74,7 +74,7 @@ def update_stats():
try:
httpd_stats = {}
logging.debug(' opening URL: ' + str(STATUS_URL))
- f = urllib.urlopen(STATUS_URL)
+ f = urllib.urlopen(STATUS_URL, None, 2)
for line in f.readlines():
diff = False
View
2 jenkins/python_modules/jenkins.py
@@ -52,7 +52,7 @@ def _get_jenkins_statistics(url):
url += '/api/json'
url += '?tree=jobs[color],overallLoad[busyExecutors[min[latest]],queueLength[min[latest]],totalExecutors[min[latest]]]'
- c = urllib2.urlopen(url)
+ c = urllib2.urlopen(url, None, 2)
json_data = c.read()
c.close()
View
5 network/netstats/conf.d/netstats.pyconf
@@ -26,6 +26,11 @@ collection_group {
value_threshold = 1.0
}
metric {
+ name_match = "icmpmsg_(.+)"
+ value_threshold = 1.0
+ }
+
+ metric {
name_match = "icmp_(.+)"
value_threshold = 1.0
}
View
2 nginx_status/python_modules/nginx_status.py
@@ -49,7 +49,7 @@ def run(self):
@staticmethod
def _get_nginx_status_stub_response(url):
- c = urllib2.urlopen(url)
+ c = urllib2.urlopen(url, None, 2)
data = c.read()
c.close()
View
2 rabbit/python_modules/rabbitmq.py
@@ -99,7 +99,7 @@ def refreshStats(stats = ('nodes', 'queues'), vhosts = ['/']):
result_dict = {}
urlstring = url_template.safe_substitute(stats = stat, vhost = vhost)
print urlstring
- result = json.load(urllib.urlopen(urlstring))
+ result = json.load(urllib.urlopen(urlstring, None, 2))
# Rearrange results so entry is held in a dict keyed by name - queue name, host name, etc.
if stat in ("queues", "nodes", "exchanges"):
for entry in result:
View
2 riak/riak.py
@@ -63,7 +63,7 @@ def run(self):
def update_metric(self):
try:
req = urllib2.Request(url = self.url)
- res = urllib2.urlopen(req)
+ res = urllib2.urlopen(req, None, 2)
stats = res.read()
dprint("%s", stats)
json_stats = json.loads(stats)

0 comments on commit 246721e

Please sign in to comment.