Skip to content

Commit

Permalink
chore(daemon): Python3 fixes.
Browse files Browse the repository at this point in the history
  • Loading branch information
Eric Wiseblatt committed Apr 16, 2019
1 parent 99d9aad commit ecba3ea
Show file tree
Hide file tree
Showing 18 changed files with 287 additions and 202 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -150,15 +150,15 @@ def output(self, options, content):
"""
do_print = not options.get('quiet', False)
if do_print:
print content
print(content)
output_path = options.get('output_path', None)

if output_path:
# pylint: disable=invalid-name
with open(options['output_path'], 'w') as f:
f.write(content)
if do_print:
print 'Wrote {0}'.format(output_path)
print('Wrote {0}'.format(output_path))


def process_command(command, options, command_registry):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@


# pip install datadog
import ConfigParser
import logging
import os
import socket

try:
from ConfigParser import ConfigParser
except ImportError:
from configparser import ConfigParser

try:
import datadog
datadog_available = True
Expand All @@ -44,7 +48,7 @@ def __init__(self, options):
assert 'dd_agent_config' in self.options, \
'Key "dd_agent_config" is mandatory in supplied options'

self.datadog_config = ConfigParser.ConfigParser()
self.datadog_config = ConfigParser()
self.datadog_config.read(options['dd_agent_config'])

if not self.datadog_config.sections():
Expand Down
28 changes: 19 additions & 9 deletions spinnaker-monitoring-daemon/spinnaker-monitoring/google_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,25 @@

from datetime import datetime
import collections
import httplib2
import json
import logging
import os
import urllib2
import httplib2

import spectator_client
from spectator_metric_transformer import PercentileDecoder

try:
from urllib2 import (
Request as urllibRequest,
urlopen as urllibUrlopen
)

except ImportError:
from urllib.request import (
Request as urllibRequest,
urlopen as urllibUrlopen)


try:
import apiclient
Expand Down Expand Up @@ -95,31 +105,31 @@ def normalize_options(options, embedded_options_key='stackdriver'):
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
def get_aws_identity_document():
url = 'http://169.254.169.254/latest/dynamic/instance-identity/document'
request = urllib2.Request(url)
request = urllibRequest(url)
try:
response = urllib2.urlopen(request)
response = urllibUrlopen(request)
except IOError as ioex:
logging.info('Cannot read AWS Identity Document,'
' probably not on Amazon Web Services.'
' url=%s: %s', url, ioex)
raise ioex
return json.JSONDecoder().decode(response.read())
return json.JSONDecoder().decode(response.read().decode('utf-8'))


# https://cloud.google.com/compute/docs/storing-retrieving-metadata
def get_google_metadata(attribute):
url = 'http://169.254.169.254/computeMetadata/v1/' + attribute
request = urllib2.Request(url)
request = urllibRequest(url)
request.add_header('Metadata-Flavor', 'Google')
try:
response = urllib2.urlopen(request)
response = urllibUrlopen(request)
except IOError as ioex:
logging.info('Cannot read google metadata,'
' probably not on Google Cloud Platform.'
' url=%s: %s', url, ioex)
raise ioex

return response.read()
return response.read().decode('utf-8')


def determine_local_project():
Expand Down Expand Up @@ -198,7 +208,7 @@ class GoogleMonitoringService(object):

@staticmethod
def millis_to_time(millis):
return datetime.fromtimestamp(millis / 1000).isoformat('T') + 'Z'
return datetime.fromtimestamp(millis // 1000).isoformat('T') + 'Z'

@property
def project(self):
Expand Down
23 changes: 17 additions & 6 deletions spinnaker-monitoring-daemon/spinnaker-monitoring/http_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,20 @@
"""Implements HTTP Server."""

import textwrap
import BaseHTTPServer
import logging
import traceback
import urllib2

try:
from BaseHTTPServer import (
HTTPServer,
BaseHTTPRequestHandler)
from urllib2 import unquote as urllibUnquote
except ImportError:
from http.server import (
HTTPServer,
BaseHTTPRequestHandler)
from urllib.request import unquote as urllibUnquote


def build_html_document(body, title=None):
"""Produces the HTML document wrapper for a text/html response."""
Expand Down Expand Up @@ -54,7 +63,7 @@ def build_html_document(body, title=None):
return '\n'.join(html)


class DelegatingRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
class DelegatingRequestHandler(BaseHTTPRequestHandler):
"""An HttpServer request handler that delegates to our CommandHandler."""

def respond(self, code, headers, body=None):
Expand All @@ -64,6 +73,8 @@ def respond(self, code, headers, body=None):
self.send_header(key, value)
self.end_headers()
if body:
if isinstance(body, str):
body = body.encode('utf-8')
self.wfile.write(body)

def decode_request(self, request):
Expand All @@ -76,7 +87,7 @@ def decode_request(self, request):

for part in query.split('&'):
key, _, value = part.partition('=')
parameters[key] = urllib2.unquote(value)
parameters[key] = urllibUnquote(value)

return path, parameters, fragment or None

Expand Down Expand Up @@ -110,7 +121,7 @@ def log_message(self, msg_format, *args):
pass


class HttpServer(BaseHTTPServer.HTTPServer):
class HttpServer(HTTPServer):
"""Implements HTTP Server that will delegate to injected request handlers."""

PATH_HANDLERS = {}
Expand All @@ -125,6 +136,6 @@ def get_option(name):
host = get_option('host') or '0.0.0.0'

logging.info('Starting HTTP server on host=%s, port=%d', host, port)
BaseHTTPServer.HTTPServer.__init__(
HTTPServer.__init__(
self, (host, port), DelegatingRequestHandler)
HttpServer.PATH_HANDLERS.update(handlers or {})
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@

import logging
import re
import sys

if sys.version_info[0] > 2:
basestring = str


def get_as_list(container, key):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,14 @@ def __init__(self, all_handlers, url_path, command_name, description):
def process_web_request(self, request, path, params, fragment):
"""Implements CommandHandler."""
query = self.params_to_query(params)
rows = [(handler.url_path, handler.description)
rows = [(handler.url_path or '', handler.description)
for handler in self.__all_handlers]
rows = sorted(rows)
row_html = [('<tr>'
'<td><A href="{path}{params}">{path}</A></td>'
'<td>{info}</td>'
'</tr>'.format(path=row[0], params=query, info=row[1]))
for row in rows if row[0] is not None]
for row in rows if row[0]]

html_body = ('<table>\n'
'<tr><th>Path</th><th>Description</th></tr>'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,23 @@
import threading
import time
import traceback
import urllib2
import urlparse
import yaml

try:
from urllib2 import (
Request as urllibRequest,
quote as urllibQuote,
urlopen as urllibUrlopen)
from urlparse import urlsplit

except ImportError:
from urllib.request import (
Request as urllibRequest,
quote as urllibQuote,
urlopen as urllibUrlopen)

from urllib.parse import urlsplit

from metric_filter import MetricFilter
from spectator_metric_transformer import SpectatorMetricTransformer

Expand Down Expand Up @@ -457,16 +470,16 @@ def create_request(self, url, authorization):
authorization: [string] None or the base64 encoded authorization string.
Returns:
urllib2.Request instance
urllibRequest instance
"""
request = urllib2.Request(url)
request = urllibRequest(url)
if authorization:
request.add_header('Authorization', 'Basic %s' % authorization)
return request

def collect_metrics(self, service, base_url, params=None):
"""Return JSON metrics from the given server."""
info = urlparse.urlsplit(base_url)
info = urlsplit(base_url)
host = info.hostname
port = info.port or 80
netloc = host
Expand All @@ -478,8 +491,8 @@ def collect_metrics(self, service, base_url, params=None):

authorization = None
if info.username or info.password:
authorization = base64.encodestring(
'%s:%s' % (info.username, info.password)).replace('\n', '')
text = '%s:%s' % (info.username, info.password)
authorization = base64.encodestring(text.encode('utf-8')).replace(b'\n', b'')

query = '?' + info.query if info.query else ''
sep = '&' if info.query else '?'
Expand All @@ -494,17 +507,17 @@ def collect_metrics(self, service, base_url, params=None):
query_params[key] = params[key]

for key, value in query_params.items():
query += sep + key + "=" + urllib2.quote(value)
query += sep + key + "=" + urllibQuote(value)
sep = "&"

url = '{base_url}{query}'.format(base_url=base_url, query=query)
collect_start_time = time.time()
response = urllib2.urlopen(self.create_request(url, authorization))
response = urllibUrlopen(self.create_request(url, authorization))
collect_end_time = time.time()

json_text = response.read()
json_text = response.read().decode('utf-8')
try:
spectator_response = json.JSONDecoder(encoding='utf-8').decode(json_text)
spectator_response = json.JSONDecoder().decode(json_text)
spectator_response['__collectStartTime'] = int(collect_start_time * 1000)
spectator_response['__collectEndTime'] = int(collect_end_time * 1000)
except ValueError:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def __to_row_html(self, params, columns, meter_name,
transforms_to=None, transforms_from=None):
tag_service_map = self.to_tag_service_map(columns, service_tag_map)
num_labels = len(tag_service_map)
_, info = type_map[meter_name].items()[0]
_, info = next(iter(type_map[meter_name].items()))
kind = info[0].get('kind')
row_html = ['<tr>']
row_span = ' rowspan={0}'.format(num_labels) if num_labels > 1 else ''
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init_buckets(self):
# It doesnt really matter what the algorithm is here. We are not
# making this decision. Rather we are decoding the decision already
# made and given to us.
buckets = [1,2,3]
buckets = [1, 2, 3]

digits = 2
exp = digits
Expand Down Expand Up @@ -205,7 +205,7 @@ def add_per_tags(self, value_json, sorted_tags, per_tags):

augmented_tags = list(sorted_tags)
augmented_tags.append(tag)
sorted_augmented_tags = sorted(augmented_tags)
sorted_augmented_tags = sorted(augmented_tags, key=lambda tag: tag['key'])
normalized_key = str(sorted_augmented_tags)
info = tag_container.get(normalized_key)
if not info:
Expand All @@ -228,7 +228,8 @@ def encode_as_spectator_response(self):
if self.__per_tag_values:
response['__per_tag_values'] = {
key: sorted([v.encode_as_spectator_response()
for v in value.values()])
for v in value.values()],
key=lambda d: (d['values'][0]['t'], d['values'][0]['v']))
for key, value in self.__per_tag_values.items()
}
return response
Expand Down Expand Up @@ -272,7 +273,7 @@ def find_tag_value(tag):
if self.__rule.discard_tag_value(key, find_tag_value(key)):
return

sorted_tags = sorted(tags) if tags else None
sorted_tags = sorted(tags, key=lambda tag: tag['key']) if tags else None
normalized_key = str(sorted_tags)

metric = self.__tags_to_metric.get(normalized_key)
Expand Down Expand Up @@ -420,7 +421,7 @@ class TransformationRule(object):
case the <extract_regex> should have a capture group for each
element. The <oneof_regex> offers multiple capture group
possibilities, and uses whichever value was matched.
* <type_name_or_names> is the type for the <target_tag_name_or_names>.
This should match the structure of <target_tag_name_or_names>.
types are as follows:
Expand Down Expand Up @@ -645,7 +646,7 @@ def __init__(self, transformer, rule_spec):
# (e.g. if the value indicates success we'd probably want AND
# but if the value indicates a failure we'd probably want OR)
self.combine_values = lambda x, y: x + y
if (rule_spec.get('value_type') == 'BOOL'):
if rule_spec.get('value_type') == 'BOOL':
self.combine_values = {
True: lambda x, y: x or y, # Any are true
False: lambda x, y: x and y, # All are true
Expand Down Expand Up @@ -835,7 +836,7 @@ def add_if_present(key, tag_dict, target_tags):
# extraction above, then that application tag will be dropped.

if target_tags:
target_metric['tags'] = sorted(target_tags)
target_metric['tags'] = sorted(target_tags, key=lambda tag: tag['key'])

metric_builder.add(
self.determine_measurement(target_metric['values'][-1]),
Expand Down Expand Up @@ -976,7 +977,7 @@ def add_meter_mapping(meter_name, spectator_metric, response):
response[meter_name]['values'].extend(spectator_metric['values'])
else:
response[meter_name] = spectator_metric

rule_list = self.__rulebase.get(meter_name, self.__default_rule_list)
if not rule_list:
# if None then discard if not mentioned and default rule was discard
Expand All @@ -991,7 +992,7 @@ def add_meter_mapping(meter_name, spectator_metric, response):
continue

transformed = {
'kind': rule.rule_specification.get('kind') or spectator_metric['kind'],
'values': rule.apply(spectator_metric)
'kind': rule.rule_specification.get('kind') or spectator_metric['kind'],
'values': rule.apply(spectator_metric)
}
add_meter_mapping(transformed_name, transformed, response)
Loading

0 comments on commit ecba3ea

Please sign in to comment.