diff --git a/.circleci/config.yml b/.circleci/config.yml index 2a011edcd4b..0676f908bd1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -134,7 +134,7 @@ jobs: - checkout - *restore_cache_step - run: tox -e 'boto_contrib-{py27,py34}-boto' --result-json /tmp/boto.1.results - - run: tox -e 'botocore_contrib-{py27,py34}-botocore' --result-json /tmp/boto.2.results + - run: tox -e 'botocore_contrib-{py27,py34,py35,py36}-botocore' --result-json /tmp/boto.2.results - persist_to_workspace: root: /tmp paths: @@ -420,6 +420,20 @@ jobs: - grpc.results - *save_cache_step + molten: + docker: + - *test_runner + resource_class: *resource_class + steps: + - checkout + - *restore_cache_step + - run: tox -e 'molten_contrib-{py36}-molten{070,072}' --result-json /tmp/molten.results + - persist_to_workspace: + root: /tmp + paths: + - molten.results + - *save_cache_step + mysqlconnector: docker: - *test_runner @@ -985,6 +999,9 @@ workflows: - kombu: requires: - flake8 + - molten: + requires: + - flake8 - mongoengine: requires: - flake8 @@ -1082,6 +1099,7 @@ workflows: - integration - jinja2 - kombu + - molten - mongoengine - msgpack - mysqlconnector diff --git a/.gitignore b/.gitignore index ea7c27c3c22..e86740572e3 100644 --- a/.gitignore +++ b/.gitignore @@ -37,6 +37,7 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.ddtox/ .coverage .coverage.* .cache diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000000..556608e1dc6 --- /dev/null +++ b/conftest.py @@ -0,0 +1,55 @@ +""" +This file configures a local pytest plugin, which allows us to configure plugin hooks to control the +execution of our tests. Either by loading in fixtures, configuring directories to ignore, etc + +Local plugins: https://docs.pytest.org/en/3.10.1/writing_plugins.html#local-conftest-plugins +Hook reference: https://docs.pytest.org/en/3.10.1/reference.html#hook-reference +""" +import os +import re +import sys + +import pytest + +PY_DIR_PATTERN = re.compile(r'^py[23][0-9]$') + + +# Determine if the folder should be ignored +# https://docs.pytest.org/en/3.10.1/reference.html#_pytest.hookspec.pytest_ignore_collect +# DEV: We can only ignore folders/modules, we cannot ignore individual files +# DEV: We must wrap with `@pytest.mark.hookwrapper` to inherit from default (e.g. honor `--ignore`) +# https://github.com/pytest-dev/pytest/issues/846#issuecomment-122129189 +@pytest.mark.hookwrapper +def pytest_ignore_collect(path, config): + """ + Skip directories defining a required minimum Python version + + Example:: + + File: tests/contrib/vertica/py35/test.py + Python 2.7: Skip + Python 3.4: Skip + Python 3.5: Collect + Python 3.6: Collect + """ + # Execute original behavior first + # DEV: We need to set `outcome.force_result(True)` if we need to override + # these results and skip this directory + outcome = yield + + # Was not ignored by default behavior + if not outcome.get_result(): + # DEV: `path` is a `LocalPath` + path = str(path) + if not os.path.isdir(path): + path = os.path.dirname(path) + dirname = os.path.basename(path) + + # Directory name match `py[23][0-9]` + if PY_DIR_PATTERN.match(dirname): + # Split out version numbers into a tuple: `py35` -> `(3, 5)` + min_required = tuple((int(v) for v in dirname.strip('py'))) + + # If the current Python version does not meet the minimum required, skip this directory + if sys.version_info[0:2] < min_required: + outcome.force_result(True) diff --git a/ddtrace/__init__.py b/ddtrace/__init__.py index cec9fda339f..03a9bfc961a 100644 --- a/ddtrace/__init__.py +++ b/ddtrace/__init__.py @@ -4,7 +4,7 @@ from .tracer import Tracer from .settings import config -__version__ = '0.17.1' +__version__ = '0.18.0' # a global tracer instance with integration settings tracer = Tracer() diff --git a/ddtrace/compat.py b/ddtrace/compat.py index be875ff7d39..62c9c10479a 100644 --- a/ddtrace/compat.py +++ b/ddtrace/compat.py @@ -1,5 +1,20 @@ import platform import sys +import textwrap + +import six + +__all__ = [ + 'httplib', + 'iteritems', + 'PY2', + 'Queue', + 'stringify', + 'StringIO', + 'urlencode', + 'parse', + 'reraise', +] PYTHON_VERSION_INFO = sys.version_info PY2 = sys.version_info[0] == 2 @@ -8,32 +23,58 @@ PYTHON_VERSION = platform.python_version() PYTHON_INTERPRETER = platform.python_implementation() -stringify = str - -if PY2: - from urllib import urlencode - import httplib - stringify = unicode - from Queue import Queue - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO -else: - from queue import Queue - from urllib.parse import urlencode - import http.client as httplib - from io import StringIO - try: - import urlparse as parse + StringIO = six.moves.cStringIO except ImportError: - from urllib import parse + StringIO = six.StringIO -try: +httplib = six.moves.http_client +urlencode = six.moves.urllib.parse.urlencode +parse = six.moves.urllib.parse +Queue = six.moves.queue.Queue +iteritems = six.iteritems +reraise = six.reraise + +stringify = six.text_type +string_type = six.string_types[0] +msgpack_type = six.binary_type +# DEV: `six` doesn't have `float` in `integer_types` +numeric_types = six.integer_types + (float, ) + + +if PYTHON_VERSION_INFO[0:2] >= (3, 4): from asyncio import iscoroutinefunction - from .compat_async import _make_async_decorator as make_async_decorator -except ImportError: + + # Execute from a string to get around syntax errors from `yield from` + # DEV: The idea to do this was stolen from `six` + # https://github.com/benjaminp/six/blob/15e31431af97e5e64b80af0a3f598d382bcdd49a/six.py#L719-L737 + six.exec_(textwrap.dedent(""" + import functools + import asyncio + + + def make_async_decorator(tracer, coro, *params, **kw_params): + \"\"\" + Decorator factory that creates an asynchronous wrapper that yields + a coroutine result. This factory is required to handle Python 2 + compatibilities. + + :param object tracer: the tracer instance that is used + :param function f: the coroutine that must be executed + :param tuple params: arguments given to the Tracer.trace() + :param dict kw_params: keyword arguments given to the Tracer.trace() + \"\"\" + @functools.wraps(coro) + @asyncio.coroutine + def func_wrapper(*args, **kwargs): + with tracer.trace(*params, **kw_params): + result = yield from coro(*args, **kwargs) # noqa: E999 + return result + + return func_wrapper + """)) + +else: # asyncio is missing so we can't have coroutines; these # functions are used only to ensure code executions in case # of an unexpected behavior @@ -44,20 +85,14 @@ def make_async_decorator(tracer, fn, *params, **kw_params): return fn -def iteritems(obj, **kwargs): - func = getattr(obj, "iteritems", None) - if not func: - func = obj.items - return func(**kwargs) - - +# DEV: There is `six.u()` which does something similar, but doesn't have the guard around `hasattr(s, 'decode')` def to_unicode(s): """ Return a unicode string for the given bytes or string instance. """ # No reason to decode if we already have the unicode compatible object we expect - # DEV: `stringify` will be a `str` for python 3 and `unicode` for python 2 + # DEV: `six.text_type` will be a `str` for python 3 and `unicode` for python 2 # DEV: Double decoding a `unicode` can cause a `UnicodeEncodeError` # e.g. `'\xc3\xbf'.decode('utf-8').decode('utf-8')` - if isinstance(s, stringify): + if isinstance(s, six.text_type): return s # If the object has a `decode` method, then decode into `utf-8` @@ -65,9 +100,9 @@ def to_unicode(s): if hasattr(s, 'decode'): return s.decode('utf-8') - # Always try to coerce the object into the `stringify` object we expect + # Always try to coerce the object into the `six.text_type` object we expect # e.g. `to_unicode(1)`, `to_unicode(dict(key='value'))` - return stringify(s) + return six.text_type(s) def get_connection_response(conn): @@ -86,45 +121,3 @@ def get_connection_response(conn): return conn.getresponse(buffering=True) else: return conn.getresponse() - - -if PY2: - string_type = basestring - msgpack_type = basestring - numeric_types = (int, long, float) -else: - string_type = str - msgpack_type = bytes - numeric_types = (int, float) - -if PY2: - # avoids Python 3 `SyntaxError` - # this block will be replaced with the `six` library - from .utils.reraise import _reraise as reraise -else: - def reraise(tp, value, tb=None): - """Python 3 re-raise function. This function is internal and - will be replaced entirely with the `six` library. - """ - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - - -__all__ = [ - 'httplib', - 'iteritems', - 'PY2', - 'Queue', - 'stringify', - 'StringIO', - 'urlencode', - 'parse', - 'reraise', -] diff --git a/ddtrace/compat_async.py b/ddtrace/compat_async.py deleted file mode 100644 index 7d8be42c921..00000000000 --- a/ddtrace/compat_async.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -Async compat module that includes all asynchronous syntax that is not -Python 2 compatible. It MUST be used only in the ``compat`` -module that owns the logic to import it or not. -""" -import functools -import asyncio - - -def _make_async_decorator(tracer, coro, *params, **kw_params): - """ - Decorator factory that creates an asynchronous wrapper that yields - a coroutine result. This factory is required to handle Python 2 - compatibilities. - - :param object tracer: the tracer instance that is used - :param function f: the coroutine that must be executed - :param tuple params: arguments given to the Tracer.trace() - :param dict kw_params: keyword arguments given to the Tracer.trace() - """ - @functools.wraps(coro) - @asyncio.coroutine - def func_wrapper(*args, **kwargs): - with tracer.trace(*params, **kw_params): - result = yield from coro(*args, **kwargs) # noqa: E999 - return result - - return func_wrapper diff --git a/ddtrace/contrib/aiobotocore/patch.py b/ddtrace/contrib/aiobotocore/patch.py index bcd543b8ddf..dd30efbd489 100644 --- a/ddtrace/contrib/aiobotocore/patch.py +++ b/ddtrace/contrib/aiobotocore/patch.py @@ -66,16 +66,6 @@ def __aexit__(self, *args, **kwargs): return response -def truncate_arg_value(value, max_len=1024): - """Truncate values which are bytes and greater than `max_len`. - Useful for parameters like 'Body' in `put_object` operations. - """ - if isinstance(value, bytes) and len(value) > max_len: - return b'...' - - return value - - @asyncio.coroutine def _wrapped_api_call(original_func, instance, args, kwargs): pin = Pin.get_from(instance) @@ -96,12 +86,7 @@ def _wrapped_api_call(original_func, instance, args, kwargs): operation = None span.resource = endpoint_name - # add args in TRACED_ARGS if exist to the span - if not aws.is_blacklist(endpoint_name): - for name, value in aws.unpacking_args(args, ARGS_NAME, TRACED_ARGS): - if name == 'params': - value = {k: truncate_arg_value(v) for k, v in value.items()} - span.set_tag(name, (value)) + aws.add_span_arg_tags(span, endpoint_name, args, ARGS_NAME, TRACED_ARGS) region_name = deep_getattr(instance, 'meta.region_name') diff --git a/ddtrace/contrib/aiohttp/middlewares.py b/ddtrace/contrib/aiohttp/middlewares.py index a05f1f6a6a1..b71a6485e63 100644 --- a/ddtrace/contrib/aiohttp/middlewares.py +++ b/ddtrace/contrib/aiohttp/middlewares.py @@ -83,6 +83,9 @@ def on_prepare(request, response): elif res_info.get('prefix'): resource = res_info.get('prefix') + # prefix the resource name by the http method + resource = '{} {}'.format(request.method, resource) + request_span.resource = resource request_span.set_tag('http.method', request.method) request_span.set_tag('http.status_code', response.status) diff --git a/ddtrace/contrib/aiopg/connection.py b/ddtrace/contrib/aiopg/connection.py index 0ec58a05068..583a7f0ee4c 100644 --- a/ddtrace/contrib/aiopg/connection.py +++ b/ddtrace/contrib/aiopg/connection.py @@ -63,11 +63,14 @@ def callproc(self, proc, args): class AIOTracedConnection(wrapt.ObjectProxy): """ TracedConnection wraps a Connection with tracing code. """ - def __init__(self, conn, pin=None): + def __init__(self, conn, pin=None, cursor_cls=AIOTracedCursor): super(AIOTracedConnection, self).__init__(conn) name = dbapi._get_vendor(conn) db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db) db_pin.onto(self) + # wrapt requires prefix of `_self` for attributes that are only in the + # proxy (since some of our source objects will use `__slots__`) + self._self_cursor_cls = cursor_cls def cursor(self, *args, **kwargs): # unfortunately we also need to patch this method as otherwise "self" @@ -81,4 +84,4 @@ def _cursor(self, *args, **kwargs): pin = Pin.get_from(self) if not pin: return cursor - return AIOTracedCursor(cursor, pin) + return self._self_cursor_cls(cursor, pin) diff --git a/ddtrace/contrib/boto/patch.py b/ddtrace/contrib/boto/patch.py index 3a614ad2c02..d781b1b779a 100644 --- a/ddtrace/contrib/boto/patch.py +++ b/ddtrace/contrib/boto/patch.py @@ -78,12 +78,7 @@ def patched_query_request(original_func, instance, args, kwargs): else: span.resource = endpoint_name - # Adding the args in AWS_QUERY_TRACED_ARGS if exist to the span - if not aws.is_blacklist(endpoint_name): - for arg in aws.unpacking_args( - args, AWS_QUERY_ARGS_NAME, AWS_QUERY_TRACED_ARGS - ): - span.set_tag(arg[0], arg[1]) + aws.add_span_arg_tags(span, endpoint_name, args, AWS_QUERY_ARGS_NAME, AWS_QUERY_TRACED_ARGS) # Obtaining region name region_name = _get_instance_region_name(instance) @@ -127,19 +122,14 @@ def patched_auth_request(original_func, instance, args, kwargs): span_type=SPAN_TYPE, ) as span: - # Adding the args in AWS_AUTH_TRACED_ARGS if exist to the span - if not aws.is_blacklist(endpoint_name): - for arg in aws.unpacking_args( - args, AWS_AUTH_ARGS_NAME, AWS_AUTH_TRACED_ARGS - ): - span.set_tag(arg[0], arg[1]) - if args: http_method = args[0] span.resource = "%s.%s" % (endpoint_name, http_method.lower()) else: span.resource = endpoint_name + aws.add_span_arg_tags(span, endpoint_name, args, AWS_AUTH_ARGS_NAME, AWS_AUTH_TRACED_ARGS) + # Obtaining region name region_name = _get_instance_region_name(instance) diff --git a/ddtrace/contrib/botocore/patch.py b/ddtrace/contrib/botocore/patch.py index 748cc6aae9d..f1c1e010c8e 100644 --- a/ddtrace/contrib/botocore/patch.py +++ b/ddtrace/contrib/botocore/patch.py @@ -15,9 +15,9 @@ # Original botocore client class _Botocore_client = botocore.client.BaseClient -SPAN_TYPE = "http" -ARGS_NAME = ("action", "params", "path", "verb") -TRACED_ARGS = ["params", "path", "verb"] +SPAN_TYPE = 'http' +ARGS_NAME = ('action', 'params', 'path', 'verb') +TRACED_ARGS = ['params', 'path', 'verb'] def patch(): @@ -55,10 +55,7 @@ def patched_api_call(original_func, instance, args, kwargs): else: span.resource = endpoint_name - # Adding the args in TRACED_ARGS if exist to the span - if not aws.is_blacklist(endpoint_name): - for arg in aws.unpacking_args(args, ARGS_NAME, TRACED_ARGS): - span.set_tag(arg[0], arg[1]) + aws.add_span_arg_tags(span, endpoint_name, args, ARGS_NAME, TRACED_ARGS) region_name = deep_getattr(instance, "meta.region_name") diff --git a/ddtrace/contrib/dbapi/__init__.py b/ddtrace/contrib/dbapi/__init__.py index 849c8b98690..76c31dd010f 100644 --- a/ddtrace/contrib/dbapi/__init__.py +++ b/ddtrace/contrib/dbapi/__init__.py @@ -119,12 +119,15 @@ def __enter__(self): class TracedConnection(wrapt.ObjectProxy): """ TracedConnection wraps a Connection with tracing code. """ - def __init__(self, conn, pin=None): + def __init__(self, conn, pin=None, cursor_cls=TracedCursor): super(TracedConnection, self).__init__(conn) name = _get_vendor(conn) self._self_datadog_name = '{}.connection'.format(name) db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db) db_pin.onto(self) + # wrapt requires prefix of `_self` for attributes that are only in the + # proxy (since some of our source objects will use `__slots__`) + self._self_cursor_cls = cursor_cls def _trace_method(self, method, name, extra_tags, *args, **kwargs): pin = Pin.get_from(self) @@ -143,7 +146,7 @@ def cursor(self, *args, **kwargs): pin = Pin.get_from(self) if not pin: return cursor - return TracedCursor(cursor, pin) + return self._self_cursor_cls(cursor, pin) def commit(self, *args, **kwargs): span_name = '{}.{}'.format(self._self_datadog_name, 'commit') diff --git a/ddtrace/contrib/molten/__init__.py b/ddtrace/contrib/molten/__init__.py new file mode 100644 index 00000000000..8570c87a86d --- /dev/null +++ b/ddtrace/contrib/molten/__init__.py @@ -0,0 +1,30 @@ +""" +The molten web framework is automatically traced by ``ddtrace`` when calling ``patch``:: + + from molten import App, Route + from ddtrace import patch_all; patch_all(molten=True) + + def hello(name: str, age: int) -> str: + return f'Hello {age} year old named {name}!' + app = App(routes=[Route('/hello/{name}/{age}', hello)]) + + +You may also enable molten tracing automatically via ``ddtrace-run``:: + + ddtrace-run python app.py + +To enable distributed tracing when using autopatching, set the +``DD_MOLTEN_DISTRIBUTED_TRACING`` environment variable to ``True``. +""" +from ...utils.importlib import require_modules + +required_modules = ['molten'] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from . import patch as _patch + + patch = _patch.patch + unpatch = _patch.unpatch + + __all__ = ['patch', 'unpatch'] diff --git a/ddtrace/contrib/molten/patch.py b/ddtrace/contrib/molten/patch.py new file mode 100644 index 00000000000..9ec58a50d93 --- /dev/null +++ b/ddtrace/contrib/molten/patch.py @@ -0,0 +1,159 @@ +import wrapt +from wrapt import wrap_function_wrapper as _w + +import molten + +from ... import Pin, config +from ...ext import AppTypes, http +from ...propagation.http import HTTPPropagator +from ...utils.formats import asbool, get_env +from ...utils.importlib import func_name +from ...utils.wrappers import unwrap as _u +from .wrappers import WrapperComponent, WrapperRenderer, WrapperMiddleware, WrapperRouter, MOLTEN_ROUTE + +MOLTEN_VERSION = tuple(map(int, molten.__version__.split()[0].split('.'))) + +# Configure default configuration +config._add('molten', dict( + service_name=get_env('molten', 'service_name', 'molten'), + app='molten', + app_type=AppTypes.web, + distributed_tracing=asbool(get_env('molten', 'distributed_tracing', False)), +)) + + +def patch(): + """Patch the instrumented methods + """ + if getattr(molten, '_datadog_patch', False): + return + setattr(molten, '_datadog_patch', True) + + pin = Pin( + service=config.molten['service_name'], + app=config.molten['app'], + app_type=config.molten['app_type'], + ) + + # add pin to module since many classes use __slots__ + pin.onto(molten) + + _w(molten.BaseApp, '__init__', patch_app_init) + _w(molten.App, '__call__', patch_app_call) + + +def unpatch(): + """Remove instrumentation + """ + if getattr(molten, '_datadog_patch', False): + setattr(molten, '_datadog_patch', False) + + # remove pin + pin = Pin.get_from(molten) + if pin: + pin.remove_from(molten) + + _u(molten.BaseApp, '__init__') + _u(molten.App, '__call__') + _u(molten.Router, 'add_route') + + +def patch_app_call(wrapped, instance, args, kwargs): + """Patch wsgi interface for app + """ + pin = Pin.get_from(molten) + + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + # DEV: This is safe because this is the args for a WSGI handler + # https://www.python.org/dev/peps/pep-3333/ + environ, start_response = args + + request = molten.http.Request.from_environ(environ) + resource = func_name(wrapped) + + # Configure distributed tracing + if config.molten.get('distributed_tracing', False): + propagator = HTTPPropagator() + # request.headers is type Iterable[Tuple[str, str]] + context = propagator.extract(dict(request.headers)) + # Only need to activate the new context if something was propagated + if context.trace_id: + pin.tracer.context_provider.activate(context) + + with pin.tracer.trace('molten.request', service=pin.service, resource=resource) as span: + @wrapt.function_wrapper + def _w_start_response(wrapped, instance, args, kwargs): + """ Patch respond handling to set metadata """ + + pin = Pin.get_from(molten) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + status, headers, exc_info = args + code, _, _ = status.partition(' ') + + try: + code = int(code) + except ValueError: + pass + + if not span.get_tag(MOLTEN_ROUTE): + # if route never resolve, update root resource + span.resource = u'{} {}'.format(request.method, code) + + span.set_tag(http.STATUS_CODE, code) + + # mark 5xx spans as error + if 500 <= code < 600: + span.error = 1 + + return wrapped(*args, **kwargs) + + # patching for extracting response code + start_response = _w_start_response(start_response) + + span.set_tag(http.METHOD, request.method) + span.set_tag(http.URL, request.path) + span.set_tag('molten.version', molten.__version__) + return wrapped(environ, start_response, **kwargs) + + +def patch_app_init(wrapped, instance, args, kwargs): + """Patch app initialization of middleware, components and renderers + """ + # allow instance to be initialized before wrapping them + wrapped(*args, **kwargs) + + # add Pin to instance + pin = Pin.get_from(molten) + + if not pin or not pin.enabled(): + return + + # Wrappers here allow us to trace objects without altering class or instance + # attributes, which presents a problem when classes in molten use + # ``__slots__`` + + instance.router = WrapperRouter(instance.router) + + # wrap middleware functions/callables + instance.middleware = [ + WrapperMiddleware(mw) + for mw in instance.middleware + ] + + # wrap components objects within injector + # NOTE: the app instance also contains a list of components but it does not + # appear to be used for anything passing along to the dependency injector + instance.injector.components = [ + WrapperComponent(c) + for c in instance.injector.components + ] + + # but renderers objects + instance.renderers = [ + WrapperRenderer(r) + for r in instance.renderers + ] diff --git a/ddtrace/contrib/molten/wrappers.py b/ddtrace/contrib/molten/wrappers.py new file mode 100644 index 00000000000..f5a61c5195d --- /dev/null +++ b/ddtrace/contrib/molten/wrappers.py @@ -0,0 +1,95 @@ +import wrapt +import molten + +from ... import Pin +from ...utils.importlib import func_name + +MOLTEN_ROUTE = 'molten.route' + + +def trace_wrapped(resource, wrapped, *args, **kwargs): + pin = Pin.get_from(molten) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + with pin.tracer.trace(func_name(wrapped), service=pin.service, resource=resource): + return wrapped(*args, **kwargs) + + +def trace_func(resource): + """Trace calls to function using provided resource name + """ + @wrapt.function_wrapper + def _trace_func(wrapped, instance, args, kwargs): + pin = Pin.get_from(molten) + + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + with pin.tracer.trace(func_name(wrapped), service=pin.service, resource=resource): + return wrapped(*args, **kwargs) + + return _trace_func + + +class WrapperComponent(wrapt.ObjectProxy): + """ Tracing of components """ + def can_handle_parameter(self, *args, **kwargs): + func = self.__wrapped__.can_handle_parameter + cname = func_name(self.__wrapped__) + resource = '{}.{}'.format(cname, func.__name__) + return trace_wrapped(resource, func, *args, **kwargs) + + # TODO[tahir]: the signature of a wrapped resolve method causes DIError to + # be thrown since paramter types cannot be determined + + +class WrapperRenderer(wrapt.ObjectProxy): + """ Tracing of renderers """ + def render(self, *args, **kwargs): + func = self.__wrapped__.render + cname = func_name(self.__wrapped__) + resource = '{}.{}'.format(cname, func.__name__) + return trace_wrapped(resource, func, *args, **kwargs) + + +class WrapperMiddleware(wrapt.ObjectProxy): + """ Tracing of callable functional-middleware """ + def __call__(self, *args, **kwargs): + func = self.__wrapped__.__call__ + resource = func_name(self.__wrapped__) + return trace_wrapped(resource, func, *args, **kwargs) + + +class WrapperRouter(wrapt.ObjectProxy): + """ Tracing of router on the way back from a matched route """ + def match(self, *args, **kwargs): + # catch matched route and wrap tracer around its handler and set root span resource + func = self.__wrapped__.match + route_and_params = func(*args, **kwargs) + + pin = Pin.get_from(molten) + if not pin or not pin.enabled(): + return route_and_params + + if route_and_params is not None: + route, params = route_and_params + + route.handler = trace_func(func_name(route.handler))(route.handler) + + # update root span resource while we know the matched route + resource = '{} {}'.format( + route.method, + route.template, + ) + root_span = pin.tracer.current_root_span() + root_span.resource = resource + + # if no root route set make sure we record it based on this resolved + # route + if root_span and not root_span.get_tag(MOLTEN_ROUTE): + root_span.set_tag(MOLTEN_ROUTE, route.name) + + return route, params + + return route_and_params diff --git a/ddtrace/contrib/psycopg/patch.py b/ddtrace/contrib/psycopg/patch.py index bbf1fca19f2..d10415a7275 100644 --- a/ddtrace/contrib/psycopg/patch.py +++ b/ddtrace/contrib/psycopg/patch.py @@ -10,6 +10,18 @@ # Original connect method _connect = psycopg2.connect +# psycopg2 versions can end in `-betaN` where `N` is a number +# in such cases we simply skip version specific patching +PSYCOPG2_VERSION = (0, 0, 0) + +try: + PSYCOPG2_VERSION = tuple(map(int, psycopg2.__version__.split()[0].split('.'))) +except Exception: + pass + +if PSYCOPG2_VERSION >= (2, 7): + from psycopg2.sql import Composable + def patch(): """ Patch monkey patches psycopg's connection function @@ -29,7 +41,27 @@ def unpatch(): psycopg2.connect = _connect -def patch_conn(conn, traced_conn_cls=dbapi.TracedConnection): +class Psycopg2TracedCursor(dbapi.TracedCursor): + """ TracedCursor for psycopg2 """ + def _trace_method(self, method, name, resource, extra_tags, *args, **kwargs): + # treat psycopg2.sql.Composable resource objects as strings + if PSYCOPG2_VERSION >= (2, 7) and isinstance(resource, Composable): + resource = resource.as_string(self.__wrapped__) + + return super(Psycopg2TracedCursor, self)._trace_method(method, name, resource, extra_tags, *args, **kwargs) + + +class Psycopg2TracedConnection(dbapi.TracedConnection): + """ TracedConnection wraps a Connection with tracing code. """ + + def __init__(self, conn, pin=None, cursor_cls=Psycopg2TracedCursor): + super(Psycopg2TracedConnection, self).__init__(conn, pin) + # wrapt requires prefix of `_self` for attributes that are only in the + # proxy (since some of our source objects will use `__slots__`) + self._self_cursor_cls = cursor_cls + + +def patch_conn(conn, traced_conn_cls=Psycopg2TracedConnection): """ Wrap will patch the instance so that it's queries are traced.""" # ensure we've patched extensions (this is idempotent) in # case we're only tracing some connections. diff --git a/ddtrace/ext/aws.py b/ddtrace/ext/aws.py index f75b1c29298..eec82b8c28d 100644 --- a/ddtrace/ext/aws.py +++ b/ddtrace/ext/aws.py @@ -1,30 +1,37 @@ -BLACKLIST_ENDPOINT = ["kms", "sts"] +from ..utils.formats import flatten_dict -def is_blacklist(endpoint_name): - """Protecting the args sent to kms, sts to avoid security leaks - if kms disabled test_kms_client in test/contrib/botocore will fail - if sts disabled test_sts_client in test/contrib/boto contrib will fail - """ - return endpoint_name in BLACKLIST_ENDPOINT +BLACKLIST_ENDPOINT = ['kms', 'sts'] +BLACKLIST_ENDPOINT_TAGS = { + 's3': ['params.Body'], +} -def unpacking_args(args, args_name, traced_args_list): +def truncate_arg_value(value, max_len=1024): + """Truncate values which are bytes and greater than `max_len`. + Useful for parameters like 'Body' in `put_object` operations. """ - @params: - args: tuple of args sent to a patched function - args_name: tuple containing the names of all the args that can be sent - traced_args_list: list of names of the args we want to trace - Returns a list of (arg name, arg) of the args we want to trace - The number of args being variable from one call to another, this function - will parse t""" - index = 0 - response = [] - for arg in args: - if arg and args_name[index] in traced_args_list: - response += [(args_name[index], arg)] - index += 1 - return response + if isinstance(value, bytes) and len(value) > max_len: + return b'...' + + return value + + +def add_span_arg_tags(span, endpoint_name, args, args_names, args_traced): + if endpoint_name not in BLACKLIST_ENDPOINT: + blacklisted = BLACKLIST_ENDPOINT_TAGS.get(endpoint_name, []) + tags = dict( + (name, value) + for (name, value) in zip(args_names, args) + if name in args_traced + ) + tags = flatten_dict(tags) + tags = { + k: truncate_arg_value(v) + for k, v in tags.items() + if k not in blacklisted + } + span.set_tags(tags) REGION = "aws.region" diff --git a/ddtrace/monkey.py b/ddtrace/monkey.py index 6eef0a8d9b3..5c2cd353e9f 100644 --- a/ddtrace/monkey.py +++ b/ddtrace/monkey.py @@ -44,6 +44,7 @@ 'aiobotocore': False, 'httplib': False, 'vertica': True, + 'molten': True, 'jinja2': True, 'flask': True, 'kombu': False, diff --git a/ddtrace/opentracer/tracer.py b/ddtrace/opentracer/tracer.py index 90355378c99..09b70cbe0a2 100644 --- a/ddtrace/opentracer/tracer.py +++ b/ddtrace/opentracer/tracer.py @@ -80,6 +80,7 @@ def __init__(self, service_name=None, config=None, scope_manager=None, dd_tracer dd_context_provider = get_context_provider_for_scope_manager(self._scope_manager) self._dd_tracer = dd_tracer or ddtrace.tracer or DatadogTracer() + self._dd_tracer.set_tags(self._config.get(keys.GLOBAL_TAGS)) self._dd_tracer.configure(enabled=self._enabled, hostname=self._config.get(keys.AGENT_HOSTNAME), port=self._config.get(keys.AGENT_PORT), diff --git a/ddtrace/pin.py b/ddtrace/pin.py index bb5215ff8cf..04dbd397503 100644 --- a/ddtrace/pin.py +++ b/ddtrace/pin.py @@ -156,6 +156,17 @@ def onto(self, obj, send=True): except AttributeError: log.debug("can't pin onto object. skipping", exc_info=True) + def remove_from(self, obj): + # Remove pin from the object. + try: + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + + pin = Pin.get_from(obj) + if pin is not None: + delattr(obj, pin_name) + except AttributeError: + log.debug('can\'t remove pin from object. skipping', exc_info=True) + def clone(self, service=None, app=None, app_type=None, tags=None, tracer=None): """Return a clone of the pin with the given attributes replaced.""" # do a shallow copy of Pin dicts diff --git a/ddtrace/settings.py b/ddtrace/settings.py index ddf6e4f3664..3e132db5557 100644 --- a/ddtrace/settings.py +++ b/ddtrace/settings.py @@ -4,6 +4,7 @@ from .pin import Pin from .span import Span +from .utils.attrdict import AttrDict from .utils.merge import deepmerge from .utils.http import normalize_header_name @@ -102,7 +103,7 @@ def __repr__(self): return '{}.{}({})'.format(cls.__module__, cls.__name__, integrations) -class IntegrationConfig(dict): +class IntegrationConfig(AttrDict): """ Integration specific configuration object. @@ -113,8 +114,9 @@ class IntegrationConfig(dict): # This is an `IntegrationConfig` config.flask - # `IntegrationConfig` supports item accessors + # `IntegrationConfig` supports both attribute and item accessors config.flask['service_name'] = 'my-service-name' + config.flask.service_name = 'my-service-name' """ def __init__(self, global_config, *args, **kwargs): """ diff --git a/ddtrace/utils/attrdict.py b/ddtrace/utils/attrdict.py new file mode 100644 index 00000000000..0f02aefa68c --- /dev/null +++ b/ddtrace/utils/attrdict.py @@ -0,0 +1,29 @@ +class AttrDict(dict): + """ + dict implementation that allows for item attribute access + + + Example:: + + data = AttrDict() + data['key'] = 'value' + print(data['key']) + + data.key = 'new-value' + print(data.key) + + # Convert an existing `dict` + data = AttrDict(dict(key='value')) + print(data.key) + """ + def __getattr__(self, key): + if key in self: + return self[key] + return object.__getattribute__(self, key) + + def __setattr__(self, key, value): + # Allow overwriting an existing attribute, e.g. `self.global_config = dict()` + if hasattr(self, key): + object.__setattr__(self, key, value) + else: + self[key] = value diff --git a/ddtrace/utils/formats.py b/ddtrace/utils/formats.py index 4ad21a41827..bf12399e2c6 100644 --- a/ddtrace/utils/formats.py +++ b/ddtrace/utils/formats.py @@ -65,3 +65,16 @@ def asbool(value): return value return value.lower() in ("true", "1") + + +def flatten_dict(d, sep='.', prefix=''): + """ + Returns a normalized dict of depth 1 with keys in order of embedding + + """ + # adapted from https://stackoverflow.com/a/19647596 + return { + prefix + sep + k if prefix else k: v + for kk, vv in d.items() + for k, v in flatten_dict(vv, sep, kk).items() + } if isinstance(d, dict) else {prefix: d} diff --git a/ddtrace/utils/reraise.py b/ddtrace/utils/reraise.py deleted file mode 100644 index 9fe3de6efb9..00000000000 --- a/ddtrace/utils/reraise.py +++ /dev/null @@ -1,5 +0,0 @@ -def _reraise(tp, value, tb=None): - """Python 2 re-raise function. This function is internal and - will be replaced entirely with the `six` library. - """ - raise tp, value, tb diff --git a/docker-compose.yml b/docker-compose.yml index d138950e04f..42c9f844d3b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -94,6 +94,7 @@ services: - ./tests:/src/tests:ro - ./setup.cfg:/src/setup.cfg:ro - ./setup.py:/src/setup.py:ro + - ./conftest.py:/src/conftest.py:ro - ./tox.ini:/src/tox.ini:ro - ./.ddtox:/src/.tox command: bash diff --git a/docs/index.rst b/docs/index.rst index 299215d958e..7f2295ad2ab 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -76,6 +76,8 @@ contacting support. +--------------------------------------------------+---------------+----------------+ | :ref:`kombu` | >= 4.0 | No | +--------------------------------------------------+---------------+----------------+ +| :ref:`molten` | >= 0.7.0 | Yes | ++--------------------------------------------------+---------------+----------------+ | :ref:`mongoengine` | >= 0.11 | Yes | +--------------------------------------------------+---------------+----------------+ | :ref:`mysql-connector` | >= 2.1 | No | diff --git a/docs/web_integrations.rst b/docs/web_integrations.rst index a8b9798da06..4145558ccab 100644 --- a/docs/web_integrations.rst +++ b/docs/web_integrations.rst @@ -52,6 +52,12 @@ Flask .. automodule:: ddtrace.contrib.flask +.. _molten: + +Molten +^^^^^^ + +.. automodule:: ddtrace.contrib.molten .. _pylons: diff --git a/setup.py b/setup.py index 5ad4a757565..5c1a452f52b 100644 --- a/setup.py +++ b/setup.py @@ -82,13 +82,14 @@ def run_tests(self): license='BSD', packages=find_packages(exclude=['tests*']), install_requires=[ - "wrapt", - "msgpack-python", + 'msgpack-python', + 'six', + 'wrapt', ], extras_require={ # users can include opentracing by having: - # install_requires=["ddtrace[opentracing]", ...] - "opentracing": ["opentracing>=2.0.0"], + # install_requires=['ddtrace[opentracing]', ...] + 'opentracing': ['opentracing>=2.0.0'], }, # plugin tox tests_require=['tox', 'flake8'], diff --git a/tests/base.py b/tests/base/__init__.py similarity index 95% rename from tests/base.py rename to tests/base/__init__.py index f205514743d..511c821810f 100644 --- a/tests/base.py +++ b/tests/base/__init__.py @@ -3,8 +3,8 @@ from ddtrace import config -from .utils.tracer import DummyTracer -from .utils.span import TestSpanContainer, TestSpan, NO_CHILDREN +from ..utils.tracer import DummyTracer +from ..utils.span import TestSpanContainer, TestSpan, NO_CHILDREN class BaseTestCase(unittest.TestCase): diff --git a/tests/benchmark.py b/tests/benchmark.py index 8e46e591ddb..5e40210cb4b 100644 --- a/tests/benchmark.py +++ b/tests/benchmark.py @@ -1,4 +1,3 @@ -import time import timeit from ddtrace import Tracer @@ -73,6 +72,7 @@ def m(self): result = timer.repeat(repeat=REPEAT, number=NUMBER) print("- method execution time: {:8.6f}".format(min(result))) + def benchmark_getpid(): timer = timeit.Timer(getpid) result = timer.repeat(repeat=REPEAT, number=NUMBER) diff --git a/tests/commands/ddtrace_run_argv.py b/tests/commands/ddtrace_run_argv.py index deeff688cd2..c31b3af0c6e 100644 --- a/tests/commands/ddtrace_run_argv.py +++ b/tests/commands/ddtrace_run_argv.py @@ -1,10 +1,8 @@ from __future__ import print_function -from ddtrace import tracer - from nose.tools import eq_ import sys if __name__ == '__main__': eq_(sys.argv[1:], ['foo', 'bar']) - print("Test success") + print('Test success') diff --git a/tests/commands/ddtrace_run_env.py b/tests/commands/ddtrace_run_env.py index bc52af24829..1ca5e1345c7 100644 --- a/tests/commands/ddtrace_run_env.py +++ b/tests/commands/ddtrace_run_env.py @@ -1,10 +1,9 @@ from __future__ import print_function -import os from ddtrace import tracer from nose.tools import eq_ if __name__ == '__main__': - eq_(tracer.tags["env"], "test") - print("Test success") + eq_(tracer.tags['env'], 'test') + print('Test success') diff --git a/tests/commands/ddtrace_run_service.py b/tests/commands/ddtrace_run_service.py index 5983eb909f4..8ff0653cff6 100644 --- a/tests/commands/ddtrace_run_service.py +++ b/tests/commands/ddtrace_run_service.py @@ -1,10 +1,9 @@ from __future__ import print_function import os -from ddtrace import tracer from nose.tools import eq_ if __name__ == '__main__': eq_(os.environ['DATADOG_SERVICE_NAME'], 'my_test_service') - print("Test success") + print('Test success') diff --git a/tests/commands/ddtrace_run_sitecustomize.py b/tests/commands/ddtrace_run_sitecustomize.py index 2d66caec973..a1f035a3999 100644 --- a/tests/commands/ddtrace_run_sitecustomize.py +++ b/tests/commands/ddtrace_run_sitecustomize.py @@ -1,7 +1,6 @@ from __future__ import print_function import sys -from ddtrace import tracer from nose.tools import ok_ diff --git a/tests/commands/test_runner.py b/tests/commands/test_runner.py index d29fa70c778..bc6aeabaa78 100644 --- a/tests/commands/test_runner.py +++ b/tests/commands/test_runner.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import sys import subprocess import unittest @@ -15,7 +14,14 @@ def tearDown(self): """ Clear DATADOG_* env vars between tests """ - for k in ('DATADOG_ENV', 'DATADOG_TRACE_ENABLED', 'DATADOG_SERVICE_NAME', 'DATADOG_TRACE_DEBUG', 'DD_TRACE_GLOBAL_TAGS'): + keys = ( + 'DATADOG_ENV', + 'DATADOG_TRACE_ENABLED', + 'DATADOG_SERVICE_NAME', + 'DATADOG_TRACE_DEBUG', + 'DD_TRACE_GLOBAL_TAGS', + ) + for k in keys: if k in os.environ: del os.environ[k] @@ -154,21 +160,21 @@ def test_patch_modules_from_env(self): # overrides work in either direction os.environ["DATADOG_PATCH_MODULES"] = "django:false" update_patched_modules() - assert EXTRA_PATCHED_MODULES["django"] == False + assert EXTRA_PATCHED_MODULES["django"] is False os.environ["DATADOG_PATCH_MODULES"] = "boto:true" update_patched_modules() - assert EXTRA_PATCHED_MODULES["boto"] == True + assert EXTRA_PATCHED_MODULES["boto"] is True os.environ["DATADOG_PATCH_MODULES"] = "django:true,boto:false" update_patched_modules() - assert EXTRA_PATCHED_MODULES["boto"] == False - assert EXTRA_PATCHED_MODULES["django"] == True + assert EXTRA_PATCHED_MODULES["boto"] is False + assert EXTRA_PATCHED_MODULES["django"] is True os.environ["DATADOG_PATCH_MODULES"] = "django:false,boto:true" update_patched_modules() - assert EXTRA_PATCHED_MODULES["boto"] == True - assert EXTRA_PATCHED_MODULES["django"] == False + assert EXTRA_PATCHED_MODULES["boto"] is True + assert EXTRA_PATCHED_MODULES["django"] is False def test_sitecustomize_without_ddtrace_run_command(self): # [Regression test]: ensure `sitecustomize` path is removed only if it's @@ -222,8 +228,6 @@ def test_got_app_name(self): def test_global_trace_tags(self): """ Ensure global tags are passed in from environment """ - - os.environ["DD_TRACE_GLOBAL_TAGS"] = 'a:True,b:0,c:C' out = subprocess.check_output( diff --git a/tests/contrib/__init__.py b/tests/contrib/__init__.py index e69de29bb2d..da962b5037f 100644 --- a/tests/contrib/__init__.py +++ b/tests/contrib/__init__.py @@ -0,0 +1,7 @@ +from .patch import PatchMixin, PatchTestCase + + +__all__ = [ + 'PatchMixin', + 'PatchTestCase', +] diff --git a/tests/contrib/aiobotocore/py35/__init__.py b/tests/contrib/aiobotocore/py35/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/contrib/aiobotocore/test_35.py b/tests/contrib/aiobotocore/py35/test.py similarity index 87% rename from tests/contrib/aiobotocore/test_35.py rename to tests/contrib/aiobotocore/py35/test.py index fe5e0aadb6f..2eda934a793 100644 --- a/tests/contrib/aiobotocore/test_35.py +++ b/tests/contrib/aiobotocore/py35/test.py @@ -1,11 +1,12 @@ -from nose.tools import eq_, ok_, assert_raises -from botocore.errorfactory import ClientError +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `async` +from nose.tools import eq_ from ddtrace.contrib.aiobotocore.patch import patch, unpatch -from .utils import aiobotocore_client -from ..asyncio.utils import AsyncioTestCase, mark_asyncio -from ...test_tracer import get_dummy_tracer +from ..utils import aiobotocore_client +from ...asyncio.utils import AsyncioTestCase, mark_asyncio +from ....test_tracer import get_dummy_tracer class AIOBotocoreTest(AsyncioTestCase): diff --git a/tests/contrib/aiobotocore/test.py b/tests/contrib/aiobotocore/test.py index 7e0b7ebf5ca..dbd26618727 100644 --- a/tests/contrib/aiobotocore/test.py +++ b/tests/contrib/aiobotocore/test.py @@ -1,7 +1,11 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `yield from` from nose.tools import eq_, ok_, assert_raises from botocore.errorfactory import ClientError from ddtrace.contrib.aiobotocore.patch import patch, unpatch +from ddtrace.ext import http +from ddtrace.compat import stringify from .utils import aiobotocore_client from ..asyncio.utils import AsyncioTestCase, mark_asyncio @@ -57,11 +61,33 @@ def test_s3_client(self): eq_(span.resource, 's3.listbuckets') eq_(span.name, 's3.command') + @mark_asyncio + def test_s3_put(self): + params = dict(Key='foo', Bucket='mybucket', Body=b'bar') + + with aiobotocore_client('s3', self.tracer) as s3: + yield from s3.create_bucket(Bucket='mybucket') + yield from s3.put_object(**params) + + spans = [trace[0] for trace in self.tracer.writer.pop_traces()] + assert spans + self.assertEqual(len(spans), 2) + self.assertEqual(spans[0].get_tag('aws.operation'), 'CreateBucket') + self.assertEqual(spans[0].get_tag(http.STATUS_CODE), '200') + self.assertEqual(spans[0].service, 'aws.s3') + self.assertEqual(spans[0].resource, 's3.createbucket') + self.assertEqual(spans[1].get_tag('aws.operation'), 'PutObject') + self.assertEqual(spans[1].resource, 's3.putobject') + self.assertEqual(spans[1].get_tag('params.Key'), stringify(params['Key'])) + self.assertEqual(spans[1].get_tag('params.Bucket'), stringify(params['Bucket'])) + self.assertIsNone(spans[1].get_tag('params.Body')) + @mark_asyncio def test_s3_client_error(self): with aiobotocore_client('s3', self.tracer) as s3: with assert_raises(ClientError): - yield from s3.list_objects(Bucket='mybucket') + # FIXME: add proper clean-up to tearDown + yield from s3.list_objects(Bucket='doesnotexist') traces = self.tracer.writer.pop_traces() eq_(len(traces), 1) diff --git a/tests/contrib/aiohttp/app/web.py b/tests/contrib/aiohttp/app/web.py index 6020c4dc309..baae86bc079 100644 --- a/tests/contrib/aiohttp/app/web.py +++ b/tests/contrib/aiohttp/app/web.py @@ -1,3 +1,4 @@ +# flake8: noqa import os import jinja2 import asyncio diff --git a/tests/contrib/aiohttp/test_middleware.py b/tests/contrib/aiohttp/test_middleware.py index 951d9126225..25eed9dcf2e 100644 --- a/tests/contrib/aiohttp/test_middleware.py +++ b/tests/contrib/aiohttp/test_middleware.py @@ -1,3 +1,4 @@ +# flake8: noqa import asyncio from nose.tools import eq_, ok_ @@ -49,7 +50,7 @@ def test_handler(self): eq_('aiohttp.request', span.name) eq_('aiohttp-web', span.service) eq_('http', span.span_type) - eq_('/', span.resource) + eq_('GET /', span.resource) eq_('/', span.get_tag('http.url')) eq_('GET', span.get_tag('http.method')) eq_('200', span.get_tag('http.status_code')) @@ -69,7 +70,7 @@ def test_param_handler(self): eq_(1, len(traces[0])) span = traces[0][0] # with the right fields - eq_('/echo/{name}', span.resource) + eq_('GET /echo/{name}', span.resource) eq_('/echo/team', span.get_tag('http.url')) eq_('200', span.get_tag('http.status_code')) @@ -107,7 +108,7 @@ def test_coroutine_chaining(self): coroutine = traces[0][2] # root span created in the middleware eq_('aiohttp.request', root.name) - eq_('/chaining/', root.resource) + eq_('GET /chaining/', root.resource) eq_('/chaining/', root.get_tag('http.url')) eq_('GET', root.get_tag('http.method')) eq_('200', root.get_tag('http.status_code')) @@ -135,7 +136,7 @@ def test_static_handler(self): span = traces[0][0] # root span created in the middleware eq_('aiohttp.request', span.name) - eq_('/statics', span.resource) + eq_('GET /statics', span.resource) eq_('/statics/empty.txt', span.get_tag('http.url')) eq_('GET', span.get_tag('http.method')) eq_('200', span.get_tag('http.status_code')) @@ -171,7 +172,7 @@ def test_exception(self): eq_(1, len(spans)) span = spans[0] eq_(1, span.error) - eq_('/exception', span.resource) + eq_('GET /exception', span.resource) eq_('error', span.get_tag('error.msg')) ok_('Exception: error' in span.get_tag('error.stack')) @@ -188,7 +189,7 @@ def test_async_exception(self): eq_(1, len(spans)) span = spans[0] eq_(1, span.error) - eq_('/async_exception', span.resource) + eq_('GET /async_exception', span.resource) eq_('error', span.get_tag('error.msg')) ok_('Exception: error' in span.get_tag('error.stack')) @@ -205,7 +206,7 @@ def test_wrapped_coroutine(self): spans = traces[0] eq_(2, len(spans)) span = spans[0] - eq_('/wrapped_coroutine', span.resource) + eq_('GET /wrapped_coroutine', span.resource) span = spans[1] eq_('nested', span.name) ok_(span.duration > 0.25, @@ -366,7 +367,7 @@ def _assert_200_parenting(self, traces): eq_('aiohttp.request', inner_span.name) eq_('aiohttp-web', inner_span.service) eq_('http', inner_span.span_type) - eq_('/', inner_span.resource) + eq_('GET /', inner_span.resource) eq_('/', inner_span.get_tag('http.url')) eq_('GET', inner_span.get_tag('http.method')) eq_('200', inner_span.get_tag('http.status_code')) diff --git a/tests/contrib/aiohttp/test_request.py b/tests/contrib/aiohttp/test_request.py index 5bd9e3227fe..17d0da83a62 100644 --- a/tests/contrib/aiohttp/test_request.py +++ b/tests/contrib/aiohttp/test_request.py @@ -1,3 +1,4 @@ +# flake8: noqa import threading import asyncio import aiohttp_jinja2 @@ -45,7 +46,7 @@ def test_full_request(self): # request eq_('aiohttp-web', request_span.service) eq_('aiohttp.request', request_span.name) - eq_('/template/', request_span.resource) + eq_('GET /template/', request_span.resource) # template eq_('aiohttp-web', template_span.service) eq_('aiohttp.template', template_span.name) diff --git a/tests/contrib/aiohttp/test_request_safety.py b/tests/contrib/aiohttp/test_request_safety.py index 3e373b05dcb..76c955f20de 100644 --- a/tests/contrib/aiohttp/test_request_safety.py +++ b/tests/contrib/aiohttp/test_request_safety.py @@ -1,3 +1,4 @@ +# flake8: noqa import threading import asyncio import aiohttp_jinja2 @@ -47,7 +48,7 @@ def test_full_request(self): # request eq_('aiohttp-web', request_span.service) eq_('aiohttp.request', request_span.name) - eq_('/template/', request_span.resource) + eq_('GET /template/', request_span.resource) # template eq_('aiohttp-web', template_span.service) eq_('aiohttp.template', template_span.name) diff --git a/tests/contrib/aiohttp/test_templates.py b/tests/contrib/aiohttp/test_templates.py index 2b2fc30dfad..84b648628f1 100644 --- a/tests/contrib/aiohttp/test_templates.py +++ b/tests/contrib/aiohttp/test_templates.py @@ -1,3 +1,4 @@ +# flake8: noqa import asyncio import aiohttp_jinja2 diff --git a/tests/contrib/aiopg/py35/__init__.py b/tests/contrib/aiopg/py35/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/contrib/aiopg/test_aiopg_35.py b/tests/contrib/aiopg/py35/test.py similarity index 94% rename from tests/contrib/aiopg/test_aiopg_35.py rename to tests/contrib/aiopg/py35/test.py index 9a1f018ee8a..8b56ed1af80 100644 --- a/tests/contrib/aiopg/test_aiopg_35.py +++ b/tests/contrib/aiopg/py35/test.py @@ -1,3 +1,5 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `async` # stdlib import asyncio diff --git a/tests/contrib/aiopg/test_aiopg.py b/tests/contrib/aiopg/test.py similarity index 98% rename from tests/contrib/aiopg/test_aiopg.py rename to tests/contrib/aiopg/test.py index 219345a8748..2c9923c7740 100644 --- a/tests/contrib/aiopg/test_aiopg.py +++ b/tests/contrib/aiopg/test.py @@ -1,3 +1,5 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `yield from` # stdlib import time import asyncio diff --git a/tests/contrib/asyncio/test_helpers.py b/tests/contrib/asyncio/test_helpers.py index dc22943fa27..6a26fba1cb7 100644 --- a/tests/contrib/asyncio/test_helpers.py +++ b/tests/contrib/asyncio/test_helpers.py @@ -1,3 +1,5 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `yield from` import asyncio from nose.tools import eq_, ok_ diff --git a/tests/contrib/asyncio/test_tracer.py b/tests/contrib/asyncio/test_tracer.py index 1dcf85f2cd3..d8b477c375c 100644 --- a/tests/contrib/asyncio/test_tracer.py +++ b/tests/contrib/asyncio/test_tracer.py @@ -1,3 +1,5 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `yield from` import asyncio from asyncio import BaseEventLoop diff --git a/tests/contrib/asyncio/test_tracer_safety.py b/tests/contrib/asyncio/test_tracer_safety.py index 7962886f59d..f3c9369a499 100644 --- a/tests/contrib/asyncio/test_tracer_safety.py +++ b/tests/contrib/asyncio/test_tracer_safety.py @@ -1,3 +1,5 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `yield from` import asyncio from nose.tools import eq_, ok_ diff --git a/tests/contrib/boto/test.py b/tests/contrib/boto/test.py index ca373a2de12..ba76b391080 100644 --- a/tests/contrib/boto/test.py +++ b/tests/contrib/boto/test.py @@ -2,7 +2,6 @@ import unittest # 3p -from nose.tools import eq_ import boto.ec2 import boto.s3 import boto.awslambda @@ -41,31 +40,31 @@ def test_ec2_client(self): ec2.get_all_instances() spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) span = spans[0] - eq_(span.get_tag('aws.operation'), "DescribeInstances") - eq_(span.get_tag(http.STATUS_CODE), "200") - eq_(span.get_tag(http.METHOD), "POST") - eq_(span.get_tag('aws.region'), "us-west-2") + self.assertEqual(span.get_tag('aws.operation'), 'DescribeInstances') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag(http.METHOD), 'POST') + self.assertEqual(span.get_tag('aws.region'), 'us-west-2') # Create an instance ec2.run_instances(21) spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) span = spans[0] - eq_(span.get_tag('aws.operation'), "RunInstances") - eq_(span.get_tag(http.STATUS_CODE), "200") - eq_(span.get_tag(http.METHOD), "POST") - eq_(span.get_tag('aws.region'), "us-west-2") - eq_(span.service, "test-boto-tracing.ec2") - eq_(span.resource, "ec2.runinstances") - eq_(span.name, "ec2.command") - eq_(span.span_type, 'boto') + self.assertEqual(span.get_tag('aws.operation'), 'RunInstances') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag(http.METHOD), 'POST') + self.assertEqual(span.get_tag('aws.region'), 'us-west-2') + self.assertEqual(span.service, 'test-boto-tracing.ec2') + self.assertEqual(span.resource, 'ec2.runinstances') + self.assertEqual(span.name, 'ec2.command') + self.assertEqual(span.span_type, 'boto') @mock_s3 def test_s3_client(self): - s3 = boto.s3.connect_to_region("us-east-1") + s3 = boto.s3.connect_to_region('us-east-1') tracer = get_dummy_tracer() writer = tracer.writer Pin(service=self.TEST_SERVICE, tracer=tracer).onto(s3) @@ -73,48 +72,75 @@ def test_s3_client(self): s3.get_all_buckets() spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) span = spans[0] - eq_(span.get_tag(http.STATUS_CODE), "200") - eq_(span.get_tag(http.METHOD), "GET") - eq_(span.get_tag('aws.operation'), "get_all_buckets") + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag(http.METHOD), 'GET') + self.assertEqual(span.get_tag('aws.operation'), 'get_all_buckets') # Create a bucket command - s3.create_bucket("cheese") + s3.create_bucket('cheese') spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) span = spans[0] - eq_(span.get_tag(http.STATUS_CODE), "200") - eq_(span.get_tag(http.METHOD), "PUT") - eq_(span.get_tag('path'), '/') - eq_(span.get_tag('aws.operation'), "create_bucket") + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag(http.METHOD), 'PUT') + self.assertEqual(span.get_tag('path'), '/') + self.assertEqual(span.get_tag('aws.operation'), 'create_bucket') # Get the created bucket - s3.get_bucket("cheese") + s3.get_bucket('cheese') spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) span = spans[0] - eq_(span.get_tag(http.STATUS_CODE), "200") - eq_(span.get_tag(http.METHOD), "HEAD") - eq_(span.get_tag('aws.operation'), "head_bucket") - eq_(span.service, "test-boto-tracing.s3") - eq_(span.resource, "s3.head") - eq_(span.name, "s3.command") + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag(http.METHOD), 'HEAD') + self.assertEqual(span.get_tag('aws.operation'), 'head_bucket') + self.assertEqual(span.service, 'test-boto-tracing.s3') + self.assertEqual(span.resource, 's3.head') + self.assertEqual(span.name, 's3.command') # Checking for resource incase of error try: - s3.get_bucket("big_bucket") + s3.get_bucket('big_bucket') except Exception: spans = writer.pop() assert spans span = spans[0] - eq_(span.resource, "s3.head") + self.assertEqual(span.resource, 's3.head') + + @mock_s3 + def test_s3_put(self): + s3 = boto.s3.connect_to_region('us-east-1') + tracer = get_dummy_tracer() + writer = tracer.writer + Pin(service=self.TEST_SERVICE, tracer=tracer).onto(s3) + s3.create_bucket('mybucket') + bucket = s3.get_bucket('mybucket') + k = boto.s3.key.Key(bucket) + k.key = 'foo' + k.set_contents_from_string('bar') + + spans = writer.pop() + assert spans + # create bucket + self.assertEqual(len(spans), 3) + self.assertEqual(spans[0].get_tag('aws.operation'), 'create_bucket') + self.assertEqual(spans[0].get_tag(http.STATUS_CODE), '200') + self.assertEqual(spans[0].service, 'test-boto-tracing.s3') + self.assertEqual(spans[0].resource, 's3.put') + # get bucket + self.assertEqual(spans[1].get_tag('aws.operation'), 'head_bucket') + self.assertEqual(spans[1].resource, 's3.head') + # put object + self.assertEqual(spans[2].get_tag('aws.operation'), '_send_file_internal') + self.assertEqual(spans[2].resource, 's3.put') @mock_lambda def test_unpatch(self): - lamb = boto.awslambda.connect_to_region("us-east-2") + lamb = boto.awslambda.connect_to_region('us-east-2') tracer = get_dummy_tracer() writer = tracer.writer Pin(service=self.TEST_SERVICE, tracer=tracer).onto(lamb) @@ -127,7 +153,7 @@ def test_unpatch(self): @mock_s3 def test_double_patch(self): - s3 = boto.s3.connect_to_region("us-east-1") + s3 = boto.s3.connect_to_region('us-east-1') tracer = get_dummy_tracer() writer = tracer.writer Pin(service=self.TEST_SERVICE, tracer=tracer).onto(s3) @@ -136,14 +162,14 @@ def test_double_patch(self): patch() # Get the created bucket - s3.create_bucket("cheese") + s3.create_bucket('cheese') spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) @mock_lambda def test_lambda_client(self): - lamb = boto.awslambda.connect_to_region("us-east-2") + lamb = boto.awslambda.connect_to_region('us-east-2') tracer = get_dummy_tracer() writer = tracer.writer Pin(service=self.TEST_SERVICE, tracer=tracer).onto(lamb) @@ -153,14 +179,14 @@ def test_lambda_client(self): lamb.list_functions() spans = writer.pop() assert spans - eq_(len(spans), 2) + self.assertEqual(len(spans), 2) span = spans[0] - eq_(span.get_tag(http.STATUS_CODE), "200") - eq_(span.get_tag(http.METHOD), "GET") - eq_(span.get_tag('aws.region'), "us-east-2") - eq_(span.get_tag('aws.operation'), "list_functions") - eq_(span.service, "test-boto-tracing.lambda") - eq_(span.resource, "lambda.get") + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag(http.METHOD), 'GET') + self.assertEqual(span.get_tag('aws.region'), 'us-east-2') + self.assertEqual(span.get_tag('aws.operation'), 'list_functions') + self.assertEqual(span.service, 'test-boto-tracing.lambda') + self.assertEqual(span.resource, 'lambda.get') @mock_sts def test_sts_client(self): @@ -174,17 +200,19 @@ def test_sts_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(span.get_tag('aws.region'), 'us-west-2') - eq_(span.get_tag('aws.operation'), 'GetFederationToken') - eq_(span.service, "test-boto-tracing.sts") - eq_(span.resource, "sts.getfederationtoken") + self.assertEqual(span.get_tag('aws.region'), 'us-west-2') + self.assertEqual(span.get_tag('aws.operation'), 'GetFederationToken') + self.assertEqual(span.service, 'test-boto-tracing.sts') + self.assertEqual(span.resource, 'sts.getfederationtoken') # checking for protection on sts against security leak - eq_(span.get_tag('args.path'), None) + self.assertIsNone(span.get_tag('args.path')) @skipUnless( False, - "Test to reproduce the case where args sent to patched function are None, can't be mocked: needs AWS crendentials") + ('Test to reproduce the case where args sent to patched function are None,' + 'can\'t be mocked: needs AWS crendentials'), + ) def test_elasticache_client(self): elasticache = boto.elasticache.connect_to_region('us-west-2') tracer = get_dummy_tracer() @@ -196,15 +224,15 @@ def test_elasticache_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(span.get_tag('aws.region'), 'us-west-2') - eq_(span.service, "test-boto-tracing.elasticache") - eq_(span.resource, "elasticache") + self.assertEqual(span.get_tag('aws.region'), 'us-west-2') + self.assertEqual(span.service, 'test-boto-tracing.elasticache') + self.assertEqual(span.resource, 'elasticache') @mock_ec2 def test_ec2_client_ot(self): """OpenTracing compatibility check of the test_ec2_client test.""" - ec2 = boto.ec2.connect_to_region("us-west-2") + ec2 = boto.ec2.connect_to_region('us-west-2') tracer = get_dummy_tracer() ot_tracer = init_tracer('my_svc', tracer) writer = tracer.writer @@ -214,34 +242,34 @@ def test_ec2_client_ot(self): ec2.get_all_instances() spans = writer.pop() assert spans - eq_(len(spans), 2) + self.assertEqual(len(spans), 2) ot_span, dd_span = spans # confirm the parenting - eq_(ot_span.parent_id, None) - eq_(dd_span.parent_id, ot_span.span_id) + self.assertIsNone(ot_span.parent_id) + self.assertEqual(dd_span.parent_id, ot_span.span_id) - eq_(ot_span.resource, "ot_span") - eq_(dd_span.get_tag('aws.operation'), "DescribeInstances") - eq_(dd_span.get_tag(http.STATUS_CODE), "200") - eq_(dd_span.get_tag(http.METHOD), "POST") - eq_(dd_span.get_tag('aws.region'), "us-west-2") + self.assertEqual(ot_span.resource, 'ot_span') + self.assertEqual(dd_span.get_tag('aws.operation'), 'DescribeInstances') + self.assertEqual(dd_span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(dd_span.get_tag(http.METHOD), 'POST') + self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') with ot_tracer.start_active_span('ot_span'): ec2.run_instances(21) spans = writer.pop() assert spans - eq_(len(spans), 2) + self.assertEqual(len(spans), 2) ot_span, dd_span = spans # confirm the parenting - eq_(ot_span.parent_id, None) - eq_(dd_span.parent_id, ot_span.span_id) - - eq_(dd_span.get_tag('aws.operation'), "RunInstances") - eq_(dd_span.get_tag(http.STATUS_CODE), "200") - eq_(dd_span.get_tag(http.METHOD), "POST") - eq_(dd_span.get_tag('aws.region'), "us-west-2") - eq_(dd_span.service, "test-boto-tracing.ec2") - eq_(dd_span.resource, "ec2.runinstances") - eq_(dd_span.name, "ec2.command") + self.assertIsNone(ot_span.parent_id) + self.assertEqual(dd_span.parent_id, ot_span.span_id) + + self.assertEqual(dd_span.get_tag('aws.operation'), 'RunInstances') + self.assertEqual(dd_span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(dd_span.get_tag(http.METHOD), 'POST') + self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') + self.assertEqual(dd_span.service, 'test-boto-tracing.ec2') + self.assertEqual(dd_span.resource, 'ec2.runinstances') + self.assertEqual(dd_span.name, 'ec2.command') diff --git a/tests/contrib/botocore/test.py b/tests/contrib/botocore/test.py index acb7844d199..0323b2c09db 100644 --- a/tests/contrib/botocore/test.py +++ b/tests/contrib/botocore/test.py @@ -1,8 +1,7 @@ # stdlib -import unittest +from unittest import TestCase # 3p -from nose.tools import eq_ import botocore.session from moto import mock_s3, mock_ec2, mock_lambda, mock_sqs, mock_kinesis, mock_kms @@ -10,13 +9,14 @@ from ddtrace import Pin from ddtrace.contrib.botocore.patch import patch, unpatch from ddtrace.ext import http +from ddtrace.compat import stringify # testing from tests.opentracer.utils import init_tracer from ...test_tracer import get_dummy_tracer -class BotocoreTest(unittest.TestCase): +class BotocoreTest(TestCase): """Botocore integration testsuite""" TEST_SERVICE = "test-botocore-tracing" @@ -24,6 +24,7 @@ class BotocoreTest(unittest.TestCase): def setUp(self): patch() self.session = botocore.session.get_session() + self.session.set_credentials(access_key='access-key', secret_key='secret-key') def tearDown(self): unpatch() @@ -41,16 +42,16 @@ def test_traced_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(len(spans), 1) - eq_(span.get_tag('aws.agent'), "botocore") - eq_(span.get_tag('aws.region'), 'us-west-2') - eq_(span.get_tag('aws.operation'), 'DescribeInstances') - eq_(span.get_tag(http.STATUS_CODE), '200') - eq_(span.get_tag('retry_attempts'), '0') - eq_(span.service, "test-botocore-tracing.ec2") - eq_(span.resource, "ec2.describeinstances") - eq_(span.name, "ec2.command") - eq_(span.span_type, 'http') + self.assertEqual(len(spans), 1) + self.assertEqual(span.get_tag('aws.agent'), "botocore") + self.assertEqual(span.get_tag('aws.region'), 'us-west-2') + self.assertEqual(span.get_tag('aws.operation'), 'DescribeInstances') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.get_tag('retry_attempts'), '0') + self.assertEqual(span.service, 'test-botocore-tracing.ec2') + self.assertEqual(span.resource, 'ec2.describeinstances') + self.assertEqual(span.name, 'ec2.command') + self.assertEqual(span.span_type, 'http') @mock_s3 def test_s3_client(self): @@ -65,11 +66,11 @@ def test_s3_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(len(spans), 2) - eq_(span.get_tag('aws.operation'), 'ListBuckets') - eq_(span.get_tag(http.STATUS_CODE), '200') - eq_(span.service, "test-botocore-tracing.s3") - eq_(span.resource, "s3.listbuckets") + self.assertEqual(len(spans), 2) + self.assertEqual(span.get_tag('aws.operation'), 'ListBuckets') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.service, 'test-botocore-tracing.s3') + self.assertEqual(span.resource, 's3.listbuckets') # testing for span error try: @@ -78,8 +79,33 @@ def test_s3_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(span.error, 1) - eq_(span.resource, "s3.listobjects") + self.assertEqual(span.error, 1) + self.assertEqual(span.resource, 's3.listobjects') + + @mock_s3 + def test_s3_put(self): + params = dict(Key='foo', Bucket='mybucket', Body=b'bar') + s3 = self.session.create_client('s3', region_name='us-west-2') + tracer = get_dummy_tracer() + writer = tracer.writer + Pin(service=self.TEST_SERVICE, tracer=tracer).onto(s3) + s3.create_bucket(Bucket='mybucket') + s3.put_object(**params) + + spans = writer.pop() + assert spans + span = spans[0] + self.assertEqual(len(spans), 2) + self.assertEqual(span.get_tag('aws.operation'), 'CreateBucket') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.service, 'test-botocore-tracing.s3') + self.assertEqual(span.resource, 's3.createbucket') + self.assertEqual(spans[1].get_tag('aws.operation'), 'PutObject') + self.assertEqual(spans[1].resource, 's3.putobject') + self.assertEqual(spans[1].get_tag('params.Key'), stringify(params['Key'])) + self.assertEqual(spans[1].get_tag('params.Bucket'), stringify(params['Bucket'])) + # confirm blacklisted + self.assertIsNone(spans[1].get_tag('params.Body')) @mock_sqs def test_sqs_client(self): @@ -93,12 +119,12 @@ def test_sqs_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(len(spans), 1) - eq_(span.get_tag('aws.region'), 'us-east-1') - eq_(span.get_tag('aws.operation'), 'ListQueues') - eq_(span.get_tag(http.STATUS_CODE), '200') - eq_(span.service, "test-botocore-tracing.sqs") - eq_(span.resource, "sqs.listqueues") + self.assertEqual(len(spans), 1) + self.assertEqual(span.get_tag('aws.region'), 'us-east-1') + self.assertEqual(span.get_tag('aws.operation'), 'ListQueues') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.service, 'test-botocore-tracing.sqs') + self.assertEqual(span.resource, 'sqs.listqueues') @mock_kinesis def test_kinesis_client(self): @@ -112,12 +138,12 @@ def test_kinesis_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(len(spans), 1) - eq_(span.get_tag('aws.region'), 'us-east-1') - eq_(span.get_tag('aws.operation'), 'ListStreams') - eq_(span.get_tag(http.STATUS_CODE), '200') - eq_(span.service, "test-botocore-tracing.kinesis") - eq_(span.resource, "kinesis.liststreams") + self.assertEqual(len(spans), 1) + self.assertEqual(span.get_tag('aws.region'), 'us-east-1') + self.assertEqual(span.get_tag('aws.operation'), 'ListStreams') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.service, 'test-botocore-tracing.kinesis') + self.assertEqual(span.resource, 'kinesis.liststreams') @mock_kinesis def test_unpatch(self): @@ -146,7 +172,7 @@ def test_double_patch(self): spans = writer.pop() assert spans - eq_(len(spans), 1) + self.assertEqual(len(spans), 1) @mock_lambda def test_lambda_client(self): @@ -160,12 +186,12 @@ def test_lambda_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(len(spans), 1) - eq_(span.get_tag('aws.region'), 'us-east-1') - eq_(span.get_tag('aws.operation'), 'ListFunctions') - eq_(span.get_tag(http.STATUS_CODE), '200') - eq_(span.service, "test-botocore-tracing.lambda") - eq_(span.resource, "lambda.listfunctions") + self.assertEqual(len(spans), 1) + self.assertEqual(span.get_tag('aws.region'), 'us-east-1') + self.assertEqual(span.get_tag('aws.operation'), 'ListFunctions') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.service, 'test-botocore-tracing.lambda') + self.assertEqual(span.resource, 'lambda.listfunctions') @mock_kms def test_kms_client(self): @@ -179,15 +205,15 @@ def test_kms_client(self): spans = writer.pop() assert spans span = spans[0] - eq_(len(spans), 1) - eq_(span.get_tag('aws.region'), 'us-east-1') - eq_(span.get_tag('aws.operation'), 'ListKeys') - eq_(span.get_tag(http.STATUS_CODE), '200') - eq_(span.service, "test-botocore-tracing.kms") - eq_(span.resource, "kms.listkeys") + self.assertEqual(len(spans), 1) + self.assertEqual(span.get_tag('aws.region'), 'us-east-1') + self.assertEqual(span.get_tag('aws.operation'), 'ListKeys') + self.assertEqual(span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(span.service, 'test-botocore-tracing.kms') + self.assertEqual(span.resource, 'kms.listkeys') # checking for protection on sts against security leak - eq_(span.get_tag('params'), None) + self.assertIsNone(span.get_tag('params')) @mock_ec2 def test_traced_client_ot(self): @@ -203,26 +229,22 @@ def test_traced_client_ot(self): spans = writer.pop() assert spans - eq_(len(spans), 2) + self.assertEqual(len(spans), 2) ot_span, dd_span = spans # confirm the parenting - eq_(ot_span.parent_id, None) - eq_(dd_span.parent_id, ot_span.span_id) - - eq_(ot_span.name, 'ec2_op') - eq_(ot_span.service, 'ec2_svc') - - eq_(dd_span.get_tag('aws.agent'), "botocore") - eq_(dd_span.get_tag('aws.region'), 'us-west-2') - eq_(dd_span.get_tag('aws.operation'), 'DescribeInstances') - eq_(dd_span.get_tag(http.STATUS_CODE), '200') - eq_(dd_span.get_tag('retry_attempts'), '0') - eq_(dd_span.service, "test-botocore-tracing.ec2") - eq_(dd_span.resource, "ec2.describeinstances") - eq_(dd_span.name, "ec2.command") - - -if __name__ == '__main__': - unittest.main() + self.assertIsNone(ot_span.parent_id) + self.assertEqual(dd_span.parent_id, ot_span.span_id) + + self.assertEqual(ot_span.name, 'ec2_op') + self.assertEqual(ot_span.service, 'ec2_svc') + + self.assertEqual(dd_span.get_tag('aws.agent'), 'botocore') + self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') + self.assertEqual(dd_span.get_tag('aws.operation'), 'DescribeInstances') + self.assertEqual(dd_span.get_tag(http.STATUS_CODE), '200') + self.assertEqual(dd_span.get_tag('retry_attempts'), '0') + self.assertEqual(dd_span.service, 'test-botocore-tracing.ec2') + self.assertEqual(dd_span.resource, 'ec2.describeinstances') + self.assertEqual(dd_span.name, 'ec2.command') diff --git a/tests/contrib/bottle/test_autopatch.py b/tests/contrib/bottle/test_autopatch.py index 98fd6d8b883..16aaff7a2cb 100644 --- a/tests/contrib/bottle/test_autopatch.py +++ b/tests/contrib/bottle/test_autopatch.py @@ -7,7 +7,6 @@ from tests.test_tracer import get_dummy_tracer from ddtrace import compat -from ddtrace.contrib.bottle import TracePlugin SERVICE = 'bottle-app' diff --git a/tests/contrib/cassandra/test.py b/tests/contrib/cassandra/test.py index a92519218da..8673659fcde 100644 --- a/tests/contrib/cassandra/test.py +++ b/tests/contrib/cassandra/test.py @@ -30,6 +30,7 @@ logging.getLogger('cassandra').setLevel(logging.INFO) + def setUpModule(): # skip all the modules if the Cluster is not available if not Cluster: @@ -39,13 +40,18 @@ def setUpModule(): cluster = Cluster(port=CASSANDRA_CONFIG['port'], connect_timeout=CONNECTION_TIMEOUT_SECS) session = cluster.connect() session.execute('DROP KEYSPACE IF EXISTS test', timeout=10) - session.execute("CREATE KEYSPACE if not exists test WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor': 1};") + session.execute( + "CREATE KEYSPACE if not exists test WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor': 1};" + ) session.execute('CREATE TABLE if not exists test.person (name text PRIMARY KEY, age int, description text)') session.execute('CREATE TABLE if not exists test.person_write (name text PRIMARY KEY, age int, description text)') session.execute("INSERT INTO test.person (name, age, description) VALUES ('Cassandra', 100, 'A cruel mistress')") - session.execute("INSERT INTO test.person (name, age, description) VALUES ('Athena', 100, 'Whose shield is thunder')") + session.execute( + "INSERT INTO test.person (name, age, description) VALUES ('Athena', 100, 'Whose shield is thunder')" + ) session.execute("INSERT INTO test.person (name, age, description) VALUES ('Calypso', 100, 'Softly-braided nymph')") + def tearDownModule(): # destroy the KEYSPACE cluster = Cluster(port=CASSANDRA_CONFIG['port'], connect_timeout=CONNECTION_TIMEOUT_SECS) @@ -152,9 +158,11 @@ def execute_fn(session, query): event = Event() result = [] future = session.execute_async(query) + def callback(results): result.append(ResultSet(future, results)) event.set() + future.add_callback(callback) event.wait() return result[0] @@ -179,7 +187,7 @@ def test_paginated_query(self): writer = tracer.writer statement = SimpleStatement(self.TEST_QUERY_PAGINATED, fetch_size=1) result = session.execute(statement) - #iterate over all pages + # iterate over all pages results = list(result) eq_(len(results), 3) @@ -257,8 +265,14 @@ def test_batch_statement(self): writer = tracer.writer batch = BatchStatement() - batch.add(SimpleStatement('INSERT INTO test.person_write (name, age, description) VALUES (%s, %s, %s)'), ('Joe', 1, 'a')) - batch.add(SimpleStatement('INSERT INTO test.person_write (name, age, description) VALUES (%s, %s, %s)'), ('Jane', 2, 'b')) + batch.add( + SimpleStatement('INSERT INTO test.person_write (name, age, description) VALUES (%s, %s, %s)'), + ('Joe', 1, 'a'), + ) + batch.add( + SimpleStatement('INSERT INTO test.person_write (name, age, description) VALUES (%s, %s, %s)'), + ('Jane', 2, 'b'), + ) session.execute(batch) spans = writer.pop() @@ -286,6 +300,7 @@ def _traced_session(self): Pin.get_from(self.cluster).clone(tracer=tracer).onto(self.cluster) return self.cluster.connect(self.TEST_KEYSPACE), tracer + class TestCassPatchAll(TestCassPatchDefault): """Test Cassandra instrumentation with patching and custom service on all clusters""" diff --git a/tests/contrib/celery/test_integration.py b/tests/contrib/celery/test_integration.py index 806a403e9d0..2590a36225b 100644 --- a/tests/contrib/celery/test_integration.py +++ b/tests/contrib/celery/test_integration.py @@ -286,7 +286,7 @@ def run(self): def test_shared_task(self): # Ensure Django Shared Task are supported @celery.shared_task - def add(x ,y): + def add(x, y): return x + y res = add.apply([2, 2]) diff --git a/tests/contrib/celery/test_patch.py b/tests/contrib/celery/test_patch.py index de3712f187a..0ade556af34 100644 --- a/tests/contrib/celery/test_patch.py +++ b/tests/contrib/celery/test_patch.py @@ -19,4 +19,3 @@ def test_patch_before_import(self): app = celery.Celery() ok_(Pin.get_from(app) is not None) - diff --git a/tests/contrib/dbapi/test_unit.py b/tests/contrib/dbapi/test_unit.py index 318d861c6b0..fe9fc75b25e 100644 --- a/tests/contrib/dbapi/test_unit.py +++ b/tests/contrib/dbapi/test_unit.py @@ -1,9 +1,8 @@ import unittest import mock -from ddtrace import Pin, Span +from ddtrace import Pin from ddtrace.contrib.dbapi import TracedCursor, TracedConnection -from ddtrace.ext import AppTypes, sql from tests.test_tracer import get_dummy_tracer @@ -49,7 +48,6 @@ def test_fetchall_wrapped_is_called_and_returned(self): def test_fetchmany_wrapped_is_called_and_returned(self): cursor = self.cursor - tracer = self.tracer cursor.rowcount = 0 cursor.fetchmany.return_value = '__result__' pin = Pin('pin_name', tracer=self.tracer) @@ -178,8 +176,8 @@ def method(): assert span.get_metric('db.rowcount') == 123, 'Row count is set as a metric' assert span.get_tag('sql.rows') == '123', 'Row count is set as a tag (for legacy django cursor replacement)' -class TestTracedConnection(unittest.TestCase): +class TestTracedConnection(unittest.TestCase): def setUp(self): self.connection = mock.Mock() self.tracer = get_dummy_tracer() diff --git a/tests/contrib/django/app/middlewares.py b/tests/contrib/django/app/middlewares.py index 787aa9557b1..c3169972915 100644 --- a/tests/contrib/django/app/middlewares.py +++ b/tests/contrib/django/app/middlewares.py @@ -11,6 +11,7 @@ class CatchExceptionMiddleware(MiddlewareClass): def process_exception(self, request, exception): return HttpResponse(status=500) + class HandleErrorMiddlewareSuccess(MiddlewareClass): """ Converts an HttpError (that may be returned from an exception handler) generated by a view or previous middleware and returns a 200 @@ -22,6 +23,7 @@ def process_response(self, request, response): return response + class HandleErrorMiddlewareClientError(MiddlewareClass): """ Converts an HttpError (that may be returned from an exception handler) generated by a view or previous middleware and returns a 404 diff --git a/tests/contrib/django/app/views.py b/tests/contrib/django/app/views.py index 585ee1fb803..8e570a536b7 100644 --- a/tests/contrib/django/app/views.py +++ b/tests/contrib/django/app/views.py @@ -32,6 +32,7 @@ def get(self, request, *args, **kwargs): def function_view(request): return HttpResponse(status=200) + def error_500(request): raise Exception('Error 500') @@ -53,6 +54,7 @@ def item_title(self, item): def item_description(self, item): return 'empty' + partial_view = partial(function_view) # disabling flake8 test below, yes, declaring a func like this is bad, we know diff --git a/tests/contrib/django/compat.py b/tests/contrib/django/compat.py index 205f2b531b6..c591277ff72 100644 --- a/tests/contrib/django/compat.py +++ b/tests/contrib/django/compat.py @@ -1,3 +1,5 @@ +__all__ = ['reverse'] + try: from django.core.urlresolvers import reverse except ImportError: diff --git a/tests/contrib/django/test_autopatching.py b/tests/contrib/django/test_autopatching.py index d414febeeb8..b4346954ad8 100644 --- a/tests/contrib/django/test_autopatching.py +++ b/tests/contrib/django/test_autopatching.py @@ -20,7 +20,6 @@ def test_autopatching_middleware_classes(self): eq_(settings.MIDDLEWARE_CLASSES[0], 'ddtrace.contrib.django.TraceMiddleware') eq_(settings.MIDDLEWARE_CLASSES[-1], 'ddtrace.contrib.django.TraceExceptionMiddleware') - @skipIf(django.VERSION >= (1, 10), 'skip if version above 1.10') def test_autopatching_twice_middleware_classes(self): ok_(django._datadog_patch) @@ -51,7 +50,6 @@ def test_autopatching_middleware(self): ok_(not getattr(settings, 'MIDDLEWARE_CLASSES', None) or 'ddtrace.contrib.django.TraceExceptionMiddleware' not in settings.MIDDLEWARE_CLASSES) - @skipIf(django.VERSION < (1, 10), 'skip if version is below 1.10') def test_autopatching_twice_middleware(self): ok_(django._datadog_patch) diff --git a/tests/contrib/django/test_cache_backends.py b/tests/contrib/django/test_cache_backends.py index c25e660485f..655cdedc972 100644 --- a/tests/contrib/django/test_cache_backends.py +++ b/tests/contrib/django/test_cache_backends.py @@ -1,7 +1,7 @@ import time # 3rd party -from nose.tools import eq_, ok_ +from nose.tools import eq_ from django.core.cache import caches # testing @@ -20,7 +20,7 @@ def test_cache_redis_get(self): # (trace) the cache miss start = time.time() - hit = cache.get('missing_key') + cache.get('missing_key') end = time.time() # tests @@ -49,7 +49,7 @@ def test_cache_redis_get_many(self): # (trace) the cache miss start = time.time() - hit = cache.get_many(['missing_key', 'another_key']) + cache.get_many(['missing_key', 'another_key']) end = time.time() # tests @@ -78,7 +78,7 @@ def test_cache_pylibmc_get(self): # (trace) the cache miss start = time.time() - hit = cache.get('missing_key') + cache.get('missing_key') end = time.time() # tests @@ -107,7 +107,7 @@ def test_cache_pylibmc_get_many(self): # (trace) the cache miss start = time.time() - hit = cache.get_many(['missing_key', 'another_key']) + cache.get_many(['missing_key', 'another_key']) end = time.time() # tests @@ -136,7 +136,7 @@ def test_cache_memcached_get(self): # (trace) the cache miss start = time.time() - hit = cache.get('missing_key') + cache.get('missing_key') end = time.time() # tests @@ -165,7 +165,7 @@ def test_cache_memcached_get_many(self): # (trace) the cache miss start = time.time() - hit = cache.get_many(['missing_key', 'another_key']) + cache.get_many(['missing_key', 'another_key']) end = time.time() # tests @@ -194,7 +194,7 @@ def test_cache_django_pylibmc_get(self): # (trace) the cache miss start = time.time() - hit = cache.get('missing_key') + cache.get('missing_key') end = time.time() # tests @@ -223,7 +223,7 @@ def test_cache_django_pylibmc_get_many(self): # (trace) the cache miss start = time.time() - hit = cache.get_many(['missing_key', 'another_key']) + cache.get_many(['missing_key', 'another_key']) end = time.time() # tests diff --git a/tests/contrib/django/test_cache_client.py b/tests/contrib/django/test_cache_client.py index f79f1cbe52e..64e2ff15116 100644 --- a/tests/contrib/django/test_cache_client.py +++ b/tests/contrib/django/test_cache_client.py @@ -19,7 +19,7 @@ def test_cache_get(self): # (trace) the cache miss start = time.time() - hit = cache.get('missing_key') + cache.get('missing_key') end = time.time() # tests @@ -48,7 +48,7 @@ def test_cache_service_can_be_overriden(self): cache = caches['default'] # (trace) the cache miss - hit = cache.get('missing_key') + cache.get('missing_key') # tests spans = self.tracer.writer.pop() @@ -63,9 +63,7 @@ def test_cache_disabled(self): cache = caches['default'] # (trace) the cache miss - start = time.time() - hit = cache.get('missing_key') - end = time.time() + cache.get('missing_key') # tests spans = self.tracer.writer.pop() @@ -77,7 +75,7 @@ def test_cache_set(self): # (trace) the cache miss start = time.time() - hit = cache.set('a_new_key', 50) + cache.set('a_new_key', 50) end = time.time() # tests @@ -106,7 +104,7 @@ def test_cache_add(self): # (trace) the cache miss start = time.time() - hit = cache.add('a_new_key', 50) + cache.add('a_new_key', 50) end = time.time() # tests @@ -135,7 +133,7 @@ def test_cache_delete(self): # (trace) the cache miss start = time.time() - hit = cache.delete('an_existing_key') + cache.delete('an_existing_key') end = time.time() # tests @@ -166,7 +164,7 @@ def test_cache_incr(self): # (trace) the cache miss start = time.time() - hit = cache.incr('value') + cache.incr('value') end = time.time() # tests @@ -206,7 +204,7 @@ def test_cache_decr(self): # (trace) the cache miss start = time.time() - hit = cache.decr('value') + cache.decr('value') end = time.time() # tests @@ -251,7 +249,7 @@ def test_cache_get_many(self): # (trace) the cache miss start = time.time() - hit = cache.get_many(['missing_key', 'another_key']) + cache.get_many(['missing_key', 'another_key']) end = time.time() # tests @@ -294,7 +292,7 @@ def test_cache_set_many(self): # (trace) the cache miss start = time.time() - hit = cache.set_many({'first_key': 1, 'second_key': 2}) + cache.set_many({'first_key': 1, 'second_key': 2}) end = time.time() # tests @@ -333,7 +331,7 @@ def test_cache_delete_many(self): # (trace) the cache miss start = time.time() - hit = cache.delete_many(['missing_key', 'another_key']) + cache.delete_many(['missing_key', 'another_key']) end = time.time() # tests diff --git a/tests/contrib/django/test_cache_views.py b/tests/contrib/django/test_cache_views.py index 007fa920d58..611c06d3312 100644 --- a/tests/contrib/django/test_cache_views.py +++ b/tests/contrib/django/test_cache_views.py @@ -1,7 +1,5 @@ -import time - # 3rd party -from nose.tools import eq_, ok_ +from nose.tools import eq_ # testing from .compat import reverse @@ -47,7 +45,10 @@ def test_cached_view(self): expected_meta_view = { 'django.cache.backend': 'django.core.cache.backends.locmem.LocMemCache', - 'django.cache.key': 'views.decorators.cache.cache_page..GET.03cdc1cc4aab71b038a6764e5fcabb82.d41d8cd98f00b204e9800998ecf8427e.en-us', + 'django.cache.key': ( + 'views.decorators.cache.cache_page..' + 'GET.03cdc1cc4aab71b038a6764e5fcabb82.d41d8cd98f00b204e9800998ecf8427e.en-us' + ), 'env': 'test', } diff --git a/tests/contrib/django/test_instrumentation.py b/tests/contrib/django/test_instrumentation.py index 3578af04a5b..e4dfd73e982 100644 --- a/tests/contrib/django/test_instrumentation.py +++ b/tests/contrib/django/test_instrumentation.py @@ -1,9 +1,5 @@ -import os -import time - # 3rd party from nose.tools import eq_, ok_ -from django.test import override_settings # project from ddtrace.contrib.django.conf import settings, DatadogSettings @@ -29,7 +25,8 @@ def test_environment_vars(self): # environment strings are properly converted with set_env( DATADOG_TRACE_AGENT_HOSTNAME='agent.consul.local', - DATADOG_TRACE_AGENT_PORT='58126'): + DATADOG_TRACE_AGENT_PORT='58126' + ): settings = DatadogSettings() eq_(settings.AGENT_HOSTNAME, 'agent.consul.local') eq_(settings.AGENT_PORT, 58126) diff --git a/tests/contrib/django/test_templates.py b/tests/contrib/django/test_templates.py index 417866c2490..8866db0318d 100644 --- a/tests/contrib/django/test_templates.py +++ b/tests/contrib/django/test_templates.py @@ -2,12 +2,8 @@ # 3rd party from nose.tools import eq_ -from django.test import SimpleTestCase from django.template import Context, Template -# project -from ddtrace.contrib.django.templates import patch_template - # testing from .utils import DjangoTraceTestCase, override_ddtrace_settings diff --git a/tests/contrib/django/utils.py b/tests/contrib/django/utils.py index 93fb0349fc2..98bd6fb5269 100644 --- a/tests/contrib/django/utils.py +++ b/tests/contrib/django/utils.py @@ -43,6 +43,7 @@ def tearDown(self): self.tracer.writer.spans = [] self.tracer.writer.pop_traces() + class override_ddtrace_settings(object): def __init__(self, *args, **kwargs): self.items = list(kwargs.items()) diff --git a/tests/contrib/djangorestframework/app/exceptions.py b/tests/contrib/djangorestframework/app/exceptions.py index 0443b109c4f..0f4fce70e48 100644 --- a/tests/contrib/djangorestframework/app/exceptions.py +++ b/tests/contrib/djangorestframework/app/exceptions.py @@ -1,6 +1,5 @@ from rest_framework.views import exception_handler from rest_framework.response import Response -from rest_framework.exceptions import APIException from rest_framework import status diff --git a/tests/contrib/djangorestframework/app/views.py b/tests/contrib/djangorestframework/app/views.py index c9ca758a860..88179c67716 100644 --- a/tests/contrib/djangorestframework/app/views.py +++ b/tests/contrib/djangorestframework/app/views.py @@ -1,9 +1,7 @@ from django.conf.urls import url, include -from django.contrib.auth.models import User, Group -from django.http import HttpResponse +from django.contrib.auth.models import User from rest_framework import viewsets, routers, serializers -from rest_framework.exceptions import APIException class UserSerializer(serializers.HyperlinkedModelSerializer): diff --git a/tests/contrib/djangorestframework/runtests.py b/tests/contrib/djangorestframework/runtests.py index b2fff2ba72c..84005b47bf1 100755 --- a/tests/contrib/djangorestframework/runtests.py +++ b/tests/contrib/djangorestframework/runtests.py @@ -8,7 +8,7 @@ app_to_test = "tests/contrib/djangorestframework" # project_root is the path of dd-trace-py (ex: ~/go/src/DataDog/dd-trace-py/) - # We need to append the project_root path to the PYTHONPATH + # We need to append the project_root path to the PYTHONPATH # in order to specify all our modules import from the project_root. current_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) project_root = os.path.join(current_dir, '..', '..') diff --git a/tests/contrib/djangorestframework/test_djangorestframework.py b/tests/contrib/djangorestframework/test_djangorestframework.py index 4b0ce87cf4b..41841456751 100644 --- a/tests/contrib/djangorestframework/test_djangorestframework.py +++ b/tests/contrib/djangorestframework/test_djangorestframework.py @@ -5,6 +5,7 @@ from tests.contrib.django.utils import DjangoTraceTestCase + @skipIf(django.VERSION < (1, 10), 'requires django version >= 1.10') class RestFrameworkTest(DjangoTraceTestCase): def setUp(self): diff --git a/tests/contrib/elasticsearch/test.py b/tests/contrib/elasticsearch/test.py index 18c6e840acc..8d2747a60db 100644 --- a/tests/contrib/elasticsearch/test.py +++ b/tests/contrib/elasticsearch/test.py @@ -52,7 +52,7 @@ def test_elasticsearch(self): es = elasticsearch.Elasticsearch(transport_class=transport_class, port=ELASTICSEARCH_CONFIG['port']) # Test index creation - mapping = {"mapping": {"properties": {"created": {"type":"date", "format": "yyyy-MM-dd"}}}} + mapping = {'mapping': {'properties': {'created': {'type': 'date', 'format': 'yyyy-MM-dd'}}}} es.indices.create(index=self.ES_INDEX, ignore=400, body=mapping) spans = writer.pop() @@ -68,7 +68,7 @@ def test_elasticsearch(self): eq_(span.resource, "PUT /%s" % self.ES_INDEX) # Put data - args = {'index':self.ES_INDEX, 'doc_type':self.ES_TYPE} + args = {'index': self.ES_INDEX, 'doc_type': self.ES_TYPE} es.index(id=10, body={'name': 'ten', 'created': datetime.date(2016, 1, 1)}, **args) es.index(id=11, body={'name': 'eleven', 'created': datetime.date(2016, 2, 1)}, **args) es.index(id=12, body={'name': 'twelve', 'created': datetime.date(2016, 3, 1)}, **args) @@ -94,8 +94,11 @@ def test_elasticsearch(self): eq_(span.get_tag('elasticsearch.url'), "/%s/_refresh" % self.ES_INDEX) # Search data - result = es.search(sort=['name:desc'], size=100, - body={"query":{"match_all":{}}}, **args) + result = es.search( + sort=['name:desc'], size=100, + body={'query': {'match_all': {}}}, + **args + ) assert len(result["hits"]["hits"]) == 3, result @@ -103,11 +106,15 @@ def test_elasticsearch(self): assert spans eq_(len(spans), 1) span = spans[0] - eq_(span.resource, - "GET /%s/%s/_search" % (self.ES_INDEX, self.ES_TYPE)) + eq_( + span.resource, + 'GET /%s/%s/_search' % (self.ES_INDEX, self.ES_TYPE), + ) eq_(span.get_tag('elasticsearch.method'), "GET") - eq_(span.get_tag('elasticsearch.url'), - "/%s/%s/_search" % (self.ES_INDEX, self.ES_TYPE)) + eq_( + span.get_tag('elasticsearch.url'), + '/%s/%s/_search' % (self.ES_INDEX, self.ES_TYPE), + ) eq_(span.get_tag('elasticsearch.body').replace(" ", ""), '{"query":{"match_all":{}}}') eq_(set(span.get_tag('elasticsearch.params').split('&')), {'sort=name%3Adesc', 'size=100'}) @@ -124,7 +131,7 @@ def test_elasticsearch(self): try: es.get(index="non_existent_index", id=100, doc_type="_all") eq_("error_not_raised", "elasticsearch.exceptions.TransportError") - except elasticsearch.exceptions.TransportError as e: + except elasticsearch.exceptions.TransportError: spans = writer.pop() assert spans span = spans[0] @@ -135,7 +142,7 @@ def test_elasticsearch(self): es.indices.create(index=10) es.indices.create(index=10) eq_("error_not_raised", "elasticsearch.exceptions.TransportError") - except elasticsearch.exceptions.TransportError as e: + except elasticsearch.exceptions.TransportError: spans = writer.pop() assert spans span = spans[-1] @@ -158,7 +165,7 @@ def test_elasticsearch_ot(self): es = elasticsearch.Elasticsearch(transport_class=transport_class, port=ELASTICSEARCH_CONFIG['port']) # Test index creation - mapping = {"mapping": {"properties": {"created": {"type":"date", "format": "yyyy-MM-dd"}}}} + mapping = {'mapping': {'properties': {'created': {'type': 'date', 'format': 'yyyy-MM-dd'}}}} with ot_tracer.start_active_span('ot_span'): es.indices.create(index=self.ES_INDEX, ignore=400, body=mapping) @@ -223,9 +230,8 @@ def test_elasticsearch(self): writer = tracer.writer Pin(service=self.TEST_SERVICE, tracer=tracer).onto(es.transport) - # Test index creation - mapping = {"mapping": {"properties": {"created": {"type":"date", "format": "yyyy-MM-dd"}}}} + mapping = {'mapping': {'properties': {'created': {'type': 'date', 'format': 'yyyy-MM-dd'}}}} es.indices.create(index=self.ES_INDEX, ignore=400, body=mapping) spans = writer.pop() @@ -241,7 +247,7 @@ def test_elasticsearch(self): eq_(span.resource, "PUT /%s" % self.ES_INDEX) # Put data - args = {'index':self.ES_INDEX, 'doc_type':self.ES_TYPE} + args = {'index': self.ES_INDEX, 'doc_type': self.ES_TYPE} es.index(id=10, body={'name': 'ten', 'created': datetime.date(2016, 1, 1)}, **args) es.index(id=11, body={'name': 'eleven', 'created': datetime.date(2016, 2, 1)}, **args) es.index(id=12, body={'name': 'twelve', 'created': datetime.date(2016, 3, 1)}, **args) @@ -267,8 +273,12 @@ def test_elasticsearch(self): eq_(span.get_tag('elasticsearch.url'), "/%s/_refresh" % self.ES_INDEX) # Search data - result = es.search(sort=['name:desc'], size=100, - body={"query":{"match_all":{}}}, **args) + result = es.search( + sort=['name:desc'], + size=100, + body={'query': {'match_all': {}}}, + **args + ) assert len(result["hits"]["hits"]) == 3, result diff --git a/tests/contrib/falcon/test_distributed_tracing.py b/tests/contrib/falcon/test_distributed_tracing.py index 02fb870a3e1..3634c726cc8 100644 --- a/tests/contrib/falcon/test_distributed_tracing.py +++ b/tests/contrib/falcon/test_distributed_tracing.py @@ -1,5 +1,3 @@ -from ddtrace.propagation.http import HTTPPropagator -from ddtrace.ext import errors as errx, http as httpx, AppTypes from falcon import testing from nose.tools import eq_, ok_ from tests.test_tracer import get_dummy_tracer diff --git a/tests/contrib/flask/test_flask_helpers.py b/tests/contrib/flask/test_flask_helpers.py index a493f4f6e7c..976e7f168ef 100644 --- a/tests/contrib/flask/test_flask_helpers.py +++ b/tests/contrib/flask/test_flask_helpers.py @@ -1,5 +1,4 @@ import flask -import wrapt from ddtrace import Pin from ddtrace.contrib.flask import unpatch diff --git a/tests/contrib/flask/test_hooks.py b/tests/contrib/flask/test_hooks.py index be17f6995e9..797e385c926 100644 --- a/tests/contrib/flask/test_hooks.py +++ b/tests/contrib/flask/test_hooks.py @@ -2,6 +2,7 @@ from . import BaseFlaskTestCase + class FlaskHookTestCase(BaseFlaskTestCase): def setUp(self): super(FlaskHookTestCase, self).setUp() @@ -11,6 +12,7 @@ def index(): return 'Hello Flask', 200 self.bp = Blueprint(__name__, 'bp') + @self.bp.route('/bp') def bp(): return 'Hello Blueprint', 200 @@ -304,7 +306,7 @@ def bp_before_app_request(): # Assert correct parent span self.assertEqual(parent.name, 'flask.preprocess_request') - def test_before_first_request(self): + def test_before_app_first_request(self): """ When Blueprint before_first_request hook is registered We create the expected spans @@ -341,7 +343,11 @@ def bp_before_app_first_request(): spans = self.get_spans() self.assertEqual(len(spans), 8) - span = self.find_span_by_name(spans, 'tests.contrib.flask.test_hooks.bp_before_app_first_request', required=False) + span = self.find_span_by_name( + spans, + 'tests.contrib.flask.test_hooks.bp_before_app_first_request', + required=False, + ) self.assertIsNone(span) def test_bp_after_request(self): diff --git a/tests/contrib/flask/test_middleware.py b/tests/contrib/flask/test_middleware.py index 24c5c6d71b5..3eef44a7036 100644 --- a/tests/contrib/flask/test_middleware.py +++ b/tests/contrib/flask/test_middleware.py @@ -70,7 +70,7 @@ def test_child(self): spans = self.tracer.writer.pop() eq_(len(spans), 2) - spans_by_name = {s.name:s for s in spans} + spans_by_name = {s.name: s for s in spans} s = spans_by_name['flask.request'] assert s.span_id @@ -116,7 +116,7 @@ def test_success(self): services = self.tracer.writer.pop_services() expected = { - "test.flask.service": {"app":"flask", "app_type":"web"} + 'test.flask.service': {'app': 'flask', 'app_type': 'web'}, } eq_(services, expected) @@ -133,7 +133,7 @@ def test_template(self): assert not self.tracer.current_span(), self.tracer.current_span().pprint() spans = self.tracer.writer.pop() eq_(len(spans), 2) - by_name = {s.name:s for s in spans} + by_name = {s.name: s for s in spans} s = by_name["flask.request"] eq_(s.service, "test.flask.service") eq_(s.resource, "tmpl") @@ -185,7 +185,7 @@ def test_template_err(self): assert not self.tracer.current_span(), self.tracer.current_span().pprint() spans = self.tracer.writer.pop() eq_(len(spans), 1) - by_name = {s.name:s for s in spans} + by_name = {s.name: s for s in spans} s = by_name["flask.request"] eq_(s.service, "test.flask.service") eq_(s.resource, "tmpl_err") @@ -210,7 +210,7 @@ def test_template_render_err(self): assert not self.tracer.current_span(), self.tracer.current_span().pprint() spans = self.tracer.writer.pop() eq_(len(spans), 2) - by_name = {s.name:s for s in spans} + by_name = {s.name: s for s in spans} s = by_name["flask.request"] eq_(s.service, "test.flask.service") eq_(s.resource, "tmpl_render_err") diff --git a/tests/contrib/flask/test_template.py b/tests/contrib/flask/test_template.py index 64ebe216e83..fa878143f52 100644 --- a/tests/contrib/flask/test_template.py +++ b/tests/contrib/flask/test_template.py @@ -1,7 +1,4 @@ -import mock - import flask -import wrapt from ddtrace import Pin from ddtrace.contrib.flask import unpatch diff --git a/tests/contrib/flask_cache/test_utils.py b/tests/contrib/flask_cache/test_utils.py index 28be1a6e8fe..f6006049e01 100644 --- a/tests/contrib/flask_cache/test_utils.py +++ b/tests/contrib/flask_cache/test_utils.py @@ -1,13 +1,11 @@ import unittest -from nose.tools import eq_, ok_ +from nose.tools import eq_ # project -from ddtrace.ext import net -from ddtrace.tracer import Tracer, Span +from ddtrace.tracer import Tracer from ddtrace.contrib.flask_cache import get_traced_cache from ddtrace.contrib.flask_cache.utils import _extract_conn_tags, _resource_from_cache_prefix -from ddtrace.contrib.flask_cache.tracers import TYPE, CACHE_BACKEND # 3rd party from flask import Flask @@ -107,10 +105,6 @@ def test_resource_from_cache_without_prefix(self): tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) - config = { - "CACHE_REDIS_PORT": REDIS_CONFIG['port'], - "CACHE_TYPE": "redis", - } traced_cache = Cache(app, config={"CACHE_TYPE": "redis"}) # expect only the resource name expected_resource = "get" diff --git a/tests/contrib/flask_cache/test_wrapper_safety.py b/tests/contrib/flask_cache/test_wrapper_safety.py index d092789dddb..77137de6d18 100644 --- a/tests/contrib/flask_cache/test_wrapper_safety.py +++ b/tests/contrib/flask_cache/test_wrapper_safety.py @@ -7,7 +7,7 @@ from ddtrace.ext import net from ddtrace.tracer import Tracer from ddtrace.contrib.flask_cache import get_traced_cache -from ddtrace.contrib.flask_cache.tracers import TYPE, CACHE_BACKEND +from ddtrace.contrib.flask_cache.tracers import CACHE_BACKEND # 3rd party from flask import Flask @@ -217,7 +217,6 @@ def test_memcached_cache_tracing_with_a_wrong_connection(self): except Exception: pass - # ensure that the error is not caused by our tracer spans = writer.pop() eq_(len(spans), 1) diff --git a/tests/contrib/gevent/test_tracer.py b/tests/contrib/gevent/test_tracer.py index 39e8a85d967..08aba68e5fd 100644 --- a/tests/contrib/gevent/test_tracer.py +++ b/tests/contrib/gevent/test_tracer.py @@ -136,7 +136,7 @@ def greenlet(_): for func in funcs: with self.tracer.trace('outer', resource='base') as span: # Use a list to force evaluation - list(func(greenlet, [0,1,2])) + list(func(greenlet, [0, 1, 2])) traces = self.tracer.writer.pop_traces() eq_(4, len(traces)) @@ -298,7 +298,7 @@ def test_propagation_with_new_context(self): self.tracer.context_provider.activate(ctx) def greenlet(): - with self.tracer.trace('greenlet') as span: + with self.tracer.trace('greenlet'): gevent.sleep(0.01) jobs = [gevent.spawn(greenlet) for x in range(1)] diff --git a/tests/contrib/grpc/hello_pb2.py b/tests/contrib/grpc/hello_pb2.py index 6cac01113f0..91c63eb4b9b 100644 --- a/tests/contrib/grpc/hello_pb2.py +++ b/tests/contrib/grpc/hello_pb2.py @@ -1,3 +1,4 @@ +# flake8: noqa # Generated by the protocol buffer compiler. DO NOT EDIT! # source: hello.proto diff --git a/tests/contrib/grpc/hello_pb2_grpc.py b/tests/contrib/grpc/hello_pb2_grpc.py index ab6ede900d2..7e57bce7d72 100644 --- a/tests/contrib/grpc/hello_pb2_grpc.py +++ b/tests/contrib/grpc/hello_pb2_grpc.py @@ -1,3 +1,4 @@ +# flake8: noqa # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc diff --git a/tests/contrib/grpc/test_grpc.py b/tests/contrib/grpc/test_grpc.py index 502b631da17..55cbbe5a5da 100644 --- a/tests/contrib/grpc/test_grpc.py +++ b/tests/contrib/grpc/test_grpc.py @@ -1,26 +1,24 @@ # Standard library -import time import unittest # Thirdparty import grpc from grpc.framework.foundation import logging_pool from nose.tools import eq_ -import wrapt # Internal from ddtrace.contrib.grpc import patch, unpatch -from ddtrace.contrib.grpc import client_interceptor from ddtrace import Pin from ...test_tracer import get_dummy_tracer, DummyWriter from .hello_pb2 import HelloRequest, HelloReply -from .hello_pb2_grpc import add_HelloServicer_to_server, HelloServicer, HelloStub +from .hello_pb2_grpc import add_HelloServicer_to_server, HelloStub GRPC_PORT = 50531 + class GrpcBaseMixin(object): def setUp(self): patch() @@ -59,7 +57,7 @@ def test_secure_channel(self): writer = self._tracer.writer spans = writer.pop() eq_(len(spans), 1) - + span = spans[0] eq_(response.message, 'x-datadog-trace-id=%d;x-datadog-parent-id=%d' % (span.trace_id, span.span_id)) _check_span(span) @@ -81,7 +79,10 @@ def test_priority_sampling(self): eq_( response.message, - 'x-datadog-trace-id=%d;x-datadog-parent-id=%d;x-datadog-sampling-priority=1' % (span.trace_id, span.span_id), + ( + 'x-datadog-trace-id=%d;x-datadog-parent-id=%d;x-datadog-sampling-priority=1' % + (span.trace_id, span.span_id) + ), ) _check_span(span) @@ -89,15 +90,13 @@ def test_span_in_error(self): # Create a channel and send one request to the server with grpc.secure_channel('localhost:%d' % (GRPC_PORT), credentials=grpc.ChannelCredentials(None)) as channel: stub = HelloStub(channel) - try: + with self.assertRaises(Exception): stub.SayError(HelloRequest(name='test')) - except: - pass # excepted to throw writer = self._tracer.writer spans = writer.pop() eq_(len(spans), 1) - + span = spans[0] eq_(span.error, 1) self.assertIsNotNone(span.meta['error.stack']) @@ -139,7 +138,7 @@ def test_pin_can_be_defined_per_channel(self): writer = self._tracer.writer spans = writer.pop() - + eq_(len(spans), 2) span1 = spans[0] span2 = spans[1] diff --git a/tests/contrib/httplib/test_httplib.py b/tests/contrib/httplib/test_httplib.py index e91c3e5a34e..d551d70e2eb 100644 --- a/tests/contrib/httplib/test_httplib.py +++ b/tests/contrib/httplib/test_httplib.py @@ -497,6 +497,7 @@ def test_httplib_request_get_request_ot(self): } ) + # Additional Python2 test cases for urllib if PY2: import urllib diff --git a/tests/contrib/molten/__init__.py b/tests/contrib/molten/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/contrib/molten/test_molten.py b/tests/contrib/molten/test_molten.py new file mode 100644 index 00000000000..2f62bbe246f --- /dev/null +++ b/tests/contrib/molten/test_molten.py @@ -0,0 +1,215 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from annotations +from unittest import TestCase + +import molten +from molten.testing import TestClient + +from ddtrace import Pin +from ddtrace.ext import errors +from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID +from ddtrace.contrib.molten import patch, unpatch +from ddtrace.contrib.molten.patch import MOLTEN_VERSION + +from ...test_tracer import get_dummy_tracer +from ...util import override_config + + +# NOTE: Type annotations required by molten otherwise parameters cannot be coerced +def hello(name: str, age: int) -> str: + return f'Hello {age} year old named {name}!' + + +def molten_client(headers=None): + app = molten.App(routes=[molten.Route('/hello/{name}/{age}', hello)]) + client = TestClient(app) + uri = app.reverse_uri('hello', name='Jim', age=24) + if headers: + return client.request('GET', uri, headers=headers) + return client.get(uri) + + +class TestMolten(TestCase): + """"Ensures Molten is properly instrumented.""" + + TEST_SERVICE = 'molten-patch' + + def setUp(self): + patch() + self.tracer = get_dummy_tracer() + Pin.override(molten, tracer=self.tracer) + + def tearDown(self): + unpatch() + self.tracer.writer.pop() + delattr(self, 'tracer') + + def test_route_success(self): + """ Tests request was a success with the expected span tags """ + response = molten_client() + spans = self.tracer.writer.pop() + self.assertEqual(response.status_code, 200) + # TestResponse from TestClient is wrapper around Response so we must + # access data property + self.assertEqual(response.data, '"Hello 24 year old named Jim!"') + span = spans[0] + self.assertEqual(span.service, 'molten') + self.assertEqual(span.name, 'molten.request') + self.assertEqual(span.resource, 'GET /hello/{name}/{age}') + self.assertEqual(span.get_tag('http.method'), 'GET') + self.assertEqual(span.get_tag('http.url'), '/hello/Jim/24') + self.assertEqual(span.get_tag('http.status_code'), '200') + + # See test_resources below for specifics of this difference + if MOLTEN_VERSION >= (0, 7, 2): + self.assertEqual(len(spans), 18) + else: + self.assertEqual(len(spans), 16) + + # test override of service name + Pin.override(molten, service=self.TEST_SERVICE) + response = molten_client() + spans = self.tracer.writer.pop() + self.assertEqual(spans[0].service, 'molten-patch') + + def test_route_failure(self): + app = molten.App(routes=[molten.Route('/hello/{name}/{age}', hello)]) + client = TestClient(app) + response = client.get('/goodbye') + spans = self.tracer.writer.pop() + self.assertEqual(response.status_code, 404) + span = spans[0] + self.assertEqual(span.service, 'molten') + self.assertEqual(span.name, 'molten.request') + self.assertEqual(span.resource, 'GET 404') + self.assertEqual(span.get_tag('http.url'), '/goodbye') + self.assertEqual(span.get_tag('http.method'), 'GET') + self.assertEqual(span.get_tag('http.status_code'), '404') + + def test_route_exception(self): + def route_error() -> str: + raise Exception('Error message') + app = molten.App(routes=[molten.Route('/error', route_error)]) + client = TestClient(app) + response = client.get('/error') + spans = self.tracer.writer.pop() + self.assertEqual(response.status_code, 500) + span = spans[0] + route_error_span = spans[-1] + self.assertEqual(span.service, 'molten') + self.assertEqual(span.name, 'molten.request') + self.assertEqual(span.resource, 'GET /error') + self.assertEqual(span.error, 1) + # error tags only set for route function span and not root span + self.assertIsNone(span.get_tag(errors.ERROR_MSG)) + self.assertEqual(route_error_span.get_tag(errors.ERROR_MSG), 'Error message') + + def test_resources(self): + """ Tests request has expected span resources """ + response = molten_client() + spans = self.tracer.writer.pop() + + # `can_handle_parameter` appears twice since two parameters are in request + # TODO[tahir]: missing ``resolve` method for components + + expected = [ + 'GET /hello/{name}/{age}', + 'molten.middleware.ResponseRendererMiddleware', + 'molten.components.HeaderComponent.can_handle_parameter', + 'molten.components.CookiesComponent.can_handle_parameter', + 'molten.components.QueryParamComponent.can_handle_parameter', + 'molten.components.RequestBodyComponent.can_handle_parameter', + 'molten.components.RequestDataComponent.can_handle_parameter', + 'molten.components.SchemaComponent.can_handle_parameter', + 'molten.components.UploadedFileComponent.can_handle_parameter', + 'molten.components.HeaderComponent.can_handle_parameter', + 'molten.components.CookiesComponent.can_handle_parameter', + 'molten.components.QueryParamComponent.can_handle_parameter', + 'molten.components.RequestBodyComponent.can_handle_parameter', + 'molten.components.RequestDataComponent.can_handle_parameter', + 'molten.components.SchemaComponent.can_handle_parameter', + 'molten.components.UploadedFileComponent.can_handle_parameter', + 'tests.contrib.molten.test_molten.hello', + 'molten.renderers.JSONRenderer.render' + ] + + # Addition of `UploadedFileComponent` in 0.7.2 changes expected spans + if MOLTEN_VERSION < (0, 7, 2): + expected = [ + r + for r in expected + if not r.startswith('molten.components.UploadedFileComponent') + ] + + self.assertEqual(len(spans), len(expected)) + self.assertEqual([s.resource for s in spans], expected) + + def test_distributed_tracing(self): + """ Tests whether span IDs are propogated when distributed tracing is on """ + with override_config('molten', dict(distributed_tracing=True)): + response = molten_client(headers={ + HTTP_HEADER_TRACE_ID: '100', + HTTP_HEADER_PARENT_ID: '42', + }) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), 'Hello 24 year old named Jim!') + + spans = self.tracer.writer.pop() + span = spans[0] + self.assertEqual(span.name, 'molten.request') + self.assertEqual(span.trace_id, 100) + self.assertEqual(span.parent_id, 42) + + # Now without tracing on + with override_config('molten', dict(distributed_tracing=False)): + response = molten_client(headers={ + HTTP_HEADER_TRACE_ID: '100', + HTTP_HEADER_PARENT_ID: '42', + }) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), 'Hello 24 year old named Jim!') + + spans = self.tracer.writer.pop() + span = spans[0] + self.assertEqual(span.name, 'molten.request') + self.assertNotEqual(span.trace_id, 100) + self.assertNotEqual(span.parent_id, 42) + + def test_unpatch_patch(self): + """ Tests unpatch-patch cycle """ + unpatch() + self.assertIsNone(Pin.get_from(molten)) + molten_client() + spans = self.tracer.writer.pop() + self.assertEqual(len(spans), 0) + + patch() + # Need to override Pin here as we do in setUp + Pin.override(molten, tracer=self.tracer) + self.assertTrue(Pin.get_from(molten) is not None) + molten_client() + spans = self.tracer.writer.pop() + self.assertTrue(len(spans) > 0) + + def test_patch_unpatch(self): + """ Tests repatch-unpatch cycle """ + # Already call patch in setUp + self.assertTrue(Pin.get_from(molten) is not None) + molten_client() + spans = self.tracer.writer.pop() + self.assertTrue(len(spans) > 0) + + # Test unpatch + unpatch() + self.assertTrue(Pin.get_from(molten) is None) + molten_client() + spans = self.tracer.writer.pop() + self.assertEqual(len(spans), 0) + + def test_patch_idempotence(self): + """ Tests repatching """ + # Already call patch in setUp but patch again + patch() + molten_client() + spans = self.tracer.writer.pop() + self.assertTrue(len(spans) > 0) diff --git a/tests/contrib/molten/test_molten_di.py b/tests/contrib/molten/test_molten_di.py new file mode 100644 index 00000000000..dd2f17b5fed --- /dev/null +++ b/tests/contrib/molten/test_molten_di.py @@ -0,0 +1,120 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from annotations +from unittest import TestCase + +# Test base adapted from molten/tests/test_dependency_injection.py + +from inspect import Parameter + +import molten +from molten import DependencyInjector + +from ddtrace import Pin +from ddtrace.contrib.molten import patch, unpatch + +from ...test_tracer import get_dummy_tracer + + +class Settings(dict): + pass + + +class SettingsComponent: + is_singleton = True + + def can_handle_parameter(self, parameter: Parameter) -> bool: + return parameter.annotation is Settings + + def resolve(self) -> Settings: + return Settings() + + +class Metrics: + __slots__ = ['settings'] + + def __init__(self, settings: Settings) -> None: + self.settings = settings + + +class MetricsComponent: + is_singleton = True + + def can_handle_parameter(self, parameter: Parameter) -> bool: + return parameter.annotation is Metrics + + def resolve(self, settings: Settings) -> Metrics: + return Metrics(settings) + + +class DB: + __slots__ = ['settings', 'metrics'] + + def __init__(self, settings: Settings, metrics: Metrics) -> None: + self.settings = settings + self.metrics = metrics + + +class DBComponent: + is_singleton = True + + def can_handle_parameter(self, parameter: Parameter) -> bool: + return parameter.annotation is DB + + def resolve(self, settings: Settings, metrics: Metrics) -> DB: + return DB(settings, metrics) + + +class Accounts: + def __init__(self, db: DB) -> None: + self.db = db + + def get_all(self): + return [] + + +class AccountsComponent: + def can_handle_parameter(self, parameter: Parameter) -> bool: + return parameter.annotation is Accounts + + def resolve(self, db: DB) -> Accounts: + return Accounts(db) + + +class TestMoltenDI(TestCase): + """"Ensures Molten dependency injection is properly instrumented.""" + + TEST_SERVICE = 'molten-patch-di' + + def setUp(self): + patch() + self.tracer = get_dummy_tracer() + Pin.override(molten, tracer=self.tracer, service=self.TEST_SERVICE) + + def tearDown(self): + unpatch() + self.tracer.writer.pop() + + def test_di_can_inject_dependencies(self): + # Given that I have a DI instance + di = DependencyInjector(components=[ + SettingsComponent(), + MetricsComponent(), + DBComponent(), + AccountsComponent(), + ]) + + # And a function that uses DI + def example(accounts: Accounts): + assert accounts.get_all() == [] + return accounts + + # When I resolve that function + # Then all the parameters should resolve as expected + resolver = di.get_resolver() + resolved_example = resolver.resolve(example) + accounts_1 = resolved_example() + + spans = self.tracer.writer.pop() + + # TODO[tahir]: We could in future trace the resolve method on components + self.assertEqual(len(spans), 0) diff --git a/tests/contrib/mongoengine/test.py b/tests/contrib/mongoengine/test.py index eac9541cf8b..478e2bbd0af 100644 --- a/tests/contrib/mongoengine/test.py +++ b/tests/contrib/mongoengine/test.py @@ -7,7 +7,7 @@ import pymongo # project -from ddtrace import Tracer, Pin +from ddtrace import Pin from ddtrace.contrib.mongoengine.patch import patch, unpatch from ddtrace.ext import mongo as mongox @@ -21,6 +21,7 @@ class Artist(mongoengine.Document): first_name = mongoengine.StringField(max_length=50) last_name = mongoengine.StringField(max_length=50) + class MongoEngineCore(object): # Define the service at the class level, so that each test suite can use a different service @@ -209,6 +210,7 @@ def get_tracer_and_connect(self): return tracer + class TestMongoEnginePatchClient(TestMongoEnginePatchClientDefault): """Test suite with a Pin local to a specific client with custom service""" diff --git a/tests/contrib/mongoengine/test_backwards.py b/tests/contrib/mongoengine/test_backwards.py index c480ae5982c..130126c7d7b 100644 --- a/tests/contrib/mongoengine/test_backwards.py +++ b/tests/contrib/mongoengine/test_backwards.py @@ -1,13 +1,12 @@ """ ensure old interfaces exist and won't break things. """ - - import mongoengine from tests.test_tracer import get_dummy_tracer from tests.contrib import config + class Singer(mongoengine.Document): first_name = mongoengine.StringField(max_length=50) last_name = mongoengine.StringField(max_length=50) diff --git a/tests/contrib/mysql/test_mysql.py b/tests/contrib/mysql/test_mysql.py index d8e4b174b47..26f3c9a668c 100644 --- a/tests/contrib/mysql/test_mysql.py +++ b/tests/contrib/mysql/test_mysql.py @@ -23,7 +23,7 @@ def tearDown(self): if self.conn: try: self.conn.ping() - except MySQLdb.InterfaceError: + except mysql.InterfaceError: pass else: self.conn.close() @@ -85,8 +85,10 @@ def test_query_many(self): tracer.enabled = True stmt = "INSERT INTO dummy (dummy_key, dummy_value) VALUES (%s, %s)" - data = [("foo","this is foo"), - ("bar","this is bar")] + data = [ + ('foo', 'this is foo'), + ('bar', 'this is bar'), + ] cursor.executemany(stmt, data) query = "SELECT dummy_key, dummy_value FROM dummy ORDER BY dummy_key" cursor.execute(query) diff --git a/tests/contrib/mysqldb/test_mysql.py b/tests/contrib/mysqldb/test_mysql.py index d56efe4ea29..9f500aa29ba 100644 --- a/tests/contrib/mysqldb/test_mysql.py +++ b/tests/contrib/mysqldb/test_mysql.py @@ -112,8 +112,8 @@ def test_query_many(self): stmt = "INSERT INTO dummy (dummy_key, dummy_value) VALUES (%s, %s)" data = [ - ("foo","this is foo"), - ("bar","this is bar"), + ('foo', 'this is foo'), + ('bar', 'this is bar'), ] cursor.executemany(stmt, data) query = "SELECT dummy_key, dummy_value FROM dummy ORDER BY dummy_key" diff --git a/tests/contrib/patch.py b/tests/contrib/patch.py new file mode 100644 index 00000000000..807923c29fa --- /dev/null +++ b/tests/contrib/patch.py @@ -0,0 +1,648 @@ +import functools +import importlib +import sys +import unittest + +import wrapt + +from tests.subprocesstest import SubprocessTestCase, run_in_subprocess + + +class PatchMixin(unittest.TestCase): + """ + TestCase for testing the patch logic of an integration. + """ + def module_imported(self, modname): + """ + Returns whether a module is imported or not. + """ + return modname in sys.modules + + def assert_module_imported(self, modname): + """ + Asserts that the module, given its name is imported. + """ + assert self.module_imported(modname), '{} module not imported'.format(modname) + + def assert_not_module_imported(self, modname): + """ + Asserts that the module, given its name is not imported. + """ + assert not self.module_imported(modname), '{} module is imported'.format(modname) + + def is_wrapped(self, obj): + return isinstance(obj, wrapt.ObjectProxy) + + def assert_wrapped(self, obj): + """ + Helper to assert that a given object is properly wrapped by wrapt. + """ + self.assertTrue(self.is_wrapped(obj), '{} is not wrapped'.format(obj)) + + def assert_not_wrapped(self, obj): + """ + Helper to assert that a given object is not wrapped by wrapt. + """ + self.assertFalse(self.is_wrapped(obj), '{} is wrapped'.format(obj)) + + def assert_not_double_wrapped(self, obj): + """ + Helper to assert that a given already wrapped object is not wrapped twice. + + This is useful for asserting idempotence. + """ + self.assert_wrapped(obj) + self.assert_not_wrapped(obj.__wrapped__) + + +def raise_if_no_attrs(f): + """ + A helper for PatchTestCase test methods that will check if there are any + modules to use else raise a NotImplementedError. + + :param f: method to wrap with a check + """ + required_attrs = [ + '__module_name__', + '__integration_name__', + '__unpatch_func__', + ] + + @functools.wraps(f) + def checked_method(self, *args, **kwargs): + for attr in required_attrs: + if not getattr(self, attr): + raise NotImplementedError(f.__doc__) + return f(self, *args, **kwargs) + return checked_method + + +class PatchTestCase(object): + """ + unittest or other test runners will pick up the base test case as a testcase + since it inherits from unittest.TestCase unless we wrap it with this empty + parent class. + """ + @run_in_subprocess + class Base(SubprocessTestCase, PatchMixin): + """PatchTestCase provides default test methods to be used for testing + common integration patching logic. + + Each test method provides a default implementation which will use the + provided attributes (described below). If the attributes are not + provided a NotImplementedError will be raised for each method that is + not overridden. + + Attributes: + __integration_name__ the name of the integration. + __module_name__ module which the integration patches. + __unpatch_func__ unpatch function from the integration. + + Example: + + A simple implementation inheriting this TestCase looks like:: + + from ddtrace.contrib.redis import unpatch + + class RedisPatchTestCase(PatchTestCase.Base): + __integration_name__ = 'redis' + __module_name__ 'redis' + __unpatch_func__ = unpatch + + def assert_module_patched(self, redis): + # assert patching logic + # self.assert_wrapped(...) + + def assert_not_module_patched(self, redis): + # assert patching logic + # self.assert_not_wrapped(...) + + def assert_not_module_double_patched(self, redis): + # assert patching logic + # self.assert_not_double_wrapped(...) + + # override this particular test case + def test_patch_import(self): + # custom patch before import check + + # optionally override other test methods... + """ + __integration_name__ = None + __module_name__ = None + __unpatch_func__ = None + + def __init__(self, *args, **kwargs): + # DEV: Python will wrap a function when assigning to a class as an + # attribute. So we cannot call self.__unpatch_func__() as the `self` + # reference will be passed as an argument. + # So we need to unwrap the function and then wrap it in a function + # that will absorb the unpatch function. + if self.__unpatch_func__: + unpatch_func = self.__unpatch_func__.__func__ + + def unpatch(): + unpatch_func() + self.__unpatch_func__ = unpatch + super(PatchTestCase.Base, self).__init__(*args, **kwargs) + + def patch(self, *args, **kwargs): + from ddtrace import patch + return patch(*args, **kwargs) + + def _gen_test_attrs(self, ops): + """ + A helper to return test names for tests given a list of different + operations. + :return: + """ + from itertools import permutations + return [ + 'test_{}'.format('_'.join(c)) for c in permutations(ops, len(ops)) + ] + + def test_verify_test_coverage(self): + """ + This TestCase should cover a variety of combinations of importing, + patching and unpatching. + """ + tests = [] + tests += self._gen_test_attrs(['import', 'patch']) + tests += self._gen_test_attrs(['import', 'patch', 'patch']) + tests += self._gen_test_attrs(['import', 'patch', 'unpatch']) + tests += self._gen_test_attrs(['import', 'patch', 'unpatch', 'unpatch']) + + # TODO: it may be possible to generate test cases dynamically. For + # now focus on the important ones. + test_ignore = set([ + 'test_unpatch_import_patch', + 'test_import_unpatch_patch_unpatch', + 'test_import_unpatch_unpatch_patch', + 'test_patch_import_unpatch_unpatch', + 'test_unpatch_import_patch_unpatch', + 'test_unpatch_import_unpatch_patch', + 'test_unpatch_patch_import_unpatch', + 'test_unpatch_patch_unpatch_import', + 'test_unpatch_unpatch_import_patch', + 'test_unpatch_unpatch_patch_import', + ]) + + for test_attr in tests: + if test_attr in test_ignore: + continue + assert hasattr(self, test_attr), '{} not found in expected test attrs'.format(test_attr) + + def assert_module_patched(self, module): + """ + Asserts that the given module is patched. + + For example, the redis integration patches the following methods: + - redis.StrictRedis.execute_command + - redis.StrictRedis.pipeline + - redis.Redis.pipeline + - redis.client.BasePipeline.execute + - redis.client.BasePipeline.immediate_execute_command + + So an appropriate assert_module_patched would look like:: + + def assert_module_patched(self, redis): + self.assert_wrapped(redis.StrictRedis.execute_command) + self.assert_wrapped(redis.StrictRedis.pipeline) + self.assert_wrapped(redis.Redis.pipeline) + self.assert_wrapped(redis.client.BasePipeline.execute) + self.assert_wrapped(redis.client.BasePipeline.immediate_execute_command) + + :param module: module to check + :return: None + """ + raise NotImplementedError(self.assert_module_patched.__doc__) + + def assert_not_module_patched(self, module): + """ + Asserts that the given module is not patched. + + For example, the redis integration patches the following methods: + - redis.StrictRedis.execute_command + - redis.StrictRedis.pipeline + - redis.Redis.pipeline + - redis.client.BasePipeline.execute + - redis.client.BasePipeline.immediate_execute_command + + So an appropriate assert_not_module_patched would look like:: + + def assert_not_module_patched(self, redis): + self.assert_not_wrapped(redis.StrictRedis.execute_command) + self.assert_not_wrapped(redis.StrictRedis.pipeline) + self.assert_not_wrapped(redis.Redis.pipeline) + self.assert_not_wrapped(redis.client.BasePipeline.execute) + self.assert_not_wrapped(redis.client.BasePipeline.immediate_execute_command) + + :param module: + :return: None + """ + raise NotImplementedError(self.assert_not_module_patched.__doc__) + + def assert_not_module_double_patched(self, module): + """ + Asserts that the given module is not patched twice. + + For example, the redis integration patches the following methods: + - redis.StrictRedis.execute_command + - redis.StrictRedis.pipeline + - redis.Redis.pipeline + - redis.client.BasePipeline.execute + - redis.client.BasePipeline.immediate_execute_command + + So an appropriate assert_not_module_double_patched would look like:: + + def assert_not_module_double_patched(self, redis): + self.assert_not_double_wrapped(redis.StrictRedis.execute_command) + self.assert_not_double_wrapped(redis.StrictRedis.pipeline) + self.assert_not_double_wrapped(redis.Redis.pipeline) + self.assert_not_double_wrapped(redis.client.BasePipeline.execute) + self.assert_not_double_wrapped(redis.client.BasePipeline.immediate_execute_command) + + :param module: module to check + :return: None + """ + raise NotImplementedError(self.assert_not_module_double_patched.__doc__) + + @raise_if_no_attrs + def test_import_patch(self): + """ + The integration should test that each class, method or function that + is to be patched is in fact done so when ddtrace.patch() is called + before the module is imported. + + For example: + + an appropriate ``test_patch_import`` would be:: + + import redis + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + module = importlib.import_module(self.__module_name__) + self.assert_not_module_patched(module) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_patch_import(self): + """ + The integration should test that each class, method or function that + is to be patched is in fact done so when ddtrace.patch() is called + after the module is imported. + + an appropriate ``test_patch_import`` would be:: + + import redis + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + module = importlib.import_module(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_import_patch_patch(self): + """ + Proper testing should be done to ensure that multiple calls to the + integration.patch() method are idempotent. That is, that the + integration does not patch its library more than once. + + An example for what this might look like for the redis integration:: + + import redis + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + ddtrace.patch(redis=True) + self.assert_not_module_double_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__module_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + self.patch(**{self.__module_name__: True}) + self.assert_not_module_double_patched(module) + + @raise_if_no_attrs + def test_patch_import_patch(self): + """ + Proper testing should be done to ensure that multiple calls to the + integration.patch() method are idempotent. That is, that the + integration does not patch its library more than once. + + An example for what this might look like for the redis integration:: + + ddtrace.patch(redis=True) + import redis + self.assert_module_patched(redis) + ddtrace.patch(redis=True) + self.assert_not_module_double_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__module_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + self.patch(**{self.__module_name__: True}) + self.assert_not_module_double_patched(module) + + @raise_if_no_attrs + def test_patch_patch_import(self): + """ + Proper testing should be done to ensure that multiple calls to the + integration.patch() method are idempotent. That is, that the + integration does not patch its library more than once. + + An example for what this might look like for the redis integration:: + + ddtrace.patch(redis=True) + ddtrace.patch(redis=True) + import redis + self.assert_not_double_wrapped(redis.StrictRedis.execute_command) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__module_name__: True}) + self.patch(**{self.__module_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + self.assert_not_module_double_patched(module) + + @raise_if_no_attrs + def test_import_patch_unpatch_patch(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it and then subsequently + patch it again. + + For example:: + + import redis + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + unpatch() + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + module = importlib.import_module(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_patch_import_unpatch_patch(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it and then subsequently + patch it again. + + For example:: + + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + import redis + unpatch() + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_patch_unpatch_import_patch(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it and then subsequently + patch it again. + + For example:: + + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + import redis + unpatch() + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.__unpatch_func__() + module = importlib.import_module(self.__module_name__) + self.assert_not_module_patched(module) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_patch_unpatch_patch_import(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it and then subsequently + patch it again. + + For example:: + + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + unpatch() + ddtrace.patch(redis=True) + import redis + self.assert_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.__unpatch_func__() + self.patch(**{self.__integration_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_unpatch_patch_import(self): + """ + Make sure unpatching before patch does not break patching. + + For example:: + + from ddtrace.contrib.redis import unpatch + unpatch() + ddtrace.patch(redis=True) + import redis + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.__unpatch_func__() + self.patch(**{self.__integration_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_patch_unpatch_import(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it before importing the + library. + + For example:: + + ddtrace.patch(redis=True) + from ddtrace.contrib.redis import unpatch + unpatch() + import redis + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.__unpatch_func__() + module = importlib.import_module(self.__module_name__) + self.assert_not_module_patched(module) + + @raise_if_no_attrs + def test_import_unpatch_patch(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it before patching. + + For example:: + + import redis + from ddtrace.contrib.redis import unpatch + ddtrace.patch(redis=True) + unpatch() + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + module = importlib.import_module(self.__module_name__) + self.__unpatch_func__() + self.assert_not_module_patched(module) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + + @raise_if_no_attrs + def test_import_patch_unpatch(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it after patching. + + For example:: + + import redis + from ddtrace.contrib.redis import unpatch + ddtrace.patch(redis=True) + unpatch() + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + module = importlib.import_module(self.__module_name__) + self.assert_not_module_patched(module) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + + @raise_if_no_attrs + def test_patch_import_unpatch(self): + """ + To ensure that we can thoroughly test the installation/patching of + an integration we must be able to unpatch it after patching. + + For example:: + + from ddtrace.contrib.redis import unpatch + ddtrace.patch(redis=True) + import redis + unpatch() + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + module = importlib.import_module(self.__module_name__) + self.assert_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + + @raise_if_no_attrs + def test_import_patch_unpatch_unpatch(self): + """ + Unpatching twice should be a no-op. + + For example:: + + import redis + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + self.assert_module_patched(redis) + unpatch() + self.assert_not_module_patched(redis) + unpatch() + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + module = importlib.import_module(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.assert_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + + @raise_if_no_attrs + def test_patch_unpatch_import_unpatch(self): + """ + Unpatching twice should be a no-op. + + For example:: + + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + unpatch() + import redis + self.assert_not_module_patched(redis) + unpatch() + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.__unpatch_func__() + module = importlib.import_module(self.__module_name__) + self.assert_not_module_patched(module) + self.__unpatch_func__() + self.assert_not_module_patched(module) + + @raise_if_no_attrs + def test_patch_unpatch_unpatch_import(self): + """ + Unpatching twice should be a no-op. + + For example:: + + from ddtrace.contrib.redis import unpatch + + ddtrace.patch(redis=True) + unpatch() + unpatch() + import redis + self.assert_not_module_patched(redis) + """ + self.assert_not_module_imported(self.__module_name__) + self.patch(**{self.__integration_name__: True}) + self.__unpatch_func__() + self.__unpatch_func__() + module = importlib.import_module(self.__module_name__) + self.assert_not_module_patched(module) diff --git a/tests/contrib/psycopg/test_psycopg.py b/tests/contrib/psycopg/test_psycopg.py index 33d322e960d..8e8123d8d35 100644 --- a/tests/contrib/psycopg/test_psycopg.py +++ b/tests/contrib/psycopg/test_psycopg.py @@ -6,12 +6,12 @@ from psycopg2 import extensions from psycopg2 import extras +import unittest from unittest import skipIf -from nose.tools import eq_, ok_ # project from ddtrace.contrib.psycopg import connection_factory -from ddtrace.contrib.psycopg.patch import patch, unpatch +from ddtrace.contrib.psycopg.patch import patch, unpatch, PSYCOPG2_VERSION from ddtrace import Pin # testing @@ -19,78 +19,128 @@ from tests.contrib.config import POSTGRES_CONFIG from tests.test_tracer import get_dummy_tracer +if PSYCOPG2_VERSION >= (2, 7): + from psycopg2.sql import SQL -PSYCOPG_VERSION = tuple(map(int, psycopg2.__version__.split()[0].split('.'))) TEST_PORT = str(POSTGRES_CONFIG['port']) -class PsycopgCore(object): + + +class PsycopgCore(unittest.TestCase): # default service TEST_SERVICE = 'postgres' + def setUp(self): + patch() + + def tearDown(self): + unpatch() + def _get_conn_and_tracer(self): - # implement me - pass + conn = psycopg2.connect(**POSTGRES_CONFIG) + tracer = get_dummy_tracer() + Pin.get_from(conn).clone(tracer=tracer).onto(conn) + + return conn, tracer + + def test_patch_unpatch(self): + tracer = get_dummy_tracer() + writer = tracer.writer + + # Test patch idempotence + patch() + patch() + + service = 'fo' + + conn = psycopg2.connect(**POSTGRES_CONFIG) + Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) + conn.cursor().execute("""select 'blah'""") + + spans = writer.pop() + assert spans, spans + self.assertEquals(len(spans), 1) + + # Test unpatch + unpatch() + + conn = psycopg2.connect(**POSTGRES_CONFIG) + conn.cursor().execute("""select 'blah'""") + + spans = writer.pop() + assert not spans, spans + + # Test patch again + patch() + + conn = psycopg2.connect(**POSTGRES_CONFIG) + Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) + conn.cursor().execute("""select 'blah'""") + + spans = writer.pop() + assert spans, spans + self.assertEquals(len(spans), 1) def assert_conn_is_traced(self, tracer, db, service): # ensure the trace pscyopg client doesn't add non-standard # methods try: - db.execute("select 'foobar'") + db.execute("""select 'foobar'""") except AttributeError: pass writer = tracer.writer # Ensure we can run a query and it's correctly traced - q = "select 'foobarblah'" + q = """select 'foobarblah'""" start = time.time() cursor = db.cursor() cursor.execute(q) rows = cursor.fetchall() end = time.time() - eq_(rows, [('foobarblah',)]) + self.assertEquals(rows, [('foobarblah',)]) assert rows spans = writer.pop() assert spans - eq_(len(spans), 2) + self.assertEquals(len(spans), 2) span = spans[0] - eq_(span.name, "postgres.query") - eq_(span.resource, q) - eq_(span.service, service) - ok_(span.get_tag("sql.query") is None) - eq_(span.error, 0) - eq_(span.span_type, "sql") + self.assertEquals(span.name, 'postgres.query') + self.assertEquals(span.resource, q) + self.assertEquals(span.service, service) + self.assertIsNone(span.get_tag('sql.query')) + self.assertEquals(span.error, 0) + self.assertEquals(span.span_type, 'sql') assert start <= span.start <= end assert span.duration <= end - start fetch_span = spans[1] - eq_(fetch_span.name, "postgres.query.fetchall") + self.assertEquals(fetch_span.name, "postgres.query.fetchall") # run a query with an error and ensure all is well - q = "select * from some_non_existant_table" + q = """select * from some_non_existant_table""" cur = db.cursor() try: cur.execute(q) except Exception: pass else: - assert 0, "should have an error" + assert 0, 'should have an error' spans = writer.pop() assert spans, spans - eq_(len(spans), 1) + self.assertEquals(len(spans), 1) span = spans[0] - eq_(span.name, "postgres.query") - eq_(span.resource, q) - eq_(span.service, service) - ok_(span.get_tag("sql.query") is None) - eq_(span.error, 1) - eq_(span.meta["out.host"], "localhost") - eq_(span.meta["out.port"], TEST_PORT) - eq_(span.span_type, "sql") + self.assertEquals(span.name, 'postgres.query') + self.assertEquals(span.resource, q) + self.assertEquals(span.service, service) + self.assertIsNone(span.get_tag('sql.query')) + self.assertEquals(span.error, 1) + self.assertEquals(span.meta['out.host'], 'localhost') + self.assertEquals(span.meta['out.port'], TEST_PORT) + self.assertEquals(span.span_type, 'sql') def test_opentracing_propagation(self): # ensure OpenTracing plays well with our integration - query = "SELECT 'tracing'" + query = """SELECT 'tracing'""" db, tracer = self._get_conn_and_tracer() ot_tracer = init_tracer('psycopg-svc', tracer) @@ -99,54 +149,54 @@ def test_opentracing_propagation(self): cursor.execute(query) rows = cursor.fetchall() - eq_(rows, [('tracing',)]) + self.assertEquals(rows, [('tracing',)]) spans = tracer.writer.pop() - eq_(len(spans), 3) + self.assertEquals(len(spans), 3) ot_span, dd_span, fetch_span = spans # confirm the parenting - eq_(ot_span.parent_id, None) - eq_(dd_span.parent_id, ot_span.span_id) + self.assertEquals(ot_span.parent_id, None) + self.assertEquals(dd_span.parent_id, ot_span.span_id) # check the OpenTracing span - eq_(ot_span.name, "db.access") - eq_(ot_span.service, "psycopg-svc") + self.assertEquals(ot_span.name, "db.access") + self.assertEquals(ot_span.service, "psycopg-svc") # make sure the Datadog span is unaffected by OpenTracing - eq_(dd_span.name, "postgres.query") - eq_(dd_span.resource, query) - eq_(dd_span.service, 'postgres') - ok_(dd_span.get_tag("sql.query") is None) - eq_(dd_span.error, 0) - eq_(dd_span.span_type, "sql") + self.assertEquals(dd_span.name, "postgres.query") + self.assertEquals(dd_span.resource, query) + self.assertEquals(dd_span.service, 'postgres') + self.assertTrue(dd_span.get_tag("sql.query") is None) + self.assertEquals(dd_span.error, 0) + self.assertEquals(dd_span.span_type, "sql") - eq_(fetch_span.name, 'postgres.query.fetchall') + self.assertEquals(fetch_span.name, 'postgres.query.fetchall') - @skipIf(PSYCOPG_VERSION < (2, 5), 'context manager not available in psycopg2==2.4') + @skipIf(PSYCOPG2_VERSION < (2, 5), 'context manager not available in psycopg2==2.4') def test_cursor_ctx_manager(self): # ensure cursors work with context managers # https://github.com/DataDog/dd-trace-py/issues/228 conn, tracer = self._get_conn_and_tracer() t = type(conn.cursor()) with conn.cursor() as cur: - assert t == type(cur), "%s != %s" % (t, type(cur)) - cur.execute(query="select 'blah'") + assert t == type(cur), '{} != {}'.format(t, type(cur)) + cur.execute(query="""select 'blah'""") rows = cur.fetchall() - assert len(rows) == 1, row + assert len(rows) == 1, rows assert rows[0][0] == 'blah' spans = tracer.writer.pop() assert len(spans) == 2 span, fetch_span = spans - eq_(span.name, "postgres.query") - eq_(fetch_span.name, 'postgres.query.fetchall') + self.assertEquals(span.name, 'postgres.query') + self.assertEquals(fetch_span.name, 'postgres.query.fetchall') def test_disabled_execute(self): conn, tracer = self._get_conn_and_tracer() tracer.enabled = False # these calls were crashing with a previous version of the code. - conn.cursor().execute(query="select 'blah'") - conn.cursor().execute("select 'blah'") + conn.cursor().execute(query="""select 'blah'""") + conn.cursor().execute("""select 'blah'""") assert not tracer.writer.pop() - @skipIf(PSYCOPG_VERSION < (2, 5), '_json is not available in psycopg2==2.4') + @skipIf(PSYCOPG2_VERSION < (2, 5), '_json is not available in psycopg2==2.4') def test_manual_wrap_extension_types(self): conn, _ = self._get_conn_and_tracer() # NOTE: this will crash if it doesn't work. @@ -175,7 +225,7 @@ def test_manual_wrap_extension_adapt(self): binary = extensions.adapt(b'12345') binary.prepare(conn) - @skipIf(PSYCOPG_VERSION < (2, 7), 'quote_ident not available in psycopg2<2.7') + @skipIf(PSYCOPG2_VERSION < (2, 7), 'quote_ident not available in psycopg2<2.7') def test_manual_wrap_extension_quote_ident(self): from ddtrace import patch_all patch_all() @@ -189,7 +239,7 @@ def test_manual_wrap_extension_quote_ident(self): def test_connect_factory(self): tracer = get_dummy_tracer() - services = ["db", "another"] + services = ['db', 'another'] for service in services: conn, _ = self._get_conn_and_tracer() Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) @@ -198,94 +248,62 @@ def test_connect_factory(self): # ensure we have the service types service_meta = tracer.writer.pop_services() expected = { - "db" : {"app":"postgres", "app_type":"db"}, - "another" : {"app":"postgres", "app_type":"db"}, + 'db': {'app': 'postgres', 'app_type': 'db'}, + 'another': {'app': 'postgres', 'app_type': 'db'}, } - eq_(service_meta, expected) + self.assertEquals(service_meta, expected) def test_commit(self): conn, tracer = self._get_conn_and_tracer() writer = tracer.writer conn.commit() spans = writer.pop() - eq_(len(spans), 1) + self.assertEquals(len(spans), 1) span = spans[0] - eq_(span.service, self.TEST_SERVICE) - eq_(span.name, 'postgres.connection.commit') + self.assertEquals(span.service, self.TEST_SERVICE) + self.assertEquals(span.name, 'postgres.connection.commit') def test_rollback(self): conn, tracer = self._get_conn_and_tracer() writer = tracer.writer conn.rollback() spans = writer.pop() - eq_(len(spans), 1) + self.assertEquals(len(spans), 1) span = spans[0] - eq_(span.service, self.TEST_SERVICE) - eq_(span.name, 'postgres.connection.rollback') - - -class TestPsycopgPatch(PsycopgCore): - - def setUp(self): - patch() - - def tearDown(self): - unpatch() - - def _get_conn_and_tracer(self): - conn = psycopg2.connect(**POSTGRES_CONFIG) - tracer = get_dummy_tracer() - Pin.get_from(conn).clone(tracer=tracer).onto(conn) - - return conn, tracer - - def test_patch_unpatch(self): - tracer = get_dummy_tracer() - writer = tracer.writer - - # Test patch idempotence - patch() - patch() - - service = "fo" - - conn = psycopg2.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) - conn.cursor().execute("select 'blah'") - - spans = writer.pop() - assert spans, spans - eq_(len(spans), 1) - - # Test unpatch - unpatch() - - conn = psycopg2.connect(**POSTGRES_CONFIG) - conn.cursor().execute("select 'blah'") - - spans = writer.pop() - assert not spans, spans - - # Test patch again - patch() + self.assertEquals(span.service, self.TEST_SERVICE) + self.assertEquals(span.name, 'postgres.connection.rollback') + + @skipIf(PSYCOPG2_VERSION < (2, 7), 'SQL string composition not available in psycopg2<2.7') + def test_composed_query(self): + """ Checks whether execution of composed SQL string is traced """ + query = SQL(' union all ').join( + [SQL("""select 'one' as x"""), + SQL("""select 'two' as x""")]) + db, tracer = self._get_conn_and_tracer() - conn = psycopg2.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) - conn.cursor().execute("select 'blah'") + with db.cursor() as cur: + cur.execute(query=query) + rows = cur.fetchall() + assert len(rows) == 2, rows + assert rows[0][0] == 'one' + assert rows[1][0] == 'two' - spans = writer.pop() - assert spans, spans - eq_(len(spans), 1) + spans = tracer.writer.pop() + assert len(spans) == 2 + span, fetch_span = spans + self.assertEquals(span.name, 'postgres.query') + self.assertEquals(span.resource, query.as_string(db)) + self.assertEquals(fetch_span.name, 'postgres.query.fetchall') def test_backwards_compatibilty_v3(): tracer = get_dummy_tracer() - factory = connection_factory(tracer, service="my-postgres-db") + factory = connection_factory(tracer, service='my-postgres-db') conn = psycopg2.connect(connection_factory=factory, **POSTGRES_CONFIG) - conn.cursor().execute("select 'blah'") + conn.cursor().execute("""select 'blah'""") -@skipIf(PSYCOPG_VERSION < (2, 7), 'quote_ident not available in psycopg2<2.7') +@skipIf(PSYCOPG2_VERSION < (2, 7), 'quote_ident not available in psycopg2<2.7') def test_manual_wrap_extension_quote_ident_standalone(): from ddtrace import patch_all patch_all() diff --git a/tests/contrib/pylibmc/test.py b/tests/contrib/pylibmc/test.py index f659da15cfc..0baaba2a0dc 100644 --- a/tests/contrib/pylibmc/test.py +++ b/tests/contrib/pylibmc/test.py @@ -1,4 +1,3 @@ - # stdlib import time from unittest.case import SkipTest @@ -125,9 +124,9 @@ def test_get_set_multi(self): client, tracer = self.get_client() # test start = time.time() - client.set_multi({"a":1, "b":2}) + client.set_multi({'a': 1, 'b': 2}) out = client.get_multi(["a", "c"]) - eq_(out, {"a":1}) + eq_(out, {'a': 1}) client.delete_multi(["a", "c"]) end = time.time() # verify @@ -142,9 +141,9 @@ def test_get_set_multi_prefix(self): client, tracer = self.get_client() # test start = time.time() - client.set_multi({"a":1, "b":2}, key_prefix='foo') + client.set_multi({'a': 1, 'b': 2}, key_prefix='foo') out = client.get_multi(["a", "c"], key_prefix='foo') - eq_(out, {"a":1}) + eq_(out, {'a': 1}) client.delete_multi(["a", "c"], key_prefix='foo') end = time.time() # verify @@ -156,14 +155,13 @@ def test_get_set_multi_prefix(self): resources = sorted(s.resource for s in spans) eq_(expected_resources, resources) - def test_get_set_delete(self): client, tracer = self.get_client() # test k = u'cafe' v = "val-foo" start = time.time() - client.delete(k) # just in case + client.delete(k) # just in case out = client.get(k) assert out is None, out client.set(k, v) @@ -179,7 +177,6 @@ def test_get_set_delete(self): resources = sorted(s.resource for s in spans) eq_(expected_resources, resources) - def _verify_cache_span(self, s, start, end): assert s.start > start assert s.start + s.duration < end @@ -190,7 +187,6 @@ def _verify_cache_span(self, s, start, end): eq_(s.get_tag("out.port"), str(cfg["port"])) - class TestPylibmcLegacy(PylibmcCore): """Test suite for the tracing of pylibmc with the legacy TracedClient interface""" @@ -226,6 +222,7 @@ def get_client(self): return client, tracer + class TestPylibmcPatch(TestPylibmcPatchDefault): """Test suite for the tracing of pylibmc with a configured lib patching""" @@ -277,4 +274,3 @@ def test_patch_unpatch(self): spans = writer.pop() assert spans, spans eq_(len(spans), 1) - diff --git a/tests/contrib/pylons/app/middleware.py b/tests/contrib/pylons/app/middleware.py index 7e1fc41824d..13fd8c6fbbc 100644 --- a/tests/contrib/pylons/app/middleware.py +++ b/tests/contrib/pylons/app/middleware.py @@ -1,5 +1,6 @@ from webob import Request, Response + class ExceptionMiddleware(object): """A middleware which raises an exception.""" def __init__(self, app): @@ -8,6 +9,7 @@ def __init__(self, app): def __call__(self, environ, start_response): raise Exception('Middleware exception') + class ExceptionToSuccessMiddleware(object): """A middleware which catches any exceptions that occur in a later middleware and returns a successful request. diff --git a/tests/contrib/pymemcache/autopatch/__init__.py b/tests/contrib/pymemcache/autopatch/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/contrib/pymemcache/test_autopatch.py b/tests/contrib/pymemcache/autopatch/test.py similarity index 100% rename from tests/contrib/pymemcache/test_autopatch.py rename to tests/contrib/pymemcache/autopatch/test.py diff --git a/tests/contrib/pymemcache/utils.py b/tests/contrib/pymemcache/utils.py index 0607c3938bd..3d93ffb4f9b 100644 --- a/tests/contrib/pymemcache/utils.py +++ b/tests/contrib/pymemcache/utils.py @@ -1,8 +1,6 @@ import collections import socket -from ddtrace import Pin - class MockSocket(object): def __init__(self, recv_bufs, connect_failure=None): @@ -60,12 +58,3 @@ def _str(s): return s.decode() else: return str(s) - - -def check_spans(client): - pin = Pin.get_from(client) - tracer = pin.tracer - spans = tracer.writer.pop() - for span in spans: - assert span.service_name is memcachedx.CMD - return spans diff --git a/tests/contrib/pymongo/test.py b/tests/contrib/pymongo/test.py index d8b9470f428..3ee80b8296a 100644 --- a/tests/contrib/pymongo/test.py +++ b/tests/contrib/pymongo/test.py @@ -22,24 +22,24 @@ def test_normalize_filter(): cases = [ (None, {}), ( - {"team":"leafs"}, - {"team": "?"}, + {'team': 'leafs'}, + {'team': '?'}, ), ( - {"age": {"$gt" : 20}}, - {"age": {"$gt" : "?"}}, + {'age': {'$gt': 20}}, + {'age': {'$gt': '?'}}, ), ( - {"age": {"$gt" : 20}}, - {"age": {"$gt" : "?"}}, + {'age': {'$gt': 20}}, + {'age': {'$gt': '?'}}, ), ( - {"_id": {"$in" : [1, 2, 3]}}, - {"_id": {"$in" : "?"}}, + {'_id': {'$in': [1, 2, 3]}}, + {'_id': {'$in': '?'}}, ), ( - {"_id": {"$nin" : [1, 2, 3]}}, - {"_id": {"$nin" : "?"}}, + {'_id': {'$nin': [1, 2, 3]}}, + {'_id': {'$nin': '?'}}, ), ( @@ -48,14 +48,14 @@ def test_normalize_filter(): ), ( { - "status": "A", - "$or": [ { "age": { "$lt": 30 } }, { "type": 1 } ] + 'status': 'A', + '$or': [{'age': {'$lt': 30}}, {'type': 1}], }, { - "status": "?", - "$or": [ { "age": { "$lt": "?" } }, { "type": "?" } ] - } - ) + 'status': '?', + '$or': [{'age': {'$lt': '?'}}, {'type': '?'}], + }, + ), ] for i, expected in cases: out = normalize_filter(i) @@ -79,19 +79,19 @@ def test_update(self): # ensure we trace deletes tracer, client = self.get_tracer_and_client() writer = tracer.writer - db = client["testdb"] - db.drop_collection("songs") + db = client['testdb'] + db.drop_collection('songs') input_songs = [ - {'name' : 'Powderfinger', 'artist':'Neil'}, - {'name' : 'Harvest', 'artist':'Neil'}, - {'name' : 'Suzanne', 'artist':'Leonard'}, - {'name' : 'Partisan', 'artist':'Leonard'}, + {'name': 'Powderfinger', 'artist': 'Neil'}, + {'name': 'Harvest', 'artist': 'Neil'}, + {'name': 'Suzanne', 'artist': 'Leonard'}, + {'name': 'Partisan', 'artist': 'Leonard'}, ] db.songs.insert_many(input_songs) result = db.songs.update_many( - {"artist":"Neil"}, - {"$set": {"artist":"Shakey"}}, + {'artist': 'Neil'}, + {'$set': {'artist': 'Shakey'}}, ) eq_(result.matched_count, 2) @@ -103,16 +103,16 @@ def test_update(self): for span in spans: # ensure all the of the common metadata is set eq_(span.service, self.TEST_SERVICE) - eq_(span.span_type, "mongodb") - eq_(span.meta.get("mongodb.collection"), "songs") - eq_(span.meta.get("mongodb.db"), "testdb") - assert span.meta.get("out.host") - assert span.meta.get("out.port") + eq_(span.span_type, 'mongodb') + eq_(span.meta.get('mongodb.collection'), 'songs') + eq_(span.meta.get('mongodb.db'), 'testdb') + assert span.meta.get('out.host') + assert span.meta.get('out.port') expected_resources = set([ - "drop songs", + 'drop songs', 'update songs {"artist": "?"}', - "insert songs", + 'insert songs', ]) eq_(expected_resources, {s.resource for s in spans}) @@ -121,27 +121,27 @@ def test_delete(self): # ensure we trace deletes tracer, client = self.get_tracer_and_client() writer = tracer.writer - db = client["testdb"] - collection_name = "here.are.songs" + db = client['testdb'] + collection_name = 'here.are.songs' db.drop_collection(collection_name) input_songs = [ - {'name' : 'Powderfinger', 'artist':'Neil'}, - {'name' : 'Harvest', 'artist':'Neil'}, - {'name' : 'Suzanne', 'artist':'Leonard'}, - {'name' : 'Partisan', 'artist':'Leonard'}, + {'name': 'Powderfinger', 'artist': 'Neil'}, + {'name': 'Harvest', 'artist': 'Neil'}, + {'name': 'Suzanne', 'artist': 'Leonard'}, + {'name': 'Partisan', 'artist': 'Leonard'}, ] songs = db[collection_name] songs.insert_many(input_songs) # test delete one - af = {'artist':'Neil'} + af = {'artist': 'Neil'} eq_(songs.count(af), 2) songs.delete_one(af) eq_(songs.count(af), 1) # test delete many - af = {'artist':'Leonard'} + af = {'artist': 'Leonard'} eq_(songs.count(af), 2) songs.delete_many(af) eq_(songs.count(af), 0) @@ -152,21 +152,21 @@ def test_delete(self): for span in spans: # ensure all the of the common metadata is set eq_(span.service, self.TEST_SERVICE) - eq_(span.span_type, "mongodb") - eq_(span.meta.get("mongodb.collection"), collection_name) - eq_(span.meta.get("mongodb.db"), "testdb") - assert span.meta.get("out.host") - assert span.meta.get("out.port") + eq_(span.span_type, 'mongodb') + eq_(span.meta.get('mongodb.collection'), collection_name) + eq_(span.meta.get('mongodb.db'), 'testdb') + assert span.meta.get('out.host') + assert span.meta.get('out.port') expected_resources = [ - "drop here.are.songs", - "count here.are.songs", - "count here.are.songs", - "count here.are.songs", - "count here.are.songs", + 'drop here.are.songs', + 'count here.are.songs', + 'count here.are.songs', + 'count here.are.songs', + 'count here.are.songs', 'delete here.are.songs {"artist": "?"}', 'delete here.are.songs {"artist": "?"}', - "insert here.are.songs", + 'insert here.are.songs', ] eq_(sorted(expected_resources), sorted(s.resource for s in spans)) @@ -177,19 +177,19 @@ def test_insert_find(self): start = time.time() db = client.testdb - db.drop_collection("teams") + db.drop_collection('teams') teams = [ { - 'name' : 'Toronto Maple Leafs', - 'established' : 1917, + 'name': 'Toronto Maple Leafs', + 'established': 1917, }, { - 'name' : 'Montreal Canadiens', - 'established' : 1910, + 'name': 'Montreal Canadiens', + 'established': 1910, }, { - 'name' : 'New York Rangers', - 'established' : 1926, + 'name': 'New York Rangers', + 'established': 1926, } ] @@ -199,36 +199,36 @@ def test_insert_find(self): db.teams.insert_many(teams[1:]) # wildcard query (using the [] syntax) - cursor = db["teams"].find() + cursor = db['teams'].find() count = 0 for row in cursor: count += 1 eq_(count, len(teams)) # scoped query (using the getattr syntax) - q = {"name": "Toronto Maple Leafs"} + q = {'name': 'Toronto Maple Leafs'} queried = list(db.teams.find(q)) end = time.time() eq_(len(queried), 1) - eq_(queried[0]["name"], "Toronto Maple Leafs") - eq_(queried[0]["established"], 1917) + eq_(queried[0]['name'], 'Toronto Maple Leafs') + eq_(queried[0]['established'], 1917) spans = writer.pop() for span in spans: # ensure all the of the common metadata is set eq_(span.service, self.TEST_SERVICE) - eq_(span.span_type, "mongodb") - eq_(span.meta.get("mongodb.collection"), "teams") - eq_(span.meta.get("mongodb.db"), "testdb") - assert span.meta.get("out.host"), span.pprint() - assert span.meta.get("out.port"), span.pprint() + eq_(span.span_type, 'mongodb') + eq_(span.meta.get('mongodb.collection'), 'teams') + eq_(span.meta.get('mongodb.db'), 'testdb') + assert span.meta.get('out.host'), span.pprint() + assert span.meta.get('out.port'), span.pprint() assert span.start > start assert span.duration < end - start expected_resources = [ - "drop teams", - "insert teams", - "insert teams", + 'drop teams', + 'insert teams', + 'insert teams', ] # query names should be used in >3.1 @@ -245,7 +245,7 @@ def test_insert_find(self): eq_(spans[-2].get_tag('mongodb.query'), None) # confirm query tag find with query criteria on name - eq_(spans[-1].get_tag('mongodb.query'), "{'name': '?'}") + eq_(spans[-1].get_tag('mongodb.query'), '{\'name\': \'?\'}') def test_update_ot(self): """OpenTracing version of test_update.""" @@ -254,18 +254,18 @@ def test_update_ot(self): writer = tracer.writer with ot_tracer.start_active_span('mongo_op'): - db = client["testdb"] - db.drop_collection("songs") + db = client['testdb'] + db.drop_collection('songs') input_songs = [ - {'name' : 'Powderfinger', 'artist':'Neil'}, - {'name' : 'Harvest', 'artist':'Neil'}, - {'name' : 'Suzanne', 'artist':'Leonard'}, - {'name' : 'Partisan', 'artist':'Leonard'}, + {'name': 'Powderfinger', 'artist': 'Neil'}, + {'name': 'Harvest', 'artist': 'Neil'}, + {'name': 'Suzanne', 'artist': 'Leonard'}, + {'name': 'Partisan', 'artist': 'Leonard'}, ] db.songs.insert_many(input_songs) result = db.songs.update_many( - {"artist":"Neil"}, - {"$set": {"artist":"Shakey"}}, + {'artist': 'Neil'}, + {'$set': {'artist': 'Shakey'}}, ) eq_(result.matched_count, 2) @@ -286,16 +286,16 @@ def test_update_ot(self): eq_(span.parent_id, ot_span.span_id) # ensure all the of the common metadata is set eq_(span.service, self.TEST_SERVICE) - eq_(span.span_type, "mongodb") - eq_(span.meta.get("mongodb.collection"), "songs") - eq_(span.meta.get("mongodb.db"), "testdb") - assert span.meta.get("out.host") - assert span.meta.get("out.port") + eq_(span.span_type, 'mongodb') + eq_(span.meta.get('mongodb.collection'), 'songs') + eq_(span.meta.get('mongodb.db'), 'testdb') + assert span.meta.get('out.host') + assert span.meta.get('out.port') expected_resources = set([ - "drop songs", + 'drop songs', 'update songs {"artist": "?"}', - "insert songs", + 'insert songs', ]) eq_(expected_resources, {s.resource for s in spans[1:]}) @@ -333,8 +333,8 @@ def get_tracer_and_client(self): def test_service(self): tracer, client = self.get_tracer_and_client() writer = tracer.writer - db = client["testdb"] - db.drop_collection("songs") + db = client['testdb'] + db.drop_collection('songs') services = writer.pop_services() eq_(len(services), 1) @@ -346,12 +346,12 @@ def test_service(self): def test_host_kwarg(self): # simulate what celery and django do when instantiating a new client conf = { - 'host': 'localhost' + 'host': 'localhost', } client = pymongo.MongoClient(**conf) conf = { - 'host': None + 'host': None, } client = pymongo.MongoClient(**conf) @@ -385,7 +385,7 @@ def test_patch_unpatch(self): client = pymongo.MongoClient(port=MONGO_CONFIG['port']) Pin.get_from(client).clone(tracer=tracer).onto(client) - client["testdb"].drop_collection("whatever") + client['testdb'].drop_collection('whatever') spans = writer.pop() assert spans, spans @@ -395,7 +395,7 @@ def test_patch_unpatch(self): unpatch() client = pymongo.MongoClient(port=MONGO_CONFIG['port']) - client["testdb"].drop_collection("whatever") + client['testdb'].drop_collection('whatever') spans = writer.pop() assert not spans, spans @@ -405,9 +405,8 @@ def test_patch_unpatch(self): client = pymongo.MongoClient(port=MONGO_CONFIG['port']) Pin.get_from(client).clone(tracer=tracer).onto(client) - client["testdb"].drop_collection("whatever") + client['testdb'].drop_collection('whatever') spans = writer.pop() assert spans, spans eq_(len(spans), 1) - diff --git a/tests/contrib/pymongo/test_spec.py b/tests/contrib/pymongo/test_spec.py index fd6d1b19573..88ec55b0ece 100644 --- a/tests/contrib/pymongo/test_spec.py +++ b/tests/contrib/pymongo/test_spec.py @@ -12,12 +12,14 @@ def test_empty(): cmd = parse_spec(SON([])) assert cmd is None + def test_create(): - cmd = parse_spec(SON([("create", "foo")])) - eq_(cmd.name, "create") - eq_(cmd.coll, "foo") + cmd = parse_spec(SON([('create', 'foo')])) + eq_(cmd.name, 'create') + eq_(cmd.coll, 'foo') eq_(cmd.tags, {}) - eq_(cmd.metrics ,{}) + eq_(cmd.metrics, {}) + def test_insert(): spec = SON([ @@ -26,10 +28,11 @@ def test_insert(): ('documents', ['a', 'b']), ]) cmd = parse_spec(spec) - eq_(cmd.name, "insert") - eq_(cmd.coll, "bla") - eq_(cmd.tags, {'mongodb.ordered':True}) - eq_(cmd.metrics, {'mongodb.documents':2}) + eq_(cmd.name, 'insert') + eq_(cmd.coll, 'bla') + eq_(cmd.tags, {'mongodb.ordered': True}) + eq_(cmd.metrics, {'mongodb.documents': 2}) + def test_update(): spec = SON([ @@ -45,6 +48,6 @@ def test_update(): ]) ]) cmd = parse_spec(spec) - eq_(cmd.name, "update") - eq_(cmd.coll, "songs") - eq_(cmd.query, {'artist':'Neil'}) + eq_(cmd.name, 'update') + eq_(cmd.coll, 'songs') + eq_(cmd.query, {'artist': 'Neil'}) diff --git a/tests/contrib/pymysql/test_pymysql.py b/tests/contrib/pymysql/test_pymysql.py index 47404aa77db..6fce206ce9f 100644 --- a/tests/contrib/pymysql/test_pymysql.py +++ b/tests/contrib/pymysql/test_pymysql.py @@ -214,6 +214,7 @@ def test_rollback(self): eq_(span.service, self.TEST_SERVICE) eq_(span.name, 'pymysql.connection.rollback') + class TestPyMysqlPatch(PyMySQLCore, TestCase): def _get_conn_tracer(self): if not self.conn: diff --git a/tests/contrib/pyramid/app/web.py b/tests/contrib/pyramid/app/web.py index f06b02c6b30..a88d9526be6 100644 --- a/tests/contrib/pyramid/app/web.py +++ b/tests/contrib/pyramid/app/web.py @@ -7,7 +7,6 @@ HTTPInternalServerError, HTTPFound, HTTPNotFound, - HTTPException, HTTPNoContent, ) diff --git a/tests/contrib/pyramid/test_pyramid.py b/tests/contrib/pyramid/test_pyramid.py index 957509999d3..03e6e826ee7 100644 --- a/tests/contrib/pyramid/test_pyramid.py +++ b/tests/contrib/pyramid/test_pyramid.py @@ -12,7 +12,6 @@ from tests.opentracer.utils import init_tracer from ...test_tracer import get_dummy_tracer -from ...util import override_global_tracer class PyramidBase(object): @@ -39,6 +38,7 @@ def get_settings(self): def override_settings(self, settings): self.create_app(settings) + class PyramidTestCase(PyramidBase): """Pyramid TestCase that includes tests for automatic instrumentation""" @@ -224,7 +224,10 @@ def test_insert_tween_if_needed_none(self): def test_insert_tween_if_needed_excview(self): settings = {'pyramid.tweens': 'pyramid.tweens.excview_tween_factory'} insert_tween_if_needed(settings) - eq_(settings['pyramid.tweens'], 'ddtrace.contrib.pyramid:trace_tween_factory\npyramid.tweens.excview_tween_factory') + eq_( + settings['pyramid.tweens'], + 'ddtrace.contrib.pyramid:trace_tween_factory\npyramid.tweens.excview_tween_factory', + ) def test_insert_tween_if_needed_excview_and_other(self): settings = {'pyramid.tweens': 'a.first.tween\npyramid.tweens.excview_tween_factory\na.last.tween\n'} @@ -277,6 +280,7 @@ def test_200_ot(self): eq_(dd_span.meta.get('http.url'), '/') eq_(dd_span.meta.get('pyramid.route.name'), 'index') + def includeme(config): pass @@ -314,7 +318,7 @@ def test_distributed_tracing(self): 'x-datadog-parent-id': '42', 'x-datadog-sampling-priority': '2', } - res = self.app.get('/', headers=headers, status=200) + self.app.get('/', headers=headers, status=200) writer = self.tracer.writer spans = writer.pop() eq_(len(spans), 1) diff --git a/tests/contrib/pyramid/test_pyramid_autopatch.py b/tests/contrib/pyramid/test_pyramid_autopatch.py index b0609350515..36106a162ae 100644 --- a/tests/contrib/pyramid/test_pyramid_autopatch.py +++ b/tests/contrib/pyramid/test_pyramid_autopatch.py @@ -1,18 +1,6 @@ -# stdlib -import sys -import webtest -import ddtrace - from nose.tools import eq_ from pyramid.config import Configurator -# 3p -from wsgiref.simple_server import make_server - -# project -from ...test_tracer import get_dummy_tracer -from ...util import override_global_tracer - from .test_pyramid import PyramidTestCase, PyramidBase @@ -40,7 +28,7 @@ def test_distributed_tracing(self): 'x-datadog-parent-id': '42', 'x-datadog-sampling-priority': '2', } - res = self.app.get('/', headers=headers, status=200) + self.app.get('/', headers=headers, status=200) writer = self.tracer.writer spans = writer.pop() eq_(len(spans), 1) diff --git a/tests/contrib/redis/test.py b/tests/contrib/redis/test.py index 9d3d361e9da..67cd7a9f09b 100644 --- a/tests/contrib/redis/test.py +++ b/tests/contrib/redis/test.py @@ -175,6 +175,7 @@ def _assert_pipeline_immediate(conn, tracer, service): eq_(span.get_tag('out.redis_db'), '0') eq_(span.get_tag('out.host'), 'localhost') + def _assert_pipeline_traced(conn, tracer, service): writer = tracer.writer @@ -197,6 +198,7 @@ def _assert_pipeline_traced(conn, tracer, service): eq_(span.get_tag('redis.raw_command'), u'SET blah 32\nRPUSH foo éé\nHGETALL xxx') eq_(span.get_metric('redis.pipeline_length'), 3) + def _assert_conn_traced(conn, tracer, service): us = conn.get('cheese') eq_(us, None) diff --git a/tests/contrib/sqlalchemy/__init__.py b/tests/contrib/sqlalchemy/__init__.py index 8b137891791..e69de29bb2d 100644 --- a/tests/contrib/sqlalchemy/__init__.py +++ b/tests/contrib/sqlalchemy/__init__.py @@ -1 +0,0 @@ - diff --git a/tests/contrib/sqlalchemy/mixins.py b/tests/contrib/sqlalchemy/mixins.py index 86d5107f4e4..743b39c8935 100644 --- a/tests/contrib/sqlalchemy/mixins.py +++ b/tests/contrib/sqlalchemy/mixins.py @@ -134,7 +134,10 @@ def test_session_query(self): # span fields eq_(span.name, '{}.query'.format(self.VENDOR)) eq_(span.service, self.SERVICE) - ok_('SELECT players.id AS players_id, players.name AS players_name \nFROM players \nWHERE players.name' in span.resource) + ok_( + 'SELECT players.id AS players_id, players.name AS players_name \nFROM players \nWHERE players.name' + in span.resource + ) eq_(span.get_tag('sql.db'), self.SQL_DB) self.check_meta(span) eq_(span.span_type, 'sql') diff --git a/tests/contrib/sqlite3/test_sqlite3.py b/tests/contrib/sqlite3/test_sqlite3.py index cc6c2597fdd..b0445ac8c5d 100644 --- a/tests/contrib/sqlite3/test_sqlite3.py +++ b/tests/contrib/sqlite3/test_sqlite3.py @@ -28,6 +28,7 @@ def test_backwards_compat(): assert not rows.fetchall() assert not tracer.writer.pop() + class TestSQLite(object): def setUp(self): patch() @@ -40,7 +41,7 @@ def test_service_info(self): backup_tracer = ddtrace.tracer ddtrace.tracer = tracer - db = sqlite3.connect(':memory:') + sqlite3.connect(':memory:') services = tracer.writer.pop_services() eq_(len(services), 1) @@ -121,10 +122,10 @@ def test_sqlite_fetchall_is_traced(self): spans = tracer.writer.pop() eq_(len(spans), 2) - + execute_span = spans[0] fetchall_span = spans[1] - + # Execute span eq_(execute_span.name, 'sqlite.query') eq_(execute_span.span_type, 'sql') @@ -260,7 +261,6 @@ def test_rollback(self): eq_(span.service, 'sqlite') eq_(span.name, 'sqlite.connection.rollback') - def test_patch_unpatch(self): tracer = get_dummy_tracer() writer = tracer.writer diff --git a/tests/contrib/test_utils.py b/tests/contrib/test_utils.py index 3bbda22de2c..d907ffad31e 100644 --- a/tests/contrib/test_utils.py +++ b/tests/contrib/test_utils.py @@ -2,7 +2,6 @@ from functools import partial from ddtrace.utils.importlib import func_name -from ddtrace.utils.formats import asbool class SomethingCallable(object): @@ -32,13 +31,16 @@ def some_function(): """ return 'nothing' -def minus(a,b): + +def minus(a, b): return a - b -minus_two = partial(minus, b=2) # partial funcs need special handling (no module) + +minus_two = partial(minus, b=2) # partial funcs need special handling (no module) # disabling flake8 test below, yes, declaring a func like this is bad, we know -plus_three = lambda x : x + 3 # NOQA +plus_three = lambda x : x + 3 # noqa + class TestContrib(object): """ @@ -55,7 +57,7 @@ def test_func_name(self): eq_(f, f.me()) eq_('tests.contrib.test_utils.me', func_name(f.me)) - eq_(3, f.add(1,2)) + eq_(3, f.add(1, 2)) eq_('tests.contrib.test_utils.add', func_name(f.add)) eq_(42, f.answer()) eq_('tests.contrib.test_utils.answer', func_name(f.answer)) diff --git a/tests/contrib/tornado/test_executor_decorator.py b/tests/contrib/tornado/test_executor_decorator.py index b671616c0f6..bfe21b9bcf3 100644 --- a/tests/contrib/tornado/test_executor_decorator.py +++ b/tests/contrib/tornado/test_executor_decorator.py @@ -167,7 +167,7 @@ def test_on_executor_custom_args_kwarg(self): def test_futures_double_instrumentation(self): # it should not double wrap `ThreadpPoolExecutor.submit` method if # `futures` is already instrumented - from ddtrace import patch; patch(futures=True) + from ddtrace import patch; patch(futures=True) # noqa from concurrent.futures import ThreadPoolExecutor from wrapt import BoundFunctionWrapper diff --git a/tests/contrib/vertica/fixtures.py b/tests/contrib/vertica/fixtures.py deleted file mode 100644 index 54dd7b44b74..00000000000 --- a/tests/contrib/vertica/fixtures.py +++ /dev/null @@ -1,41 +0,0 @@ -# 3p - -# project -import ddtrace -from ddtrace.contrib.vertica.patch import patch, unpatch - -# testing -import pytest -from tests.contrib.config import VERTICA_CONFIG -from tests.test_tracer import get_dummy_tracer - - -TEST_TABLE = "test_table" - - -@pytest.fixture -def test_tracer(): - return get_dummy_tracer() - - -@pytest.fixture -def test_conn(test_tracer): - ddtrace.tracer = test_tracer - patch() - - import vertica_python # must happen AFTER installing with patch() - - conn = vertica_python.connect(**VERTICA_CONFIG) - cur = conn.cursor() - cur.execute("DROP TABLE IF EXISTS {}".format(TEST_TABLE)) - cur.execute( - """CREATE TABLE {} ( - a INT, - b VARCHAR(32) - ) - """.format( - TEST_TABLE - ) - ) - test_tracer.writer.pop() - return conn, cur diff --git a/tests/contrib/vertica/test_vertica.py b/tests/contrib/vertica/test_vertica.py index dc4972aa157..d110ff7c184 100644 --- a/tests/contrib/vertica/test_vertica.py +++ b/tests/contrib/vertica/test_vertica.py @@ -1,24 +1,60 @@ -# stdlib - # 3p +import pytest import wrapt # project -from ddtrace import Pin +import ddtrace +from ddtrace import Pin, config from ddtrace.contrib.vertica.patch import patch, unpatch from ddtrace.ext import errors +from ddtrace.utils.merge import deepmerge # testing -import pytest +from tests.base import BaseTestCase from tests.contrib.config import VERTICA_CONFIG from tests.opentracer.utils import init_tracer from tests.test_tracer import get_dummy_tracer -from .fixtures import test_conn, test_tracer, TEST_TABLE -from .utils import override_config +TEST_TABLE = "test_table" + + +@pytest.fixture(scope='function') +def test_tracer(request): + request.cls.test_tracer = get_dummy_tracer() + return request.cls.test_tracer + + +@pytest.fixture(scope='function') +def test_conn(request, test_tracer): + ddtrace.tracer = test_tracer + patch() + + import vertica_python # must happen AFTER installing with patch() + + conn = vertica_python.connect(**VERTICA_CONFIG) + + cur = conn.cursor() + cur.execute("DROP TABLE IF EXISTS {}".format(TEST_TABLE)) + cur.execute( + """CREATE TABLE {} ( + a INT, + b VARCHAR(32) + ) + """.format( + TEST_TABLE + ) + ) + test_tracer.writer.pop() + + request.cls.test_conn = (conn, cur) + return conn, cur -class TestVerticaPatching(object): +class TestVerticaPatching(BaseTestCase): + def tearDown(self): + super(TestVerticaPatching, self).tearDown() + unpatch() + def test_not_patched(self): """Ensure that vertica is not patched somewhere before our tests.""" import vertica_python @@ -95,68 +131,75 @@ def test_unpatch_after_import(self): ) -class TestVertica(object): - def teardown_method(self, method): +@pytest.mark.usefixtures('test_tracer', 'test_conn') +class TestVertica(BaseTestCase): + def tearDown(self): + super(TestVertica, self).tearDown() + unpatch() - @override_config({"service_name": "test_svc_name"}) def test_configuration_service_name(self): """Ensure that the integration can be configured.""" - patch() - import vertica_python + with self.override_config('vertica', dict(service_name='test_svc_name')): + patch() + import vertica_python - test_tracer = get_dummy_tracer() + test_tracer = get_dummy_tracer() - conn = vertica_python.connect(**VERTICA_CONFIG) - cur = conn.cursor() - Pin.override(cur, tracer=test_tracer) - with conn: - cur.execute("DROP TABLE IF EXISTS {}".format(TEST_TABLE)) + conn = vertica_python.connect(**VERTICA_CONFIG) + cur = conn.cursor() + Pin.override(cur, tracer=test_tracer) + with conn: + cur.execute("DROP TABLE IF EXISTS {}".format(TEST_TABLE)) spans = test_tracer.writer.pop() assert len(spans) == 1 assert spans[0].service == "test_svc_name" - @override_config( - { - "patch": { - "vertica_python.vertica.connection.Connection": { - "routines": { - "cursor": { - "operation_name": "get_cursor", - "trace_enabled": True, - } - } - } - } - } - ) def test_configuration_routine(self): """Ensure that the integration routines can be configured.""" - patch() - import vertica_python + routine_config = dict( + patch={ + 'vertica_python.vertica.connection.Connection': dict( + routines=dict( + cursor=dict( + operation_name='get_cursor', + trace_enabled=True, + ), + ), + ), + }, + ) - test_tracer = get_dummy_tracer() + # Make a copy of the vertica config first before we merge our settings over + # DEV: First argument gets merged into the second + copy = deepmerge(config.vertica, dict()) + overrides = deepmerge(routine_config, copy) + with self.override_config('vertica', overrides): + patch() + import vertica_python - conn = vertica_python.connect(**VERTICA_CONFIG) - Pin.override(conn, service="mycustomservice", tracer=test_tracer) - conn.cursor() # should be traced now - conn.close() + test_tracer = get_dummy_tracer() + + conn = vertica_python.connect(**VERTICA_CONFIG) + Pin.override(conn, service="mycustomservice", tracer=test_tracer) + conn.cursor() # should be traced now + conn.close() spans = test_tracer.writer.pop() assert len(spans) == 1 assert spans[0].name == "get_cursor" assert spans[0].service == "mycustomservice" - def test_execute_metadata(self, test_conn, test_tracer): + def test_execute_metadata(self): """Metadata related to an `execute` call should be captured.""" - conn, cur = test_conn + conn, cur = self.test_conn - Pin.override(cur, tracer=test_tracer) + Pin.override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 2 # check all the metadata @@ -173,18 +216,17 @@ def test_execute_metadata(self, test_conn, test_tracer): assert spans[1].resource == "SELECT * FROM test_table;" - def test_cursor_override(self, test_conn): + def test_cursor_override(self): """Test overriding the tracer with our own.""" - conn, cur = test_conn + conn, cur = self.test_conn - test_tracer = get_dummy_tracer() - Pin.override(cur, tracer=test_tracer) + Pin.override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 2 # check all the metadata @@ -199,16 +241,16 @@ def test_cursor_override(self, test_conn): assert spans[1].resource == "SELECT * FROM test_table;" - def test_execute_exception(self, test_conn, test_tracer): + def test_execute_exception(self): """Exceptions should result in appropriate span tagging.""" from vertica_python.errors import VerticaSyntaxError - conn, cur = test_conn + conn, cur = self.test_conn with conn, pytest.raises(VerticaSyntaxError): cur.execute("INVALID QUERY") - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 2 # check all the metadata @@ -221,12 +263,12 @@ def test_execute_exception(self, test_conn, test_tracer): assert spans[1].resource == "COMMIT;" - def test_rowcount_oddity(self, test_conn, test_tracer): + def test_rowcount_oddity(self): """Vertica treats rowcount specially. Ensure we handle it. See https://github.com/vertica/vertica-python/tree/029a65a862da893e7bd641a68f772019fd9ecc99#rowcount-oddities """ - conn, cur = test_conn + conn, cur = self.test_conn with conn: cur.execute( @@ -256,7 +298,7 @@ def test_rowcount_oddity(self, test_conn, test_tracer): cur.fetchall() cur.rowcount == 5 - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 9 # check all the rowcounts @@ -273,15 +315,15 @@ def test_rowcount_oddity(self, test_conn, test_tracer): assert spans[4].name == "vertica.fetchall" assert spans[4].get_metric("db.rowcount") == 5 - def test_nextset(self, test_conn, test_tracer): + def test_nextset(self): """cursor.nextset() should be traced.""" - conn, cur = test_conn + conn, cur = self.test_conn with conn: cur.execute("SELECT * FROM {0}; SELECT * FROM {0}".format(TEST_TABLE)) cur.nextset() - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 3 # check all the rowcounts @@ -292,9 +334,9 @@ def test_nextset(self, test_conn, test_tracer): assert spans[2].name == "vertica.query" assert spans[2].resource == "COMMIT;" - def test_copy(self, test_conn, test_tracer): + def test_copy(self): """cursor.copy() should be traced.""" - conn, cur = test_conn + conn, cur = self.test_conn with conn: cur.copy( @@ -302,7 +344,7 @@ def test_copy(self, test_conn, test_tracer): "1,foo\n2,bar", ) - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 2 # check all the rowcounts @@ -312,17 +354,17 @@ def test_copy(self, test_conn, test_tracer): assert spans[1].name == "vertica.query" assert spans[1].resource == "COMMIT;" - def test_opentracing(self, test_conn, test_tracer): + def test_opentracing(self): """Ensure OpenTracing works with vertica.""" - conn, cur = test_conn + conn, cur = self.test_conn - ot_tracer = init_tracer("vertica_svc", test_tracer) + ot_tracer = init_tracer("vertica_svc", self.test_tracer) with ot_tracer.start_active_span("vertica_execute"): cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) conn.close() - spans = test_tracer.writer.pop() + spans = self.test_tracer.writer.pop() assert len(spans) == 2 ot_span, dd_span = spans diff --git a/tests/contrib/vertica/utils.py b/tests/contrib/vertica/utils.py deleted file mode 100644 index 6653d3d0bd8..00000000000 --- a/tests/contrib/vertica/utils.py +++ /dev/null @@ -1,36 +0,0 @@ -from copy import deepcopy - -# https://stackoverflow.com/a/7205107 -def merge(a, b, path=None): - """merges b into a""" - if path is None: - path = [] - for key in b: - if key in a: - if isinstance(a[key], dict) and isinstance(b[key], dict): - merge(a[key], b[key], path + [str(key)]) - elif a[key] == b[key]: - pass # same leaf value - else: - a[key] = b[key] - else: - a[key] = b[key] - return a - - -def override_config(custom_conf): - """Overrides the vertica configuration and reinstalls the previous - afterwards.""" - from ddtrace import config - - def provide_config(func): - def wrapper(*args, **kwargs): - orig = deepcopy(config.vertica) - merge(config.vertica, custom_conf) - r = func(*args, **kwargs) - config._add("vertica", orig) - return r - - return wrapper - - return provide_config diff --git a/tests/ddtrace_run.py b/tests/ddtrace_run.py index 9652e137e90..0b5c625c5a7 100644 --- a/tests/ddtrace_run.py +++ b/tests/ddtrace_run.py @@ -1,8 +1,9 @@ import os import sys +# DEV: We must append to sys path before importing ddtrace_run sys.path.append('.') -from ddtrace.commands import ddtrace_run +from ddtrace.commands import ddtrace_run # noqa os.environ['PYTHONPATH'] = "{}:{}".format(os.getenv('PYTHONPATH'), os.path.abspath('.')) ddtrace_run.main() diff --git a/tests/memory.py b/tests/memory.py index 00848545d2c..bfa50887f36 100644 --- a/tests/memory.py +++ b/tests/memory.py @@ -65,6 +65,7 @@ def _ping_pylibmc(self, i): self._pylibmc.incr("a", 2) self._pylibmc.decr("a", 1) + if __name__ == '__main__': k = KitchenSink() t = pympler.tracker.SummaryTracker() diff --git a/tests/monkey.py b/tests/monkey.py deleted file mode 100644 index ab1f611ed54..00000000000 --- a/tests/monkey.py +++ /dev/null @@ -1,22 +0,0 @@ -""" auto patch things. """ - -# manual test for monkey patching -import logging -import sys - -# project -import ddtrace - -# allow logging -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) - -ddtrace.tracer.debug_logging = True - -# Patch nothing -ddtrace.patch() - -# Patch all except Redis -ddtrace.patch_all(redis=False) - -# Patch Redis -ddtrace.patch(redis=True) diff --git a/tests/opentracer/conftest.py b/tests/opentracer/conftest.py new file mode 100644 index 00000000000..f1d052415be --- /dev/null +++ b/tests/opentracer/conftest.py @@ -0,0 +1,61 @@ +""" +pytest local plugin used to automatically make the following fixtures +available for all tests in this directory + +https://docs.pytest.org/en/latest/writing_plugins.html#testing-plugins +""" +import pytest + +from ddtrace.opentracer import Tracer, set_global_tracer + +from tests.test_tracer import get_dummy_tracer + + +@pytest.fixture() +def ot_tracer_factory(): + """Fixture which returns an opentracer ready to use for testing.""" + + def make_ot_tracer( + service_name="my_svc", config=None, scope_manager=None, context_provider=None + ): + config = config or {} + tracer = Tracer( + service_name=service_name, config=config, scope_manager=scope_manager + ) + + # similar to how we test the ddtracer, use a dummy tracer + dd_tracer = get_dummy_tracer() + if context_provider: + dd_tracer.configure(context_provider=context_provider) + + # attach the dummy tracer to the opentracer + tracer._dd_tracer = dd_tracer + return tracer + + return make_ot_tracer + + +@pytest.fixture() +def ot_tracer(ot_tracer_factory): + """Fixture for a default opentracer.""" + return ot_tracer_factory() + + +@pytest.fixture() +def global_tracer(ot_tracer): + """A function similar to one OpenTracing users would write to initialize + their OpenTracing tracer. + """ + set_global_tracer(ot_tracer) + + return ot_tracer + + +@pytest.fixture() +def writer(ot_tracer): + return ot_tracer._dd_tracer.writer + + +@pytest.fixture() +def dd_tracer(ot_tracer): + return ot_tracer._dd_tracer diff --git a/tests/opentracer/test_dd_compatibility.py b/tests/opentracer/test_dd_compatibility.py index 22579166c32..1e01c57b993 100644 --- a/tests/opentracer/test_dd_compatibility.py +++ b/tests/opentracer/test_dd_compatibility.py @@ -4,8 +4,6 @@ from ddtrace.opentracer.span_context import SpanContext -from tests.opentracer.utils import ot_tracer_factory, ot_tracer, dd_tracer, writer, global_tracer - class TestTracerCompatibility(object): """Ensure that our opentracer produces results in the underlying ddtracer.""" @@ -178,7 +176,7 @@ def test_distributed_trace_propagation(self, ot_tracer, dd_tracer, writer): # extract should activate the span so that a subsequent start_span # will inherit from the propagated span context - ext_span_ctx = ot_tracer.extract(Format.HTTP_HEADERS, carrier) + ot_tracer.extract(Format.HTTP_HEADERS, carrier) with dd_tracer.trace('test') as span: pass diff --git a/tests/opentracer/test_span.py b/tests/opentracer/test_span.py index 60b918dfcc2..99931b59e8f 100644 --- a/tests/opentracer/test_span.py +++ b/tests/opentracer/test_span.py @@ -11,12 +11,14 @@ def nop_tracer(): tracer._tracer = get_dummy_tracer() return tracer + @pytest.fixture def nop_span_ctx(): from ddtrace.ext.priority import AUTO_KEEP from ddtrace.opentracer.span_context import SpanContext return SpanContext(sampling_priority=AUTO_KEEP, sampled=True) + @pytest.fixture def nop_span(nop_tracer, nop_span_ctx): return Span(nop_tracer, nop_span_ctx, 'my_op_name') diff --git a/tests/opentracer/test_tracer.py b/tests/opentracer/test_tracer.py index bcf7a0d91aa..0fa306d390d 100644 --- a/tests/opentracer/test_tracer.py +++ b/tests/opentracer/test_tracer.py @@ -15,7 +15,6 @@ from ddtrace.settings import ConfigException import pytest -from .utils import ot_tracer_factory, ot_tracer, writer class TestTracerConfig(object): @@ -71,6 +70,25 @@ def test_invalid_config_key(self): assert ["enabeld", "setttings"] in str(ce_info) assert tracer is not None + def test_global_tags(self): + """Global tags should be passed from the opentracer to the tracer.""" + config = { + 'global_tags': { + 'tag1': 'value1', + 'tag2': 2, + }, + } + + tracer = Tracer(service_name='mysvc', config=config) + with tracer.start_span('myop') as span: + # global tags should be attached to generated all datadog spans + assert span._dd_span.get_tag('tag1') == 'value1' + assert span._dd_span.get_tag('tag2') == '2' + + with tracer.start_span('myop2') as span2: + assert span2._dd_span.get_tag('tag1') == 'value1' + assert span2._dd_span.get_tag('tag2') == '2' + class TestTracer(object): def test_start_span(self, ot_tracer, writer): diff --git a/tests/opentracer/test_tracer_asyncio.py b/tests/opentracer/test_tracer_asyncio.py index 74a87d759e3..8e716a469d3 100644 --- a/tests/opentracer/test_tracer_asyncio.py +++ b/tests/opentracer/test_tracer_asyncio.py @@ -1,3 +1,5 @@ +# flake8: noqa +# DEV: Skip linting, we lint with Python 2, we'll get SyntaxErrors from `yield from` import asyncio import pytest from opentracing.scope_managers.asyncio import AsyncioScopeManager @@ -6,7 +8,7 @@ from ddtrace.opentracer.utils import get_context_provider_for_scope_manager from tests.contrib.asyncio.utils import AsyncioTestCase, mark_asyncio -from .utils import ot_tracer_factory, dd_tracer, writer +from .conftest import dd_tracer, ot_tracer_factory, writer @pytest.fixture() diff --git a/tests/opentracer/test_tracer_gevent.py b/tests/opentracer/test_tracer_gevent.py index 85c649f5587..9c3b7f04ecb 100644 --- a/tests/opentracer/test_tracer_gevent.py +++ b/tests/opentracer/test_tracer_gevent.py @@ -6,8 +6,6 @@ from ddtrace.contrib.gevent import patch, unpatch from ddtrace.opentracer.utils import get_context_provider_for_scope_manager -from .utils import ot_tracer_factory, dd_tracer, writer - @pytest.fixture() def ot_tracer(ot_tracer_factory): diff --git a/tests/opentracer/test_tracer_tornado.py b/tests/opentracer/test_tracer_tornado.py index cdabc210715..051741416ed 100644 --- a/tests/opentracer/test_tracer_tornado.py +++ b/tests/opentracer/test_tracer_tornado.py @@ -1,10 +1,6 @@ import pytest from opentracing.scope_managers.tornado import TornadoScopeManager -import ddtrace - -from tests.opentracer.utils import ot_tracer_factory, ot_tracer, writer - @pytest.fixture() def ot_tracer(ot_tracer_factory): diff --git a/tests/opentracer/test_utils.py b/tests/opentracer/test_utils.py index 28651bcd670..d38c0e55cb0 100644 --- a/tests/opentracer/test_utils.py +++ b/tests/opentracer/test_utils.py @@ -5,6 +5,7 @@ get_context_provider_for_scope_manager, ) + class TestOpentracerUtils(object): def test_get_context_provider_for_scope_manager_thread(self): scope_manager = ThreadLocalScopeManager() diff --git a/tests/opentracer/utils.py b/tests/opentracer/utils.py index f43ffb0b4aa..884f2406664 100644 --- a/tests/opentracer/utils.py +++ b/tests/opentracer/utils.py @@ -1,58 +1,4 @@ -import pytest - -from ddtrace.opentracer import Tracer, set_global_tracer - -from tests.test_tracer import get_dummy_tracer - - -@pytest.fixture() -def ot_tracer_factory(): - """Fixture which returns an opentracer ready to use for testing.""" - - def make_ot_tracer( - service_name="my_svc", config=None, scope_manager=None, context_provider=None - ): - config = config or {} - tracer = Tracer( - service_name=service_name, config=config, scope_manager=scope_manager - ) - - # similar to how we test the ddtracer, use a dummy tracer - dd_tracer = get_dummy_tracer() - if context_provider: - dd_tracer.configure(context_provider=context_provider) - - # attach the dummy tracer to the opentracer - tracer._dd_tracer = dd_tracer - return tracer - - return make_ot_tracer - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture for a default opentracer.""" - return ot_tracer_factory() - - -@pytest.fixture() -def global_tracer(ot_tracer): - """A function similar to one OpenTracing users would write to initialize - their OpenTracing tracer. - """ - set_global_tracer(ot_tracer) - - return ot_tracer - - -@pytest.fixture() -def writer(ot_tracer): - return ot_tracer._dd_tracer.writer - - -@pytest.fixture() -def dd_tracer(ot_tracer): - return ot_tracer._dd_tracer +from ddtrace.opentracer import Tracer def init_tracer(service_name, dd_tracer, scope_manager=None): diff --git a/tests/subprocesstest.py b/tests/subprocesstest.py new file mode 100644 index 00000000000..bae0e4037aa --- /dev/null +++ b/tests/subprocesstest.py @@ -0,0 +1,127 @@ +""" +subprocesstest enables unittest test cases and suites to be run in separate +python interpreter instances. + +A base class SubprocessTestCase is provided that, when extended, will run test +cases marked with @run_in_subprocess in a separate python interpreter. +""" +import os +import subprocess +import sys +import unittest + + +SUBPROC_TEST_ATTR = '_subproc_test' +SUBPROC_ENV_VAR = 'SUBPROCESS_TEST' + + +def run_in_subprocess(obj): + """ + Marks a test case that is to be run in its own 'clean' interpreter instance. + + When applied to a TestCase class, each method will be run in a separate + interpreter instance. + + Usage on a class:: + + from tests.subprocesstest import SubprocessTestCase, run_in_subprocess + + @run_in_subprocess + class PatchTests(SubprocessTestCase): + # will be run in new interpreter + def test_patch_before_import(self): + patch() + import module + + # will be run in new interpreter as well + def test_patch_after_import(self): + import module + patch() + + + Usage on a test method:: + + class OtherTests(SubprocessTestCase): + @run_in_subprocess + def test_case(self): + pass + + + :param obj: method or class to run in a separate python interpreter. + :return: + """ + setattr(obj, SUBPROC_TEST_ATTR, True) + return obj + + +class SubprocessTestCase(unittest.TestCase): + def _full_method_name(self): + test = getattr(self, self._testMethodName) + # DEV: we have to use the internal self reference of the bound test + # method to pull out the class and module since using a mix of `self` + # and the test attributes will result in inconsistencies when the test + # method is defined on another class. + # A concrete case of this is a parent and child TestCase where the child + # doesn't override a parent test method. The full_method_name we want + # is that of the child test method (even though it exists on the parent) + modpath = test.__self__.__class__.__module__ + clsname = test.__self__.__class__.__name__ + testname = test.__name__ + testcase_name = '{}.{}.{}'.format(modpath, clsname, testname) + return testcase_name + + def _run_test_in_subprocess(self, result): + full_testcase_name = self._full_method_name() + + # copy the environment and include the special subprocess environment + # variable for the subprocess to detect + sp_test_env = os.environ.copy() + sp_test_env[SUBPROC_ENV_VAR] = 'True' + sp_test_cmd = ['python', '-m', 'unittest', full_testcase_name] + sp = subprocess.Popen( + sp_test_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=sp_test_env, + ) + _, stderr = sp.communicate() + + if sp.returncode: + try: + cmdf = ' '.join(sp_test_cmd) + raise Exception('Subprocess Test "{}" Failed'.format(cmdf)) + except Exception: + exc_info = sys.exc_info() + sys.stderr.write(stderr) + result.addFailure(self, exc_info) + else: + result.addSuccess(self) + + def _in_subprocess(self): + """Determines if the test is being run in a subprocess. + + This is done by checking for an environment variable that we call the + subprocess test with. + + :return: whether the test is a subprocess test + """ + return os.getenv(SUBPROC_ENV_VAR, None) is not None + + def _is_subprocess_test(self): + if hasattr(self, SUBPROC_TEST_ATTR): + return True + + test = getattr(self, self._testMethodName) + if hasattr(test, SUBPROC_TEST_ATTR): + return True + + return False + + def run(self, result=None): + if not self._is_subprocess_test(): + return super(SubprocessTestCase, self).run(result=result) + + if self._in_subprocess(): + return super(SubprocessTestCase, self).run(result=result) + else: + self._run_test_in_subprocess(result) diff --git a/tests/test_api.py b/tests/test_api.py index 327585183cc..ee71316c57a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -8,6 +8,7 @@ from ddtrace.api import _parse_response_json, API from ddtrace.compat import iteritems, httplib + class ResponseMock: def __init__(self, content): self.content = content @@ -15,6 +16,7 @@ def __init__(self, content): def read(self): return self.content + class APITests(TestCase): def setUp(self): @@ -30,28 +32,38 @@ def tearDown(self): def test_parse_response_json(self, log): tracer = get_dummy_tracer() tracer.debug_logging = True - test_cases = {'OK': {'js': None, 'log': "please make sure trace-agent is up to date"}, - 'OK\n': {'js': None, 'log': "please make sure trace-agent is up to date"}, - 'error:unsupported-endpoint': {'js': None, 'log': "unable to load JSON 'error:unsupported-endpoint'"}, - 42: {'js': None, 'log': "unable to load JSON '42'"}, # int as key to trigger TypeError - '{}': {'js': {}}, - '[]': {'js': []}, - '{"rate_by_service": {"service:,env:":0.5, "service:mcnulty,env:test":0.9, "service:postgres,env:test":0.6}}': - {'js': {"rate_by_service": - {"service:,env:":0.5, - "service:mcnulty,env:test":0.9, - "service:postgres,env:test":0.6}}}, - ' [4,2,1] ': {'js': [4,2,1]}} - - for k,v in iteritems(test_cases): + + test_cases = { + 'OK': {'js': None, 'log': "please make sure trace-agent is up to date"}, + 'OK\n': {'js': None, 'log': "please make sure trace-agent is up to date"}, + 'error:unsupported-endpoint': {'js': None, 'log': "unable to load JSON 'error:unsupported-endpoint'"}, + 42: {'js': None, 'log': "unable to load JSON '42'"}, # int as key to trigger TypeError + '{}': {'js': {}}, + '[]': {'js': []}, + '{"rate_by_service": {"service:,env:":0.5, "service:mcnulty,env:test":0.9, "service:postgres,env:test":0.6}}': { # noqa + 'js': { + 'rate_by_service': { + 'service:,env:': 0.5, + 'service:mcnulty,env:test': 0.9, + 'service:postgres,env:test': 0.6, + }, + }, + }, + ' [4,2,1] ': {'js': [4, 2, 1]}, + } + + for k, v in iteritems(test_cases): r = ResponseMock(k) - js =_parse_response_json(r) + js = _parse_response_json(r) eq_(v['js'], js) if 'log' in v: - ok_(1<=len(log.call_args_list), "not enough elements in call_args_list: %s" % log.call_args_list) + ok_( + 1 <= len(log.call_args_list), + 'not enough elements in call_args_list: {}'.format(log.call_args_list), + ) print(log.call_args_list) - l = log.call_args_list[-1][0][0] - ok_(v['log'] in l, "unable to find %s in %s" % (v['log'], l)) + args = log.call_args_list[-1][0][0] + ok_(v['log'] in args, 'unable to find {} in {}'.format(v['log'], args)) @mock.patch('ddtrace.compat.httplib.HTTPConnection') def test_put_connection_close(self, HTTPConnection): diff --git a/tests/test_compat.py b/tests/test_compat.py index 4510f0f758f..d174028802c 100644 --- a/tests/test_compat.py +++ b/tests/test_compat.py @@ -124,7 +124,7 @@ def test_reraise(self): with assert_raises(Exception) as ex: try: raise Exception('Ouch!') - except Exception as e: + except Exception: # original exception we want to re-raise (typ, val, tb) = sys.exc_info() try: diff --git a/tests/test_context.py b/tests/test_context.py index c4c022830b4..c9177f3df91 100644 --- a/tests/test_context.py +++ b/tests/test_context.py @@ -272,7 +272,6 @@ def test_partial_flush_remaining(self): set([span.name for span in ctx._trace]), ) - def test_finished(self): # a Context is finished if all spans inside are finished ctx = Context() diff --git a/tests/test_filters.py b/tests/test_filters.py index 80435fb79af..162d0c19087 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -4,6 +4,7 @@ from ddtrace.span import Span from ddtrace.ext.http import URL + class FilterRequestOnUrlTests(TestCase): def test_is_match(self): span = Span(name='Name', tracer=None) diff --git a/tests/test_global_config.py b/tests/test_global_config.py index f5e32b0306d..8bc62a75117 100644 --- a/tests/test_global_config.py +++ b/tests/test_global_config.py @@ -4,7 +4,7 @@ from nose.tools import eq_, ok_, assert_raises from ddtrace import config as global_config -from ddtrace.settings import Config, ConfigException +from ddtrace.settings import Config from .test_tracer import get_dummy_tracer @@ -40,14 +40,14 @@ def test_settings_copy(self): ok_(self.config.requests['distributed_tracing'] is True) ok_(self.config.requests['experimental']['request_enqueuing'] is True) - def test_missing_integration(self): + def test_missing_integration_key(self): # ensure a meaningful exception is raised when an integration # that is not available is retrieved in the configuration # object - with assert_raises(ConfigException) as e: + with assert_raises(KeyError) as e: self.config.new_integration['some_key'] - ok_(isinstance(e.exception, ConfigException)) + ok_(isinstance(e.exception, KeyError)) def test_global_configuration(self): # ensure a global configuration is available in the `ddtrace` module @@ -226,9 +226,8 @@ def test_settings_no_hook(self): # Emit the span # DEV: This is the test, to ensure no exceptions are raised self.config.web.hooks._emit('request', span) - on_web_request.assert_called() - def test_settings_no_hook(self): + def test_settings_no_span(self): """ When calling `Hooks._emit()` When no span is provided diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 7ad6a85a083..81b01439e34 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -6,6 +6,7 @@ from .util import override_global_tracer from .test_tracer import get_dummy_tracer + class HelpersTestCase(TestCase): """Test suite for ``ddtrace`` helpers""" def setUp(self): diff --git a/tests/test_integration.py b/tests/test_integration.py index 6025fab8bad..5f6b049a2fc 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -48,8 +48,6 @@ def _put(self, endpoint, data, count=0): os.environ.get('TEST_DATADOG_INTEGRATION', False), 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' ) - - class TestWorkers(TestCase): """ Ensures that a workers interacts correctly with the main thread. These are part @@ -138,7 +136,7 @@ def test_worker_single_trace_multiple_spans(self): # make a single send() if a single trace with multiple spans is created before the flush tracer = self.tracer parent = tracer.trace('client.testing') - child = tracer.trace('client.testing').finish() + tracer.trace('client.testing').finish() parent.finish() # one send is expected @@ -215,6 +213,7 @@ def test_worker_filter_request(self): eq_(len(payload), 1) eq_(payload[0][0]['name'], 'testing.nonfilteredurl') + @skipUnless( os.environ.get('TEST_DATADOG_INTEGRATION', False), 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' @@ -242,7 +241,7 @@ def test_send_presampler_headers(self, mocked_http): traces = [trace] # make a call and retrieve the `conn` Mock object - response = self.api_msgpack.send_traces(traces) + self.api_msgpack.send_traces(traces) request_call = mocked_http.return_value.request eq_(request_call.call_count, 1) @@ -272,7 +271,7 @@ def test_send_presampler_headers_not_in_services(self, mocked_http): }] # make a call and retrieve the `conn` Mock object - response = self.api_msgpack.send_services(services) + self.api_msgpack.send_services(services) request_call = mocked_http.return_value.request eq_(request_call.call_count, 1) @@ -430,6 +429,7 @@ def test_send_service_called_multiple_times(self): ok_(response) eq_(response.status, 200) + @skipUnless( os.environ.get('TEST_DATADOG_INTEGRATION', False), 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' @@ -473,6 +473,7 @@ def test_downgrade_api(self): eq_(response.status, 200) ok_(isinstance(api._encoder, JSONEncoder)) + @skipUnless( os.environ.get('TEST_DATADOG_INTEGRATION', False), 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' @@ -511,6 +512,7 @@ def test_send_single_trace(self): ok_(response) eq_(response.status, 200) + @skipUnless( os.environ.get('TEST_DATADOG_INTEGRATION', False), 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' @@ -521,7 +523,7 @@ class TestConfigure(TestCase): previous overrides have been kept. """ def test_configure_keeps_api_hostname_and_port(self): - tracer = Tracer() # use real tracer with real api + tracer = Tracer() # use real tracer with real api eq_('localhost', tracer.writer.api.hostname) eq_(8126, tracer.writer.api.port) tracer.configure(hostname='127.0.0.1', port=8127) diff --git a/tests/test_sampler.py b/tests/test_sampler.py index ac7a1a0e3ec..d0130207dad 100644 --- a/tests/test_sampler.py +++ b/tests/test_sampler.py @@ -4,10 +4,9 @@ import random from ddtrace.span import Span -from ddtrace.sampler import RateSampler, AllSampler, RateByServiceSampler, _key, _default_key +from ddtrace.sampler import RateSampler, AllSampler, _key, _default_key from ddtrace.compat import iteritems from tests.test_tracer import get_dummy_tracer -from .util import patch_time from ddtrace.constants import SAMPLING_PRIORITY_KEY, SAMPLE_RATE_METRIC_KEY @@ -55,7 +54,10 @@ def test_deterministic_behavior(self): sampled = (1 == len(samples)) for j in range(10): other_span = Span(tracer, i, trace_id=span.trace_id) - assert sampled == tracer.sampler.sample(other_span), "sampling should give the same result for a given trace_id" + assert ( + sampled == tracer.sampler.sample(other_span) + ), 'sampling should give the same result for a given trace_id' + class RateByServiceSamplerTest(unittest.TestCase): def test_default_key(self): @@ -96,7 +98,9 @@ def test_sample_rate_deviation(self): if sample.get_metric(SAMPLING_PRIORITY_KEY) > 0: samples_with_high_priority += 1 else: - assert 0 == sample.get_metric(SAMPLING_PRIORITY_KEY), "when priority sampling is on, priority should be 0 when trace is to be dropped" + assert ( + 0 == sample.get_metric(SAMPLING_PRIORITY_KEY) + ), 'when priority sampling is on, priority should be 0 when trace is to be dropped' # We must have at least 1 sample, check that it has its sample rate properly assigned assert samples[0].get_metric(SAMPLE_RATE_METRIC_KEY) is None @@ -107,9 +111,20 @@ def test_sample_rate_deviation(self): def test_set_sample_rate_by_service(self): cases = [ - {"service:,env:":1}, - {"service:,env:":1, "service:mcnulty,env:dev":0.33, "service:postgres,env:dev":0.7}, - {"service:,env:":1, "service:mcnulty,env:dev": 0.25, "service:postgres,env:dev": 0.5, "service:redis,env:prod": 0.75} + { + 'service:,env:': 1, + }, + { + 'service:,env:': 1, + 'service:mcnulty,env:dev': 0.33, + 'service:postgres,env:dev': 0.7, + }, + { + 'service:,env:': 1, + 'service:mcnulty,env:dev': 0.25, + 'service:postgres,env:dev': 0.5, + 'service:redis,env:prod': 0.75, + }, ] tracer = get_dummy_tracer() @@ -118,7 +133,7 @@ def test_set_sample_rate_by_service(self): for case in cases: priority_sampler.set_sample_rate_by_service(case) rates = {} - for k,v in iteritems(priority_sampler._by_service_samplers): + for k, v in iteritems(priority_sampler._by_service_samplers): rates[k] = v.sample_rate assert case == rates, "%s != %s" % (case, rates) # It's important to also test in reverse mode for we want to make sure key deletion @@ -127,6 +142,6 @@ def test_set_sample_rate_by_service(self): for case in cases: priority_sampler.set_sample_rate_by_service(case) rates = {} - for k,v in iteritems(priority_sampler._by_service_samplers): + for k, v in iteritems(priority_sampler._by_service_samplers): rates[k] = v.sample_rate assert case == rates, "%s != %s" % (case, rates) diff --git a/tests/test_span.py b/tests/test_span.py index 8a27762b5d1..6e4a3cae761 100644 --- a/tests/test_span.py +++ b/tests/test_span.py @@ -9,57 +9,60 @@ def test_ids(): - s = Span(tracer=None, name="span.test") + s = Span(tracer=None, name='span.test') assert s.trace_id assert s.span_id assert not s.parent_id - s2 = Span(tracer=None, name="t", trace_id=1, span_id=2, parent_id=1) + s2 = Span(tracer=None, name='t', trace_id=1, span_id=2, parent_id=1) eq_(s2.trace_id, 1) eq_(s2.span_id, 2) eq_(s2.parent_id, 1) + def test_tags(): - s = Span(tracer=None, name="test.span") - s.set_tag("a", "a") - s.set_tag("b", 1) - s.set_tag("c", "1") + s = Span(tracer=None, name='test.span') + s.set_tag('a', 'a') + s.set_tag('b', 1) + s.set_tag('c', '1') d = s.to_dict() expected = { - "a" : "a", - "b" : "1", - "c" : "1", + 'a': 'a', + 'b': '1', + 'c': '1', } - eq_(d["meta"], expected) + eq_(d['meta'], expected) + def test_set_valid_metrics(): - s = Span(tracer=None, name="test.span") - s.set_metric("a", 0) - s.set_metric("b", -12) - s.set_metric("c", 12.134) - s.set_metric("d", 1231543543265475686787869123) - s.set_metric("e", "12.34") + s = Span(tracer=None, name='test.span') + s.set_metric('a', 0) + s.set_metric('b', -12) + s.set_metric('c', 12.134) + s.set_metric('d', 1231543543265475686787869123) + s.set_metric('e', '12.34') d = s.to_dict() expected = { - "a": 0, - "b": -12, - "c": 12.134, - "d": 1231543543265475686787869123, - "e": 12.34, + 'a': 0, + 'b': -12, + 'c': 12.134, + 'd': 1231543543265475686787869123, + 'e': 12.34, } - eq_(d["metrics"], expected) + eq_(d['metrics'], expected) + def test_set_invalid_metric(): - s = Span(tracer=None, name="test.span") + s = Span(tracer=None, name='test.span') invalid_metrics = [ None, {}, [], s, - "quarante-douze", - float("nan"), - float("inf"), + 'quarante-douze', + float('nan'), + float('inf'), 1j ] @@ -68,15 +71,17 @@ def test_set_invalid_metric(): s.set_metric(k, m) eq_(s.get_metric(k), None) + def test_set_numpy_metric(): try: import numpy as np except ImportError: - raise SkipTest("numpy not installed") - s = Span(tracer=None, name="test.span") - s.set_metric("a", np.int64(1)) - eq_(s.get_metric("a"), 1) - eq_(type(s.get_metric("a")), float) + raise SkipTest('numpy not installed') + s = Span(tracer=None, name='test.span') + s.set_metric('a', np.int64(1)) + eq_(s.get_metric('a'), 1) + eq_(type(s.get_metric('a')), float) + def test_tags_not_string(): # ensure we can cast as strings @@ -84,14 +89,15 @@ class Foo(object): def __repr__(self): 1 / 0 - s = Span(tracer=None, name="test.span") - s.set_tag("a", Foo()) + s = Span(tracer=None, name='test.span') + s.set_tag('a', Foo()) + def test_finish(): # ensure finish will record a span dt = DummyTracer() ctx = Context() - s = Span(dt, "test.span", context=ctx) + s = Span(dt, 'test.span', context=ctx) ctx.add_span(s) assert s.duration is None @@ -99,15 +105,16 @@ def test_finish(): with s as s1: assert s is s1 time.sleep(sleep) - assert s.duration >= sleep, "%s < %s" % (s.duration, sleep) + assert s.duration >= sleep, '%s < %s' % (s.duration, sleep) eq_(1, dt.spans_recorded) def test_finish_no_tracer(): # ensure finish works with no tracer without raising exceptions - s = Span(tracer=None, name="test.span") + s = Span(tracer=None, name='test.span') s.finish() + def test_finish_called_multiple_times(): # we should only record a span the first time finish is called on it dt = DummyTracer() @@ -127,34 +134,37 @@ def test_finish_set_span_duration(): s.finish() assert s.duration == 1337.0 + def test_traceback_with_error(): - s = Span(None, "test.span") + s = Span(None, 'test.span') try: 1 / 0 except ZeroDivisionError: s.set_traceback() else: - assert 0, "should have failed" + assert 0, 'should have failed' assert s.error assert 'by zero' in s.get_tag(errors.ERROR_MSG) - assert "ZeroDivisionError" in s.get_tag(errors.ERROR_TYPE) + assert 'ZeroDivisionError' in s.get_tag(errors.ERROR_TYPE) + def test_traceback_without_error(): - s = Span(None, "test.span") + s = Span(None, 'test.span') s.set_traceback() assert not s.error assert not s.get_tag(errors.ERROR_MSG) assert not s.get_tag(errors.ERROR_TYPE) - assert "in test_traceback_without_error" in s.get_tag(errors.ERROR_STACK) + assert 'in test_traceback_without_error' in s.get_tag(errors.ERROR_STACK) + def test_ctx_mgr(): dt = DummyTracer() - s = Span(dt, "bar") + s = Span(dt, 'bar') assert not s.duration assert not s.error - e = Exception("boo") + e = Exception('boo') try: with s: time.sleep(0.01) @@ -163,75 +173,62 @@ def test_ctx_mgr(): eq_(out, e) assert s.duration > 0, s.duration assert s.error - eq_(s.get_tag(errors.ERROR_MSG), "boo") - assert "Exception" in s.get_tag(errors.ERROR_TYPE) + eq_(s.get_tag(errors.ERROR_MSG), 'boo') + assert 'Exception' in s.get_tag(errors.ERROR_TYPE) assert s.get_tag(errors.ERROR_STACK) else: - assert 0, "should have failed" + assert 0, 'should have failed' + def test_span_to_dict(): - s = Span(tracer=None, name="test.span", service="s", resource="r") - s.span_type = "foo" - s.set_tag("a", "1") - s.set_meta("b", "2") + s = Span(tracer=None, name='test.span', service='s', resource='r') + s.span_type = 'foo' + s.set_tag('a', '1') + s.set_meta('b', '2') s.finish() d = s.to_dict() assert d - eq_(d["span_id"], s.span_id) - eq_(d["trace_id"], s.trace_id) - eq_(d["parent_id"], s.parent_id) - eq_(d["meta"], {"a": "1", "b": "2"}) - eq_(d["type"], "foo") - eq_(d["error"], 0) - eq_(type(d["error"]), int) + eq_(d['span_id'], s.span_id) + eq_(d['trace_id'], s.trace_id) + eq_(d['parent_id'], s.parent_id) + eq_(d['meta'], {'a': '1', 'b': '2'}) + eq_(d['type'], 'foo') + eq_(d['error'], 0) + eq_(type(d['error']), int) + def test_span_to_dict_sub(): - parent = Span(tracer=None, name="test.span", service="s", resource="r") - s = Span(tracer=None, name="test.span", service="s", resource="r") + parent = Span(tracer=None, name='test.span', service='s', resource='r') + s = Span(tracer=None, name='test.span', service='s', resource='r') s._parent = parent - s.span_type = "foo" - s.set_tag("a", "1") - s.set_meta("b", "2") + s.span_type = 'foo' + s.set_tag('a', '1') + s.set_meta('b', '2') s.finish() d = s.to_dict() assert d - eq_(d["span_id"], s.span_id) - eq_(d["trace_id"], s.trace_id) - eq_(d["parent_id"], s.parent_id) - eq_(d["meta"], {"a": "1", "b": "2"}) - eq_(d["type"], "foo") - eq_(d["error"], 0) - eq_(type(d["error"]), int) + eq_(d['span_id'], s.span_id) + eq_(d['trace_id'], s.trace_id) + eq_(d['parent_id'], s.parent_id) + eq_(d['meta'], {'a': '1', 'b': '2'}) + eq_(d['type'], 'foo') + eq_(d['error'], 0) + eq_(type(d['error']), int) + def test_span_boolean_err(): - s = Span(tracer=None, name="foo.bar", service="s", resource="r") + s = Span(tracer=None, name='foo.bar', service='s', resource='r') s.error = True s.finish() d = s.to_dict() assert d - eq_(d["error"], 1) - eq_(type(d["error"]), int) - -def test_span_to_dict(): - s = Span(tracer=None, name="test.span", service="s", resource="r") - s.span_type = "foo" - s.set_tag("a", "1") - s.set_meta("b", "2") - s.finish() + eq_(d['error'], 1) + eq_(type(d['error']), int) - d = s.to_dict() - assert d - eq_(d["span_id"], s.span_id) - eq_(d["trace_id"], s.trace_id) - eq_(d["parent_id"], s.parent_id) - eq_(d["meta"], {"a": "1", "b": "2"}) - eq_(d["type"], "foo") - eq_(d["error"], 0) - eq_(type(d["error"]), int) class DummyTracer(object): def __init__(self): diff --git a/tests/test_tracer.py b/tests/test_tracer.py index 8bebdbc6bba..81618c0f245 100644 --- a/tests/test_tracer.py +++ b/tests/test_tracer.py @@ -10,7 +10,6 @@ from ddtrace.context import Context from .base import BaseTracerTestCase -from .utils.span import TestSpan from .utils.tracer import DummyTracer from .utils.tracer import DummyWriter # noqa @@ -283,7 +282,7 @@ def test_unserializable_span_with_finish(self): # a weird case where manually calling finish with an unserializable # span was causing an loop of serialization. with self.trace('parent') as span: - span.metrics['as'] = np.int64(1) # circumvent the data checks + span.metrics['as'] = np.int64(1) # circumvent the data checks span.finish() def test_tracer_disabled_mem_leak(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index 9d06ee1c5b7..959c8acc7da 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,38 +2,36 @@ import unittest import warnings -from nose.tools import eq_, ok_ - from ddtrace.utils.deprecation import deprecation, deprecated, format_message -from ddtrace.utils.formats import asbool, get_env +from ddtrace.utils.formats import asbool, get_env, flatten_dict -class TestUtilities(unittest.TestCase): +class TestUtils(unittest.TestCase): def test_asbool(self): # ensure the value is properly cast - eq_(asbool("True"), True) - eq_(asbool("true"), True) - eq_(asbool("1"), True) - eq_(asbool("False"), False) - eq_(asbool("false"), False) - eq_(asbool(None), False) - eq_(asbool(""), False) - eq_(asbool(True), True) - eq_(asbool(False), False) + self.assertTrue(asbool('True')) + self.assertTrue(asbool('true')) + self.assertTrue(asbool('1')) + self.assertFalse(asbool('False')) + self.assertFalse(asbool('false')) + self.assertFalse(asbool(None)) + self.assertFalse(asbool('')) + self.assertTrue(asbool(True)) + self.assertFalse(asbool(False)) def test_get_env(self): # ensure `get_env` returns a default value if environment variables # are not set value = get_env('django', 'distributed_tracing') - ok_(value is None) + self.assertIsNone(value) value = get_env('django', 'distributed_tracing', False) - ok_(value is False) + self.assertFalse(value) def test_get_env_found(self): # ensure `get_env` returns a value if the environment variable is set os.environ['DD_REQUESTS_DISTRIBUTED_TRACING'] = '1' value = get_env('requests', 'distributed_tracing') - eq_(value, '1') + self.assertEqual(value, '1') def test_get_env_found_legacy(self): # ensure `get_env` returns a value if legacy environment variables @@ -42,17 +40,17 @@ def test_get_env_found_legacy(self): warnings.simplefilter('always') os.environ['DATADOG_REQUESTS_DISTRIBUTED_TRACING'] = '1' value = get_env('requests', 'distributed_tracing') - eq_(value, '1') - ok_(len(w) == 1) - ok_(issubclass(w[-1].category, DeprecationWarning)) - ok_('Use `DD_` prefix instead' in str(w[-1].message)) + self.assertEqual(value, '1') + self.assertEqual(len(w), 1) + self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) + self.assertTrue('Use `DD_` prefix instead' in str(w[-1].message)) def test_get_env_key_priority(self): # ensure `get_env` use `DD_` with highest priority os.environ['DD_REQUESTS_DISTRIBUTED_TRACING'] = 'highest' os.environ['DATADOG_REQUESTS_DISTRIBUTED_TRACING'] = 'lowest' value = get_env('requests', 'distributed_tracing') - eq_(value, 'highest') + self.assertEqual(value, 'highest') def test_deprecation_formatter(self): # ensure the formatter returns the proper message @@ -61,8 +59,11 @@ def test_deprecation_formatter(self): 'use something else instead', '1.0.0', ) - expected = "'deprecated_function' is deprecated and will be remove in future versions (1.0.0). use something else instead" - eq_(msg, expected) + expected = ( + '\'deprecated_function\' is deprecated and will be remove in future versions (1.0.0). ' + 'use something else instead' + ) + self.assertEqual(msg, expected) def test_deprecation(self): # ensure `deprecation` properly raise a DeprecationWarning @@ -73,9 +74,9 @@ def test_deprecation(self): message='message', version='1.0.0' ) - ok_(len(w) == 1) - ok_(issubclass(w[-1].category, DeprecationWarning)) - ok_('message' in str(w[-1].message)) + self.assertEqual(len(w), 1) + self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) + self.assertIn('message', str(w[-1].message)) def test_deprecated_decorator(self): # ensure `deprecated` decorator properly raise a DeprecationWarning @@ -86,6 +87,12 @@ def fxn(): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') fxn() - ok_(len(w) == 1) - ok_(issubclass(w[-1].category, DeprecationWarning)) - ok_('decorator' in str(w[-1].message)) + self.assertEqual(len(w), 1) + self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) + self.assertIn('decorator', str(w[-1].message)) + + def test_flatten_dict(self): + """ ensure that flattening of a nested dict results in a normalized, 1-level dict """ + d = dict(A=1, B=2, C=dict(A=3, B=4, C=dict(A=5, B=6))) + e = dict(A=1, B=2, C_A=3, C_B=4, C_C_A=5, C_C_B=6) + self.assertEquals(flatten_dict(d, sep='_'), e) diff --git a/tests/test_writer.py b/tests/test_writer.py index d00ca7177e4..3e4c8912710 100644 --- a/tests/test_writer.py +++ b/tests/test_writer.py @@ -3,6 +3,7 @@ from ddtrace.span import Span from ddtrace.writer import AsyncWorker, Q + class RemoveAllFilter(): def __init__(self): self.filtered_traces = 0 @@ -11,6 +12,7 @@ def process_trace(self, trace): self.filtered_traces += 1 return None + class KeepAllFilter(): def __init__(self): self.filtered_traces = 0 @@ -19,6 +21,7 @@ def process_trace(self, trace): self.filtered_traces += 1 return trace + class AddTagFilter(): def __init__(self, tag_name): self.tag_name = tag_name @@ -30,6 +33,7 @@ def process_trace(self, trace): span.set_tag(self.tag_name, "A value") return trace + class DummmyAPI(): def __init__(self): self.traces = [] @@ -38,15 +42,20 @@ def send_traces(self, traces): for trace in traces: self.traces.append(trace) + N_TRACES = 11 + class AsyncWorkerTests(TestCase): def setUp(self): self.api = DummmyAPI() self.traces = Q() self.services = Q() for i in range(N_TRACES): - self.traces.add([Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j-1 or None) for j in range(7)]) + self.traces.add([ + Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) + for j in range(7) + ]) def test_filters_keep_all(self): filtr = KeepAllFilter() diff --git a/tests/unit/test_settings.py b/tests/unit/test_settings.py index 2e9b7955272..90ae0addce6 100644 --- a/tests/unit/test_settings.py +++ b/tests/unit/test_settings.py @@ -61,6 +61,33 @@ def test_is_a_dict(self): integration_config = IntegrationConfig(Config()) assert isinstance(integration_config, dict) + def test_allow_item_access(self): + config = IntegrationConfig(Config()) + config['setting'] = 'value' + + # Can be accessed both as item and attr accessor + assert config.setting == 'value' + assert config['setting'] == 'value' + + def test_allow_attr_access(self): + config = IntegrationConfig(Config()) + config.setting = 'value' + + # Can be accessed both as item and attr accessor + assert config.setting == 'value' + assert config['setting'] == 'value' + + def test_allow_both_access(self): + config = IntegrationConfig(Config()) + + config.setting = 'value' + assert config['setting'] == 'value' + assert config.setting == 'value' + + config['setting'] = 'new-value' + assert config.setting == 'new-value' + assert config['setting'] == 'new-value' + def test_allow_configuring_http(self): global_config = Config() integration_config = IntegrationConfig(global_config) diff --git a/tests/util.py b/tests/util.py index 0fb7bc99f49..d57170eddd3 100644 --- a/tests/util.py +++ b/tests/util.py @@ -41,13 +41,17 @@ def patch_time(): def assert_dict_issuperset(a, b): - ok_(set(a.items()).issuperset(set(b.items())), - msg="{a} is not a superset of {b}".format(a=a, b=b)) + ok_( + set(a.items()).issuperset(set(b.items())), + msg="{a} is not a superset of {b}".format(a=a, b=b), + ) def assert_list_issuperset(a, b): - ok_(set(a).issuperset(set(b)), - msg="{a} is not a superset of {b}".format(a=a, b=b)) + ok_( + set(a).issuperset(set(b)), + msg="{a} is not a superset of {b}".format(a=a, b=b), + ) @contextmanager diff --git a/tests/utils/span.py b/tests/utils/span.py index c5a20900e5b..e93ec044f72 100644 --- a/tests/utils/span.py +++ b/tests/utils/span.py @@ -164,7 +164,6 @@ def assert_meta(self, meta, exact=False): ) - class TestSpanContainer(object): """ Helper class for a container of Spans. @@ -289,7 +288,6 @@ def find_span(self, *args, **kwargs): return span - class TestSpanNode(TestSpan, TestSpanContainer): """ A :class:`tests.utils.span.TestSpan` which is used as part of a span tree. diff --git a/tests/utils/tracer.py b/tests/utils/tracer.py index a2cc21f9237..0e917c6cf67 100644 --- a/tests/utils/tracer.py +++ b/tests/utils/tracer.py @@ -2,8 +2,6 @@ from ddtrace.tracer import Tracer from ddtrace.writer import AgentWriter -from .span import TestSpan - class DummyWriter(AgentWriter): """DummyWriter is a small fake writer used for tests. not thread-safe.""" diff --git a/tests/wait-for-services.py b/tests/wait-for-services.py index a85232e8113..9a0457a4d71 100644 --- a/tests/wait-for-services.py +++ b/tests/wait-for-services.py @@ -24,8 +24,9 @@ def try_until_timeout(exception): """ def wrap(fn): - err = None def wrapper(*args, **kwargs): + err = None + for i in range(100): try: fn() @@ -55,6 +56,7 @@ def check_cassandra(): with Cluster(**CASSANDRA_CONFIG).connect() as conn: conn.execute("SELECT now() FROM system.local") + @try_until_timeout(Exception) def check_mysql(): conn = mysql.connector.connect(**MYSQL_CONFIG) @@ -63,6 +65,7 @@ def check_mysql(): finally: conn.close() + @try_until_timeout(Exception) def check_rediscluster(): test_host = REDISCLUSTER_CONFIG['host'] @@ -74,6 +77,7 @@ def check_rediscluster(): r = rediscluster.StrictRedisCluster(startup_nodes=startup_nodes) r.flushall() + @try_until_timeout(Exception) def check_vertica(): conn = vertica_python.connect(**VERTICA_CONFIG) @@ -82,6 +86,7 @@ def check_vertica(): finally: conn.close() + @try_until_timeout(Exception) def check_rabbitmq(): url = "amqp://{user}:{password}@{host}:{port}//".format(**RABBITMQ_CONFIG) diff --git a/tox.ini b/tox.ini index 67797da38ea..d888d6c3d3f 100644 --- a/tox.ini +++ b/tox.ini @@ -39,7 +39,7 @@ envlist = aiopg_contrib-{py34,py35,py36}-aiopg{012,015} asyncio_contrib-{py34,py35,py36} boto_contrib-{py27,py34}-boto - botocore_contrib-{py27,py34}-botocore + botocore_contrib-{py27,py34,py35,py36}-botocore bottle_contrib{,_autopatch}-{py27,py34,py35,py36}-bottle{11,12}-webtest cassandra_contrib-{py27,py34,py35,py36}-cassandra{35,36,37,38,315} celery_contrib-{py27,py34,py35,py36}-celery{31,40,41,42}-redis{210} @@ -66,6 +66,7 @@ envlist = grpc_contrib-{py27,py34,py35,py36}-grpc httplib_contrib-{py27,py34,py35,py36} jinja2_contrib-{py27,py34,py35,py36}-jinja{27,28,29,210} + molten_contrib-{py36}-molten{070,072} mongoengine_contrib-{py27,py34,py35,py36}-mongoengine{015} msgpack_contrib-{py27,py34}-msgpack{03,04,05} mysql_contrib-{py27,py34,py35,py36}-mysqlconnector{21} @@ -142,7 +143,7 @@ deps = boto: boto boto: moto<1.0 botocore: botocore - botocore: moto<1.0 + botocore: moto>=1.0,<2 bottle11: bottle>=0.11,<0.12 bottle12: bottle>=0.12,<0.13 cassandra35: cassandra-driver>=3.5,<3.6 @@ -206,6 +207,8 @@ deps = jinja29: jinja2>=2.9,<2.10 jinja210: jinja2>=2.10,<2.11 memcached: python-memcached + molten070: molten>=0.7.0,<0.7.2 + molten072: molten>=0.7.2,<0.8.0 mongoengine015: mongoengine>=0.15<0.16 msgpack03: msgpack-python>=0.3,<0.4 msgpack04: msgpack-python>=0.4,<0.5 @@ -289,67 +292,66 @@ passenv=TEST_* commands = # run only essential tests related to the tracing client - tracer: nosetests {posargs} --exclude=".*(contrib|integration|commands|opentracer|unit).*" tests + tracer: pytest {posargs} --ignore="tests/contrib" --ignore="tests/integration" --ignore="tests/commands" --ignore="tests/opentracer" --ignore="tests/unit" tests # run only the opentrace tests opentracer: pytest {posargs} tests/opentracer/test_tracer.py tests/opentracer/test_span.py tests/opentracer/test_span_context.py tests/opentracer/test_dd_compatibility.py tests/opentracer/test_utils.py opentracer_asyncio: pytest {posargs} tests/opentracer/test_tracer_asyncio.py opentracer_tornado-tornado{40,41,42,43,44}: pytest {posargs} tests/opentracer/test_tracer_tornado.py opentracer_gevent: pytest {posargs} tests/opentracer/test_tracer_gevent.py # integration tests - integration: nosetests {posargs} tests/test_integration.py + integration: pytest {posargs} tests/test_integration.py # Contribs - aiobotocore_contrib-{py34}: nosetests {posargs} --exclude=".*(test_35).*" tests/contrib/aiobotocore - aiobotocore_contrib-{py35,py36}: nosetests {posargs} tests/contrib/aiobotocore - aiopg_contrib-{py34}: nosetests {posargs} --exclude=".*(test_aiopg_35).*" tests/contrib/aiopg - aiopg_contrib-{py35,py36}: nosetests {posargs} tests/contrib/aiopg - aiohttp_contrib: nosetests {posargs} tests/contrib/aiohttp - asyncio_contrib: nosetests {posargs} tests/contrib/asyncio - boto_contrib: nosetests {posargs} tests/contrib/boto - botocore_contrib: nosetests {posargs} tests/contrib/botocore - bottle_contrib: nosetests {posargs} tests/contrib/bottle/test.py - bottle_contrib_autopatch: python tests/ddtrace_run.py nosetests {posargs} tests/contrib/bottle/test_autopatch.py + aiobotocore_contrib-{py34,py35,py36}: pytest {posargs} tests/contrib/aiobotocore + aiopg_contrib-{py34,py35,py36}: pytest {posargs} tests/contrib/aiopg + aiohttp_contrib: pytest {posargs} tests/contrib/aiohttp + asyncio_contrib: pytest {posargs} tests/contrib/asyncio + boto_contrib: pytest {posargs} tests/contrib/boto + botocore_contrib: pytest {posargs} tests/contrib/botocore + bottle_contrib: pytest {posargs} tests/contrib/bottle/test.py + bottle_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/bottle/test_autopatch.py cassandra_contrib: nosetests {posargs} tests/contrib/cassandra - celery_contrib: nosetests {posargs} tests/contrib/celery + celery_contrib: pytest {posargs} tests/contrib/celery dbapi_contrib: pytest {posargs} tests/contrib/dbapi django_contrib: python tests/contrib/django/runtests.py {posargs} django_contrib_autopatch: python tests/ddtrace_run.py python tests/contrib/django/runtests.py {posargs} django_drf_contrib: python tests/contrib/djangorestframework/runtests.py {posargs} - elasticsearch_contrib: nosetests {posargs} tests/contrib/elasticsearch - falcon_contrib: nosetests {posargs} tests/contrib/falcon/test_middleware.py tests/contrib/falcon/test_distributed_tracing.py - falcon_contrib_autopatch: python tests/ddtrace_run.py nosetests {posargs} tests/contrib/falcon/test_autopatch.py + elasticsearch_contrib: pytest {posargs} tests/contrib/elasticsearch + falcon_contrib: pytest {posargs} tests/contrib/falcon/test_middleware.py tests/contrib/falcon/test_distributed_tracing.py + falcon_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/falcon/test_autopatch.py flask_contrib: pytest {posargs} tests/contrib/flask flask_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/flask_autopatch - flask_cache_contrib: nosetests {posargs} tests/contrib/flask_cache - futures_contrib: nosetests {posargs} tests/contrib/futures - gevent_contrib: nosetests {posargs} tests/contrib/gevent - grpc_contrib: nosetests {posargs} tests/contrib/grpc - httplib_contrib: nosetests {posargs} tests/contrib/httplib - jinja2_contrib: nosetests {posargs} tests/contrib/jinja2 + flask_cache_contrib: pytest {posargs} tests/contrib/flask_cache + futures_contrib: pytest {posargs} tests/contrib/futures + gevent_contrib: pytest {posargs} tests/contrib/gevent + grpc_contrib: pytest {posargs} tests/contrib/grpc + httplib_contrib: pytest {posargs} tests/contrib/httplib + jinja2_contrib: pytest {posargs} tests/contrib/jinja2 + molten_contrib: pytest {posargs} tests/contrib/molten mongoengine_contrib: nosetests {posargs} tests/contrib/mongoengine - msgpack_contrib: nosetests {posargs} tests/test_encoders.py + msgpack_contrib: pytest {posargs} tests/test_encoders.py mysql_contrib: nosetests {posargs} tests/contrib/mysql mysqldb_contrib: nosetests {posargs} tests/contrib/mysqldb - psycopg_contrib: nosetests {posargs} tests/contrib/psycopg + psycopg_contrib: pytest {posargs} tests/contrib/psycopg pylibmc_contrib: nosetests {posargs} tests/contrib/pylibmc - pylons_contrib: nosetests {posargs} tests/contrib/pylons - pymemcache_contrib: nosetests {posargs} --exclude="test_autopatch.py" tests/contrib/pymemcache/ - pymemcache_contrib_autopatch: python tests/ddtrace_run.py nosetests {posargs} tests/contrib/pymemcache/test_autopatch.py + pylons_contrib: pytest {posargs} tests/contrib/pylons + pymemcache_contrib: pytest {posargs} --ignore="tests/contrib/pymemcache/autopatch" tests/contrib/pymemcache/ + pymemcache_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/pymemcache/autopatch/ pymongo_contrib: nosetests {posargs} tests/contrib/pymongo - pymysql_contrib: nosetests {posargs} tests/contrib/pymysql + pymysql_contrib: pytest {posargs} tests/contrib/pymysql pyramid_contrib: nosetests {posargs} tests/contrib/pyramid/test_pyramid.py pyramid_contrib_autopatch: python tests/ddtrace_run.py nosetests {posargs} tests/contrib/pyramid/test_pyramid_autopatch.py redis_contrib: nosetests {posargs} tests/contrib/redis rediscluster_contrib: nosetests {posargs} tests/contrib/rediscluster - requests_contrib: nosetests {posargs} tests/contrib/requests + requests_contrib: pytest {posargs} tests/contrib/requests requests_gevent_contrib: nosetests {posargs} tests/contrib/requests_gevent kombu_contrib: nosetests {posargs} tests/contrib/kombu - sqlalchemy_contrib: nosetests {posargs} tests/contrib/sqlalchemy + sqlalchemy_contrib: pytest {posargs} tests/contrib/sqlalchemy sqlite3_contrib: nosetests {posargs} tests/contrib/sqlite3 - tornado_contrib: nosetests {posargs} tests/contrib/tornado + tornado_contrib: pytest {posargs} tests/contrib/tornado vertica_contrib: pytest {posargs} tests/contrib/vertica/ # run subsets of the tests for particular library versions - ddtracerun: nosetests {posargs} tests/commands/test_runner.py - test_utils: nosetests {posargs} tests/contrib/test_utils.py + ddtracerun: pytest {posargs} tests/commands/test_runner.py + test_utils: pytest {posargs} tests/contrib/test_utils.py # Unit tests: pytest based test suite that do not require any additional dependency. unit_tests: pytest {posargs} tests/unit @@ -373,7 +375,7 @@ ignore_outcome=true [testenv:flake8] deps=flake8==3.5.0 -commands=flake8 ddtrace +commands=flake8 . basepython=python2 [falcon_autopatch] @@ -609,6 +611,16 @@ setenv = {[bottle_autopatch]setenv} +# DEV: We use `conftest.py` as a local pytest plugin to configure hooks for collection +[pytest] +# Common directories to ignore +addopts = --ignore "tests/utils" --ignore "tests/base" +# DEV: The default is `test_*\.py` which will miss `test.py` files +python_files = test*\.py + [flake8] max-line-length=120 -exclude=tests +exclude= + .ddtox,.tox, + .git,__pycache__, + .eggs,*.egg