diff --git a/.travis.yml b/.travis.yml index 153d54c1d116c8..334da25976755a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,10 @@ language: python +services: + - memcached + - riak + - mysql + - postgresql + - redis-server python: - "2.6" - "2.7" diff --git a/setup.py b/setup.py index d14f3ece73a50f..b89e59eba7e105 100755 --- a/setup.py +++ b/setup.py @@ -58,6 +58,7 @@ 'nydus', 'mock>=0.8.0', 'redis', + 'riak', 'unittest2', ] diff --git a/src/sentry/app.py b/src/sentry/app.py index 8d6583fa11678b..ac6cf7d28e94f9 100644 --- a/src/sentry/app.py +++ b/src/sentry/app.py @@ -25,4 +25,6 @@ def get_instance(path, options): buffer = get_instance(settings.SENTRY_BUFFER, settings.SENTRY_BUFFER_OPTIONS) quotas = get_instance(settings.SENTRY_QUOTAS, settings.SENTRY_QUOTA_OPTIONS) +nodestore = get_instance( + settings.SENTRY_NODESTORE, settings.SENTRY_NODESTORE_OPTIONS) search = get_instance(settings.SENTRY_SEARCH, settings.SENTRY_SEARCH_OPTIONS) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index b3ab5dec3fe4fb..3812615d7c9b5e 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -150,6 +150,7 @@ 'kombu.transport.django', 'raven.contrib.django.raven_compat', 'sentry', + 'sentry.nodestore', 'sentry.search', 'sentry.plugins.sentry_interface_types', 'sentry.plugins.sentry_mail', @@ -508,10 +509,11 @@ # Redis connection information (see Nydus documentation) SENTRY_REDIS_OPTIONS = {} -# Buffer backend to use +# Buffer backend SENTRY_BUFFER = 'sentry.buffer.Buffer' SENTRY_BUFFER_OPTIONS = {} +# Quota backend SENTRY_QUOTAS = 'sentry.quotas.Quota' SENTRY_QUOTA_OPTIONS = {} @@ -521,6 +523,10 @@ # The maximum number of events per minute the system should accept. SENTRY_SYSTEM_MAX_EVENTS_PER_MINUTE = 0 +# Node storage backend +SENTRY_NODESTORE = 'sentry.nodestore.django.DjangoNodeStorage' +SENTRY_NODESTORE_OPTIONS = {} + # Search backend SENTRY_SEARCH = 'sentry.search.django.DjangoSearchBackend' SENTRY_SEARCH_OPTIONS = {} diff --git a/src/sentry/db/models/fields/__init__.py b/src/sentry/db/models/fields/__init__.py index ed17cb9bdd93aa..38484294560958 100644 --- a/src/sentry/db/models/fields/__init__.py +++ b/src/sentry/db/models/fields/__init__.py @@ -10,3 +10,4 @@ from .bounded import * # NOQA from .gzippeddict import * # NOQA +from .node import * # NOQA diff --git a/src/sentry/db/models/fields/node.py b/src/sentry/db/models/fields/node.py new file mode 100644 index 00000000000000..56fd80c9f4566a --- /dev/null +++ b/src/sentry/db/models/fields/node.py @@ -0,0 +1,131 @@ +""" +sentry.db.models.fields.node +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +import collections +import logging +import warnings + +from django.db import models +from django.db.models.signals import post_delete + +from sentry.utils.cache import memoize +from sentry.utils.compat import pickle +from sentry.utils.strings import decompress, compress + +from .gzippeddict import GzippedDictField + +__all__ = ('NodeField',) + +logger = logging.getLogger('sentry.errors') + + +class NodeData(collections.MutableMapping): + def __init__(self, id, data=None): + self.id = id + self._node_data = data + + def __getitem__(self, key): + return self.data[key] + + def __setitem__(self, key, value): + self.data[key] = value + + def __delitem__(self, key): + del self.data[key] + + def __iter__(self): + return iter(self.data) + + def __len__(self): + return len(self.data) + + def __repr__(self): + cls_name = type(self).__name__ + if self._node_data: + return '<%s: id=%s data=%r>' % ( + cls_name, self.id, repr(self._node_data)) + return '<%s: id=%s>' % (cls_name, self.id,) + + @memoize + def data(self): + from sentry import app + + if self._node_data is not None: + return self._node_data + + elif self.id: + warnings.warn('You should populate node data before accessing it.') + return app.nodestore.get(self.id) or {} + + return {} + + def bind_data(self, data): + self._node_data = data + + +class NodeField(GzippedDictField): + """ + Similar to the gzippedictfield except that it stores a reference + to an external node. + """ + __metaclass__ = models.SubfieldBase + + def contribute_to_class(self, cls, name): + super(NodeField, self).contribute_to_class(cls, name) + post_delete.connect( + self.on_delete, + sender=self.model, + weak=False) + + def on_delete(self, instance, **kwargs): + from sentry import app + + value = getattr(instance, self.name) + if not value.id: + return + + app.nodestore.delete(value.id) + + def to_python(self, value): + if isinstance(value, basestring) and value: + try: + value = pickle.loads(decompress(value)) + except Exception, e: + logger.exception(e) + value = {} + elif not value: + value = {} + + if 'node_id' in value: + node_id = value.pop('node_id') + data = None + else: + node_id = None + data = value + + return NodeData(node_id, data) + + def get_prep_value(self, value): + from sentry import app + + if not value and self.null: + # save ourselves some storage + return None + + # TODO(dcramer): we should probably do this more intelligently + # and manually + if not value.id: + value.id = app.nodestore.create(value.data) + else: + app.nodestore.set(value.id, value.data) + + return compress(pickle.dumps({ + 'node_id': value.id + })) diff --git a/src/sentry/db/models/manager.py b/src/sentry/db/models/manager.py index 2fbbc6325b5e7e..3fd69100aaa08f 100644 --- a/src/sentry/db/models/manager.py +++ b/src/sentry/db/models/manager.py @@ -224,3 +224,19 @@ def get_from_cache(self, **kwargs): def create_or_update(self, **kwargs): return create_or_update(self.model, **kwargs) + + def bind_nodes(self, object_list, *node_names): + from sentry import app + + object_node_list = [] + for name in node_names: + object_node_list.extend((getattr(i, name) for i in object_list if getattr(i, name).id)) + + node_ids = [n.id for n in object_node_list] + if not node_ids: + return + + node_results = app.nodestore.get_multi(node_ids) + + for node in object_node_list: + node.bind_data(node_results.get(node.id) or {}) diff --git a/src/sentry/models.py b/src/sentry/models.py index 3e69d6716b9b47..ca835194e90bb1 100644 --- a/src/sentry/models.py +++ b/src/sentry/models.py @@ -41,7 +41,7 @@ ) from sentry.db.models import ( Model, GzippedDictField, BoundedIntegerField, BoundedPositiveIntegerField, - update, sane_repr + NodeField, update, sane_repr ) from sentry.manager import ( GroupManager, ProjectManager, MetaManager, InstanceMetaManager, BaseManager, @@ -482,7 +482,6 @@ class EventBase(Model): max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') checksum = models.CharField(max_length=32, db_index=True) - data = GzippedDictField(blank=True, null=True) num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) @@ -583,6 +582,7 @@ class Group(EventBase): time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) + data = GzippedDictField(blank=True, null=True) objects = GroupManager() @@ -726,6 +726,7 @@ class Event(EventBase): time_spent = BoundedIntegerField(null=True) server_name = models.CharField(max_length=128, db_index=True, null=True) site = models.CharField(max_length=128, db_index=True, null=True) + data = NodeField(blank=True, null=True) objects = BaseManager() diff --git a/src/sentry/nodestore/__init__.py b/src/sentry/nodestore/__init__.py new file mode 100644 index 00000000000000..a28d6b91781b34 --- /dev/null +++ b/src/sentry/nodestore/__init__.py @@ -0,0 +1,9 @@ +""" +sentry.nodestore +~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import diff --git a/src/sentry/nodestore/base.py b/src/sentry/nodestore/base.py new file mode 100644 index 00000000000000..5105ecbefa4f76 --- /dev/null +++ b/src/sentry/nodestore/base.py @@ -0,0 +1,67 @@ +""" +sentry.nodestore.base +~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +import uuid + + +class NodeStorage(object): + def create(self, data): + """ + >>> key = nodestore.create({'foo': 'bar'}) + """ + node_id = self.generate_id() + self.set(node_id, data) + return node_id + + def delete(self, id): + """ + >>> nodestore.delete('key1') + """ + raise NotImplementedError + + def get(self, id): + """ + >>> data = nodestore.get('key1') + >>> print data + """ + raise NotImplementedError + + def get_multi(self, id_list): + """ + >>> data_map = nodestore.get_multi(['key1', 'key2') + >>> print 'key1', data_map['key1'] + >>> print 'key2', data_map['key2'] + """ + return dict( + (id, self.get(id)) + for id in id_list + ) + + def set(self, id, data): + """ + >>> nodestore.set('key1', {'foo': 'bar'}) + """ + raise NotImplementedError + + def set_multi(self, values): + """ + >>> nodestore.set_multi({ + >>> 'key1': {'foo': 'bar'}, + >>> 'key2': {'foo': 'baz'}, + >>> }) + """ + for id, data in values.iteritems(): + self.set(id=id, data=data) + + def generate_id(self): + return uuid.uuid4().hex + + def cleanup(self, cutoff_timestamp): + raise NotImplementedError diff --git a/src/sentry/nodestore/django/__init__.py b/src/sentry/nodestore/django/__init__.py new file mode 100644 index 00000000000000..d03f3a57f8c4b7 --- /dev/null +++ b/src/sentry/nodestore/django/__init__.py @@ -0,0 +1,9 @@ +""" +sentry.nodestore.django +~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from .backend import DjangoNodeStorage # NOQA diff --git a/src/sentry/nodestore/django/backend.py b/src/sentry/nodestore/django/backend.py new file mode 100644 index 00000000000000..d4c612cc64b7c6 --- /dev/null +++ b/src/sentry/nodestore/django/backend.py @@ -0,0 +1,46 @@ +""" +sentry.nodestore.django.backend +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +from django.utils import timezone + +from sentry.db.models import create_or_update +from sentry.nodestore.base import NodeStorage + +from .models import Node + + +class DjangoNodeStorage(NodeStorage): + def delete(self, id): + Node.objects.filter(id=id).delete() + + def get(self, id): + try: + return Node.objects.get(id=id).data + except Node.DoesNotExist: + return None + + def get_multi(self, id_list): + return dict( + (n.id, n.data) + for n in Node.objects.filter(id__in=id_list) + ) + + def set(self, id, data): + create_or_update( + Node, + id=id, + defaults={ + 'data': data, + 'timestamp': timezone.now(), + }, + ) + + def cleanup(self, cutoff_timestamp): + Node.objects.filter(timestamp__lte=cutoff_timestamp).delete() diff --git a/src/sentry/nodestore/django/models.py b/src/sentry/nodestore/django/models.py new file mode 100644 index 00000000000000..1c70e65913b71d --- /dev/null +++ b/src/sentry/nodestore/django/models.py @@ -0,0 +1,26 @@ +""" +sentry.nodestore.django.models +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +from django.db import models +from django.utils import timezone + +from sentry.db.models import ( + BaseModel, GzippedDictField, sane_repr) + + +class Node(BaseModel): + id = models.CharField(max_length=40, primary_key=True) + data = GzippedDictField() + timestamp = models.DateTimeField(default=timezone.now, db_index=True) + + __repr__ = sane_repr('timestamp') + + class Meta: + app_label = 'nodestore' diff --git a/src/sentry/nodestore/migrations/0001_initial.py b/src/sentry/nodestore/migrations/0001_initial.py new file mode 100644 index 00000000000000..22ba0d08aa9b4e --- /dev/null +++ b/src/sentry/nodestore/migrations/0001_initial.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Node' + db.create_table(u'nodestore_node', ( + ('id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=40, primary_key=True)), + ('data', self.gf('django.db.models.fields.TextField')()), + ('timestamp', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)), + )) + db.send_create_signal('nodestore', ['Node']) + + + def backwards(self, orm): + # Deleting model 'Node' + db.delete_table(u'nodestore_node') + + + models = { + 'nodestore.node': { + 'Meta': {'object_name': 'Node'}, + 'data': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'primary_key': 'True'}), + 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}) + } + } + + complete_apps = ['nodestore'] diff --git a/src/sentry/nodestore/migrations/__init__.py b/src/sentry/nodestore/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/nodestore/models.py b/src/sentry/nodestore/models.py new file mode 100644 index 00000000000000..8d9a8e502f5f65 --- /dev/null +++ b/src/sentry/nodestore/models.py @@ -0,0 +1,13 @@ +""" +sentry.nodestore.models +~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +# HACK(dcramer): Django doesn't play well with our naming schemes, and we prefer +# our methods ways over Django's limited scoping +from .django.models import * # NOQA diff --git a/src/sentry/nodestore/multi/__init__.py b/src/sentry/nodestore/multi/__init__.py new file mode 100644 index 00000000000000..23e12bb3775a05 --- /dev/null +++ b/src/sentry/nodestore/multi/__init__.py @@ -0,0 +1,7 @@ +""" +sentry.nodestore.multi +~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" diff --git a/src/sentry/nodestore/multi/backend.py b/src/sentry/nodestore/multi/backend.py new file mode 100644 index 00000000000000..b8b971ab6422f0 --- /dev/null +++ b/src/sentry/nodestore/multi/backend.py @@ -0,0 +1,92 @@ +""" +sentry.nodestore.multi.backend +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +import random + +from sentry.nodestore.base import NodeStorage +from sentry.utils.imports import import_string + + +class MultiNodeStorage(NodeStorage): + """ + A backend which will write to multiple backends, and read from a random + choice. + + This is not intended for consistency, but is instead designed to allow you + to dual-write for purposes of migrations. + + >>> MultiNodeStorage(backends=[ + >>> ('sentry.nodestore.django.backend.DjangoNodeStorage', {}), + >>> ('sentry.nodestore.riak.backend.RiakNodeStorage', {}), + >>> ], read_selector=random.choice) + """ + def __init__(self, backends, read_selector=random.choice, **kwargs): + assert backends, "you should provide at least one backend" + + self.backends = [] + for backend, backend_options in backends: + if isinstance(backend, basestring): + backend = import_string(backend) + self.backends.append(backend(**backend_options)) + self.read_selector = read_selector + super(MultiNodeStorage, self).__init__(**kwargs) + + def get(self, id): + # just fetch it from a random backend, we're not aiming for consistency + backend = self.read_selector(self.backends) + return backend.get(id) + + def get_multi(self, id_list): + backend = self.read_selector(self.backends) + return backend.get_multi(id_list=id_list) + + def set(self, id, data): + should_raise = False + for backend in self.backends: + try: + backend.set(id, data) + except Exception: + should_raise = True + + if should_raise: + raise + + def set_multi(self, values): + should_raise = False + for backend in self.backends: + try: + backend.set_multi(values) + except Exception: + should_raise = True + + if should_raise: + raise + + def delete(self, id): + should_raise = False + for backend in self.backends: + try: + backend.delete(id) + except Exception: + should_raise = True + + if should_raise: + raise + + def cleanup(self, cutoff_timestamp): + should_raise = False + for backend in self.backends: + try: + backend.cleanup(cutoff_timestamp) + except Exception: + should_raise = True + + if should_raise: + raise diff --git a/src/sentry/nodestore/riak/__init__.py b/src/sentry/nodestore/riak/__init__.py new file mode 100644 index 00000000000000..0f526e2f155c52 --- /dev/null +++ b/src/sentry/nodestore/riak/__init__.py @@ -0,0 +1,7 @@ +""" +sentry.nodestore.riak +~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" diff --git a/src/sentry/nodestore/riak/backend.py b/src/sentry/nodestore/riak/backend.py new file mode 100644 index 00000000000000..0ad5951c5fc072 --- /dev/null +++ b/src/sentry/nodestore/riak/backend.py @@ -0,0 +1,60 @@ +""" +sentry.nodestore.riak.backend +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. +:license: BSD, see LICENSE for more details. +""" + +from __future__ import absolute_import + +import riak +import riak.resolver + +from sentry.nodestore.base import NodeStorage + + +class RiakNodeStorage(NodeStorage): + """ + A Riak-based backend for storing node data. + + >>> RiakNodeStorage(nodes=[{'host':'127.0.0.1','http_port':8098}]) + """ + def __init__(self, nodes, bucket='nodes', + resolver=riak.resolver.last_written_resolver, **kwargs): + self.conn = riak.RiakClient( + nodes=nodes, resolver=resolver, **kwargs) + self.bucket = self.conn.bucket(bucket) + super(RiakNodeStorage, self).__init__(**kwargs) + + def create(self, data): + obj = self.bucket.new(data=data) + obj.store() + return obj.key + + def delete(self, id): + obj = self.bucket.new(key=id) + obj.delete() + + def get(self, id): + # just fetch it from a random backend, we're not aiming for consistency + obj = self.bucket.get(key=id) + if not obj: + return None + return obj.data + + def get_multi(self, id_list): + result = self.bucket.multiget(id_list) + return dict( + (obj.key, obj.data) + for obj in result + ) + + def set(self, id, data): + obj = self.bucket.new(key=id, data=data) + obj.store() + + def cleanup(self, cutoff_timestamp): + # TODO(dcramer): we should either index timestamps or have this run + # a map/reduce (probably the latter) + raise NotImplementedError diff --git a/src/sentry/tasks/cleanup.py b/src/sentry/tasks/cleanup.py index 6bdd8d42b04bdc..8fc80db8564018 100644 --- a/src/sentry/tasks/cleanup.py +++ b/src/sentry/tasks/cleanup.py @@ -23,6 +23,7 @@ def cleanup(days=30, project=None, chunk_size=1000, **kwargs): from django.utils import timezone + from sentry import app # TODO: TagKey and GroupTagKey need cleaned up from sentry.models import ( Group, Event, GroupCountByMinute, EventMapping, @@ -48,6 +49,18 @@ def cleanup(days=30, project=None, chunk_size=1000, **kwargs): ts = timezone.now() - datetime.timedelta(days=days) + log.info("Removing expired values for %r", LostPasswordHash) + LostPasswordHash.objects.filter( + date_added__lte=timezone.now() - datetime.timedelta(days=1) + ).delete() + + # TODO: we should move this into individual backends + log.info("Removing old Node values") + try: + app.nodestore.cleanup(ts) + except NotImplementedError: + log.warning("Node backend does not support cleanup operation") + # Remove types which can easily be bound to project + date for model, date_col in GENERIC_DELETES: log.info("Removing %r for days=%s project=%r", model, days, project or '*') @@ -59,8 +72,3 @@ def cleanup(days=30, project=None, chunk_size=1000, **kwargs): for obj in list(qs[:chunk_size]): log.info("Removing %r", obj) obj.delete() - - log.info("Removing expired values for %r", LostPasswordHash) - LostPasswordHash.objects.filter( - date_added__lte=timezone.now() - datetime.timedelta(days=1) - ).delete() diff --git a/src/sentry/testutils.py b/src/sentry/testutils.py index c4a02c39494580..9ecfb82d39d0d5 100644 --- a/src/sentry/testutils.py +++ b/src/sentry/testutils.py @@ -9,6 +9,7 @@ from __future__ import absolute_import import base64 +import pytest import os.path from exam import Exam, fixture, before # NOQA @@ -129,12 +130,18 @@ def group(self): @fixture def event(self): + return self.create_event(event_id='a' * 32) + + def create_event(self, event_id, **kwargs): + if 'group' not in kwargs: + kwargs['group'] = self.group + kwargs.setdefault('project', kwargs['group'].project) + kwargs.setdefault('message', 'Foo bar') + kwargs.setdefault('data', LEGACY_DATA) + return Event.objects.create( - event_id='a' * 32, - group=self.group, - message='Foo bar', - project=self.project, - data=LEGACY_DATA, + event_id=event_id, + **kwargs ) def assertRequiresAuthentication(self, path, method='GET'): @@ -284,3 +291,18 @@ def wrapped(*args, **kwargs): finally: app.conf.CELERY_ALWAYS_EAGER = prev return wrapped + + +def riak_is_available(): + import socket + try: + socket.create_connection(('127.0.0.1', 8098), 1.0) + except socket.error: + return False + else: + return True + + +requires_riak = pytest.mark.skipif( + lambda x: not riak_is_available(), + reason="requires riak server running") diff --git a/src/sentry/web/frontend/events.py b/src/sentry/web/frontend/events.py index 851e9971f4d149..748c9508be6c37 100644 --- a/src/sentry/web/frontend/events.py +++ b/src/sentry/web/frontend/events.py @@ -25,6 +25,8 @@ def replay_event(request, team, project, group, event_id): except Event.DoesNotExist: return HttpResponseRedirect(reverse('sentry')) + Event.objects.bind_nodes([event], 'data') + interfaces = event.interfaces if 'sentry.interfaces.Http' not in interfaces: # TODO: show a proper error diff --git a/src/sentry/web/frontend/groups.py b/src/sentry/web/frontend/groups.py index 3498b6cb8a51ec..79f445634ce198 100644 --- a/src/sentry/web/frontend/groups.py +++ b/src/sentry/web/frontend/groups.py @@ -386,6 +386,8 @@ def group(request, team, project, group, event_id=None): else: event = group.get_latest_event() or Event() + Event.objects.bind_nodes([event], 'data') + # bind params to group in case they get hit event.group = group event.project = project @@ -509,6 +511,8 @@ def group_tag_details(request, team, project, group, tag_name): def group_event_list(request, team, project, group): event_list = group.event_set.all().order_by('-datetime') + Event.objects.bind_nodes(event_list, 'data') + return render_with_group_context(group, 'sentry/groups/event_list.html', { 'event_list': event_list, 'page': 'event_list', @@ -529,6 +533,8 @@ def group_event_list_json(request, team, project, group_id): events = group.event_set.order_by('-id')[:limit] + Event.objects.bind_nodes(events, 'data') + return HttpResponse(json.dumps([event.as_dict() for event in events]), mimetype='application/json') @@ -543,6 +549,8 @@ def group_event_details_json(request, team, project, group_id, event_id_or_lates else: event = get_object_or_404(group.event_set, pk=event_id_or_latest) + Event.objects.bind_nodes([event], 'data') + return HttpResponse(json.dumps(event.as_dict()), mimetype='application/json') diff --git a/tests/sentry/models/tests.py b/tests/sentry/models/tests.py index 36062e2c4445d8..45770a116b7b74 100644 --- a/tests/sentry/models/tests.py +++ b/tests/sentry/models/tests.py @@ -8,13 +8,17 @@ from django.conf import settings from django.core import mail from django.core.urlresolvers import reverse +from django.db import connection from django.utils import timezone from sentry.constants import MINUTE_NORMALIZATION +from sentry.db.models.fields.node import NodeData from sentry.models import ( Project, ProjectKey, Group, Event, Team, GroupTag, GroupCountByMinute, TagValue, PendingTeamMember, LostPasswordHash, Alert, User, create_default_project) from sentry.testutils import TestCase, fixture +from sentry.utils.compat import pickle +from sentry.utils.strings import compress class ProjectTest(TestCase): @@ -169,3 +173,49 @@ def test_simple(self): team = project.team assert team.owner == user assert team.slug == 'sentry' + + +class EventNodeStoreTest(TestCase): + def test_does_transition_data_to_node(self): + group = self.group + data = {'key': 'value'} + + query_bits = [ + "INSERT INTO sentry_message (group_id, project_id, data, logger, level, message, checksum, datetime)", + "VALUES(%s, %s, %s, '', 0, %s, %s, %s)", + ] + params = [group.id, group.project_id, compress(pickle.dumps(data)), 'test', 'a' * 32, timezone.now()] + + # This is pulled from SQLInsertCompiler + if connection.features.can_return_id_from_insert: + r_fmt, r_params = connection.ops.return_insert_id() + if r_fmt: + query_bits.append(r_fmt % Event._meta.pk.column) + params += r_params + + cursor = connection.cursor() + cursor.execute(' '.join(query_bits), params) + + if connection.features.can_return_id_from_insert: + event_id = connection.ops.fetch_returned_insert_id(cursor) + else: + event_id = connection.ops.last_insert_id( + cursor, Event._meta.db_table, Event._meta.pk.column) + + event = Event.objects.get(id=event_id) + assert type(event.data) == NodeData + assert event.data == data + assert event.data.id is None + + event.save() + + assert event.data == data + assert event.data.id is not None + + node_id = event.data.id + event = Event.objects.get(id=event_id) + + Event.objects.bind_nodes([event], 'data') + + assert event.data == data + assert event.data.id == node_id diff --git a/tests/sentry/nodestore/__init__.py b/tests/sentry/nodestore/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/django/__init__.py b/tests/sentry/nodestore/django/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/django/backend/__init__.py b/tests/sentry/nodestore/django/backend/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/django/backend/tests.py b/tests/sentry/nodestore/django/backend/tests.py new file mode 100644 index 00000000000000..d44893831ca724 --- /dev/null +++ b/tests/sentry/nodestore/django/backend/tests.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- + +from __future__ import absolute_import + +from sentry.nodestore.django.models import Node +from sentry.nodestore.django.backend import DjangoNodeStorage +from sentry.testutils import TestCase + + +class DjangoNodeStorageTest(TestCase): + def setUp(self): + self.ns = DjangoNodeStorage() + + def test_get(self): + node = Node.objects.create( + id='d2502ebbd7df41ceba8d3275595cac33', + data={ + 'foo': 'bar', + } + ) + + result = self.ns.get(node.id) + assert result == node.data + + def test_get_multi(self): + nodes = [ + Node.objects.create( + id='d2502ebbd7df41ceba8d3275595cac33', + data={ + 'foo': 'bar', + } + ), + Node.objects.create( + id='5394aa025b8e401ca6bc3ddee3130edc', + data={ + 'foo': 'baz', + } + ), + ] + + result = self.ns.get_multi([ + 'd2502ebbd7df41ceba8d3275595cac33', '5394aa025b8e401ca6bc3ddee3130edc' + ]) + assert result == dict((n.id, n.data) for n in nodes) + + def test_set(self): + self.ns.set('d2502ebbd7df41ceba8d3275595cac33', { + 'foo': 'bar', + }) + assert Node.objects.get(id='d2502ebbd7df41ceba8d3275595cac33').data == { + 'foo': 'bar', + } + + def test_set_multi(self): + self.ns.set_multi({ + 'd2502ebbd7df41ceba8d3275595cac33': { + 'foo': 'bar', + }, + '5394aa025b8e401ca6bc3ddee3130edc': { + 'foo': 'baz', + }, + }) + assert Node.objects.get(id='d2502ebbd7df41ceba8d3275595cac33').data == { + 'foo': 'bar', + } + assert Node.objects.get(id='5394aa025b8e401ca6bc3ddee3130edc').data == { + 'foo': 'baz', + } + + def test_create(self): + node_id = self.ns.create({ + 'foo': 'bar', + }) + assert Node.objects.get(id=node_id).data == { + 'foo': 'bar', + } + + def test_delete(self): + node = Node.objects.create( + id='d2502ebbd7df41ceba8d3275595cac33', + data={ + 'foo': 'bar', + } + ) + + self.ns.delete(node.id) + assert not Node.objects.filter(id=node.id).exists() diff --git a/tests/sentry/nodestore/multi/__init__.py b/tests/sentry/nodestore/multi/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/multi/backend/__init__.py b/tests/sentry/nodestore/multi/backend/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/multi/backend/tests.py b/tests/sentry/nodestore/multi/backend/tests.py new file mode 100644 index 00000000000000..1a282ff22aa36e --- /dev/null +++ b/tests/sentry/nodestore/multi/backend/tests.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- + +from __future__ import absolute_import + +from sentry.nodestore.base import NodeStorage +from sentry.nodestore.multi.backend import MultiNodeStorage +from sentry.testutils import TestCase + + +class InMemoryBackend(NodeStorage): + def __init__(self): + self._data = {} + + def set(self, id, data): + self._data[id] = data + + def get(self, id): + return self._data.get(id) + + +class MultiNodeStorageTest(TestCase): + def setUp(self): + self.ns = MultiNodeStorage([ + (InMemoryBackend, {}), + (InMemoryBackend, {}), + ]) + + def test_basic_integration(self): + node_id = self.ns.create({ + 'foo': 'bar', + }) + assert node_id is not None + for backend in self.ns.backends: + assert backend.get(node_id) == { + 'foo': 'bar', + } + + self.ns.set(node_id, { + 'foo': 'baz', + }) + for backend in self.ns.backends: + assert backend.get(node_id) == { + 'foo': 'baz', + } + + result = self.ns.get(node_id) + assert result == { + 'foo': 'baz', + } + + node_id2 = self.ns.create({ + 'foo': 'bar', + }) + for backend in self.ns.backends: + assert backend.get(node_id2) == { + 'foo': 'bar', + } + + result = self.ns.get_multi([node_id, node_id2]) + assert result[node_id] == { + 'foo': 'baz', + } + assert result[node_id2] == { + 'foo': 'bar', + } + + result = self.ns.set_multi({ + node_id: { + 'foo': 'biz', + }, + node_id2: { + 'foo': 'bir', + }, + }) + + for backend in self.ns.backends: + assert backend.get(node_id) == { + 'foo': 'biz', + } + assert backend.get(node_id2) == { + 'foo': 'bir', + } diff --git a/tests/sentry/nodestore/riak/__init__.py b/tests/sentry/nodestore/riak/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/riak/backend/__init__.py b/tests/sentry/nodestore/riak/backend/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/nodestore/riak/backend/tests.py b/tests/sentry/nodestore/riak/backend/tests.py new file mode 100644 index 00000000000000..92a76302181ec2 --- /dev/null +++ b/tests/sentry/nodestore/riak/backend/tests.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +from __future__ import absolute_import + +from sentry.nodestore.riak.backend import RiakNodeStorage +from sentry.testutils import TestCase, requires_riak + + +@requires_riak +class RiakNodeStorageTest(TestCase): + def setUp(self): + self.ns = RiakNodeStorage(nodes=[{ + 'host': '127.0.0.1', + 'http_port': 8098, + }]) + + def test_integration(self): + node_id = self.ns.create({ + 'foo': 'bar', + }) + assert node_id is not None + + self.ns.set(node_id, { + 'foo': 'baz', + }) + + result = self.ns.get(node_id) + assert result == { + 'foo': 'baz', + } + + node_id2 = self.ns.create({ + 'foo': 'bar', + }) + + result = self.ns.get_multi([node_id, node_id2]) + assert result[node_id] == { + 'foo': 'baz', + } + assert result[node_id2] == { + 'foo': 'bar', + } diff --git a/tests/sentry/web/frontend/groups/tests.py b/tests/sentry/web/frontend/groups/tests.py index c7054223de42b1..3c9e65829d992d 100644 --- a/tests/sentry/web/frontend/groups/tests.py +++ b/tests/sentry/web/frontend/groups/tests.py @@ -113,6 +113,11 @@ def path(self): }) def test_does_render(self): + event = self.create_event( + event_id='a' * 32, datetime=timezone.now() - timedelta(minutes=1)) + event2 = self.create_event( + event_id='b' * 32, datetime=timezone.now()) + self.login() resp = self.client.get(self.path) assert resp.status_code == 200 @@ -124,6 +129,10 @@ def test_does_render(self): assert resp.context['project'] == self.project assert resp.context['team'] == self.team assert resp.context['group'] == self.group + event_list = resp.context['event_list'] + assert len(event_list) == 2 + assert event_list[0] == event2 + assert event_list[1] == event class GroupTagListTest(TestCase): @@ -185,14 +194,19 @@ def path(self): def test_does_render(self): self.login() - # HACK: force fixture creation - self.event + + event = self.create_event( + event_id='a' * 32, datetime=timezone.now() - timedelta(minutes=1)) + event2 = self.create_event( + event_id='b' * 32, datetime=timezone.now()) + resp = self.client.get(self.path) assert resp.status_code == 200 assert resp['Content-Type'] == 'application/json' data = json.loads(resp.content) - assert len(data) == 1 - assert data[0]['id'] == str(self.event.event_id) + assert len(data) == 2 + assert data[0]['id'] == str(event2.event_id) + assert data[1]['id'] == str(event.event_id) def test_does_not_allow_beyond_limit(self): self.login()