Skip to content

Commit

Permalink
Merge c57c36f into 1a918c4
Browse files Browse the repository at this point in the history
  • Loading branch information
dhakim87 committed Jun 29, 2020
2 parents 1a918c4 + c57c36f commit a7c55db
Show file tree
Hide file tree
Showing 5 changed files with 332 additions and 0 deletions.
17 changes: 17 additions & 0 deletions microsetta_private_api/db/patches/0065.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
CREATE TABLE ag.event_log(
id uuid PRIMARY KEY NOT NULL,
event_type varchar(100) NOT NULL,
event_subtype varchar(100) NOT NULL,
event_time timestamptz default current_timestamp NOT NULL,
event_state jsonb);

-- Full event log sorted by time
CREATE INDEX idx_event_log_event_time ON ag.event_log (event_time);
-- Event log filtered by type sorted by time
CREATE INDEX idx_event_log_event_type_event_time ON ag.event_log (event_type, event_time);
-- Event log filtered by type and subtype sorted by time
CREATE INDEX idx_event_log_event_type_event_subtype_event_time ON ag.event_log (event_type, event_subtype, event_time);
-- Event log filtered by user email sorted by time
CREATE INDEX idx_events_state_email_time ON ag.event_log ((event_state->>'email'), event_time);
-- Event log filtered by user account id sorted by time
CREATE INDEX idx_events_state_account_id_time ON ag.event_log ((event_state->>'account_id'), event_time);
51 changes: 51 additions & 0 deletions microsetta_private_api/model/log_event.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import datetime
import uuid

from microsetta_private_api.model.model_base import ModelBase
from enum import Enum, unique


# NOTE: The string values of these enums are persisted to the database
# therefore. They MUST NOT BE CHANGED.
@unique
class EventType(Enum):
# The event type indicating an email was sent to an end user
EMAIL = "email"


@unique
class EventSubtype(Enum):
# Email event subtypes refer to the various email templates we can send out

# indicate a sample was received, and is good, but is being banked
EMAIL_SAMPLE_RECEIVED_BANKED = "sample_received_banked"
# indicate a sample was received, is good, and is being plated
EMAIL_SAMPLE_RECEIVED_PLATED = "sample_received_plated"
# indicate a previously banked sample is now being plated
EMAIL_BANKED_SAMPLE_NOW_PLATED = "banked_sample_plated"
# indicate if there is a problem with a sample
# (messaging should be tailored to the problem)
EMAIL_SAMPLE_RECEIVED_WITH_PROBLEMS = "sample_received_with_problems"


class LogEvent(ModelBase):
def __init__(self,
event_id: uuid.UUID,
event_type: EventType,
event_subtype: EventSubtype,
event_time: datetime,
event_state: dict):
self.event_id = event_id
self.event_type = event_type
self.event_subtype = event_subtype
self.event_time = event_time
self.event_state = event_state

def to_api(self):
return {
"event_id": str(self.event_id),
"event_type": self.event_type.value,
"event_subtype": self.event_subtype.value,
"event_time": self.event_time,
"event_state": self.event_state
}
121 changes: 121 additions & 0 deletions microsetta_private_api/repo/event_log_repo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
from uuid import UUID

from psycopg2._json import Json

from microsetta_private_api.model.log_event import LogEvent, EventType, \
EventSubtype
from microsetta_private_api.repo.base_repo import BaseRepo


_read_cols = "id, event_type, event_subtype, event_time, event_state"


def _event_to_row(event: LogEvent):
return {
"id": str(event.event_id),
"event_type": event.event_type.value,
"event_subtype": event.event_subtype.value,
# event_time is set by db upon creation, need not be passed in.
"event_state": Json(event.event_state),
}


def _row_to_event(row):
return LogEvent(UUID(row['id']),
EventType(row['event_type']),
EventSubtype(row['event_subtype']),
row['event_time'],
row['event_state'])


class EventLogRepo(BaseRepo):
def __init__(self, transaction):
super().__init__(transaction)

def add_event(self, event: LogEvent):
with self._transaction.cursor() as cur:
cur.execute("INSERT INTO event_log("
"id, "
"event_type, "
"event_subtype, "
"event_state"
") VALUES ("
"%(id)s, "
"%(event_type)s, "
"%(event_subtype)s, "
"%(event_state)s"
")",
_event_to_row(event))
return cur.rowcount == 1

def get_events(self):
with self._transaction.dict_cursor() as cur:
cur.execute("SELECT " + _read_cols + " FROM "
"event_log "
"ORDER BY "
"event_time DESC")
return [_row_to_event(row) for row in cur.fetchall()]

def get_events_by_type(self, event_type: EventType):
with self._transaction.dict_cursor() as cur:
cur.execute("SELECT " + _read_cols + " FROM "
"event_log "
"WHERE "
"event_type = %s "
"ORDER BY "
"event_time DESC",
(event_type.value,))
return [_row_to_event(row) for row in cur.fetchall()]

def get_events_by_subtype(self,
event_type: EventType,
event_subtype: EventSubtype):
with self._transaction.dict_cursor() as cur:
cur.execute("SELECT " + _read_cols + " FROM "
"event_log "
"WHERE "
"event_type = %s AND "
"event_subtype = %s "
"ORDER BY "
"event_time DESC",
(event_type.value, event_subtype.value))
return [_row_to_event(row) for row in cur.fetchall()]

# See https://www.postgresql.org/docs/9.5/functions-json.html#FUNCTIONS-JSON-OP-TABLE # noqa
# to understand referencing email field from jsonb representation

# Based on results of EXPLAIN of queries in psql 9.5, looks like postgres
# can use our index for exact matches, but can't use it for anything with a
# wildcard
# TODO: Should test against newest postgresql, May need to look up gin
# indexes to improve this if performance becomes an issue.
# See https://stackoverflow.com/questions/33025890/indexing-jsonb-data-for-pattern-matching-searches # noqa
def get_events_by_email(self, email: str):
with self._transaction.dict_cursor() as cur:
cur.execute("SELECT " + _read_cols + " FROM "
"event_log "
"WHERE "
"event_state->>'email' ILIKE %s "
"ORDER BY "
"event_state->>'email', event_time DESC",
(email+"%",))
return [_row_to_event(row) for row in cur.fetchall()]

def get_events_by_account(self, account_id: UUID):
with self._transaction.dict_cursor() as cur:
cur.execute("SELECT " + _read_cols + " FROM "
"event_log "
"WHERE "
"event_state->>'account_id' = %s "
"ORDER BY "
"event_time DESC",
(str(account_id),))
return [_row_to_event(row) for row in cur.fetchall()]

def delete_event(self, event_id: UUID):
with self._transaction.cursor() as cur:
cur.execute("DELETE FROM event_log "
"WHERE "
"id = %s",
(str(event_id),))
return cur.rowcount == 1
Empty file.
143 changes: 143 additions & 0 deletions microsetta_private_api/repo/tests/test_event_log_repo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
import json

import psycopg2
import unittest

from microsetta_private_api.model.log_event import LogEvent, EventType, \
EventSubtype
from microsetta_private_api.repo.event_log_repo import EventLogRepo
from microsetta_private_api.repo.transaction import Transaction

import uuid

from microsetta_private_api.util.util import json_converter


class EventLogTests(unittest.TestCase):
def test_event_log(self):
event_id = uuid.uuid4()
acct_id = uuid.uuid4()
event = LogEvent(
event_id,
EventType.EMAIL,
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED,
None,
{
"email": "foobarbaz@kasdgklhasg.com",
"account_id": str(acct_id),
"blahblah": "Blah blah blah",
"ski": "ball"
}
)

with Transaction() as t:
events = EventLogRepo(t)

insertion = events.add_event(event)
self.assertTrue(insertion)

# Check full event log
self.assertEqual(events.get_events()[0].event_id, event_id)

# Check event log filtered by primary type
primary_type = events.get_events_by_type(EventType.EMAIL)[0]
self.assertEqual(primary_type.event_id, event_id)

# Check fields
client_obj = json.loads(json.dumps(primary_type.to_api(),
default=json_converter))
self.assertEqual(client_obj['event_id'], str(event_id))
self.assertEqual(client_obj['event_type'],
EventType.EMAIL.value)
self.assertEqual(client_obj['event_subtype'],
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED.value)
self.assertEqual(client_obj['event_state']['ski'], 'ball')

# Check event log filtered by subtype
subtype = events.get_events_by_subtype(
EventType.EMAIL,
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED)[0]
self.assertEqual(subtype.event_id, event_id)

# Check event log filtered by exact email
exact = events.get_events_by_email("foobarbaz@kasdgklhasg.com")[0]
self.assertEqual(exact.event_id, event_id)

# Check event log filtered by email prefix
partial = events.get_events_by_email("foobarbaz@ka")[0]
self.assertEqual(partial.event_id, event_id)

# Check event log filtered by account
acct = events.get_events_by_account(acct_id)[0]
self.assertEqual(acct.event_id, event_id)

# Check event can be deleted
deletion = events.delete_event(event_id)
self.assertTrue(deletion)

# Check event is no longer there
all = events.get_events()
if len(all) > 0:
assert all[0].event_id != event_id
t.rollback()

def test_dups_rejected(self):
event = LogEvent(
uuid.uuid4(),
EventType.EMAIL,
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED,
None,
{}
)
with Transaction() as t:
events = EventLogRepo(t)
events.add_event(event)
with self.assertRaises(psycopg2.errors.UniqueViolation):
events.add_event(event)
t.rollback()

def test_empty_emails(self):
acct_id = uuid.uuid4()
event1 = LogEvent(
uuid.uuid4(),
EventType.EMAIL,
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED,
None,
{
'email': "foobarbaz@itzatest.com"
}
)
event2 = LogEvent(
uuid.uuid4(),
EventType.EMAIL,
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED,
None,
{
'email': "foobarbaz@itzatest.com"
}
)
event3 = LogEvent(
uuid.uuid4(),
EventType.EMAIL,
EventSubtype.EMAIL_BANKED_SAMPLE_NOW_PLATED,
None,
{
'account_id': str(acct_id)
}
)
with Transaction() as t:
events = EventLogRepo(t)
events.add_event(event1)
events.add_event(event2)
events.add_event(event3)

self.assertEqual(
len(events.get_events_by_email("foobarbaz@itzatest.com")),
2
)
self.assertEqual(
len(events.get_events_by_account(acct_id)),
1
)

t.rollback()

0 comments on commit a7c55db

Please sign in to comment.