Skip to content
This repository has been archived by the owner on Sep 22, 2023. It is now read-only.

Commit

Permalink
Merge pull request #22 from narratorai/index-events
Browse files Browse the repository at this point in the history
Add indexes to event table
  • Loading branch information
Mike Nason committed Nov 19, 2020
2 parents 0f9552b + b77f32a commit 9fc6154
Show file tree
Hide file tree
Showing 8 changed files with 146 additions and 45 deletions.
60 changes: 33 additions & 27 deletions Dockerfile
Expand Up @@ -29,35 +29,41 @@ ARG skip_dev_deps
RUN useradd --create-home redash

# Ubuntu packages
RUN apt-get update && apt-get install -y ca-certificates curl && \
RUN apt-get update && \
apt-get install -y \
curl \
gnupg \
build-essential \
pwgen \
libffi-dev \
sudo \
git-core \
wget \
unzip \
# Postgres client
libpq-dev \
# ODBC support:
g++ unixodbc-dev \
# for SAML
xmlsec1 \
# Additional packages required for data sources:
libssl-dev \
default-libmysqlclient-dev \
freetds-dev \
libsasl2-dev \
unzip \
libsasl2-modules-gssapi-mit \
# Narrator Custom:
jq && \
# End Narrator Custom
curl \
gnupg \
build-essential \
pwgen \
libffi-dev \
sudo \
git-core \
wget \
unzip \
# Postgres client
libpq-dev \
# ODBC support:
g++ unixodbc-dev \
# for SAML
xmlsec1 \
# Additional packages required for data sources:
libssl-dev \
default-libmysqlclient-dev \
freetds-dev \
libsasl2-dev \
unzip \
libsasl2-modules-gssapi-mit \
# Narrator Custom:
ca-certificates \
jq && \
# End Narrator Custom
# MSSQL ODBC Driver:
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
curl https://packages.microsoft.com/config/ubuntu/18.04/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
apt-get update && \
ACCEPT_EULA=Y apt-get install -y msodbcsql17 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
rm -rf /var/lib/apt/lists/*

WORKDIR /app

Expand Down
40 changes: 40 additions & 0 deletions migrations/versions/73567079e0f3_add_event_indexes.py
@@ -0,0 +1,40 @@
"""add event indexes
Revision ID: 73567079e0f3
Revises: e5c7a4e2df4d
Create Date: 2020-11-19 16:53:12.180504
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '73567079e0f3'
down_revision = 'e5c7a4e2df4d'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_events_action'), 'events', ['action'], unique=False)
op.create_index('ix_events_created_at_desc', 'events', [sa.text('created_at DESC')], unique=False)
op.create_index(op.f('ix_events_object_id'), 'events', ['object_id'], unique=False)
op.create_index(op.f('ix_events_object_type'), 'events', ['object_type'], unique=False)
op.create_index('ix_events_org_action', 'events', ['org_id', 'action'], unique=False)
op.create_index('ix_events_org_created_at_desc', 'events', ['org_id', sa.text('created_at DESC')], unique=False)
op.create_index(op.f('ix_events_org_id'), 'events', ['org_id'], unique=False)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_events_org_id'), table_name='events')
op.drop_index('ix_events_org_created_at_desc', table_name='events')
op.drop_index('ix_events_org_action', table_name='events')
op.drop_index(op.f('ix_events_object_type'), table_name='events')
op.drop_index(op.f('ix_events_object_id'), table_name='events')
op.drop_index('ix_events_created_at_desc', table_name='events')
op.drop_index(op.f('ix_events_action'), table_name='events')
# ### end Alembic commands ###
11 changes: 7 additions & 4 deletions redash/__init__.py
@@ -1,10 +1,13 @@
from __future__ import absolute_import
import os

if os.environ.get('SERVER_EXTRA_OPTIONS') and "gevent" in os.environ.get('SERVER_EXTRA_OPTIONS'):
import gevent.monkey
gevent.monkey.patch_all() # noqa

import gevent.monkey
gevent.monkey.patch_all()
import psycogreen.gevent
psycogreen.gevent.patch_psycopg() # noqa

import logging
import os
import sys

import redis
Expand Down
7 changes: 5 additions & 2 deletions redash/handlers/api.py
Expand Up @@ -86,7 +86,7 @@
from redash.utils import json_dumps

# Narrator Internal
from redash.handlers.internal import BigQueryCreateDatasetResource
from redash.handlers.internal import BigQueryCreateDatasetResource, QueryHistoryEventsResource

class ApiExt(Api):
def add_org_resource(self, resource, *urls, **kwargs):
Expand Down Expand Up @@ -319,4 +319,7 @@ def json_representation(data, code, headers=None):
# NARRATOR INTERNAL
api.add_org_resource(
BigQueryCreateDatasetResource, "/api/narrator/create_bigquery_dataset/<data_source_id>", endpoint="internal_create_bigquery_dataset"
)
)
api.add_org_resource(
QueryHistoryEventsResource, "/api/narrator/query_history", endpoint="query_history"
)
30 changes: 29 additions & 1 deletion redash/handlers/internal.py
Expand Up @@ -3,13 +3,14 @@
"""

import logging
import time
from datetime import datetime as dt, timedelta

from flask import request
import apiclient.errors

from redash import models
from redash.handlers.base import BaseResource, require_fields
from redash.handlers.events import serialize_event
from redash.permissions import (
require_admin,
)
Expand Down Expand Up @@ -56,3 +57,30 @@ def post(self, data_source_id):
raise e



class QueryHistoryEventsResource(BaseResource):
def _to_date(self, input):
"""
Parse an ISO 8601 string to a datetime object
"""
return dt.strptime(input, "%Y-%m-%dT%H:%M:%S%z")

@require_admin
def get(self):
after_time = request.args.get("after", type=self._to_date)
page_size = request.args.get("page_size", 50, type=int)

query = self.current_org.events.filter(models.Event.action == 'execute_query')
if after_time is not None:
query = query.filter(models.Event.created_at < after_time)

results = query.limit(page_size).all()

next_ts = None if len(results) == 0 else results[-1].created_at
items = [
{**serialize_event(item)['details'], "ts": item.created_at}
for item in results
]

return {"next_ts": next_ts, "events": items}

26 changes: 22 additions & 4 deletions redash/models/__init__.py
Expand Up @@ -1237,20 +1237,38 @@ def get_by_id_and_org(cls, object_id, org):
)
class Event(db.Model):
id = primary_key("Event")
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"), index=True)
org = db.relationship(Organization, back_populates="events")
user_id = Column(key_type("User"), db.ForeignKey("users.id"), nullable=True)
user = db.relationship(User, backref="events")
action = Column(db.String(255))
object_type = Column(db.String(255))
object_id = Column(db.String(255), nullable=True)
action = Column(db.String(255), index=True)
object_type = Column(db.String(255), index=True)
object_id = Column(db.String(255), nullable=True, index=True)
additional_properties = Column(
MutableDict.as_mutable(PseudoJSON), nullable=True, default={}
)
created_at = Column(db.DateTime(True), default=db.func.now())

__tablename__ = "events"

# NARRATOR INTERNAL: indexes added
__table_args__ = (
db.Index(
"ix_events_created_at_desc",
created_at.desc()
),
db.Index(
"ix_events_org_created_at_desc",
"org_id",
created_at.desc(),
),
db.Index(
"ix_events_org_action",
"org_id",
"action",
)
)

def __str__(self):
return "%s,%s,%s,%s" % (
self.user_id,
Expand Down
14 changes: 8 additions & 6 deletions redash/settings/gunicorn.py
@@ -1,8 +1,10 @@
import gevent.monkey
gevent.monkey.patch_all()
# TODO remove this file

import logging
import os
# import gevent.monkey
# gevent.monkey.patch_all()

logger = logging.getLogger('redash.gunicorn')
logger.setLevel(logging.INFO)
# import logging
# import os

# logger = logging.getLogger('redash.gunicorn')
# logger.setLevel(logging.INFO)
3 changes: 2 additions & 1 deletion requirements.txt
Expand Up @@ -19,7 +19,7 @@ Flask-Limiter==1.2.1
passlib==1.7.1
aniso8601==8.0.0
blinker==1.4
psycopg2==2.8.5
psycopg2==2.8.6
python-dateutil==2.8.1
pytz>=2019.3
PyYAML==5.1.2
Expand Down Expand Up @@ -58,6 +58,7 @@ sshtunnel==0.1.5
supervisor==4.2.0
supervisor_checks==0.8.1
werkzeug==0.16.1
psycogreen==1.0.2
# Install the dependencies of the bin/bundle-extensions script here.
# It has its own requirements file to simplify the frontend client build process
-r requirements_bundles.txt
Expand Down

0 comments on commit 9fc6154

Please sign in to comment.