Skip to content

Commit

Permalink
Merge pull request #14 from romanchyla/limits
Browse files Browse the repository at this point in the history
Added limits to the persistence layer
  • Loading branch information
romanchyla committed Jun 15, 2016
2 parents a3efd1b + 49c61ce commit bbb25d4
Show file tree
Hide file tree
Showing 10 changed files with 296 additions and 10 deletions.
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
90 changes: 90 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import os
import sys

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()

def get_app_config(key):
opath = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if opath not in sys.path:
sys.path.insert(0, opath)

from orcid_service import app as application
app = application.create_app()

with app.app_context() as c:
print 'Getting actual config for', key, app.config.get(key)
return app.config.get(key)

def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
cfg = config.get_section(config.config_ini_section)
if 'use_flask_db_url' in cfg and cfg['use_flask_db_url'] == 'true':
cfg['sqlalchemy.url'] = get_app_config('SQLALCHEMY_BINDS')['orcid']


engine = engine_from_config(
cfg,
prefix='sqlalchemy.',
poolclass=pool.NullPool)

connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)

try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()

if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28 changes: 28 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}

"""

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}

from alembic import op
import sqlalchemy as sa


${imports if imports else ""}

def upgrade():
#with app.app_context() as c:
# db.session.add(Model())
# db.session.commit()

${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
31 changes: 31 additions & 0 deletions alembic/versions/51f3b3b5cd5d_creating_basic_db_structure.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""Creating basic DB structure
Revision ID: 51f3b3b5cd5d
Revises: None
Create Date: 2015-13-09 20:13:58.241566
"""
# revision identifiers, used by Alembic.
revision = '51f3b3b5cd5d'
down_revision = None

from alembic import op
import sqlalchemy as sa
import datetime

from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer, Index
from sqlalchemy_utils import URLType


def upgrade():
op.create_table('limits',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('uid', sa.String(length=255), nullable=False),
sa.Column('field', sa.String(length=255), nullable=True),
sa.Column('filter', sa.Text, nullable=False),
Index('ix_uid', 'uid')
)

def downgrade():
op.drop_table('limits')
3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,6 @@ Flask-RESTful==0.3.5
requests==2.8.1
flask-discoverer==0.0.2
flask-consulate==0.1.2
Flask-SQLAlchemy
sqlalchemy-utils
psycopg2
33 changes: 31 additions & 2 deletions solr/app.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@

import logging.config
from flask import Flask, make_response, jsonify
from views import StatusView, Tvrh, Search, Qtree, BigQuery
from flask.ext.restful import Api
from flask.ext.discoverer import Discoverer
from flask.ext.consulate import Consul, ConsulConnectionError
from flask.ext.sqlalchemy import SQLAlchemy
from views import StatusView, Tvrh, Search, Qtree, BigQuery

db = SQLAlchemy()

def create_app():
def create_app(**config):
"""
Application factory
:return configured flask.Flask application instance
Expand All @@ -17,10 +19,37 @@ def create_app():
app.url_map.strict_slashes = False
Consul(app) # load_config expects consul to be registered
load_config(app)
if config:
app.config.update(config)

db.init_app(app)
logging.config.dictConfig(
app.config['SOLR_SERVICE_LOGGING']
)

## pysqlite driver breaks transactions, we have to apply some hacks as per
## http://docs.sqlalchemy.org/en/rel_0_9/dialects/sqlite.html#pysqlite-serializable

if 'sqlite' in (app.config.get('SQLALCHEMY_BINDS') or {'solr_service':''})['solr_service']:
from sqlalchemy import event

binds = app.config.get('SQLALCHEMY_BINDS')
if binds and 'solr_service' in binds:
engine = db.get_engine(app, bind=(app.config.get('SQLALCHEMY_BINDS') and 'solr_service'))
else:
engine = db.get_engine(app)

@event.listens_for(engine, "connect")
def do_connect(dbapi_connection, connection_record):
# disable pysqlite's emitting of the BEGIN statement entirely.
# also stops it from emitting COMMIT before any DDL.
dbapi_connection.isolation_level = None

@event.listens_for(engine, "begin")
def do_begin(conn):
# emit our own BEGIN
conn.execute("BEGIN")

api = Api(app)

@api.representation('application/json')
Expand Down
4 changes: 4 additions & 0 deletions solr/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
SOLR_SERVICE_BIGQUERY_HANDLER = SOLR_SERVICE_URL + '/bigquery'
SOLR_SERVICE_FORWARD_COOKIE_NAME = 'session'
SOLR_SERVICE_DISALLOWED_FIELDS = ['body', 'full', 'reader']
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_BINDS = {
'solr_service': 'sqlite:///'
}
SOLR_SERVICE_ALLOWED_FIELDS = [
'abstract', 'ack', 'aff', 'alternate_bibcode', 'alternate_title',
'arxiv_class', 'author', 'bibcode', 'bibgroup', 'bibstem',
Expand Down
26 changes: 26 additions & 0 deletions solr/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
"""
solr.models
~~~~~~~~~~~~~~~~~~~~~
Models for the users (users) of AdsWS
"""
from flask_sqlalchemy import SQLAlchemy

db = SQLAlchemy() # must be run in the context of a flask application

class Limits(db.Model):
__bind_key__ = 'solr_service'
__tablename__ = 'limits'
id = db.Column(db.Integer, primary_key=True)
uid = db.Column(db.String(255))
field = db.Column(db.String(255))
filter = db.Column(db.Text)

def toJSON(self):
"""Returns value formatted as python dict."""
return {
'uid': self.uid,
'field': self.field,
'filter': self.filter or None
}
46 changes: 44 additions & 2 deletions solr/tests/unittests/test_solr.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,26 @@
from StringIO import StringIO
from ..mocks import MockSolrResponse
from views import SolrInterface

from models import Limits, db

class TestSolrInterface(TestCase):

def create_app(self):
"""Start the wsgi application"""
a = app.create_app()
a = app.create_app(**{
'SQLALCHEMY_BINDS': {'solr_service': 'sqlite:///'},
'SQLALCHEMY_ECHO': True,
'TESTING': True,
'PROPAGATE_EXCEPTIONS': True,
'TRAP_BAD_REQUEST_ERRORS': True,
'SOLR_SERVICE_DISALLOWED_FIELDS': ['full', 'bar']
})
return a

def setUp(self):
db.create_all(app=self.app, bind=['solr_service'])


def test_cleanup_solr_request(self):
"""
Simple test of the cleanup classmethod
Expand All @@ -41,6 +52,36 @@ def test_cleanup_solr_request(self):
cleaned = SolrInterface.cleanup_solr_request(payload)
self.assertNotIn('*', cleaned['fl'])


def test_limits(self):
"""
Prevent users from getting certain data
"""
db.session.add(Limits(uid='9', field='full', filter='bibstem:apj'))
db.session.commit()
self.assertTrue(len(db.session.query(Limits).filter_by(uid='9').all()) == 1)

payload = {'fl': ['id,bibcode,title,full,bar'], 'q': '*:*'}
cleaned = SolrInterface.cleanup_solr_request(payload, user_id='9')
self.assertEqual(cleaned['fl'], u'id,bibcode,title,full')
self.assertEqual(cleaned['fq'], [u'bibstem:apj'])

cleaned = SolrInterface.cleanup_solr_request(
{'fl': ['id,bibcode,full'], 'fq': ['*:*']},
user_id='9')
self.assertEqual(cleaned['fl'], u'id,bibcode,full')
self.assertEqual(cleaned['fq'], ['*:*', u'bibstem:apj'])

# multiple entries for the user
db.session.add(Limits(uid='9', field='bar', filter='bibstem:apr'))
db.session.commit()

cleaned = SolrInterface.cleanup_solr_request(
{'fl': ['id,bibcode,fuLL,BAR'], 'fq': ['*:*']},
user_id='9')
self.assertEqual(cleaned['fl'], u'id,bibcode,full,bar')
self.assertEqual(cleaned['fq'], ['*:*', u'bibstem:apj', u'bibstem:apr'])


class TestWebservices(TestCase):

Expand Down Expand Up @@ -331,5 +372,6 @@ def request_callback(request, uri, headers):
self.assertEqual(resp.json['error'],
'You can only pass one content stream.')


if __name__ == '__main__':
unittest.main()
Loading

0 comments on commit bbb25d4

Please sign in to comment.