Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Log sql queries at DEBUG level, env var LOG_LEVEL
Browse files Browse the repository at this point in the history
  • Loading branch information
richardTowers committed Aug 30, 2018
1 parent 0a26ffa commit 43bba6a
Show file tree
Hide file tree
Showing 5 changed files with 43 additions and 22 deletions.
57 changes: 39 additions & 18 deletions backdrop/core/storage/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import psycopg2
import psycopg2.extras
import pytz
import logging

from uuid import uuid4

Expand All @@ -22,6 +23,8 @@
)
from .. import timeutils

logger = logging.getLogger(__name__)


class PostgresStorageEngine(object):

Expand All @@ -37,39 +40,48 @@ def create_table_and_indices(self):
support this project, who are the losers).
"""
with self.connection.cursor() as cursor:
cursor.execute(CREATE_TABLE_SQL)
query = CREATE_TABLE_SQL
logger.debug(
'create_table_and_indices - executing sql query: ' + query)
cursor.execute(query)
self.connection.commit()

def drop_table_and_indices(self):
"""
As with the create above, this is likely only used during tests.
"""
with self.connection.cursor() as cursor:
cursor.execute(DROP_TABLE_SQL)
query = DROP_TABLE_SQL
logger.debug(
'drop_table_and_indices - executing sql query: ' + query)
cursor.execute(query)
self.connection.commit()

def data_set_exists(self, data_set_id):
# This is slightly different to the mongo implementation
# in that it will return False if `create_data_set` has
# been called, but no records have been saved.
with self.connection.cursor() as cursor:
cursor.execute(
create_data_set_exists_query(cursor.mogrify, data_set_id))
query = create_data_set_exists_query(cursor.mogrify, data_set_id)
logger.debug('data_set_exists - executing sql query: ' + query)
cursor.execute(query)
return cursor.rowcount > 0

def create_data_set(self, data_set_id, size):
pass

def delete_data_set(self, data_set_id):
with self.connection.cursor() as cursor:
cursor.execute(
create_delete_data_set_query(cursor.mogrify, data_set_id))
query = create_delete_data_set_query(cursor.mogrify, data_set_id)
logger.debug('delete_data_set - executing sql query: ' + query)
cursor.execute(query)
self.connection.commit()

def get_last_updated(self, data_set_id):
with self.connection.cursor() as cursor:
cursor.execute(
create_get_last_updated_query(cursor.mogrify, data_set_id))
query = create_get_last_updated_query(cursor.mogrify, data_set_id)
logger.debug('get_last_updated - executing sql query: ' + query)
cursor.execute(query)

if cursor.rowcount == 0:
return None
Expand All @@ -80,8 +92,10 @@ def get_last_updated(self, data_set_id):
def batch_last_updated(self, data_sets):
collections = [collection.name for collection in data_sets]
with self.connection.cursor() as cursor:
cursor.execute(
create_batch_last_updated_query(cursor.mogrify, collections))
query = create_batch_last_updated_query(
cursor.mogrify, collections)
logger.debug('batch_last_updated - executing sql query: ' + query)
cursor.execute(query)
results = cursor.fetchall()
timestamp_by_collection = {
collection: max_timestamp for [collection, max_timestamp] in results}
Expand All @@ -103,8 +117,10 @@ def save_record(self, data_set_id, record):

def find_record(self, data_set_id, record_id):
with self.connection.cursor() as cursor:
cursor.execute(
create_find_record_query(cursor.mogrify, data_set_id, record_id))
query = create_find_record_query(
cursor.mogrify, data_set_id, record_id)
logger.debug('find_record - executing sql query: ' + query)
cursor.execute(query)
(record,) = cursor.fetchone()
return _parse_datetime_fields(record)

Expand All @@ -114,21 +130,26 @@ def update_record(self, data_set_id, record_id, record):
ts = record['_timestamp'] if '_timestamp' in record else updated_at

with self.connection.cursor() as cursor:
cursor.execute(create_update_record_query(
cursor.mogrify, data_set_id, record, record_id, ts, updated_at))
query = create_update_record_query(
cursor.mogrify, data_set_id, record, record_id, ts, updated_at)
logger.debug('update_record - executing sql query: ' + query)
cursor.execute(query)
self.connection.commit()

def delete_record(self, data_set_id, record_id):
with self.connection.cursor() as cursor:
cursor.execute(
create_delete_record_query(cursor.mogrify, data_set_id, record_id))
query = create_delete_record_query(
cursor.mogrify, data_set_id, record_id)
logger.debug('delete_record - executing sql query: ' + query)
cursor.execute(query)
self.connection.commit()

def execute_query(self, data_set_id, query):
with self.connection.cursor() as cursor:
pg_query, convert_query_result_to_dictionaries = create_sql_query(
query, convert_query_result_to_dictionaries = create_sql_query(
cursor.mogrify, data_set_id, query)
cursor.execute(pg_query)
logger.debug('execute_query - executing sql query: ' + query)
cursor.execute(query)
records = convert_query_result_to_dictionaries(cursor.fetchall())
return [_parse_datetime_fields(record) for record in records]

Expand Down
2 changes: 1 addition & 1 deletion backdrop/read/config/production.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@
CA_CERTIFICATE = PAAS.get('CA_CERTIFICATE')
STAGECRAFT_URL = os.getenv('STAGECRAFT_URL')
SIGNON_API_USER_TOKEN = os.getenv('SIGNON_API_USER_TOKEN')
LOG_LEVEL = "ERROR"
LOG_LEVEL = os.getenv("LOG_LEVEL", "ERROR")
SESSION_COOKIE_SECURE = True
2 changes: 1 addition & 1 deletion backdrop/read/config/staging.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@
CA_CERTIFICATE = PAAS.get('CA_CERTIFICATE')
STAGECRAFT_URL = os.getenv('STAGECRAFT_URL')
SIGNON_API_USER_TOKEN = os.getenv('SIGNON_API_USER_TOKEN')
LOG_LEVEL = "ERROR"
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
SESSION_COOKIE_SECURE = True
2 changes: 1 addition & 1 deletion backdrop/write/config/production.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
BROKER_URL = PAAS.get('REDIS_URL') or os.getenv('REDIS_URL')
BROKER_FAILOVER_STRATEGY = "round-robin"
SIGNON_API_USER_TOKEN = os.getenv('SIGNON_API_USER_TOKEN')
LOG_LEVEL = "INFO"
LOG_LEVEL = os.getenv("LOG_LEVEL", "ERROR")
DATA_SET_UPLOAD_FORMAT = {
"ithc_excel": "excel",
}
Expand Down
2 changes: 1 addition & 1 deletion backdrop/write/config/staging.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
BROKER_URL = PAAS.get('REDIS_URL') or os.getenv('REDIS_URL')
BROKER_FAILOVER_STRATEGY = "round-robin"
SIGNON_API_USER_TOKEN = os.getenv('SIGNON_API_USER_TOKEN')
LOG_LEVEL = "INFO"
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
SESSION_COOKIE_SECURE = True
SECRET_KEY = os.getenv('SECRET_KEY')
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = os.getenv(
Expand Down

0 comments on commit 43bba6a

Please sign in to comment.