Skip to content

Commit

Permalink
Merge pull request #614 from EverythingMe/fix/caching
Browse files Browse the repository at this point in the history
Fix: don't cache /results API endpoint
  • Loading branch information
arikfr committed Oct 18, 2015
2 parents 4f4dc13 + 799ce3e commit 56b51f6
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 28 deletions.
53 changes: 28 additions & 25 deletions redash/handlers/query_results.py
Expand Up @@ -62,25 +62,8 @@ def post(self):

ONE_YEAR = 60 * 60 * 24 * 365.25

cache_headers = {
'Cache-Control': 'max-age=%d' % ONE_YEAR
}

class QueryResultAPI(BaseResource):
@staticmethod
def csv_response(query_result):
s = cStringIO.StringIO()

query_data = json.loads(query_result.data)
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
writer.writerow(row)

headers = {'Content-Type': "text/csv; charset=UTF-8"}
headers.update(cache_headers)
return make_response(s.getvalue(), 200, headers)

@staticmethod
def add_cors_headers(headers):
Expand All @@ -106,6 +89,7 @@ def options(self, query_id=None, query_result_id=None, filetype='json'):

@require_permission('view_query')
def get(self, query_id=None, query_result_id=None, filetype='json'):
should_cache = query_result_id is not None
if query_result_id is None and query_id is not None:
query = models.Query.get(models.Query.id == query_id)
if query:
Expand Down Expand Up @@ -133,21 +117,40 @@ def get(self, query_id=None, query_result_id=None, filetype='json'):

record_event.delay(event)

headers = {}
if filetype == 'json':
response = self.make_json_response(query_result)
else:
response = self.make_csv_response(query_result)

if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
self.add_cors_headers(headers)
self.add_cors_headers(response.headers)

if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
headers.update(cache_headers)
return make_response(data, 200, headers)
else:
return self.csv_response(query_result)
if should_cache:
response.headers.add_header('Cache-Control', 'max-age=%d' % ONE_YEAR)

return response

else:
abort(404)

def make_json_response(self, query_result):
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, {})

@staticmethod
def make_csv_response(query_result):
s = cStringIO.StringIO()

query_data = json.loads(query_result.data)
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
writer.writerow(row)

headers = {'Content-Type': "text/csv; charset=UTF-8"}
return make_response(s.getvalue(), 200, headers)


api.add_resource(QueryResultListAPI, '/api/query_results', endpoint='query_results')
api.add_resource(QueryResultAPI,
Expand Down
22 changes: 22 additions & 0 deletions tests/handlers/test_query_results.py
@@ -0,0 +1,22 @@
from tests import BaseTestCase
from tests.factories import query_result_factory, query_factory
from tests.handlers import authenticated_user, json_request

from redash.wsgi import app


class TestQueryResultsCacheHeaders(BaseTestCase):
def test_uses_cache_headers_for_specific_result(self):
query_result = query_result_factory.create()
query = query_factory.create(latest_query_data=query_result)
with app.test_client() as c, authenticated_user(c):
rv = json_request(c.get, '/api/queries/{}/results/{}.json'.format(query.id, query_result.id))
self.assertIn('Cache-Control', rv.headers)

def test_doesnt_use_cache_headers_for_non_specific_result(self):
query_result = query_result_factory.create()
query = query_factory.create(latest_query_data=query_result)
with app.test_client() as c, authenticated_user(c):
rv = json_request(c.get, '/api/queries/{}/results.json'.format(query.id))
self.assertNotIn('Cache-Control', rv.headers)

7 changes: 4 additions & 3 deletions tests/test_models.py
Expand Up @@ -3,6 +3,7 @@
import json
from unittest import TestCase
import mock
from dateutil.parser import parse as date_parse
from tests import BaseTestCase
from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
Expand Down Expand Up @@ -99,9 +100,9 @@ def test_exact_time_that_needs_reschedule(self):
self.assertTrue(models.should_schedule_next(yesterday, now, schedule))

def test_exact_time_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
yesterday = (now - datetime.timedelta(days=1)).replace(hour=now.hour+3, minute=now.minute+1)
schedule = "{:02d}:00".format(now.hour + 3)
now = date_parse("2015-10-16 20:10")
yesterday = date_parse("2015-10-15 23:07")
schedule = "23:00"
self.assertFalse(models.should_schedule_next(yesterday, now, schedule))

def test_exact_time_with_day_change(self):
Expand Down

0 comments on commit 56b51f6

Please sign in to comment.