Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: respect chart cache timeout setting #21637

Merged
merged 4 commits into from
Sep 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions superset/common/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
QueryContextProcessor,
)
from superset.common.query_object import QueryObject
from superset.models.slice import Slice

if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource
Expand All @@ -46,6 +47,7 @@ class QueryContext:
enforce_numerical_metrics: ClassVar[bool] = True

datasource: BaseDatasource
slice_: Optional[Slice] = None
queries: List[QueryObject]
form_data: Optional[Dict[str, Any]]
result_type: ChartDataResultType
Expand All @@ -64,6 +66,7 @@ def __init__(
*,
datasource: BaseDatasource,
queries: List[QueryObject],
slice_: Optional[Slice],
form_data: Optional[Dict[str, Any]],
result_type: ChartDataResultType,
result_format: ChartDataResultFormat,
Expand All @@ -72,6 +75,7 @@ def __init__(
cache_values: Dict[str, Any],
) -> None:
self.datasource = datasource
self.slice_ = slice_
self.result_type = result_type
self.result_format = result_format
self.queries = queries
Expand All @@ -98,6 +102,8 @@ def get_payload(
def get_cache_timeout(self) -> Optional[int]:
if self.custom_cache_timeout is not None:
return self.custom_cache_timeout
if self.slice_ and self.slice_.cache_timeout is not None:
return self.slice_.cache_timeout
if self.datasource.cache_timeout is not None:
return self.datasource.cache_timeout
if hasattr(self.datasource, "database"):
Expand Down
11 changes: 11 additions & 0 deletions superset/common/query_context_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,12 @@
from typing import Any, Dict, List, Optional, TYPE_CHECKING

from superset import app, db
from superset.charts.dao import ChartDAO
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.query_context import QueryContext
from superset.common.query_object_factory import QueryObjectFactory
from superset.datasource.dao import DatasourceDAO
from superset.models.slice import Slice
from superset.utils.core import DatasourceDict, DatasourceType

if TYPE_CHECKING:
Expand Down Expand Up @@ -55,6 +57,11 @@ def create(
datasource_model_instance = None
if datasource:
datasource_model_instance = self._convert_to_model(datasource)

slice_ = None
if form_data and form_data.get("slice_id") is not None:
slice_ = self._get_slice(form_data.get("slice_id"))

result_type = result_type or ChartDataResultType.FULL
result_format = result_format or ChartDataResultFormat.JSON
queries_ = [
Expand All @@ -72,6 +79,7 @@ def create(
return QueryContext(
datasource=datasource_model_instance,
queries=queries_,
slice_=slice_,
form_data=form_data,
result_type=result_type,
result_format=result_format,
Expand All @@ -88,3 +96,6 @@ def _convert_to_model(self, datasource: DatasourceDict) -> BaseDatasource:
datasource_type=DatasourceType(datasource["type"]),
datasource_id=int(datasource["id"]),
)

def _get_slice(self, slice_id: Any) -> Optional[Slice]:
return ChartDAO.find_by_id(slice_id)
85 changes: 83 additions & 2 deletions tests/integration_tests/charts/data/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import copy
from datetime import datetime
from io import BytesIO
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, List
from unittest import mock
from zipfile import ZipFile

Expand All @@ -38,8 +38,12 @@
load_birth_names_data,
)
from tests.integration_tests.test_app import app

from tests.integration_tests.fixtures.energy_dashboard import (
load_energy_table_with_slice,
load_energy_table_data,
)
import pytest
from superset.models.slice import Slice

from superset.charts.data.commands.get_data_command import ChartDataCommand
from superset.connectors.sqla.models import TableColumn, SqlaTable
Expand Down Expand Up @@ -976,3 +980,80 @@ def test_data_cache_default_timeout(
):
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 3456


def test_chart_cache_timeout(
test_client,
login_as_admin,
physical_query_context,
load_energy_table_with_slice: List[Slice],
):
# should override datasource cache timeout

slice_with_cache_timeout = load_energy_table_with_slice[0]
slice_with_cache_timeout.cache_timeout = 20
db.session.merge(slice_with_cache_timeout)

datasource: SqlaTable = (
db.session.query(SqlaTable)
.filter(SqlaTable.id == physical_query_context["datasource"]["id"])
.first()
)
datasource.cache_timeout = 1254
db.session.merge(datasource)

db.session.commit()

physical_query_context["form_data"] = {"slice_id": slice_with_cache_timeout.id}

rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 20


@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 1010,
},
},
)
def test_chart_cache_timeout_not_present(
test_client, login_as_admin, physical_query_context
):
# should use datasource cache, if it's present

datasource: SqlaTable = (
db.session.query(SqlaTable)
.filter(SqlaTable.id == physical_query_context["datasource"]["id"])
.first()
)
datasource.cache_timeout = 1980
db.session.merge(datasource)
db.session.commit()

rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 1980


@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 1010,
},
},
)
def test_chart_cache_timeout_chart_not_found(
test_client, login_as_admin, physical_query_context
):
# should use default timeout

physical_query_context["form_data"] = {"slice_id": 0}

rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 1010
14 changes: 9 additions & 5 deletions tests/integration_tests/fixtures/energy_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import random
from typing import Dict, Set
from typing import Dict, List, Set

import pandas as pd
import pytest
Expand Down Expand Up @@ -59,8 +59,8 @@ def load_energy_table_data():
@pytest.fixture()
def load_energy_table_with_slice(load_energy_table_data):
with app.app_context():
_create_energy_table()
yield
slices = _create_energy_table()
yield slices
_cleanup()


Expand All @@ -69,7 +69,7 @@ def _get_dataframe():
return pd.DataFrame.from_dict(data)


def _create_energy_table():
def _create_energy_table() -> List[Slice]:
table = create_table_metadata(
table_name=ENERGY_USAGE_TBL_NAME,
database=get_example_database(),
Expand All @@ -86,13 +86,17 @@ def _create_energy_table():
db.session.commit()
table.fetch_metadata()

slices = []
for slice_data in _get_energy_slices():
_create_and_commit_energy_slice(

slice = _create_and_commit_energy_slice(
table,
slice_data["slice_title"],
slice_data["viz_type"],
slice_data["params"],
)
slices.append(slice)
return slices


def _create_and_commit_energy_slice(
Expand Down