Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Ensure verbose mapping exists for SQL Lab Query model #23597

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions superset/charts/data/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import json
import logging
from typing import Any, Dict, Optional, TYPE_CHECKING
from typing import Any, Dict, Optional, TYPE_CHECKING, Union

import simplejson
from flask import current_app, make_response, request, Response
Expand All @@ -44,6 +44,7 @@
from superset.dao.exceptions import DatasourceNotFound
from superset.exceptions import QueryObjectValidationError
from superset.extensions import event_logger
from superset.models.sql_lab import Query
from superset.utils.async_query_manager import AsyncQueryTokenException
from superset.utils.core import create_zip, get_user_id, json_int_dttm_ser
from superset.views.base import CsvResponse, generate_download_headers, XlsxResponse
Expand Down Expand Up @@ -342,7 +343,7 @@ def _send_chart_response(
self,
result: Dict[Any, Any],
form_data: Optional[Dict[str, Any]] = None,
datasource: Optional[BaseDatasource] = None,
datasource: Optional[Union[BaseDatasource, Query]] = None,
) -> Response:
result_type = result["query_context"].result_type
result_format = result["query_context"].result_format
Expand Down Expand Up @@ -405,7 +406,7 @@ def _get_data_response(
command: ChartDataCommand,
force_cached: bool = False,
form_data: Optional[Dict[str, Any]] = None,
datasource: Optional[BaseDatasource] = None,
datasource: Optional[Union[BaseDatasource, Query]] = None,
) -> Response:
try:
result = command.run(force_cached=force_cached)
Expand Down
13 changes: 8 additions & 5 deletions superset/charts/post_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"""

from io import StringIO
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union

import pandas as pd
from flask_babel import gettext as __
Expand All @@ -42,6 +42,7 @@

if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource
from superset.models.sql_lab import Query


def get_column_key(label: Tuple[str, ...], metrics: List[str]) -> Tuple[Any, ...]:
Expand Down Expand Up @@ -223,7 +224,7 @@ def list_unique_values(series: pd.Series) -> str:
def pivot_table_v2(
df: pd.DataFrame,
form_data: Dict[str, Any],
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None,
) -> pd.DataFrame:
"""
Pivot table v2.
Expand All @@ -249,7 +250,7 @@ def pivot_table_v2(
def pivot_table(
df: pd.DataFrame,
form_data: Dict[str, Any],
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None,
) -> pd.DataFrame:
"""
Pivot table (v1).
Expand Down Expand Up @@ -285,7 +286,9 @@ def pivot_table(
def table(
df: pd.DataFrame,
form_data: Dict[str, Any],
datasource: Optional["BaseDatasource"] = None, # pylint: disable=unused-argument
datasource: Optional[ # pylint: disable=unused-argument
Union["BaseDatasource", "Query"]
] = None,
) -> pd.DataFrame:
"""
Table.
Expand Down Expand Up @@ -314,7 +317,7 @@ def table(
def apply_post_process(
result: Dict[Any, Any],
form_data: Optional[Dict[str, Any]] = None,
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None,
) -> Dict[Any, Any]:
form_data = form_data or {}

Expand Down
1 change: 1 addition & 0 deletions superset/models/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,7 @@ def data(self) -> Dict[str, Any]:
"database": {"id": self.database_id, "backend": self.database.backend},
"order_by_choices": order_by_choices,
"schema": self.schema,
"verbose_map": {},
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Present here for the superset.connectors.base.models.BaseDatasource model.

}

def raise_for_access(self) -> None:
Expand Down
7 changes: 4 additions & 3 deletions superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@

if TYPE_CHECKING:
from superset.connectors.base.models import BaseColumn, BaseDatasource
from superset.models.sql_lab import Query

logging.getLogger("MARKDOWN").setLevel(logging.INFO)
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -1711,7 +1712,7 @@ def get_column_names_from_metrics(metrics: List[Metric]) -> List[str]:

def extract_dataframe_dtypes(
df: pd.DataFrame,
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union[BaseDatasource, Query]] = None,
) -> List[GenericDataType]:
"""Serialize pandas/numpy dtypes to generic types"""

Expand All @@ -1731,13 +1732,13 @@ def extract_dataframe_dtypes(
if datasource:
for column in datasource.columns:
if isinstance(column, dict):
columns_by_name[column.get("column_name")] = column
columns_by_name[column.get("column_name")] = column # type: ignore
else:
columns_by_name[column.column_name] = column

generic_types: List[GenericDataType] = []
for column in df.columns:
column_object = columns_by_name.get(column)
column_object = columns_by_name.get(column) # type: ignore
series = df[column]
inferred_type = infer_dtype(series)
if isinstance(column_object, dict):
Expand Down