Skip to content

Commit

Permalink
feat(native-filters): add support for import/export dashboard (apache…
Browse files Browse the repository at this point in the history
…#15253)

* feat(native-filters): add support for import/export

* fix test and non-dataset filters

* lint
  • Loading branch information
villebro authored and cccs-RyanS committed Dec 17, 2021
1 parent bf91faf commit 46f2af3
Show file tree
Hide file tree
Showing 4 changed files with 70 additions and 3 deletions.
8 changes: 7 additions & 1 deletion superset/connectors/base/models.py
Expand Up @@ -22,7 +22,7 @@
from flask_appbuilder.security.sqla.models import User
from sqlalchemy import and_, Boolean, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import foreign, Query, relationship, RelationshipProperty
from sqlalchemy.orm import foreign, Query, relationship, RelationshipProperty, Session

from superset import security_manager
from superset.constants import NULL_STRING
Expand Down Expand Up @@ -516,6 +516,12 @@ def raise_for_access(self) -> None:

security_manager.raise_for_access(datasource=self)

@classmethod
def get_datasource_by_name(
cls, session: Session, datasource_name: str, schema: str, database_name: str
) -> Optional["BaseDatasource"]:
raise NotImplementedError()


class BaseColumn(AuditMixinNullable, ImportExportMixin):
"""Interface for column"""
Expand Down
26 changes: 26 additions & 0 deletions superset/connectors/connector_registry.py
Expand Up @@ -16,8 +16,10 @@
# under the License.
from typing import Dict, List, Optional, Set, Type, TYPE_CHECKING

from flask_babel import _
from sqlalchemy import or_
from sqlalchemy.orm import Session, subqueryload
from sqlalchemy.orm.exc import NoResultFound

from superset.datasets.commands.exceptions import DatasetNotFoundError

Expand Down Expand Up @@ -73,6 +75,30 @@ def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]:
datasources.extend(qry.all())
return datasources

@classmethod
def get_datasource_by_id( # pylint: disable=too-many-arguments
cls, session: Session, datasource_id: int,
) -> "BaseDatasource":
"""
Find a datasource instance based on the unique id.
:param session: Session to use
:param datasource_id: unique id of datasource
:return: Datasource corresponding to the id
:raises NoResultFound: if no datasource is found corresponding to the id
"""
for datasource_class in ConnectorRegistry.sources.values():
try:
return (
session.query(datasource_class)
.filter(datasource_class.id == datasource_id)
.one()
)
except NoResultFound:
# proceed to next datasource type
pass
raise NoResultFound(_("Datasource id not found: %(id)s", id=datasource_id))

@classmethod
def get_datasource_by_name( # pylint: disable=too-many-arguments
cls,
Expand Down
25 changes: 23 additions & 2 deletions superset/dashboards/commands/importers/v0.py
Expand Up @@ -84,6 +84,7 @@ def import_chart(
def import_dashboard(
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
dashboard_to_import: Dashboard,
dataset_id_mapping: Optional[Dict[int, int]] = None,
import_time: Optional[int] = None,
) -> int:
"""Imports the dashboard from the object to the database.
Expand Down Expand Up @@ -140,6 +141,20 @@ def alter_positions(
value["meta"]["chartId"] = old_to_new_slc_id_dict[old_slice_id]
dashboard.position_json = json.dumps(position_data)

def alter_native_filters(dashboard: Dashboard) -> None:
json_metadata = json.loads(dashboard.json_metadata)
native_filter_configuration = json_metadata.get("native_filter_configuration")
if not native_filter_configuration:
return
for native_filter in native_filter_configuration:
for target in native_filter.get("targets", []):
old_dataset_id = target.get("datasetId")
if dataset_id_mapping and old_dataset_id is not None:
target["datasetId"] = dataset_id_mapping.get(
old_dataset_id, old_dataset_id,
)
dashboard.json_metadata = json.dumps(json_metadata)

logger.info("Started import of the dashboard: %s", dashboard_to_import.to_json())
session = db.session
logger.info("Dashboard has %d slices", len(dashboard_to_import.slices))
Expand Down Expand Up @@ -235,6 +250,8 @@ def alter_positions(
timed_refresh_immune_slices=new_timed_refresh_immune_slices
)

alter_native_filters(dashboard_to_import)

new_slices = (
session.query(Slice).filter(Slice.id.in_(old_to_new_slc_id_dict.values())).all()
)
Expand Down Expand Up @@ -301,11 +318,15 @@ def import_dashboards(
data = json.loads(content, object_hook=decode_dashboards)
if not data:
raise DashboardImportException(_("No data in file"))
dataset_id_mapping: Dict[int, int] = {}
for table in data["datasources"]:
import_dataset(table, database_id, import_time=import_time)
new_dataset_id = import_dataset(table, database_id, import_time=import_time)
params = json.loads(table.params)
dataset_id_mapping[params["remote_id"]] = new_dataset_id

session.commit()
for dashboard in data["dashboards"]:
import_dashboard(dashboard, import_time=import_time)
import_dashboard(dashboard, dataset_id_mapping, import_time=import_time)
session.commit()


Expand Down
14 changes: 14 additions & 0 deletions superset/models/dashboard.py
Expand Up @@ -334,6 +334,20 @@ def export_dashboards( # pylint: disable=too-many-locals
# set slices without creating ORM relations
slices = copied_dashboard.__dict__.setdefault("slices", [])
slices.append(copied_slc)

json_metadata = json.loads(dashboard.json_metadata)
native_filter_configuration: List[Dict[str, Any]] = json_metadata.get(
"native_filter_configuration", []
)
for native_filter in native_filter_configuration:
session = db.session()
for target in native_filter.get("targets", []):
id_ = target.get("datasetId")
if id_ is None:
continue
datasource = ConnectorRegistry.get_datasource_by_id(session, id_)
datasource_ids.add((datasource.id, datasource.type))

copied_dashboard.alter_params(remote_id=dashboard_id)
copied_dashboards.append(copied_dashboard)

Expand Down

0 comments on commit 46f2af3

Please sign in to comment.