From b04fd2ea540e5dfd958d9ba36eef1c5f7c9f33ad Mon Sep 17 00:00:00 2001 From: Jesse Yang Date: Wed, 29 Jun 2022 12:27:51 -0700 Subject: [PATCH] feat(report): capture dashboard reports in specific states --- .../dashboards/permalink/commands/create.py | 6 +- ...be71abde154_fix_report_schedule_and_log.py | 2 +- ..._rename_report_schedule_extra_to_extra_.py | 53 ++++++ superset/models/dashboard.py | 6 +- superset/models/helpers.py | 3 +- superset/reports/api.py | 5 +- superset/reports/commands/alert.py | 2 +- superset/reports/commands/base.py | 3 +- superset/reports/commands/bulk_delete.py | 2 +- superset/reports/commands/create.py | 35 ++-- superset/reports/commands/delete.py | 2 +- superset/reports/commands/exceptions.py | 2 +- superset/reports/commands/execute.py | 109 ++++++------ superset/reports/commands/log_prune.py | 2 +- superset/reports/commands/update.py | 2 +- superset/reports/dao.py | 6 +- superset/reports/filters.py | 2 +- superset/reports/logs/api.py | 2 +- .../{models/reports.py => reports/models.py} | 26 +-- superset/reports/notifications/__init__.py | 2 +- superset/reports/notifications/base.py | 2 +- superset/reports/notifications/email.py | 2 +- superset/reports/notifications/slack.py | 2 +- superset/reports/schemas.py | 3 +- superset/reports/types.py | 23 +++ superset/views/base.py | 2 +- tests/integration_tests/charts/api_tests.py | 2 +- tests/integration_tests/dashboard_utils.py | 4 +- .../integration_tests/dashboards/api_tests.py | 2 +- .../integration_tests/databases/api_tests.py | 2 +- .../fixtures/tabbed_dashboard.py | 101 ++++++++--- tests/integration_tests/reports/api_tests.py | 85 +--------- .../commands/create_dashboard_report_tests.py | 74 +++++++++ .../execute_dashboard_report_tests.py | 66 ++++++++ .../reports/commands_tests.py | 157 ++---------------- .../reports/scheduler_tests.py | 3 +- tests/integration_tests/reports/utils.py | 114 ++++++++++++- tests/integration_tests/test_app.py | 5 +- 38 files changed, 561 insertions(+), 360 deletions(-) create mode 100644 superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py rename superset/{models/reports.py => reports/models.py} (93%) create mode 100644 superset/reports/types.py create mode 100644 tests/integration_tests/reports/commands/create_dashboard_report_tests.py create mode 100644 tests/integration_tests/reports/commands/execute_dashboard_report_tests.py diff --git a/superset/dashboards/permalink/commands/create.py b/superset/dashboards/permalink/commands/create.py index 75cb4f5748ab..51dac2d5dee7 100644 --- a/superset/dashboards/permalink/commands/create.py +++ b/superset/dashboards/permalink/commands/create.py @@ -31,8 +31,10 @@ class CreateDashboardPermalinkCommand(BaseDashboardPermalinkCommand): """ - Get or create a permalink key for the given dashboard in certain state. - Will reuse the key for the same user and dashboard state. + Get or create a permalink key for the dashboard. + + The same dashboard_id and state for the same user will return the + same permalink. """ def __init__( diff --git a/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py b/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py index 5c88606511d9..d00b60cd1887 100644 --- a/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py +++ b/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py @@ -31,7 +31,7 @@ from sqlalchemy.ext.declarative import declarative_base from superset import db -from superset.models.reports import ReportState +from superset.reports.models import ReportState Base = declarative_base() diff --git a/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py b/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py new file mode 100644 index 000000000000..2e485d46ab23 --- /dev/null +++ b/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""rename_report_schedule_extra_to_extra_json + +So we can reuse the ExtraJSONMixin + +Revision ID: ffa79af61a56 +Revises: 06e1e70058c7 +Create Date: 2022-07-11 11:26:00.010714 + +""" + +# revision identifiers, used by Alembic. +revision = "ffa79af61a56" +down_revision = "06e1e70058c7" + +from alembic import op +from sqlalchemy.types import Text + + +def upgrade(): + op.alter_column( + "report_schedule", + "extra", + new_column_name="extra_json", + # existing info is required for MySQL + existing_type=Text, + existing_nullable=False, + ) + + +def downgrade(): + op.alter_column( + "report_schedule", + "extra_json", + new_column_name="extra", + existing_type=Text, + existing_nullable=False, + ) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 6c733ff4e1af..b8dbf37e7d6f 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -438,13 +438,15 @@ def export_dashboards( # pylint: disable=too-many-locals ) @classmethod - def get(cls, id_or_slug: str) -> Dashboard: + def get(cls, id_or_slug: Union[str, int]) -> Dashboard: session = db.session() qry = session.query(Dashboard).filter(id_or_slug_filter(id_or_slug)) return qry.one_or_none() -def id_or_slug_filter(id_or_slug: str) -> BinaryExpression: +def id_or_slug_filter(id_or_slug: Union[int, str]) -> BinaryExpression: + if isinstance(id_or_slug, int): + return Dashboard.id == id_or_slug if id_or_slug.isdigit(): return Dashboard.id == int(id_or_slug) return Dashboard.slug == id_or_slug diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 051f8fb7c3bf..bc6ecb64526b 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -485,7 +485,8 @@ def extra(self) -> Dict[str, Any]: ) return {} - def set_extra_json(self, extras: Dict[str, Any]) -> None: + @extra.setter + def extra(self, extras: Dict[str, Any]) -> None: self.extra_json = json.dumps(extras) def set_extra_json_key(self, key: str, value: Any) -> None: diff --git a/superset/reports/api.py b/superset/reports/api.py index 9f6fb86a77ad..46480da9995c 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -30,7 +30,6 @@ from superset.dashboards.filters import DashboardAccessFilter from superset.databases.filters import DatabaseFilter from superset.extensions import event_logger -from superset.models.reports import ReportSchedule from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand from superset.reports.commands.create import CreateReportScheduleCommand from superset.reports.commands.delete import DeleteReportScheduleCommand @@ -45,6 +44,7 @@ ) from superset.reports.commands.update import UpdateReportScheduleCommand from superset.reports.filters import ReportScheduleAllTextFilter +from superset.reports.models import ReportSchedule from superset.reports.schemas import ( get_delete_ids_schema, openapi_spec_methods_override, @@ -94,6 +94,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "database.database_name", "database.id", "description", + "extra", "force_screenshot", "grace_period", "last_eval_dttm", @@ -135,6 +136,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "crontab_humanized", "dashboard_id", "description", + "extra", "id", "last_eval_dttm", "last_state", @@ -156,6 +158,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "dashboard", "database", "description", + "extra", "force_screenshot", "grace_period", "log_retention", diff --git a/superset/reports/commands/alert.py b/superset/reports/commands/alert.py index 6382826aad54..88ece787d378 100644 --- a/superset/reports/commands/alert.py +++ b/superset/reports/commands/alert.py @@ -27,7 +27,6 @@ from superset import app, jinja_context, security_manager from superset.commands.base import BaseCommand -from superset.models.reports import ReportSchedule, ReportScheduleValidatorType from superset.reports.commands.exceptions import ( AlertQueryError, AlertQueryInvalidTypeError, @@ -36,6 +35,7 @@ AlertQueryTimeout, AlertValidatorConfigError, ) +from superset.reports.models import ReportSchedule, ReportScheduleValidatorType from superset.utils.core import override_user from superset.utils.retries import retry_call diff --git a/superset/reports/commands/base.py b/superset/reports/commands/base.py index ee2eba7139ed..d685c7e696f0 100644 --- a/superset/reports/commands/base.py +++ b/superset/reports/commands/base.py @@ -22,7 +22,6 @@ from superset.charts.dao import ChartDAO from superset.commands.base import BaseCommand from superset.dashboards.dao import DashboardDAO -from superset.models.reports import ReportCreationMethod from superset.reports.commands.exceptions import ( ChartNotFoundValidationError, ChartNotSavedValidationError, @@ -30,6 +29,7 @@ DashboardNotSavedValidationError, ReportScheduleChartOrDashboardValidationError, ) +from superset.reports.models import ReportCreationMethod logger = logging.getLogger(__name__) @@ -63,6 +63,7 @@ def validate_chart_dashboard( if chart_id and dashboard_id: exceptions.append(ReportScheduleChartOrDashboardValidationError()) + if chart_id: chart = ChartDAO.find_by_id(chart_id) if not chart: diff --git a/superset/reports/commands/bulk_delete.py b/superset/reports/commands/bulk_delete.py index 131a97af2d50..28a39a2fb6ea 100644 --- a/superset/reports/commands/bulk_delete.py +++ b/superset/reports/commands/bulk_delete.py @@ -21,13 +21,13 @@ from superset.commands.base import BaseCommand from superset.dao.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException -from superset.models.reports import ReportSchedule from superset.reports.commands.exceptions import ( ReportScheduleBulkDeleteFailedError, ReportScheduleForbiddenError, ReportScheduleNotFoundError, ) from superset.reports.dao import ReportScheduleDAO +from superset.reports.models import ReportSchedule logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py index a67aabef9865..04b050c135ec 100644 --- a/superset/reports/commands/create.py +++ b/superset/reports/commands/create.py @@ -18,13 +18,12 @@ import logging from typing import Any, Dict, List, Optional -from flask_appbuilder.models.sqla import Model +from flask_babel import gettext as _ from marshmallow import ValidationError from superset.commands.base import CreateMixin from superset.dao.exceptions import DAOCreateFailedError from superset.databases.dao import DatabaseDAO -from superset.models.reports import ReportCreationMethod, ReportScheduleType from superset.reports.commands.base import BaseReportScheduleCommand from superset.reports.commands.exceptions import ( DatabaseNotFoundValidationError, @@ -36,6 +35,12 @@ ReportScheduleRequiredTypeValidationError, ) from superset.reports.dao import ReportScheduleDAO +from superset.reports.models import ( + ReportCreationMethod, + ReportSchedule, + ReportScheduleType, +) +from superset.reports.types import ReportScheduleExtra logger = logging.getLogger(__name__) @@ -44,7 +49,7 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand): def __init__(self, data: Dict[str, Any]): self._properties = data.copy() - def run(self) -> Model: + def run(self) -> ReportSchedule: self.validate() try: report_schedule = ReportScheduleDAO.create(self._properties) @@ -117,20 +122,28 @@ def validate(self) -> None: raise exception def _validate_report_extra(self, exceptions: List[ValidationError]) -> None: - extra = self._properties.get("extra") + extra: Optional[ReportScheduleExtra] = self._properties.get("extra") dashboard = self._properties.get("dashboard") if extra is None or dashboard is None: return - dashboard_tab_ids = extra.get("dashboard_tab_ids") - if dashboard_tab_ids is None: + dashboard_state = extra.get("dashboard") + if not dashboard_state: return - position_data = json.loads(dashboard.position_json) - invalid_tab_ids = [ - tab_id for tab_id in dashboard_tab_ids if tab_id not in position_data - ] + + position_data = json.loads(dashboard.position_json or "{}") + active_tabs = dashboard_state.get("activeTabs") or [] + anchor = dashboard_state.get("anchor") + invalid_tab_ids = { + tab_id for tab_id in active_tabs if tab_id not in position_data + } + if anchor and anchor not in position_data: + invalid_tab_ids.add(anchor) if invalid_tab_ids: exceptions.append( - ValidationError(f"Invalid tab IDs selected: {invalid_tab_ids}", "extra") + ValidationError( + _("Invalid tab ids: %s(tab_ids)", tab_ids=str(invalid_tab_ids)), + "extra", + ) ) diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py index 4c38f9b7cd69..4adf17683a6a 100644 --- a/superset/reports/commands/delete.py +++ b/superset/reports/commands/delete.py @@ -23,13 +23,13 @@ from superset.commands.base import BaseCommand from superset.dao.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException -from superset.models.reports import ReportSchedule from superset.reports.commands.exceptions import ( ReportScheduleDeleteFailedError, ReportScheduleForbiddenError, ReportScheduleNotFoundError, ) from superset.reports.dao import ReportScheduleDAO +from superset.reports.models import ReportSchedule logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/exceptions.py b/superset/reports/commands/exceptions.py index db46503b93fd..0763e9be8a82 100644 --- a/superset/reports/commands/exceptions.py +++ b/superset/reports/commands/exceptions.py @@ -24,7 +24,7 @@ ForbiddenError, ValidationError, ) -from superset.models.reports import ReportScheduleType +from superset.reports.models import ReportScheduleType class DatabaseNotFoundValidationError(ValidationError): diff --git a/superset/reports/commands/execute.py b/superset/reports/commands/execute.py index ad1b1edcd2a7..0bf6b7956661 100644 --- a/superset/reports/commands/execute.py +++ b/superset/reports/commands/execute.py @@ -17,7 +17,7 @@ import json import logging from datetime import datetime, timedelta -from typing import Any, List, Optional +from typing import Any, List, Optional, Union from uuid import UUID import pandas as pd @@ -29,16 +29,10 @@ from superset.commands.base import BaseCommand from superset.commands.exceptions import CommandException from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType -from superset.extensions import feature_flag_manager, machine_auth_provider_factory -from superset.models.reports import ( - ReportDataFormat, - ReportExecutionLog, - ReportRecipients, - ReportRecipientType, - ReportSchedule, - ReportScheduleType, - ReportState, +from superset.dashboards.permalink.commands.create import ( + CreateDashboardPermalinkCommand, ) +from superset.extensions import feature_flag_manager, machine_auth_provider_factory from superset.reports.commands.alert import AlertCommand from superset.reports.commands.exceptions import ( ReportScheduleAlertGracePeriodError, @@ -61,16 +55,21 @@ REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER, ReportScheduleDAO, ) +from superset.reports.models import ( + ReportDataFormat, + ReportExecutionLog, + ReportRecipients, + ReportRecipientType, + ReportSchedule, + ReportScheduleType, + ReportState, +) from superset.reports.notifications import create_notification from superset.reports.notifications.base import NotificationContent from superset.reports.notifications.exceptions import NotificationError from superset.utils.celery import session_scope from superset.utils.csv import get_chart_csv_data, get_chart_dataframe -from superset.utils.screenshots import ( - BaseScreenshot, - ChartScreenshot, - DashboardScreenshot, -) +from superset.utils.screenshots import ChartScreenshot, DashboardScreenshot from superset.utils.urls import get_url_path from superset.utils.webdriver import DashboardStandaloneMode @@ -174,6 +173,16 @@ def _get_url( force=force, **kwargs, ) + + # If we need to render dashboard in a specific sate, use stateful permalink + dashboard_state = self._report_schedule.extra.get("dashboard") + if dashboard_state: + permalink_key = CreateDashboardPermalinkCommand( + dashboard_id=self._report_schedule.dashboard_id, + state=dashboard_state, + ).run() + return get_url_path("Superset.dashboard_permalink", key=permalink_key) + return get_url_path( "Superset.dashboard", user_friendly=user_friendly, @@ -198,49 +207,33 @@ def _get_screenshots(self) -> List[bytes]: :raises: ReportScheduleScreenshotFailedError """ image_data = [] - screenshots: List[BaseScreenshot] = [] + url = self._get_url() + user = self._get_user() if self._report_schedule.chart: - url = self._get_url() - logger.info("Screenshotting chart at %s", url) - screenshots = [ - ChartScreenshot( - url, - self._report_schedule.chart.digest, - window_size=app.config["WEBDRIVER_WINDOW"]["slice"], - thumb_size=app.config["WEBDRIVER_WINDOW"]["slice"], - ) - ] + screenshot: Union[ChartScreenshot, DashboardScreenshot] = ChartScreenshot( + url, + self._report_schedule.chart.digest, + window_size=app.config["WEBDRIVER_WINDOW"]["slice"], + thumb_size=app.config["WEBDRIVER_WINDOW"]["slice"], + ) else: - tabs: Optional[List[str]] = json.loads(self._report_schedule.extra).get( - "dashboard_tab_ids", None + screenshot = DashboardScreenshot( + url, + self._report_schedule.dashboard.digest, + window_size=app.config["WEBDRIVER_WINDOW"]["dashboard"], + thumb_size=app.config["WEBDRIVER_WINDOW"]["dashboard"], ) - dashboard_base_url = self._get_url() - if tabs is None: - urls = [dashboard_base_url] - else: - urls = [f"{dashboard_base_url}#{tab_id}" for tab_id in tabs] - screenshots = [ - DashboardScreenshot( - url, - self._report_schedule.dashboard.digest, - window_size=app.config["WEBDRIVER_WINDOW"]["dashboard"], - thumb_size=app.config["WEBDRIVER_WINDOW"]["dashboard"], - ) - for url in urls - ] - user = self._get_user() - for screenshot in screenshots: - try: - image = screenshot.get_screenshot(user=user) - except SoftTimeLimitExceeded as ex: - logger.warning("A timeout occurred while taking a screenshot.") - raise ReportScheduleScreenshotTimeout() from ex - except Exception as ex: - raise ReportScheduleScreenshotFailedError( - f"Failed taking a screenshot {str(ex)}" - ) from ex - if image is not None: - image_data.append(image) + try: + image = screenshot.get_screenshot(user=user) + except SoftTimeLimitExceeded as ex: + logger.warning("A timeout occurred while taking a screenshot.") + raise ReportScheduleScreenshotTimeout() from ex + except Exception as ex: + raise ReportScheduleScreenshotFailedError( + f"Failed taking a screenshot {str(ex)}" + ) from ex + if image is not None: + image_data.append(image) if not image_data: raise ReportScheduleScreenshotFailedError() return image_data @@ -640,11 +633,13 @@ class AsyncExecuteReportScheduleCommand(BaseCommand): - On Alerts uses related Command AlertCommand and sends configured notifications """ - def __init__(self, task_id: str, model_id: int, scheduled_dttm: datetime): + def __init__( + self, task_id: Union[UUID, str], model_id: int, scheduled_dttm: datetime + ): self._model_id = model_id self._model: Optional[ReportSchedule] = None self._scheduled_dttm = scheduled_dttm - self._execution_id = UUID(task_id) + self._execution_id = task_id if isinstance(task_id, UUID) else UUID(task_id) def run(self) -> None: with session_scope(nullpool=True) as session: diff --git a/superset/reports/commands/log_prune.py b/superset/reports/commands/log_prune.py index 4b577b1f55dc..badd267ecfdd 100644 --- a/superset/reports/commands/log_prune.py +++ b/superset/reports/commands/log_prune.py @@ -19,9 +19,9 @@ from superset.commands.base import BaseCommand from superset.dao.exceptions import DAODeleteFailedError -from superset.models.reports import ReportSchedule from superset.reports.commands.exceptions import ReportSchedulePruneLogError from superset.reports.dao import ReportScheduleDAO +from superset.reports.models import ReportSchedule from superset.utils.celery import session_scope logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/update.py b/superset/reports/commands/update.py index c43ee47a0acc..3399eca7b72c 100644 --- a/superset/reports/commands/update.py +++ b/superset/reports/commands/update.py @@ -26,7 +26,6 @@ from superset.dao.exceptions import DAOUpdateFailedError from superset.databases.dao import DatabaseDAO from superset.exceptions import SupersetSecurityException -from superset.models.reports import ReportSchedule, ReportScheduleType, ReportState from superset.reports.commands.base import BaseReportScheduleCommand from superset.reports.commands.exceptions import ( DatabaseNotFoundValidationError, @@ -37,6 +36,7 @@ ReportScheduleUpdateFailedError, ) from superset.reports.dao import ReportScheduleDAO +from superset.reports.models import ReportSchedule, ReportScheduleType, ReportState logger = logging.getLogger(__name__) diff --git a/superset/reports/dao.py b/superset/reports/dao.py index 21b1473f3281..c096e486d222 100644 --- a/superset/reports/dao.py +++ b/superset/reports/dao.py @@ -26,7 +26,7 @@ from superset.dao.base import BaseDAO from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db -from superset.models.reports import ( +from superset.reports.models import ( ReportExecutionLog, ReportRecipients, ReportSchedule, @@ -155,7 +155,7 @@ def validate_update_uniqueness( return found_id is None or found_id == expect_id @classmethod - def create(cls, properties: Dict[str, Any], commit: bool = True) -> Model: + def create(cls, properties: Dict[str, Any], commit: bool = True) -> ReportSchedule: """ create a report schedule and nested recipients :raises: DAOCreateFailedError @@ -187,7 +187,7 @@ def create(cls, properties: Dict[str, Any], commit: bool = True) -> Model: @classmethod def update( cls, model: Model, properties: Dict[str, Any], commit: bool = True - ) -> Model: + ) -> ReportSchedule: """ create a report schedule and nested recipients :raises: DAOCreateFailedError diff --git a/superset/reports/filters.py b/superset/reports/filters.py index 82ea73a91a9f..699d10fc9afb 100644 --- a/superset/reports/filters.py +++ b/superset/reports/filters.py @@ -20,7 +20,7 @@ from sqlalchemy import or_ from sqlalchemy.orm.query import Query -from superset.models.reports import ReportSchedule +from superset.reports.models import ReportSchedule from superset.views.base import BaseFilter diff --git a/superset/reports/logs/api.py b/superset/reports/logs/api.py index 6b11da556f67..41c47e36522d 100644 --- a/superset/reports/logs/api.py +++ b/superset/reports/logs/api.py @@ -25,8 +25,8 @@ from superset import is_feature_enabled from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.models.reports import ReportExecutionLog from superset.reports.logs.schemas import openapi_spec_methods_override +from superset.reports.models import ReportExecutionLog from superset.views.base_api import BaseSupersetModelRestApi logger = logging.getLogger(__name__) diff --git a/superset/models/reports.py b/superset/reports/models.py similarity index 93% rename from superset/models/reports.py rename to superset/reports/models.py index b0aa94d9a77c..9f7f48282672 100644 --- a/superset/models/reports.py +++ b/superset/reports/models.py @@ -16,8 +16,7 @@ # under the License. """A collection of ORM sqlalchemy models for Superset""" import enum -import json -from typing import Any, Dict, Optional +from typing import Any, Optional from cron_descriptor import get_description from flask_appbuilder import Model @@ -40,8 +39,9 @@ from superset.extensions import security_manager from superset.models.core import Database from superset.models.dashboard import Dashboard -from superset.models.helpers import AuditMixinNullable +from superset.models.helpers import AuditMixinNullable, ExtraJSONMixin from superset.models.slice import Slice +from superset.reports.types import ReportScheduleExtra metadata = Model.metadata # pylint: disable=no-member @@ -95,7 +95,7 @@ class ReportCreationMethod(str, enum.Enum): ) -class ReportSchedule(Model, AuditMixinNullable): +class ReportSchedule(Model, AuditMixinNullable, ExtraJSONMixin): """ Report Schedules, supports alerts and reports @@ -147,12 +147,11 @@ class ReportSchedule(Model, AuditMixinNullable): # (Alerts/Reports) Unlock a possible stalled working state working_timeout = Column(Integer, default=60 * 60 * 1) - # Store the selected dashboard tabs etc. - extra = Column(Text, default="{}") - # (Reports) When generating a screenshot, bypass the cache? force_screenshot = Column(Boolean, default=False) + extra: ReportScheduleExtra # type: ignore + def __repr__(self) -> str: return str(self.name) @@ -160,12 +159,13 @@ def __repr__(self) -> str: def crontab_humanized(self) -> str: return get_description(self.crontab) - @validates("extra") - # pylint: disable=unused-argument,no-self-use - def validate_extra(self, key: str, value: Dict[Any, Any]) -> Optional[str]: - if value is not None: - return json.dumps(value) - return None + @validates("extra_json") + def orm_validate_extra_json( + self, _: str, value: Optional[ReportScheduleExtra] + ) -> Any: # noqa + if value is None: + return {} + return value class ReportRecipients(Model, AuditMixinNullable): diff --git a/superset/reports/notifications/__init__.py b/superset/reports/notifications/__init__.py index dae43d64c7ca..c466f59abd5b 100644 --- a/superset/reports/notifications/__init__.py +++ b/superset/reports/notifications/__init__.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from superset.models.reports import ReportRecipients +from superset.reports.models import ReportRecipients from superset.reports.notifications.base import BaseNotification, NotificationContent from superset.reports.notifications.email import EmailNotification from superset.reports.notifications.slack import SlackNotification diff --git a/superset/reports/notifications/base.py b/superset/reports/notifications/base.py index 06bfecf79014..f5c9b79fd7c9 100644 --- a/superset/reports/notifications/base.py +++ b/superset/reports/notifications/base.py @@ -20,7 +20,7 @@ import pandas as pd -from superset.models.reports import ReportRecipients, ReportRecipientType +from superset.reports.models import ReportRecipients, ReportRecipientType @dataclass diff --git a/superset/reports/notifications/email.py b/superset/reports/notifications/email.py index 3991f24b9264..0fd571c38643 100644 --- a/superset/reports/notifications/email.py +++ b/superset/reports/notifications/email.py @@ -26,7 +26,7 @@ from flask_babel import gettext as __ from superset import app -from superset.models.reports import ReportRecipientType +from superset.reports.models import ReportRecipientType from superset.reports.notifications.base import BaseNotification from superset.reports.notifications.exceptions import NotificationError from superset.utils.core import send_email_smtp diff --git a/superset/reports/notifications/slack.py b/superset/reports/notifications/slack.py index 2a198d66453c..d4bed7fed806 100644 --- a/superset/reports/notifications/slack.py +++ b/superset/reports/notifications/slack.py @@ -26,7 +26,7 @@ from slack.errors import SlackApiError, SlackClientError from superset import app -from superset.models.reports import ReportRecipientType +from superset.reports.models import ReportRecipientType from superset.reports.notifications.base import BaseNotification from superset.reports.notifications.exceptions import NotificationError from superset.utils.decorators import statsd_gauge diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py index a0ce67d80dbb..c64593f7f961 100644 --- a/superset/reports/schemas.py +++ b/superset/reports/schemas.py @@ -23,7 +23,7 @@ from marshmallow_enum import EnumField from pytz import all_timezones -from superset.models.reports import ( +from superset.reports.models import ( ReportCreationMethod, ReportDataFormat, ReportRecipientType, @@ -297,4 +297,5 @@ class ReportSchedulePutSchema(Schema): default=ReportDataFormat.VISUALIZATION, validate=validate.OneOf(choices=tuple(key.value for key in ReportDataFormat)), ) + extra = fields.Dict(default=None) force_screenshot = fields.Boolean(default=False) diff --git a/superset/reports/types.py b/superset/reports/types.py new file mode 100644 index 000000000000..d487e3ad2376 --- /dev/null +++ b/superset/reports/types.py @@ -0,0 +1,23 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import TypedDict + +from superset.dashboards.permalink.types import DashboardPermalinkState + + +class ReportScheduleExtra(TypedDict): + dashboard: DashboardPermalinkState diff --git a/superset/views/base.py b/superset/views/base.py index 26e22e698a89..8a754af5a5e6 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -71,7 +71,7 @@ SupersetSecurityException, ) from superset.models.helpers import ImportExportMixin -from superset.models.reports import ReportRecipientType +from superset.reports.models import ReportRecipientType from superset.superset_typing import FlaskResponse from superset.translations.utils import get_language_pack from superset.utils import core as utils diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index b935579a0041..aabe0f719a6b 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -30,7 +30,7 @@ from superset.extensions import cache_manager, db from superset.models.core import Database, FavStar, FavStarClassName from superset.models.dashboard import Dashboard -from superset.models.reports import ReportSchedule, ReportScheduleType +from superset.reports.models import ReportSchedule, ReportScheduleType from superset.models.slice import Slice from superset.utils.core import get_example_default_schema diff --git a/tests/integration_tests/dashboard_utils.py b/tests/integration_tests/dashboard_utils.py index 115d3269f2e5..bafe0fdc0f36 100644 --- a/tests/integration_tests/dashboard_utils.py +++ b/tests/integration_tests/dashboard_utils.py @@ -17,9 +17,7 @@ """Utils to provide dashboards for tests""" import json -from typing import Any, Dict, List, Optional - -from pandas import DataFrame +from typing import Dict, List, Optional from superset import db from superset.connectors.sqla.models import SqlaTable diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index a027dcffae60..2e57167db935 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -34,7 +34,7 @@ from superset import db, security_manager from superset.models.dashboard import Dashboard from superset.models.core import FavStar, FavStarClassName -from superset.models.reports import ReportSchedule, ReportScheduleType +from superset.reports.models import ReportSchedule, ReportScheduleType from superset.models.slice import Slice from superset.utils.core import backend from superset.views.base import generate_download_headers diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index de754bd60a3a..fe9228ebf4a0 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -43,7 +43,7 @@ from superset.db_engine_specs.hana import HanaEngineSpec from superset.errors import SupersetError from superset.models.core import Database, ConfigurationMethod -from superset.models.reports import ReportSchedule, ReportScheduleType +from superset.reports.models import ReportSchedule, ReportScheduleType from superset.utils.database import get_example_database, get_main_database from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.birth_names_dashboard import ( diff --git a/tests/integration_tests/fixtures/tabbed_dashboard.py b/tests/integration_tests/fixtures/tabbed_dashboard.py index 4f9c4465c2d0..3342ce78f99e 100644 --- a/tests/integration_tests/fixtures/tabbed_dashboard.py +++ b/tests/integration_tests/fixtures/tabbed_dashboard.py @@ -21,15 +21,14 @@ from superset import db from superset.models.dashboard import Dashboard from tests.integration_tests.dashboard_utils import create_dashboard -from tests.integration_tests.test_app import app -@pytest.fixture(scope="session") -def tabbed_dashboard(): +@pytest.fixture() +def tabbed_dashboard(app_context): position_json = { "DASHBOARD_VERSION_KEY": "v2", "GRID_ID": { - "children": ["TABS-IpViLohnyP"], + "children": ["TABS-L1A", "TABS-L1B"], "id": "GRID_ID", "parents": ["ROOT_ID"], "type": "GRID", @@ -40,38 +39,102 @@ def tabbed_dashboard(): "type": "HEADER", }, "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, - "TAB-j53G4gtKGF": { + "TAB-L1AA": { "children": [], - "id": "TAB-j53G4gtKGF", + "id": "TAB-L1AA", "meta": { "defaultText": "Tab title", "placeholder": "Tab title", - "text": "Tab 1", + "text": "Tab L1AA", }, - "parents": ["ROOT_ID", "GRID_ID", "TABS-IpViLohnyP"], + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1A"], "type": "TAB", }, - "TAB-nerWR09Ju": { + "TAB-L1AB": { "children": [], - "id": "TAB-nerWR09Ju", + "id": "TAB-L1AB", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L1AB", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1A"], + "type": "TAB", + }, + "TABS-L1A": { + "children": ["TAB-L1AA", "TAB-L1AB"], + "id": "TABS-L1A", + "meta": {}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "TABS", + }, + "TAB-L1BA": { + "children": [], + "id": "TAB-L1BA", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L1B", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1B"], + "type": "TAB", + }, + "TAB-L1BB": { + "children": ["TABS-L2A"], + "id": "TAB-L1BB", "meta": { "defaultText": "Tab title", "placeholder": "Tab title", "text": "Tab 2", }, - "parents": ["ROOT_ID", "GRID_ID", "TABS-IpViLohnyP"], + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1B"], "type": "TAB", }, - "TABS-IpViLohnyP": { - "children": ["TAB-j53G4gtKGF", "TAB-nerWR09Ju"], - "id": "TABS-IpViLohnyP", + "TABS-L1B": { + "children": ["TAB-L1BA", "TAB-L1BB"], + "id": "TABS-L1B", "meta": {}, "parents": ["ROOT_ID", "GRID_ID"], "type": "TABS", }, + "TAB-L2AA": { + "children": [], + "id": "TAB-L2AA", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L2AA", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L2A"], + "type": "TAB", + }, + "TAB-L2AB": { + "children": [], + "id": "TAB-L2AB", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L2AB", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L2A"], + "type": "TAB", + }, + "TABS-L2A": { + "children": ["TAB-L2AA", "TAB-L2AB"], + "id": "TABS-L2A", + "meta": {}, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1BB"], + "type": "TABS", + }, } - with app.app_context(): - dash = create_dashboard( - "tabbed-dash-test", "Tabbed Dash Test", json.dumps(position_json), [] - ) - yield dash + dash = create_dashboard( + "test-tabbed-dash", "Test tabbed dash", json.dumps(position_json), [] + ) + err = None + try: + yield dash + except Exception as ex: + err = ex + db.session.query(Dashboard).filter_by(id=dash.id).delete() + if err: + raise err diff --git a/tests/integration_tests/reports/api_tests.py b/tests/integration_tests/reports/api_tests.py index 2ca41610a2e4..b39ea5308a8e 100644 --- a/tests/integration_tests/reports/api_tests.py +++ b/tests/integration_tests/reports/api_tests.py @@ -29,7 +29,7 @@ from superset.models.core import Database from superset.models.slice import Slice from superset.models.dashboard import Dashboard -from superset.models.reports import ( +from superset.reports.models import ( ReportSchedule, ReportCreationMethod, ReportRecipients, @@ -45,7 +45,6 @@ load_birth_names_dashboard_with_slices, load_birth_names_data, ) -from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard from tests.integration_tests.reports.utils import insert_report_schedule REPORTS_COUNT = 10 @@ -281,6 +280,7 @@ def test_get_list_report_schedule(self): "crontab_humanized", "dashboard_id", "description", + "extra", "id", "last_eval_dttm", "last_state", @@ -1528,84 +1528,3 @@ def test_report_schedule_logs_no_mutations(self): assert rv.status_code == 405 rv = self.client.delete(uri) assert rv.status_code == 405 - - @pytest.mark.usefixtures("create_report_schedules") - @pytest.mark.usefixtures("tabbed_dashboard") - def test_when_invalid_tab_ids_are_given_it_raises_bad_request(self): - """ - when tab ids are specified in the extra argument, make sure that the - tab ids are valid. - """ - self.login(username="admin") - dashboard = ( - db.session.query(Dashboard) - .filter(Dashboard.slug == "tabbed-dash-test") - .first() - ) - example_db = get_example_database() - report_schedule_data = { - "type": ReportScheduleType.ALERT, - "name": "new3", - "description": "description", - "crontab": "0 9 * * *", - "creation_method": ReportCreationMethod.ALERTS_REPORTS, - "recipients": [ - { - "type": ReportRecipientType.EMAIL, - "recipient_config_json": {"target": "target@superset.org"}, - }, - ], - "grace_period": 14400, - "working_timeout": 3600, - "chart": None, - "dashboard": dashboard.id, - "database": example_db.id, - "extra": {"dashboard_tab_ids": ["INVALID-TAB-ID-1", "TABS-IpViLohnyP"]}, - } - response = self.post_assert_metric( - "api/v1/report/", report_schedule_data, "post" - ) - assert response.status_code == 422 - assert response.json == { - "message": {"extra": ["Invalid tab IDs selected: ['INVALID-TAB-ID-1']"]} - } - - @pytest.mark.usefixtures("create_report_schedules") - @pytest.mark.usefixtures("tabbed_dashboard") - def test_when_tab_ids_are_given_it_gets_added_to_extra(self): - self.login(username="admin") - dashboard = ( - db.session.query(Dashboard) - .filter(Dashboard.slug == "tabbed-dash-test") - .first() - ) - example_db = get_example_database() - report_schedule_data = { - "type": ReportScheduleType.ALERT, - "name": "new3", - "description": "description", - "crontab": "0 9 * * *", - "creation_method": ReportCreationMethod.ALERTS_REPORTS, - "recipients": [ - { - "type": ReportRecipientType.EMAIL, - "recipient_config_json": {"target": "target@superset.org"}, - }, - ], - "grace_period": 14400, - "working_timeout": 3600, - "chart": None, - "dashboard": dashboard.id, - "database": example_db.id, - "extra": {"dashboard_tab_ids": ["TABS-IpViLohnyP"]}, - } - response = self.post_assert_metric( - "api/v1/report/", report_schedule_data, "post" - ) - assert response.status_code == 201 - assert json.loads( - db.session.query(ReportSchedule) - .filter(ReportSchedule.id == response.json["id"]) - .first() - .extra - ) == {"dashboard_tab_ids": ["TABS-IpViLohnyP"]} diff --git a/tests/integration_tests/reports/commands/create_dashboard_report_tests.py b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py new file mode 100644 index 000000000000..56596389408b --- /dev/null +++ b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py @@ -0,0 +1,74 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pytest + +from superset import db +from superset.models.dashboard import Dashboard +from superset.reports.commands.create import CreateReportScheduleCommand +from superset.reports.commands.exceptions import ReportScheduleInvalidError +from superset.reports.models import ( + ReportCreationMethod, + ReportRecipientType, + ReportScheduleType, +) +from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard + +DASHBOARD_REPORT_SCHEDULE_DEFAULTS = { + "type": ReportScheduleType.REPORT, + "description": "description", + "crontab": "0 9 * * *", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@example.com"}, + }, + ], + "grace_period": 14400, + "working_timeout": 3600, +} + + +@pytest.mark.usefixtures("login_as_admin") +def test_accept_valid_tab_ids(tabbed_dashboard: Dashboard) -> None: + report_schedule = CreateReportScheduleCommand( + { + **DASHBOARD_REPORT_SCHEDULE_DEFAULTS, + "name": "tabbed dashboard report (valid tab ids)", + "dashboard": tabbed_dashboard.id, + "extra": {"dashboard": {"activeTabs": ["TAB-L1AA", "TAB-L2AB"]}}, + } + ).run() + assert report_schedule.extra == { + "dashboard": {"activeTabs": ["TAB-L1AA", "TAB-L2AB"]} + } + db.session.delete(report_schedule) + + +@pytest.mark.usefixtures("login_as_admin") +def test_raise_exception_for_invalid_tab_ids(tabbed_dashboard: Dashboard) -> None: + with pytest.raises(ReportScheduleInvalidError) as exc_info: + CreateReportScheduleCommand( + { + **DASHBOARD_REPORT_SCHEDULE_DEFAULTS, + "name": "tabbed dashboard report (invalid tab ids)", + "dashboard": tabbed_dashboard.id, + "extra": {"dashboard": {"activeTabs": ["TAB-INVALID_ID"]}}, + } + ).run() + assert "Invalid tab ids" in str(exc_info.value.normalized_messages()) diff --git a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py new file mode 100644 index 000000000000..88f3ad583ac5 --- /dev/null +++ b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py @@ -0,0 +1,66 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from unittest.mock import MagicMock, patch +from uuid import uuid4 + +from superset.dashboards.permalink.commands.create import ( + CreateDashboardPermalinkCommand, +) +from superset.models.dashboard import Dashboard +from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand +from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard +from tests.integration_tests.reports.utils import create_dashboard_report + + +@patch("superset.reports.notifications.email.send_email_smtp") +@patch( + "superset.reports.commands.execute.DashboardScreenshot", +) +@patch( + "superset.dashboards.permalink.commands.create.CreateDashboardPermalinkCommand.run" +) +def test_report_for_dashboard_with_tabs( + create_dashboard_permalink_mock: MagicMock, + dashboard_screenshot_mock: MagicMock, + send_email_smtp_mock: MagicMock, + tabbed_dashboard: Dashboard, +) -> None: + create_dashboard_permalink_mock.return_value = "permalink" + dashboard_screenshot_mock.get_screenshot.return_value = b"test-image" + + with create_dashboard_report( + dashboard=tabbed_dashboard, + extra={"active_tabs": ["TAB-L1B", "TAB-L2BB"]}, + name="test report tabbed dashboard", + ) as report_schedule: + dashboard: Dashboard = report_schedule.dashboard + AsyncExecuteReportScheduleCommand( + uuid4(), report_schedule.id, datetime.utcnow() + ).run() + dashboard_state = report_schedule.extra.get("dashboard", {}) + permalink_key = CreateDashboardPermalinkCommand( + dashboard.id, dashboard_state + ).run() + + assert dashboard_screenshot_mock.call_count == 1 + assert dashboard_screenshot_mock.call_args_list[0].args == ( + f"http://0.0.0.0:8080/superset/dashboard/p/{permalink_key}/", + f"{dashboard.digest}", + ) + assert send_email_smtp_mock.called is True + assert len(send_email_smtp_mock.call_args.kwargs["images"]) == 1 diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index 63a54ff56182..3eebfb0594c6 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -27,19 +27,9 @@ from freezegun import freeze_time from sqlalchemy.sql import func -from superset import db, security_manager +from superset import db from superset.models.core import Database from superset.models.dashboard import Dashboard -from superset.models.reports import ( - ReportDataFormat, - ReportExecutionLog, - ReportRecipients, - ReportRecipientType, - ReportSchedule, - ReportScheduleType, - ReportScheduleValidatorType, - ReportState, -) from superset.models.slice import Slice from superset.reports.commands.exceptions import ( AlertQueryError, @@ -57,29 +47,37 @@ ) from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand +from superset.reports.models import ( + ReportDataFormat, + ReportExecutionLog, + ReportSchedule, + ReportScheduleType, + ReportScheduleValidatorType, + ReportState, +) from superset.utils.database import get_example_database from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, ) -from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard from tests.integration_tests.fixtures.world_bank_dashboard import ( load_world_bank_dashboard_with_slices_module_scope, load_world_bank_data, ) -from tests.integration_tests.reports.utils import insert_report_schedule +from tests.integration_tests.reports.utils import ( + cleanup_report_schedule, + create_report_notification, + CSV_FILE, + OWNER_EMAIL, + SCREENSHOT_FILE, + TEST_ID, +) from tests.integration_tests.test_app import app -from tests.integration_tests.utils import read_fixture pytestmark = pytest.mark.usefixtures( "load_world_bank_dashboard_with_slices_module_scope" ) -TEST_ID = str(uuid4()) -CSV_FILE = read_fixture("trends.csv") -SCREENSHOT_FILE = read_fixture("sample.png") -OWNER_EMAIL = "admin@fab.org" - def get_target_from_report_schedule(report_schedule: ReportSchedule) -> List[str]: return [ @@ -129,78 +127,6 @@ def assert_log(state: str, error_message: Optional[str] = None): assert log.value_row_json is None -def create_report_notification( - email_target: Optional[str] = None, - slack_channel: Optional[str] = None, - chart: Optional[Slice] = None, - dashboard: Optional[Dashboard] = None, - database: Optional[Database] = None, - sql: Optional[str] = None, - report_type: Optional[str] = None, - validator_type: Optional[str] = None, - validator_config_json: Optional[str] = None, - grace_period: Optional[int] = None, - report_format: Optional[ReportDataFormat] = None, - name: Optional[str] = None, - extra: Optional[Dict[str, Any]] = None, - force_screenshot: bool = False, -) -> ReportSchedule: - report_type = report_type or ReportScheduleType.REPORT - target = email_target or slack_channel - config_json = {"target": target} - owner = ( - db.session.query(security_manager.user_model) - .filter_by(email=OWNER_EMAIL) - .one_or_none() - ) - - if slack_channel: - recipient = ReportRecipients( - type=ReportRecipientType.SLACK, - recipient_config_json=json.dumps(config_json), - ) - else: - recipient = ReportRecipients( - type=ReportRecipientType.EMAIL, - recipient_config_json=json.dumps(config_json), - ) - - if name is None: - name = "report_with_csv" if report_format else "report" - - report_schedule = insert_report_schedule( - type=report_type, - name=name, - crontab="0 9 * * *", - description="Daily report", - sql=sql, - chart=chart, - dashboard=dashboard, - database=database, - recipients=[recipient], - owners=[owner], - validator_type=validator_type, - validator_config_json=validator_config_json, - grace_period=grace_period, - report_format=report_format or ReportDataFormat.VISUALIZATION, - extra=extra, - force_screenshot=force_screenshot, - ) - return report_schedule - - -def cleanup_report_schedule(report_schedule: ReportSchedule) -> None: - db.session.query(ReportExecutionLog).filter( - ReportExecutionLog.report_schedule == report_schedule - ).delete() - db.session.query(ReportRecipients).filter( - ReportRecipients.report_schedule == report_schedule - ).delete() - - db.session.delete(report_schedule) - db.session.commit() - - @contextmanager def create_test_table_context(database: Database): database.get_sqla_engine().execute( @@ -308,23 +234,6 @@ def create_report_email_dashboard_force_screenshot(): cleanup_report_schedule(report_schedule) -@pytest.fixture() -def create_report_email_tabbed_dashboard(tabbed_dashboard): - with app.app_context(): - report_schedule = create_report_notification( - email_target="target@email.com", - dashboard=tabbed_dashboard, - extra={ - "dashboard_tab_ids": [ - "TAB-j53G4gtKGF", - "TAB-nerWR09Ju", - ] - }, - ) - yield report_schedule - cleanup_report_schedule(report_schedule) - - @pytest.fixture() def create_report_slack_chart(): with app.app_context(): @@ -1900,41 +1809,9 @@ def test_grace_period_error_flap( ) @patch("superset.reports.dao.ReportScheduleDAO.bulk_delete_logs") def test_prune_log_soft_time_out(bulk_delete_logs, create_report_email_dashboard): - from datetime import datetime, timedelta - from celery.exceptions import SoftTimeLimitExceeded bulk_delete_logs.side_effect = SoftTimeLimitExceeded() with pytest.raises(SoftTimeLimitExceeded) as excinfo: AsyncPruneReportScheduleLogCommand().run() assert str(excinfo.value) == "SoftTimeLimitExceeded()" - - -@pytest.mark.usefixtures( - "create_report_email_tabbed_dashboard", -) -@patch("superset.reports.notifications.email.send_email_smtp") -@patch( - "superset.reports.commands.execute.DashboardScreenshot", -) -def test_when_tabs_are_selected_it_takes_screenshots_for_every_tabs( - dashboard_screenshot_mock, - send_email_smtp_mock, - create_report_email_tabbed_dashboard, -): - dashboard_screenshot_mock.get_screenshot.return_value = b"test-image" - dashboard = create_report_email_tabbed_dashboard.dashboard - - AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_tabbed_dashboard.id, datetime.utcnow() - ).run() - - tabs = json.loads(create_report_email_tabbed_dashboard.extra)["dashboard_tab_ids"] - assert dashboard_screenshot_mock.call_count == 2 - for index, tab in enumerate(tabs): - assert dashboard_screenshot_mock.call_args_list[index].args == ( - f"http://0.0.0.0:8080/superset/dashboard/{dashboard.id}/?standalone=3&force=false#{tab}", - f"{dashboard.digest}", - ) - assert send_email_smtp_mock.called is True - assert len(send_email_smtp_mock.call_args.kwargs["images"]) == 2 diff --git a/tests/integration_tests/reports/scheduler_tests.py b/tests/integration_tests/reports/scheduler_tests.py index 77894a6d2ef3..f8c7873fe5ee 100644 --- a/tests/integration_tests/reports/scheduler_tests.py +++ b/tests/integration_tests/reports/scheduler_tests.py @@ -17,12 +17,11 @@ from typing import List from unittest.mock import patch -import pytest from freezegun import freeze_time from freezegun.api import FakeDatetime # type: ignore from superset.extensions import db -from superset.models.reports import ReportScheduleType +from superset.reports.models import ReportScheduleType from superset.tasks.scheduler import scheduler from tests.integration_tests.reports.utils import insert_report_schedule from tests.integration_tests.test_app import app diff --git a/tests/integration_tests/reports/utils.py b/tests/integration_tests/reports/utils.py index 1f04f0798860..4d4b977b8291 100644 --- a/tests/integration_tests/reports/utils.py +++ b/tests/integration_tests/reports/utils.py @@ -15,21 +15,33 @@ # specific language governing permissions and limitations # under the License. +import json +from contextlib import contextmanager from typing import Any, Dict, List, Optional +from uuid import uuid4 from flask_appbuilder.security.sqla.models import User -from superset import db +from superset import db, security_manager from superset.models.core import Database from superset.models.dashboard import Dashboard -from superset.models.reports import ( +from superset.models.slice import Slice +from superset.reports.models import ( ReportDataFormat, ReportExecutionLog, ReportRecipients, + ReportRecipientType, ReportSchedule, + ReportScheduleType, ReportState, ) -from superset.models.slice import Slice +from tests.integration_tests.test_app import app +from tests.integration_tests.utils import read_fixture + +TEST_ID = str(uuid4()) +CSV_FILE = read_fixture("trends.csv") +SCREENSHOT_FILE = read_fixture("sample.png") +OWNER_EMAIL = "admin@fab.org" def insert_report_schedule( @@ -83,3 +95,99 @@ def insert_report_schedule( db.session.add(report_schedule) db.session.commit() return report_schedule + + +def create_report_notification( + email_target: Optional[str] = None, + slack_channel: Optional[str] = None, + chart: Optional[Slice] = None, + dashboard: Optional[Dashboard] = None, + database: Optional[Database] = None, + sql: Optional[str] = None, + report_type: Optional[ReportScheduleType] = None, + validator_type: Optional[str] = None, + validator_config_json: Optional[str] = None, + grace_period: Optional[int] = None, + report_format: Optional[ReportDataFormat] = None, + name: Optional[str] = None, + extra: Optional[Dict[str, Any]] = None, + force_screenshot: bool = False, +) -> ReportSchedule: + report_type = report_type or ReportScheduleType.REPORT + target = email_target or slack_channel + config_json = {"target": target} + owner = ( + db.session.query(security_manager.user_model) + .filter_by(email=OWNER_EMAIL) + .one_or_none() + ) + + if slack_channel: + recipient = ReportRecipients( + type=ReportRecipientType.SLACK, + recipient_config_json=json.dumps(config_json), + ) + else: + recipient = ReportRecipients( + type=ReportRecipientType.EMAIL, + recipient_config_json=json.dumps(config_json), + ) + + if name is None: + name = "report_with_csv" if report_format else "report" + + report_schedule = insert_report_schedule( + report_type, + name=name, + crontab="0 9 * * *", + description="Daily report", + sql=sql, + chart=chart, + dashboard=dashboard, + database=database, + recipients=[recipient], + owners=[owner], + validator_type=validator_type, + validator_config_json=validator_config_json, + grace_period=grace_period, + report_format=report_format or ReportDataFormat.VISUALIZATION, + extra=extra, + force_screenshot=force_screenshot, + ) + return report_schedule + + +def cleanup_report_schedule(report_schedule: ReportSchedule) -> None: + db.session.query(ReportExecutionLog).filter( + ReportExecutionLog.report_schedule == report_schedule + ).delete() + db.session.query(ReportRecipients).filter( + ReportRecipients.report_schedule == report_schedule + ).delete() + + db.session.delete(report_schedule) + db.session.commit() + + +@contextmanager +def create_dashboard_report(dashboard, extra, **kwargs): + report_schedule = create_report_notification( + email_target="target@example.com", + dashboard=dashboard, + extra={ + "dashboard": extra, + }, + **kwargs + ) + error = None + + try: + yield report_schedule + except Exception as ex: # pylint: disable=broad-except + error = ex + + # make sure to clean up in case of yield exceptions + cleanup_report_schedule(report_schedule) + + if error: + raise error diff --git a/tests/integration_tests/test_app.py b/tests/integration_tests/test_app.py index c64076ec360a..fb7b47b67cb9 100644 --- a/tests/integration_tests/test_app.py +++ b/tests/integration_tests/test_app.py @@ -29,5 +29,8 @@ def login( client: "FlaskClient[Any]", username: str = "admin", password: str = "general" ): - resp = client.post("/login/", data=dict(username=username, password=password)) + resp = client.post( + "/login/", + data=dict(username=username, password=password), + ).get_data(as_text=True) assert "User confirmation needed" not in resp