Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add endpoint to export saved queries using new format #11447

Merged
merged 15 commits into from Oct 30, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 2 additions & 3 deletions superset/charts/api.py
Expand Up @@ -174,6 +174,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
apispec_parameter_schemas = {
"screenshot_query_schema": screenshot_query_schema,
"get_delete_ids_schema": get_delete_ids_schema,
"get_export_ids_schema": get_export_ids_schema,
}
""" Add extra schemas to the OpenAPI components schema section """
openapi_spec_methods = openapi_spec_methods_override
Expand Down Expand Up @@ -732,9 +733,7 @@ def export(self, **kwargs: Any) -> Response:
content:
application/json:
schema:
type: array
items:
type: integer
$ref: '#/components/schemas/get_export_ids_schema'
responses:
200:
description: A zip file with chart(s), dataset(s) and database(s) as YAML
Expand Down
5 changes: 2 additions & 3 deletions superset/databases/api.py
Expand Up @@ -166,6 +166,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):

apispec_parameter_schemas = {
"database_schemas_query_schema": database_schemas_query_schema,
"get_export_ids_schema": get_export_ids_schema,
}
openapi_spec_tag = "Database"
openapi_spec_component_schemas = (
Expand Down Expand Up @@ -677,9 +678,7 @@ def export(self, **kwargs: Any) -> Response:
content:
application/json:
schema:
type: array
items:
type: integer
$ref: '#/components/schemas/get_export_ids_schema'
responses:
200:
description: A zip file with database(s) and dataset(s) as YAML
Expand Down
7 changes: 4 additions & 3 deletions superset/datasets/api.py
Expand Up @@ -164,6 +164,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
allowed_rel_fields = {"database", "owners"}
allowed_distinct_fields = {"schema"}

apispec_parameter_schemas = {
"get_export_ids_schema": get_export_ids_schema,
}
openapi_spec_component_schemas = (DatasetRelatedObjectsResponse,)

@expose("/", methods=["POST"])
Expand Down Expand Up @@ -358,9 +361,7 @@ def export(self, **kwargs: Any) -> Response:
content:
application/json:
schema:
type: array
items:
type: integer
$ref: '#/components/schemas/get_export_ids_schema'
responses:
200:
description: Dataset export
Expand Down
1 change: 0 additions & 1 deletion superset/models/sql_lab.py
Expand Up @@ -188,7 +188,6 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin, ImportMixin):

export_parent = "database"
export_fields = [
"db_id",
"schema",
"label",
"description",
Expand Down
68 changes: 67 additions & 1 deletion superset/queries/saved_queries/api.py
Expand Up @@ -15,9 +15,12 @@
# specific language governing permissions and limitations
# under the License.
import logging
from datetime import datetime
from io import BytesIO
from typing import Any
from zipfile import ZipFile

from flask import g, Response
from flask import g, Response, send_file
from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
Expand All @@ -32,13 +35,15 @@
SavedQueryBulkDeleteFailedError,
SavedQueryNotFoundError,
)
from superset.queries.saved_queries.commands.export import ExportSavedQueriesCommand
from superset.queries.saved_queries.filters import (
SavedQueryAllTextFilter,
SavedQueryFavoriteFilter,
SavedQueryFilter,
)
from superset.queries.saved_queries.schemas import (
get_delete_ids_schema,
get_export_ids_schema,
Copy link
Member

@dpgaspar dpgaspar Oct 30, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not related with this PR, sort of a note to self, this is asking to be DRY'ed

openapi_spec_methods_override,
)
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
Expand All @@ -50,6 +55,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(SavedQuery)

include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
RouteMethod.EXPORT,
RouteMethod.RELATED,
RouteMethod.DISTINCT,
"bulk_delete", # not using RouteMethod since locally defined
Expand Down Expand Up @@ -114,6 +120,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):

apispec_parameter_schemas = {
"get_delete_ids_schema": get_delete_ids_schema,
"get_export_ids_schema": get_export_ids_schema,
}
openapi_spec_tag = "Queries"
openapi_spec_methods = openapi_spec_methods_override
Expand Down Expand Up @@ -183,3 +190,62 @@ def bulk_delete(self, **kwargs: Any) -> Response:
return self.response_404()
except SavedQueryBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))

@expose("/export/", methods=["GET"])
@protect()
@safe
@statsd_metrics
@rison(get_export_ids_schema)
def export(self, **kwargs: Any) -> Response:
"""Export saved queries
---
get:
description: >-
Exports multiple saved queries and downloads them as YAML files
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_export_ids_schema'
responses:
200:
description: A zip file with saved query(ies) and database(s) as YAML
content:
application/zip:
schema:
type: string
format: binary
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
requested_ids = kwargs["rison"]
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
root = f"saved_query_export_{timestamp}"
filename = f"{root}.zip"

buf = BytesIO()
with ZipFile(buf, "w") as bundle:
try:
for file_name, file_content in ExportSavedQueriesCommand(
requested_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
except SavedQueryNotFoundError:
return self.response_404()
buf.seek(0)

return send_file(
buf,
mimetype="application/zip",
as_attachment=True,
attachment_filename=filename,
)
92 changes: 92 additions & 0 deletions superset/queries/saved_queries/commands/export.py
@@ -0,0 +1,92 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file

import json
import logging
from typing import Iterator, List, Tuple

import yaml

from superset.commands.base import BaseCommand
from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError
from superset.queries.saved_queries.dao import SavedQueryDAO
from superset.utils.dict_import_export import IMPORT_EXPORT_VERSION, sanitize
from superset.models.sql_lab import SavedQuery

logger = logging.getLogger(__name__)


class ExportSavedQueriesCommand(BaseCommand):
def __init__(self, query_ids: List[int]):
self.query_ids = query_ids

# this will be set when calling validate()
self._models: List[SavedQuery] = []

@staticmethod
def export_saved_query(query: SavedQuery) -> Iterator[Tuple[str, str]]:
# build filename based on database, optional schema, and label
database_slug = sanitize(query.database.database_name)
schema_slug = sanitize(query.schema)
query_slug = sanitize(query.label) or str(query.uuid)
file_name = f"queries/{database_slug}/{schema_slug}/{query_slug}.yaml"

payload = query.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
payload["version"] = IMPORT_EXPORT_VERSION
payload["database_uuid"] = str(query.database.uuid)

file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

# include database as well
file_name = f"databases/{database_slug}.yaml"

payload = query.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if "extra" in payload:
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])

payload["version"] = IMPORT_EXPORT_VERSION

file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

def run(self) -> Iterator[Tuple[str, str]]:
self.validate()

for query in self._models:
yield from self.export_saved_query(query)

def validate(self) -> None:
self._models = SavedQueryDAO.find_by_ids(self.query_ids)
if len(self._models) != len(self.query_ids):
raise SavedQueryNotFoundError()
1 change: 1 addition & 0 deletions superset/queries/saved_queries/schemas.py
Expand Up @@ -31,3 +31,4 @@
}

get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}}
get_export_ids_schema = {"type": "array", "items": {"type": "integer"}}
49 changes: 49 additions & 0 deletions tests/queries/saved_queries/api_tests.py
Expand Up @@ -17,7 +17,9 @@
# isort:skip_file
"""Unit tests for Superset"""
import json
from io import BytesIO
from typing import Optional
from zipfile import is_zipfile

import pytest
import prison
Expand Down Expand Up @@ -680,3 +682,50 @@ def test_delete_bulk_saved_query_not_found(self):
uri = f"api/v1/saved_query/?q={prison.dumps(saved_query_ids)}"
rv = self.delete_assert_metric(uri, "bulk_delete")
assert rv.status_code == 404

@pytest.mark.usefixtures("create_saved_queries")
def test_export(self):
"""
Saved Query API: Test export
"""
admin = self.get_user("admin")
sample_query = (
db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).first()
)

self.login(username="admin")
argument = [sample_query.id]
uri = f"api/v1/saved_query/export/?q={prison.dumps(argument)}"
rv = self.client.get(uri)
assert rv.status_code == 200
buf = BytesIO(rv.data)
assert is_zipfile(buf)

@pytest.mark.usefixtures("create_saved_queries")
def test_export_not_found(self):
"""
Saved Query API: Test export
"""
max_id = db.session.query(func.max(SavedQuery.id)).scalar()

self.login(username="admin")
argument = [max_id + 1, max_id + 2]
uri = f"api/v1/saved_query/export/?q={prison.dumps(argument)}"
rv = self.client.get(uri)
assert rv.status_code == 404

@pytest.mark.usefixtures("create_saved_queries")
def test_export_not_allowed(self):
"""
Saved Query API: Test export
"""
admin = self.get_user("admin")
sample_query = (
db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).first()
)

self.login(username="gamma")
argument = [sample_query.id]
uri = f"api/v1/saved_query/export/?q={prison.dumps(argument)}"
rv = self.client.get(uri)
assert rv.status_code == 404