Skip to content

Commit

Permalink
feat: Sqllab to Explore UX improvements api changes (#11836)
Browse files Browse the repository at this point in the history
  • Loading branch information
hughhhh committed Nov 30, 2020
1 parent 5ebc09b commit 0689738
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 12 deletions.
14 changes: 13 additions & 1 deletion superset/datasets/api.py
Expand Up @@ -16,6 +16,7 @@
# under the License.
import logging
from datetime import datetime
from distutils.util import strtobool
from io import BytesIO
from typing import Any
from zipfile import ZipFile
Expand Down Expand Up @@ -252,6 +253,10 @@ def put(self, pk: int) -> Response:
schema:
type: integer
name: pk
- in: path
schema:
type: bool
name: override_column
requestBody:
description: Dataset schema
required: true
Expand Down Expand Up @@ -284,6 +289,11 @@ def put(self, pk: int) -> Response:
500:
$ref: '#/components/responses/500'
"""
override_columns = (
bool(strtobool(request.args["override_columns"]))
if "override_columns" in request.args
else False
)
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
Expand All @@ -292,7 +302,9 @@ def put(self, pk: int) -> Response:
except ValidationError as error:
return self.response_400(message=error.messages)
try:
changed_model = UpdateDatasetCommand(g.user, pk, item).run()
changed_model = UpdateDatasetCommand(
g.user, pk, item, override_columns
).run()
response = self.response(200, id=changed_model.id, result=item)
except DatasetNotFoundError:
response = self.response_404()
Expand Down
32 changes: 23 additions & 9 deletions superset/datasets/commands/update.py
Expand Up @@ -48,17 +48,29 @@


class UpdateDatasetCommand(BaseCommand):
def __init__(self, user: User, model_id: int, data: Dict[str, Any]):
def __init__(
self,
user: User,
model_id: int,
data: Dict[str, Any],
override_columns: bool = False,
):
self._actor = user
self._model_id = model_id
self._properties = data.copy()
self._model: Optional[SqlaTable] = None
self.override_columns = override_columns

def run(self) -> Model:
self.validate()
if self._model:
try:
dataset = DatasetDAO.update(self._model, self._properties)
dataset = DatasetDAO.update(
model=self._model,
properties=self._properties,
commit=True,
override_columns=self.override_columns,
)
return dataset
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
Expand Down Expand Up @@ -123,14 +135,16 @@ def _validate_columns(
]
if not DatasetDAO.validate_columns_exist(self._model_id, columns_ids):
exceptions.append(DatasetColumnNotFoundValidationError())

# validate new column names uniqueness
columns_names: List[str] = [
column["column_name"] for column in columns if "id" not in column
]
if not DatasetDAO.validate_columns_uniqueness(
self._model_id, columns_names
):
exceptions.append(DatasetColumnsExistsValidationError())
if not self.override_columns:
columns_names: List[str] = [
column["column_name"] for column in columns if "id" not in column
]
if not DatasetDAO.validate_columns_uniqueness(
self._model_id, columns_names
):
exceptions.append(DatasetColumnsExistsValidationError())

def _validate_metrics(
self, metrics: List[Dict[str, Any]], exceptions: List[ValidationError]
Expand Down
15 changes: 13 additions & 2 deletions superset/datasets/dao.py
Expand Up @@ -143,8 +143,12 @@ def validate_metrics_uniqueness(dataset_id: int, metrics_names: List[str]) -> bo
return len(dataset_query) == 0

@classmethod
def update(
cls, model: SqlaTable, properties: Dict[str, Any], commit: bool = True
def update( # pylint: disable=W:279
cls,
model: SqlaTable,
properties: Dict[str, Any],
commit: bool = True,
override_columns: bool = False,
) -> Optional[SqlaTable]:
"""
Updates a Dataset model on the metadata DB
Expand Down Expand Up @@ -175,6 +179,13 @@ def update(
new_metrics.append(metric_obj)
properties["metrics"] = new_metrics

if override_columns:
# remove columns initially for full refresh
original_properties = properties["columns"]
properties["columns"] = []
super().update(model, properties, commit=commit)
properties["columns"] = original_properties

return super().update(model, properties, commit=commit)

@classmethod
Expand Down
38 changes: 38 additions & 0 deletions tests/datasets/api_tests.py
Expand Up @@ -547,6 +547,44 @@ def test_update_dataset_item(self):
db.session.delete(dataset)
db.session.commit()

def test_update_dataset_item_w_override_columns(self):
"""
Dataset API: Test update dataset with override columns
"""
# Add default dataset
dataset = self.insert_default_dataset()
self.login(username="admin")
dataset_data = {
"columns": [
{
"column_name": "new_col",
"description": "description",
"expression": "expression",
"type": "INTEGER",
"verbose_name": "New Col",
}
],
"description": "changed description",
}
uri = f"api/v1/dataset/{dataset.id}?override_columns=true"
rv = self.put_assert_metric(uri, dataset_data, "put")
assert rv.status_code == 200

columns = (
db.session.query(TableColumn)
.filter_by(table_id=dataset.id)
.order_by("column_name")
.all()
)

assert columns[0].column_name == dataset_data["columns"][0]["column_name"]
assert columns[0].description == dataset_data["columns"][0]["description"]
assert columns[0].expression == dataset_data["columns"][0]["expression"]
assert columns[0].type == dataset_data["columns"][0]["type"]

db.session.delete(dataset)
db.session.commit()

def test_update_dataset_create_column(self):
"""
Dataset API: Test update dataset create column
Expand Down

0 comments on commit 0689738

Please sign in to comment.