Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/superset into add-…
Browse files Browse the repository at this point in the history
…cypress-cmds
  • Loading branch information
hughhhh committed Sep 1, 2021
2 parents 08380c8 + e024f8c commit 7d3fb3e
Show file tree
Hide file tree
Showing 19 changed files with 599 additions and 55 deletions.
6 changes: 3 additions & 3 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@
/superset/migrations/ @apache/superset-committers

# Notify Preset team when ephemeral env settings are changed
.github/workflows/ecs-task-definition.json @robdiciuccio @craig-rueda @willbarrett @rusackas @eschutho @dpgaspar @nytai @mistercrunch
.github/workflows/docker-ephemeral-env.yml @robdiciuccio @craig-rueda @willbarrett @rusackas @eschutho @dpgaspar @nytai @mistercrunch
.github/workflows/ephemeral*.yml @robdiciuccio @craig-rueda @willbarrett @rusackas @eschutho @dpgaspar @nytai @mistercrunch
.github/workflows/ecs-task-definition.json @robdiciuccio @craig-rueda @rusackas @eschutho @dpgaspar @nytai @mistercrunch
.github/workflows/docker-ephemeral-env.yml @robdiciuccio @craig-rueda @rusackas @eschutho @dpgaspar @nytai @mistercrunch
.github/workflows/ephemeral*.yml @robdiciuccio @craig-rueda @rusackas @eschutho @dpgaspar @nytai @mistercrunch
2 changes: 1 addition & 1 deletion docs/src/pages/docs/Connecting to Databases/snowflake.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ version: 1
## Snowflake

The recommended connector library for Snowflake is
[snowflake-sqlalchemy](https://pypi.org/project/snowflake-sqlalchemy/).
[snowflake-sqlalchemy](https://pypi.org/project/snowflake-sqlalchemy/1.2.4/)<=1.2.4. (This version is required until Superset migrates to sqlalchemy>=1.4.0)

The connection string for Snowflake looks like this:

Expand Down
4 changes: 3 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,9 @@ def get_git_sha() -> str:
"shillelagh": [
"shillelagh[datasetteapi,gsheetsapi,socrata,weatherapi]>=1.0.3, <2"
],
"snowflake": ["snowflake-sqlalchemy>=1.2.3, <1.3"],
"snowflake": [
"snowflake-sqlalchemy==1.2.4"
], # PINNED! 1.2.5 introduced breaking changes requiring sqlalchemy>=1.4.0
"teradata": ["sqlalchemy-teradata==0.9.0.dev0"],
"thumbnails": ["Pillow>=7.0.0, <8.0.0"],
"vertica": ["sqlalchemy-vertica-python>=0.5.9, < 0.6"],
Expand Down
15 changes: 14 additions & 1 deletion superset-frontend/src/SqlLab/reducers/sqlLab.js
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,20 @@ export default function sqlLabReducer(state = {}, action) {
if (changedQuery.changedOn > queriesLastUpdate) {
queriesLastUpdate = changedQuery.changedOn;
}
newQueries[id] = { ...state.queries[id], ...changedQuery };
const prevState = state.queries[id]?.state;
const currentState = changedQuery.state;
newQueries[id] = {
...state.queries[id],
...changedQuery,
// race condition:
// because of async behavior, sql lab may still poll a couple of seconds
// when it started fetching or finished rendering results
state:
currentState === 'success' &&
['fetching', 'success'].includes(prevState)
? prevState
: currentState,
};
change = true;
}
});
Expand Down
9 changes: 8 additions & 1 deletion superset-frontend/src/chart/Chart.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ const propTypes = {
};

const BLANK = {};
const NONEXISTENT_DATASET = t(
'The dataset associated with this chart no longer exists',
);

const defaultProps = {
addFilter: () => BLANK,
Expand Down Expand Up @@ -178,7 +181,11 @@ class Chart extends React.PureComponent {
const message = chartAlert || queryResponse?.message;

// if datasource is still loading, don't render JS errors
if (chartAlert && datasource === PLACEHOLDER_DATASOURCE) {
if (
chartAlert !== undefined &&
chartAlert !== NONEXISTENT_DATASET &&
datasource === PLACEHOLDER_DATASOURCE
) {
return (
<Styles
data-ui-anchor="chart"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,8 @@ class SliceHeaderControls extends React.PureComponent<
modalBody={
<ViewQueryModal latestQueryFormData={this.props.formData} />
}
draggable
resizable
responsive
/>
</Menu.Item>
Expand Down
1 change: 1 addition & 0 deletions superset-frontend/src/explore/controlPanels/TimeTable.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ export default {
config: {
type: 'CollectionControl',
label: t('Time series columns'),
renderTrigger: true,
validators: [validateNonEmpty],
controlName: 'TimeSeriesColumnControl',
},
Expand Down
3 changes: 2 additions & 1 deletion superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,8 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
# Allowed format types for upload on Database view
EXCEL_EXTENSIONS = {"xlsx", "xls"}
CSV_EXTENSIONS = {"csv", "tsv", "txt"}
ALLOWED_EXTENSIONS = {*EXCEL_EXTENSIONS, *CSV_EXTENSIONS}
COLUMNAR_EXTENSIONS = {"parquet", "zip"}
ALLOWED_EXTENSIONS = {*EXCEL_EXTENSIONS, *CSV_EXTENSIONS, *COLUMNAR_EXTENSIONS}

# CSV Options: key/value pairs that will be passed as argument to DataFrame.to_csv
# method.
Expand Down
17 changes: 16 additions & 1 deletion superset/initialization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ def init_views(self) -> None:
DashboardModelViewAsync,
)
from superset.views.database.views import (
ColumnarToDatabaseView,
CsvToDatabaseView,
DatabaseView,
ExcelToDatabaseView,
Expand Down Expand Up @@ -281,6 +282,7 @@ def init_views(self) -> None:
appbuilder.add_view_no_menu(CssTemplateAsyncModelView)
appbuilder.add_view_no_menu(CsvToDatabaseView)
appbuilder.add_view_no_menu(ExcelToDatabaseView)
appbuilder.add_view_no_menu(ColumnarToDatabaseView)
appbuilder.add_view_no_menu(Dashboard)
appbuilder.add_view_no_menu(DashboardModelViewAsync)
appbuilder.add_view_no_menu(Datasource)
Expand Down Expand Up @@ -371,7 +373,20 @@ def init_views(self) -> None:
)
),
)

appbuilder.add_link(
"Upload a Columnar file",
label=__("Upload a Columnar file"),
href="/columnartodatabaseview/form",
icon="fa-upload",
category="Data",
category_label=__("Data"),
category_icon="fa-wrench",
cond=lambda: bool(
self.config["COLUMNAR_EXTENSIONS"].intersection(
self.config["ALLOWED_EXTENSIONS"]
)
),
)
try:
import xlrd # pylint: disable=unused-import

Expand Down
2 changes: 1 addition & 1 deletion superset/jinja_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ def get_filters(self, column: str, remove_filter: bool = False) -> List[Filter]:

for flt in form_data.get("adhoc_filters", []):
val: Union[Any, List[Any]] = flt.get("comparator")
op: str = flt["operator"].upper() if "operator" in flt else None
op: str = flt["operator"].upper() if flt.get("operator") else None
# fltOpName: str = flt.get("filterOptionName")
if (
flt.get("expressionType") == "SIMPLE"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
{#
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
#}
{% extends 'appbuilder/general/model/edit.html' %}

{% block tail_js %}
{{ super() }}
<script>
var db = $("#con");
var schema = $("#schema");

// this element is a text input
// copy it here so it can be reused later
var any_schema_is_allowed = schema.clone();

update_schemas_allowed_for_columnar_upload(db.val());
db.change(function(){
update_schemas_allowed_for_columnar_upload(db.val());
});

function update_schemas_allowed_for_columnar_upload(db_id) {
$.ajax({
method: "GET",
url: "/superset/schemas_access_for_file_upload",
data: {db_id: db_id},
dataType: 'json',
contentType: "application/json; charset=utf-8"
}).done(function(data) {
change_schema_field_in_formview(data)
}).fail(function(error) {
var errorMsg = error.responseJSON.error;
alert("ERROR: " + errorMsg);
});
}

function change_schema_field_in_formview(schemas_allowed){
if (schemas_allowed && schemas_allowed.length > 0) {
var dropdown_schema_lists = '<select id="schema" name="schema" required>';
schemas_allowed.forEach(function(schema_allowed) {
dropdown_schema_lists += ('<option value="' + schema_allowed + '">' + schema_allowed + '</option>');
});
dropdown_schema_lists += '</select>';
$("#schema").replaceWith(dropdown_schema_lists);
} else {
$("#schema").replaceWith(any_schema_is_allowed)
}
}
</script>
{% endblock %}
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
function update_schemas_allowed_for_csv_upload(db_id) {
$.ajax({
method: "GET",
url: "/superset/schemas_access_for_csv_upload",
url: "/superset/schemas_access_for_file_upload",
data: {db_id: db_id},
dataType: 'json',
contentType: "application/json; charset=utf-8"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
function update_schemas_allowed_for_excel_upload(db_id) {
$.ajax({
method: "GET",
url: "/superset/schemas_access_for_excel_upload",
url: "/superset/schemas_access_for_file_upload",
data: {db_id: db_id},
dataType: 'json',
contentType: "application/json; charset=utf-8"
Expand Down
88 changes: 55 additions & 33 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2564,6 +2564,14 @@ def sql_json(self) -> FlaskResponse:
execution_context = SqlJsonExecutionContext(request.json)
return self.sql_json_exec(execution_context, request.json, log_params)

@classmethod
def is_query_handled(cls, query: Optional[Query]) -> bool:
return query is not None and query.status in [
QueryStatus.RUNNING,
QueryStatus.PENDING,
QueryStatus.TIMED_OUT,
]

def sql_json_exec( # pylint: disable=too-many-statements,too-many-locals
self,
execution_context: SqlJsonExecutionContext,
Expand All @@ -2574,35 +2582,16 @@ def sql_json_exec( # pylint: disable=too-many-statements,too-many-locals

session = db.session()

# check to see if this query is already running
query = (
session.query(Query)
.filter_by(
client_id=execution_context.client_id,
user_id=execution_context.user_id,
sql_editor_id=execution_context.sql_editor_id,
)
.one_or_none()
)
if query is not None and query.status in [
QueryStatus.RUNNING,
QueryStatus.PENDING,
QueryStatus.TIMED_OUT,
]:
query = self._get_existing_query(execution_context, session)

if self.is_query_handled(query):
# return the existing query
payload = json.dumps(
{"query": query.to_dict()}, default=utils.json_int_dttm_ser
{"query": query.to_dict()}, default=utils.json_int_dttm_ser # type: ignore
)
return json_success(payload)

mydb = session.query(Database).get(execution_context.database_id)
if not mydb:
raise SupersetGenericErrorException(
__(
"The database referenced in this query was not found. Please "
"contact an administrator for further assistance or try again."
)
)
mydb = self._get_the_query_db(execution_context, session)

# Set tmp_schema_name for CTA
# TODO(bkyryliuk): consider parsing, splitting tmp_schema_name from
Expand Down Expand Up @@ -2708,9 +2697,9 @@ def sql_json_exec( # pylint: disable=too-many-statements,too-many-locals
mydb.db_engine_spec.get_limit_from_sql(rendered_query),
execution_context.limit,
]
if limits[0] is None or limits[0] > limits[1]:
if limits[0] is None or limits[0] > limits[1]: # type: ignore
query.limiting_factor = LimitingFactor.DROPDOWN
elif limits[1] > limits[0]:
elif limits[1] > limits[0]: # type: ignore
query.limiting_factor = LimitingFactor.QUERY
else: # limits[0] == limits[1]
query.limiting_factor = LimitingFactor.QUERY_AND_DROPDOWN
Expand All @@ -2734,6 +2723,39 @@ def sql_json_exec( # pylint: disable=too-many-statements,too-many-locals
session, rendered_query, query, expand_data, log_params
)

@classmethod
def _get_the_query_db(
cls, execution_context: SqlJsonExecutionContext, session: Session
) -> Database:
mydb = session.query(Database).get(execution_context.database_id)
cls._validate_query_db(mydb)
return mydb

@classmethod
def _validate_query_db(cls, database: Optional[Database]) -> None:
if not database:
raise SupersetGenericErrorException(
__(
"The database referenced in this query was not found. Please "
"contact an administrator for further assistance or try again."
)
)

@classmethod
def _get_existing_query(
cls, execution_context: SqlJsonExecutionContext, session: Session
) -> Optional[Query]:
query = (
session.query(Query)
.filter_by(
client_id=execution_context.client_id,
user_id=execution_context.user_id,
sql_editor_id=execution_context.sql_editor_id,
)
.one_or_none()
)
return query

@has_access
@event_logger.log_this
@expose("/csv/<client_id>")
Expand Down Expand Up @@ -3004,12 +3026,12 @@ def _get_sqllab_tabs(user_id: int) -> Dict[str, Any]:
.first()
)

databases: Dict[int, Any] = {
database.id: {
databases: Dict[int, Any] = {}
for database in DatabaseDAO.find_all():
databases[database.id] = {
k: v for k, v in database.to_json().items() if k in DATABASE_KEYS
}
for database in DatabaseDAO.find_all()
}
databases[database.id]["backend"] = database.backend
queries: Dict[str, Any] = {}

# These are unnecessary if sqllab backend persistence is disabled
Expand Down Expand Up @@ -3072,11 +3094,11 @@ def sqllab_history(self) -> FlaskResponse:
@api
@has_access_api
@event_logger.log_this
@expose("/schemas_access_for_csv_upload")
def schemas_access_for_csv_upload(self) -> FlaskResponse:
@expose("/schemas_access_for_file_upload")
def schemas_access_for_file_upload(self) -> FlaskResponse:
"""
This method exposes an API endpoint to
get the schema access control settings for csv upload in this database
get the schema access control settings for file upload in this database
"""
if not request.args.get("db_id"):
return json_error_response("No database is allowed for your csv upload")
Expand Down

0 comments on commit 7d3fb3e

Please sign in to comment.