Skip to content

Commit

Permalink
style: enforcing mypy typing for connectors (apache#9824)
Browse files Browse the repository at this point in the history
Co-authored-by: John Bodley <john.bodley@airbnb.com>
  • Loading branch information
2 people authored and pkdotson committed May 26, 2020
1 parent 17812e9 commit 1d06e4a
Show file tree
Hide file tree
Showing 17 changed files with 392 additions and 240 deletions.
4 changes: 2 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,15 @@ combine_as_imports = true
include_trailing_comma = true
line_length = 88
known_first_party = superset
known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,contextlib2,croniter,cryptography,dateutil,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,geohash,geopy,humanize,isodate,jinja2,markdown,markupsafe,marshmallow,msgpack,numpy,pandas,parsedatetime,pathlib2,polyline,prison,pyarrow,pyhive,pytz,retry,selenium,setuptools,simplejson,sphinx_rtd_theme,sqlalchemy,sqlalchemy_utils,sqlparse,werkzeug,wtforms,wtforms_json,yaml
known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,contextlib2,croniter,cryptography,dataclasses,dateutil,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,geohash,geopy,humanize,isodate,jinja2,markdown,markupsafe,marshmallow,msgpack,numpy,pandas,parsedatetime,pathlib2,polyline,prison,pyarrow,pyhive,pytz,retry,selenium,setuptools,simplejson,sphinx_rtd_theme,sqlalchemy,sqlalchemy_utils,sqlparse,werkzeug,wtforms,wtforms_json,yaml
multi_line_output = 3
order_by_type = false

[mypy]
ignore_missing_imports = true
no_implicit_optional = true

[mypy-superset.bin.*,superset.charts.*,superset.datasets.*,superset.dashboards.*,superset.commands.*,superset.common.*,superset.dao.*,superset.db_engine_specs.*,superset.db_engines.*,superset.examples.*,superset.migrations.*,superset.models.*,superset.queries.*,superset.security.*,superset.sql_validators.*,superset.tasks.*,superset.translations.*]
[mypy-superset.bin.*,superset.charts.*,superset.commands.*,superset.common.*,superset.connectors.*,superset.dao.*,superset.dashboards.*,superset.datasets.*,superset.db_engine_specs.*,superset.db_engines.*,superset.examples.*,superset.migrations.*,superset.models.*,uperset.queries.*,superset.security.*,superset.sql_validators.*,superset.tasks.*,superset.translations.*]
check_untyped_defs = true
disallow_untyped_calls = true
disallow_untyped_defs = true
61 changes: 35 additions & 26 deletions superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@
from flask_appbuilder.security.sqla.models import User
from sqlalchemy import and_, Boolean, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import foreign, Query, relationship
from sqlalchemy.orm import foreign, Query, relationship, RelationshipProperty

from superset.constants import NULL_STRING
from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult
from superset.models.slice import Slice
from superset.typing import FilterValue, FilterValues
from superset.typing import FilterValue, FilterValues, QueryObjectDict
from superset.utils import core as utils

METRIC_FORM_DATA_PARAMS = [
Expand Down Expand Up @@ -93,7 +93,7 @@ class BaseDatasource(
update_from_object_fields: List[str]

@declared_attr
def slices(self):
def slices(self) -> RelationshipProperty:
return relationship(
"Slice",
primaryjoin=lambda: and_(
Expand All @@ -117,15 +117,15 @@ def column_names(self) -> List[str]:
return sorted([c.column_name for c in self.columns], key=lambda x: x or "")

@property
def columns_types(self) -> Dict:
def columns_types(self) -> Dict[str, str]:
return {c.column_name: c.type for c in self.columns}

@property
def main_dttm_col(self) -> str:
return "timestamp"

@property
def datasource_name(self):
def datasource_name(self) -> str:
raise NotImplementedError()

@property
Expand All @@ -143,7 +143,7 @@ def filterable_column_names(self) -> List[str]:
return sorted([c.column_name for c in self.columns if c.filterable])

@property
def dttm_cols(self) -> List:
def dttm_cols(self) -> List[str]:
return []

@property
Expand Down Expand Up @@ -182,7 +182,7 @@ def short_data(self) -> Dict[str, Any]:
}

@property
def select_star(self):
def select_star(self) -> Optional[str]:
pass

@property
Expand Down Expand Up @@ -336,18 +336,18 @@ def handle_single_value(value: Optional[FilterValue]) -> Optional[FilterValue]:
values = None
return values

def external_metadata(self):
def external_metadata(self) -> List[Dict[str, str]]:
"""Returns column information from the external system"""
raise NotImplementedError()

def get_query_str(self, query_obj) -> str:
def get_query_str(self, query_obj: QueryObjectDict) -> str:
"""Returns a query as a string
This is used to be displayed to the user so that she/he can
understand what is taking place behind the scene"""
raise NotImplementedError()

def query(self, query_obj) -> QueryResult:
def query(self, query_obj: QueryObjectDict) -> QueryResult:
"""Executes the query and returns a dataframe
query_obj is a dictionary representing Superset's query interface.
Expand All @@ -363,7 +363,7 @@ def values_for_column(self, column_name: str, limit: int = 10000) -> List:
raise NotImplementedError()

@staticmethod
def default_query(qry) -> Query:
def default_query(qry: Query) -> Query:
return qry

def get_column(self, column_name: Optional[str]) -> Optional["BaseColumn"]:
Expand All @@ -376,8 +376,8 @@ def get_column(self, column_name: Optional[str]) -> Optional["BaseColumn"]:

@staticmethod
def get_fk_many_from_list(
object_list, fkmany, fkmany_class, key_attr
): # pylint: disable=too-many-locals
object_list: List[Any], fkmany: List[Column], fkmany_class: Type, key_attr: str,
) -> List[Column]: # pylint: disable=too-many-locals
"""Update ORM one-to-many list from object list
Used for syncing metrics and columns using the same code"""
Expand All @@ -390,8 +390,9 @@ def get_fk_many_from_list(
# sync existing fks
for fk in fkmany:
obj = object_dict.get(getattr(fk, key_attr))
for attr in fkmany_class.update_from_object_fields:
setattr(fk, attr, obj.get(attr))
if obj:
for attr in fkmany_class.update_from_object_fields:
setattr(fk, attr, obj.get(attr))

# create new fks
new_fks = []
Expand All @@ -409,7 +410,7 @@ def get_fk_many_from_list(
fkmany += new_fks
return fkmany

def update_from_object(self, obj) -> None:
def update_from_object(self, obj: Dict[str, Any]) -> None:
"""Update datasource from a data structure
The UI's table editor crafts a complex data structure that
Expand All @@ -426,18 +427,26 @@ def update_from_object(self, obj) -> None:
self.owners = obj.get("owners", [])

# Syncing metrics
metrics = self.get_fk_many_from_list(
obj.get("metrics"), self.metrics, self.metric_class, "metric_name"
metrics = (
self.get_fk_many_from_list(
obj["metrics"], self.metrics, self.metric_class, "metric_name"
)
if self.metric_class and "metrics" in obj
else []
)
self.metrics = metrics

# Syncing columns
self.columns = self.get_fk_many_from_list(
obj.get("columns"), self.columns, self.column_class, "column_name"
self.columns = (
self.get_fk_many_from_list(
obj["columns"], self.columns, self.column_class, "column_name"
)
if self.column_class and "columns" in obj
else []
)

def get_extra_cache_keys( # pylint: disable=no-self-use
self, query_obj: Dict[str, Any] # pylint: disable=unused-argument
self, query_obj: QueryObjectDict # pylint: disable=unused-argument
) -> List[Hashable]:
""" If a datasource needs to provide additional keys for calculation of
cache keys, those can be provided via this method
Expand Down Expand Up @@ -474,7 +483,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
# [optional] Set this to support import/export functionality
export_fields: List[Any] = []

def __repr__(self):
def __repr__(self) -> str:
return self.column_name

num_types = (
Expand Down Expand Up @@ -505,11 +514,11 @@ def is_string(self) -> bool:
return self.type and any(map(lambda t: t in self.type.upper(), self.str_types))

@property
def expression(self):
def expression(self) -> Column:
raise NotImplementedError()

@property
def python_date_format(self):
def python_date_format(self) -> Column:
raise NotImplementedError()

@property
Expand Down Expand Up @@ -557,11 +566,11 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
"""

@property
def perm(self):
def perm(self) -> Optional[str]:
raise NotImplementedError()

@property
def expression(self):
def expression(self) -> Column:
raise NotImplementedError()

@property
Expand Down
3 changes: 2 additions & 1 deletion superset/connectors/base/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@
# under the License.
from flask import Markup

from superset.connectors.base.models import BaseDatasource
from superset.exceptions import SupersetException
from superset.views.base import SupersetModelView


class DatasourceModelView(SupersetModelView):
def pre_delete(self, item):
def pre_delete(self, item: BaseDatasource) -> None:
if item.slices:
raise SupersetException(
Markup(
Expand Down

0 comments on commit 1d06e4a

Please sign in to comment.