Skip to content

Commit

Permalink
chore: Upgrade pylint to 2.5.3 and fix most new rules (apache#10101)
Browse files Browse the repository at this point in the history
* Bump pylint version to 2.5.3

* Add a global disable for the most common new pylint error

* Fix a bunch of files containing very few errors

* More pylint tweakage, low-hanging fruit

* More easy stuff...

* Fix more erroring files

* Fix the last couple of errors, clean pylint!

* Black

* Fix mypy issue in connectors/druid/models.py
  • Loading branch information
Will Barrett authored and auxten committed Nov 20, 2020
1 parent 68152b6 commit 8198cd7
Show file tree
Hide file tree
Showing 40 changed files with 166 additions and 135 deletions.
4 changes: 2 additions & 2 deletions .pylintrc
Expand Up @@ -81,7 +81,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports,import-outside-toplevel


[REPORTS]
Expand Down Expand Up @@ -254,7 +254,7 @@ notes=FIXME,XXX
[SIMILARITIES]

# Minimum lines number of a similarity.
min-similarity-lines=4
min-similarity-lines=5

# Ignore comments when computing similarities.
ignore-comments=yes
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Expand Up @@ -28,7 +28,7 @@ psycopg2-binary==2.8.5
pycodestyle==2.5.0
pydruid==0.6.1
pyhive==0.6.2
pylint==1.9.2
pylint==2.5.3
redis==3.5.1
requests==2.23.0
statsd==3.3.0
Expand Down
3 changes: 1 addition & 2 deletions superset/app.py
Expand Up @@ -95,7 +95,6 @@ def post_init(self) -> None:
"""
Called after any other init tasks
"""
pass

def configure_celery(self) -> None:
celery_app.config_from_object(self.config["CELERY_CONFIG"])
Expand Down Expand Up @@ -593,7 +592,7 @@ def configure_wtf(self) -> None:
def register_blueprints(self) -> None:
for bp in self.config["BLUEPRINTS"]:
try:
logger.info(f"Registering blueprint: '{bp.name}'")
logger.info("Registering blueprint: %s", bp.name)
self.flask_app.register_blueprint(bp)
except Exception: # pylint: disable=broad-except
logger.exception("blueprint registration failed")
Expand Down
20 changes: 13 additions & 7 deletions superset/charts/api.py
Expand Up @@ -219,7 +219,9 @@ def post(self) -> Response:
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["PUT"])
Expand Down Expand Up @@ -287,7 +289,9 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["DELETE"])
Expand Down Expand Up @@ -334,7 +338,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ
except ChartForbiddenError:
return self.response_403()
except ChartDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/", methods=["DELETE"])
Expand Down Expand Up @@ -386,9 +392,7 @@ def bulk_delete(
return self.response(
200,
message=ngettext(
f"Deleted %(num)d chart",
f"Deleted %(num)d charts",
num=len(item_ids),
"Deleted %(num)d chart", "Deleted %(num)d charts", num=len(item_ids)
),
)
except ChartNotFoundError:
Expand Down Expand Up @@ -464,13 +468,15 @@ def data(self) -> Response:
headers=generate_download_headers("csv"),
mimetype="application/csv",
)
elif result_format == ChartDataResultFormat.JSON:

if result_format == ChartDataResultFormat.JSON:
response_data = simplejson.dumps(
{"result": payload}, default=json_int_dttm_ser, ignore_nan=True
)
resp = make_response(response_data, 200)
resp.headers["Content-Type"] = "application/json; charset=utf-8"
return resp

raise self.response_400(message=f"Unsupported result_format: {result_format}")

@expose("/<pk>/thumbnail/<digest>/", methods=["GET"])
Expand Down
2 changes: 0 additions & 2 deletions superset/commands/base.py
Expand Up @@ -29,7 +29,6 @@ def run(self) -> Any:
Run executes the command. Can raise command exceptions
:raises: CommandException
"""
pass

@abstractmethod
def validate(self) -> None:
Expand All @@ -38,4 +37,3 @@ def validate(self) -> None:
Will raise exception if validation fails
:raises: CommandException
"""
pass
29 changes: 18 additions & 11 deletions superset/common/query_object.py
Expand Up @@ -141,8 +141,8 @@ def __init__(
if is_sip_38 and groupby:
self.columns += groupby
logger.warning(
f"The field `groupby` is deprecated. Viz plugins should "
f"pass all selectables via the `columns` field"
"The field `groupby` is deprecated. Viz plugins should "
"pass all selectables via the `columns` field"
)

self.orderby = orderby or []
Expand All @@ -151,32 +151,39 @@ def __init__(
for field in DEPRECATED_FIELDS:
if field.old_name in kwargs:
logger.warning(
f"The field `{field.old_name}` is deprecated, please use "
f"`{field.new_name}` instead."
"The field `%s` is deprecated, please use `%s` instead.",
field.old_name,
field.new_name,
)
value = kwargs[field.old_name]
if value:
if hasattr(self, field.new_name):
logger.warning(
f"The field `{field.new_name}` is already populated, "
f"replacing value with contents from `{field.old_name}`."
"The field `%s` is already populated, "
"replacing value with contents from `%s`.",
field.new_name,
field.old_name,
)
setattr(self, field.new_name, value)

# move deprecated extras fields to extras
for field in DEPRECATED_EXTRAS_FIELDS:
if field.old_name in kwargs:
logger.warning(
f"The field `{field.old_name}` is deprecated and should be "
f"passed to `extras` via the `{field.new_name}` property."
"The field `%s` is deprecated and should "
"be passed to `extras` via the `%s` property.",
field.old_name,
field.new_name,
)
value = kwargs[field.old_name]
if value:
if hasattr(self.extras, field.new_name):
logger.warning(
f"The field `{field.new_name}` is already populated in "
f"`extras`, replacing value with contents "
f"from `{field.old_name}`."
"The field `%s` is already populated in "
"`extras`, replacing value with contents "
"from `%s`.",
field.new_name,
field.old_name,
)
self.extras[field.new_name] = value

Expand Down
2 changes: 1 addition & 1 deletion superset/config.py
Expand Up @@ -898,7 +898,7 @@ class CeleryConfig: # pylint: disable=too-few-public-methods
print(f"Loaded your LOCAL configuration at [{cfg_path}]")
except Exception:
logger.exception(
f"Failed to import config for {CONFIG_PATH_ENV_VAR}={cfg_path}"
"Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path
)
raise
elif importlib.util.find_spec("superset_config"):
Expand Down
4 changes: 2 additions & 2 deletions superset/connectors/base/models.py
Expand Up @@ -348,7 +348,7 @@ def handle_single_value(value: Optional[FilterValue]) -> Optional[FilterValue]:
value = utils.cast_to_num(value)
if value == NULL_STRING:
return None
elif value == "<empty string>":
if value == "<empty string>":
return ""
return value

Expand Down Expand Up @@ -516,7 +516,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
export_fields: List[Any] = []

def __repr__(self) -> str:
return self.column_name
return str(self.column_name)

num_types = (
"DOUBLE",
Expand Down
15 changes: 5 additions & 10 deletions superset/connectors/druid/models.py
Expand Up @@ -14,8 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
# pylint: disable=invalid-unary-operand-type
# pylint: skip-file
import json
import logging
import re
Expand Down Expand Up @@ -81,12 +80,7 @@
pass

try:
from superset.utils.core import (
DimSelector,
DTTM_ALIAS,
FilterOperator,
flasher,
)
from superset.utils.core import DimSelector, DTTM_ALIAS, FilterOperator, flasher
except ImportError:
pass

Expand Down Expand Up @@ -845,7 +839,8 @@ def granularity(
else:
granularity["type"] = "duration"
granularity["duration"] = (
utils.parse_human_timedelta(period_name).total_seconds() * 1000 # type: ignore
utils.parse_human_timedelta(period_name).total_seconds() # type: ignore
* 1000
)
return granularity

Expand Down Expand Up @@ -950,7 +945,7 @@ def resolve_postagg(

@staticmethod
def metrics_and_post_aggs(
metrics: List[Metric], metrics_dict: Dict[str, DruidMetric],
metrics: List[Metric], metrics_dict: Dict[str, DruidMetric]
) -> Tuple["OrderedDict[str, Any]", "OrderedDict[str, Any]"]:
# Separate metrics into those that are aggregations
# and those that are post aggregations
Expand Down
2 changes: 0 additions & 2 deletions superset/dao/exceptions.py
Expand Up @@ -22,8 +22,6 @@ class DAOException(SupersetException):
Base DAO exception class
"""

pass


class DAOCreateFailedError(DAOException):
"""
Expand Down
16 changes: 11 additions & 5 deletions superset/dashboards/api.py
Expand Up @@ -206,7 +206,9 @@ def post(self) -> Response:
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["PUT"])
Expand Down Expand Up @@ -274,7 +276,9 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["DELETE"])
Expand Down Expand Up @@ -321,7 +325,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ
except DashboardForbiddenError:
return self.response_403()
except DashboardDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/", methods=["DELETE"])
Expand Down Expand Up @@ -373,8 +379,8 @@ def bulk_delete(
return self.response(
200,
message=ngettext(
f"Deleted %(num)d dashboard",
f"Deleted %(num)d dashboards",
"Deleted %(num)d dashboard",
"Deleted %(num)d dashboards",
num=len(item_ids),
),
)
Expand Down
2 changes: 1 addition & 1 deletion superset/dashboards/dao.py
Expand Up @@ -49,7 +49,7 @@ def validate_update_slug_uniqueness(dashboard_id: int, slug: Optional[str]) -> b

@staticmethod
def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard:
owners = [owner for owner in model.owners]
owners = list(model.owners)
for slc in model.slices:
slc.owners = list(set(owners) | set(slc.owners))
if commit:
Expand Down
16 changes: 12 additions & 4 deletions superset/datasets/api.py
Expand Up @@ -186,7 +186,9 @@ def post(self) -> Response:
except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DatasetCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["PUT"])
Expand Down Expand Up @@ -254,7 +256,9 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ
except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DatasetUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["DELETE"])
Expand Down Expand Up @@ -301,7 +305,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ
except DatasetForbiddenError:
return self.response_403()
except DatasetDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

@expose("/export/", methods=["GET"])
Expand Down Expand Up @@ -401,5 +407,7 @@ def refresh(self, pk: int) -> Response:
except DatasetForbiddenError:
return self.response_403()
except DatasetRefreshFailedError as ex:
logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}")
logger.error(
"Error refreshing dataset %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
4 changes: 2 additions & 2 deletions superset/datasets/dao.py
Expand Up @@ -46,7 +46,7 @@ def get_database_by_id(database_id: int) -> Optional[Database]:
try:
return db.session.query(Database).filter_by(id=database_id).one_or_none()
except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Could not get database by id: {ex}")
logger.error("Could not get database by id: %s", str(ex))
return None

@staticmethod
Expand All @@ -55,7 +55,7 @@ def validate_table_exists(database: Database, table_name: str, schema: str) -> b
database.get_table(table_name, schema=schema)
return True
except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Got an error {ex} validating table: {table_name}")
logger.error("Got an error %s validating table: %s", str(ex), table_name)
return False

@staticmethod
Expand Down

0 comments on commit 8198cd7

Please sign in to comment.