Skip to content

Commit

Permalink
Linting pylint errors
Browse files Browse the repository at this point in the history
  • Loading branch information
mistercrunch committed Apr 3, 2017
1 parent 9ba5b49 commit e1d76c9
Show file tree
Hide file tree
Showing 18 changed files with 112 additions and 71 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ ignore-mixin-members=yes
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata

# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
Expand Down
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ env:
- TRAVIS_NODE_VERSION="5.11"
matrix:
- TOX_ENV=javascript
- TOX_ENV=pylint
- TOX_ENV=py34-postgres
- TOX_ENV=py34-sqlite
- TOX_ENV=py27-mysql
Expand Down
3 changes: 3 additions & 0 deletions dev-reqs.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
codeclimate-test-reporter
coveralls
flake8
flask_cors
mock
mysqlclient
nose
psycopg2
pylint
pythrifthiveapi
pyyaml
# Also install everything we need to build Sphinx docs
-r dev-reqs-for-docs.txt
2 changes: 2 additions & 0 deletions pylint-errors.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/bin/bash
pylint superset --errors-only
8 changes: 4 additions & 4 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ def load_examples(load_test_data):
def refresh_druid(datasource, merge):
"""Refresh druid datasources"""
session = db.session()
from superset import models
for cluster in session.query(models.DruidCluster).all():
from superset.connectors.druid.models import DruidCluster
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource,
merge_flag=merge)
Expand All @@ -153,8 +153,8 @@ def refresh_druid(datasource, merge):
@manager.command
def update_datasources_cache():
"""Refresh sqllab datasources cache"""
from superset import models
for database in db.session.query(models.Database).all():
from superset.models.core import Database
for database in db.session.query(Database).all():
print('Fetching {} datasources ...'.format(database.name))
try:
database.all_table_names(force=True)
Expand Down
29 changes: 22 additions & 7 deletions superset/connectors/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,23 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):

"""A common interface to objects that are queryable (tables and datasources)"""

# ---------------------------------------------------------------
# class attributes to define when deriving BaseDatasource
# ---------------------------------------------------------------
__tablename__ = None # {connector_name}_datasource
type = None # datasoure type, str to be defined when deriving this class
baselink = None # url portion pointing to ModelView endpoint

column_class = None # link to derivative of BaseColumn
metric_class = None # link to derivative of BaseMetric

# Used to do code highlighting when displaying the query in the UI
query_language = None

name = None # can be a Column or a property pointing to one

# ---------------------------------------------------------------

# Columns
id = Column(Integer, primary_key=True)
description = Column(Text)
Expand All @@ -30,6 +39,11 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
params = Column(String(1000))
perm = Column(String(1000))

# placeholder for a relationship to a derivative of BaseColumn
columns = []
# placeholder for a relationship to a derivative of BaseMetric
metrics = []

@property
def column_names(self):
return sorted([c.column_name for c in self.columns])
Expand Down Expand Up @@ -69,6 +83,14 @@ def column_formats(self):
if m.d3format
}

@property
def metrics_combo(self):
return sorted(
[
(m.metric_name, m.verbose_name or m.metric_name)
for m in self.metrics],
key=lambda x: x[1])

@property
def data(self):
"""Data representation of the datasource sent to the frontend"""
Expand All @@ -91,13 +113,6 @@ def data(self):
'type': self.type,
}

# TODO move this block to SqlaTable.data
if self.type == 'table':
grains = self.database.grains() or []
if grains:
grains = [(g.name, g.name) for g in grains]
d['granularity_sqla'] = utils.choicify(self.dttm_cols)
d['time_grain_sqla'] = grains
return d


Expand Down
35 changes: 15 additions & 20 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class DruidColumn(Model, BaseColumn):
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
back_populates='columns',
enable_typechecks=False)
dimension_spec_json = Column(Text)

Expand Down Expand Up @@ -264,20 +264,10 @@ class DruidMetric(Model, BaseMetric):
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
back_populates='metrics',
enable_typechecks=False)
json = Column(Text)

def refresh_datasources(self, datasource_name=None, merge_flag=False):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
for datasource in self.get_datasources():
if datasource not in conf.get('DRUID_DATA_SOURCE_BLACKLIST'):
if not datasource_name or datasource_name == datasource:
DruidDatasource.sync_to_db(datasource, self, merge_flag)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
Expand Down Expand Up @@ -336,17 +326,22 @@ class DruidDatasource(Model, BaseDatasource):
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])

columns = relationship(
'DruidColumn',
cascade='all, delete-orphan',
back_populates='datasource',
enable_typechecks=False)
metrics = relationship(
'DruidMetric',
cascade='all, delete-orphan',
back_populates='datasource',
enable_typechecks=False)

export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)

@property
def metrics_combo(self):
return sorted(
[(m.metric_name, m.verbose_name) for m in self.metrics],
key=lambda x: x[1])

@property
def database(self):
return self.cluster
Expand Down Expand Up @@ -784,15 +779,15 @@ def recursive_get_fields(_conf):
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'), '')
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name'), ''
mconf.get('name')
)
else:
post_aggs[metric_name] = Postaggregator(
Expand Down
7 changes: 4 additions & 3 deletions superset/connectors/druid/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,12 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
}

def post_add(self, metric):
utils.init_metrics_perm(superset, [metric])
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())

def post_update(self, metric):
utils.init_metrics_perm(superset, [metric])

if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())

appbuilder.add_view_no_menu(DruidMetricInlineView)

Expand Down
19 changes: 11 additions & 8 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,14 +251,6 @@ def html(self):
"dataframe table table-striped table-bordered "
"table-condensed"))

@property
def metrics_combo(self):
return sorted(
[
(m.metric_name, m.verbose_name or m.metric_name)
for m in self.metrics],
key=lambda x: x[1])

@property
def sql_url(self):
return self.database.sql_url + "?table_name=" + str(self.table_name)
Expand All @@ -276,6 +268,17 @@ def get_col(self, col_name):
if col_name == col.column_name:
return col

@property
def data(self):
d = super(SqlaTable, self).data
if self.type == 'table':
grains = self.database.grains() or []
if grains:
grains = [(g.name, g.name) for g in grains]
d['granularity_sqla'] = utils.choicify(self.dttm_cols)
d['time_grain_sqla'] = grains
return d

def values_for_column(self, column_name, limit=10000):
"""Runs query against sqla to retrieve some
sample values for the given column.
Expand Down
4 changes: 2 additions & 2 deletions superset/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -818,7 +818,7 @@ def load_unicode_test_data():
# generate date/numeric data
df['date'] = datetime.datetime.now().date()
df['value'] = [random.randint(1, 100) for _ in range(len(df))]
df.to_sql(
df.to_sql( # pylint: disable=no-member
'unicode_test',
db.engine,
if_exists='replace',
Expand Down Expand Up @@ -953,7 +953,7 @@ def load_long_lat_data():
pdf['date'] = datetime.datetime.now().date()
pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))]
pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))]
pdf.to_sql(
pdf.to_sql( # pylint: disable=no-member
'long_lat',
db.engine,
if_exists='replace',
Expand Down
2 changes: 2 additions & 0 deletions superset/db_engines/presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ def cancel(self):
return

response = presto.requests.delete(self._nextUri)

# pylint: disable=no-member
if response.status_code != presto.requests.codes.no_content:
fmt = "Unexpected status code after cancel {}\n{}"
raise presto.OperationalError(
Expand Down
3 changes: 2 additions & 1 deletion superset/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@
# for 'autogenerate' support
# from myapp import mymodel
from flask import current_app

config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = Base.metadata
target_metadata = Base.metadata # pylint: disable=no-member

# other values from the config, defined by the needs of env.py,
# can be acquired:
Expand Down
Loading

0 comments on commit e1d76c9

Please sign in to comment.