Skip to content

Commit db6b2f3

Browse files
authored
pylint errors will now break the build (apache#2543)
* Linting pylint errors * Backing off of an unecessary change
1 parent c31210b commit db6b2f3

File tree

18 files changed

+99
-69
lines changed

18 files changed

+99
-69
lines changed

.pylintrc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ ignore-mixin-members=yes
277277
# (useful for modules/projects where namespaces are manipulated during runtime
278278
# and thus existing member attributes cannot be deduced by static analysis. It
279279
# supports qualified module names, as well as Unix pattern matching.
280-
ignored-modules=
280+
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
281281

282282
# List of class names for which member attributes should not be checked (useful
283283
# for classes with dynamically set attributes). This supports the use of

.travis.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ env:
1717
- TRAVIS_NODE_VERSION="5.11"
1818
matrix:
1919
- TOX_ENV=javascript
20+
- TOX_ENV=pylint
2021
- TOX_ENV=py34-postgres
2122
- TOX_ENV=py34-sqlite
2223
- TOX_ENV=py27-mysql

dev-reqs.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
codeclimate-test-reporter
22
coveralls
33
flake8
4+
flask_cors
45
mock
56
mysqlclient
67
nose
78
psycopg2
9+
pylint
10+
pythrifthiveapi
811
pyyaml
912
# Also install everything we need to build Sphinx docs
1013
-r dev-reqs-for-docs.txt

pylint-errors.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
#!/bin/bash
2+
pylint superset --errors-only

superset/cli.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,8 +133,8 @@ def load_examples(load_test_data):
133133
def refresh_druid(datasource, merge):
134134
"""Refresh druid datasources"""
135135
session = db.session()
136-
from superset import models
137-
for cluster in session.query(models.DruidCluster).all():
136+
from superset.connectors.druid.models import DruidCluster
137+
for cluster in session.query(DruidCluster).all():
138138
try:
139139
cluster.refresh_datasources(datasource_name=datasource,
140140
merge_flag=merge)
@@ -153,8 +153,8 @@ def refresh_druid(datasource, merge):
153153
@manager.command
154154
def update_datasources_cache():
155155
"""Refresh sqllab datasources cache"""
156-
from superset import models
157-
for database in db.session.query(models.Database).all():
156+
from superset.models.core import Database
157+
for database in db.session.query(Database).all():
158158
print('Fetching {} datasources ...'.format(database.name))
159159
try:
160160
database.all_table_names(force=True)

superset/connectors/base.py

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,23 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
1111

1212
"""A common interface to objects that are queryable (tables and datasources)"""
1313

14+
# ---------------------------------------------------------------
15+
# class attributes to define when deriving BaseDatasource
16+
# ---------------------------------------------------------------
1417
__tablename__ = None # {connector_name}_datasource
18+
type = None # datasoure type, str to be defined when deriving this class
19+
baselink = None # url portion pointing to ModelView endpoint
1520

1621
column_class = None # link to derivative of BaseColumn
1722
metric_class = None # link to derivative of BaseMetric
1823

1924
# Used to do code highlighting when displaying the query in the UI
2025
query_language = None
2126

27+
name = None # can be a Column or a property pointing to one
28+
29+
# ---------------------------------------------------------------
30+
2231
# Columns
2332
id = Column(Integer, primary_key=True)
2433
description = Column(Text)
@@ -30,6 +39,11 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
3039
params = Column(String(1000))
3140
perm = Column(String(1000))
3241

42+
# placeholder for a relationship to a derivative of BaseColumn
43+
columns = []
44+
# placeholder for a relationship to a derivative of BaseMetric
45+
metrics = []
46+
3347
@property
3448
def column_names(self):
3549
return sorted([c.column_name for c in self.columns])
@@ -69,6 +83,14 @@ def column_formats(self):
6983
if m.d3format
7084
}
7185

86+
@property
87+
def metrics_combo(self):
88+
return sorted(
89+
[
90+
(m.metric_name, m.verbose_name or m.metric_name)
91+
for m in self.metrics],
92+
key=lambda x: x[1])
93+
7294
@property
7395
def data(self):
7496
"""Data representation of the datasource sent to the frontend"""
@@ -91,13 +113,6 @@ def data(self):
91113
'type': self.type,
92114
}
93115

94-
# TODO move this block to SqlaTable.data
95-
if self.type == 'table':
96-
grains = self.database.grains() or []
97-
if grains:
98-
grains = [(g.name, g.name) for g in grains]
99-
d['granularity_sqla'] = utils.choicify(self.dttm_cols)
100-
d['time_grain_sqla'] = grains
101116
return d
102117

103118

superset/connectors/druid/models.py

Lines changed: 2 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -268,16 +268,6 @@ class DruidMetric(Model, BaseMetric):
268268
enable_typechecks=False)
269269
json = Column(Text)
270270

271-
def refresh_datasources(self, datasource_name=None, merge_flag=False):
272-
"""Refresh metadata of all datasources in the cluster
273-
274-
If ``datasource_name`` is specified, only that datasource is updated
275-
"""
276-
self.druid_version = self.get_druid_version()
277-
for datasource in self.get_datasources():
278-
if datasource not in conf.get('DRUID_DATA_SOURCE_BLACKLIST'):
279-
if not datasource_name or datasource_name == datasource:
280-
DruidDatasource.sync_to_db(datasource, self, merge_flag)
281271
export_fields = (
282272
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
283273
'json', 'description', 'is_restricted', 'd3format'
@@ -341,12 +331,6 @@ class DruidDatasource(Model, BaseDatasource):
341331
'cluster_name', 'offset', 'cache_timeout', 'params'
342332
)
343333

344-
@property
345-
def metrics_combo(self):
346-
return sorted(
347-
[(m.metric_name, m.verbose_name) for m in self.metrics],
348-
key=lambda x: x[1])
349-
350334
@property
351335
def database(self):
352336
return self.cluster
@@ -784,15 +768,15 @@ def recursive_get_fields(_conf):
784768
mconf.get('probabilities', ''),
785769
)
786770
elif mconf.get('type') == 'fieldAccess':
787-
post_aggs[metric_name] = Field(mconf.get('name'), '')
771+
post_aggs[metric_name] = Field(mconf.get('name'))
788772
elif mconf.get('type') == 'constant':
789773
post_aggs[metric_name] = Const(
790774
mconf.get('value'),
791775
output_name=mconf.get('name', '')
792776
)
793777
elif mconf.get('type') == 'hyperUniqueCardinality':
794778
post_aggs[metric_name] = HyperUniqueCardinality(
795-
mconf.get('name'), ''
779+
mconf.get('name')
796780
)
797781
else:
798782
post_aggs[metric_name] = Postaggregator(

superset/connectors/druid/views.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,11 +100,12 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
100100
}
101101

102102
def post_add(self, metric):
103-
utils.init_metrics_perm(superset, [metric])
103+
if metric.is_restricted:
104+
security.merge_perm(sm, 'metric_access', metric.get_perm())
104105

105106
def post_update(self, metric):
106-
utils.init_metrics_perm(superset, [metric])
107-
107+
if metric.is_restricted:
108+
security.merge_perm(sm, 'metric_access', metric.get_perm())
108109

109110
appbuilder.add_view_no_menu(DruidMetricInlineView)
110111

superset/connectors/sqla/models.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -251,14 +251,6 @@ def html(self):
251251
"dataframe table table-striped table-bordered "
252252
"table-condensed"))
253253

254-
@property
255-
def metrics_combo(self):
256-
return sorted(
257-
[
258-
(m.metric_name, m.verbose_name or m.metric_name)
259-
for m in self.metrics],
260-
key=lambda x: x[1])
261-
262254
@property
263255
def sql_url(self):
264256
return self.database.sql_url + "?table_name=" + str(self.table_name)
@@ -276,6 +268,17 @@ def get_col(self, col_name):
276268
if col_name == col.column_name:
277269
return col
278270

271+
@property
272+
def data(self):
273+
d = super(SqlaTable, self).data
274+
if self.type == 'table':
275+
grains = self.database.grains() or []
276+
if grains:
277+
grains = [(g.name, g.name) for g in grains]
278+
d['granularity_sqla'] = utils.choicify(self.dttm_cols)
279+
d['time_grain_sqla'] = grains
280+
return d
281+
279282
def values_for_column(self, column_name, limit=10000):
280283
"""Runs query against sqla to retrieve some
281284
sample values for the given column.

superset/data/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -818,7 +818,7 @@ def load_unicode_test_data():
818818
# generate date/numeric data
819819
df['date'] = datetime.datetime.now().date()
820820
df['value'] = [random.randint(1, 100) for _ in range(len(df))]
821-
df.to_sql(
821+
df.to_sql( # pylint: disable=no-member
822822
'unicode_test',
823823
db.engine,
824824
if_exists='replace',
@@ -953,7 +953,7 @@ def load_long_lat_data():
953953
pdf['date'] = datetime.datetime.now().date()
954954
pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))]
955955
pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))]
956-
pdf.to_sql(
956+
pdf.to_sql( # pylint: disable=no-member
957957
'long_lat',
958958
db.engine,
959959
if_exists='replace',

0 commit comments

Comments
 (0)