From cd41cdf211b8de6c9802cff1973735b7def09865 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Wed, 17 Jun 2020 19:57:09 -0700 Subject: [PATCH 1/9] Bump pylint version to 2.5.3 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 207b6a967addf..5325ea698a2c3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -28,7 +28,7 @@ psycopg2-binary==2.8.5 pycodestyle==2.5.0 pydruid==0.6.1 pyhive==0.6.2 -pylint==1.9.2 +pylint==2.5.3 redis==3.5.1 requests==2.23.0 statsd==3.3.0 From b0c7ae1dc4c4d4b96714462fa8008d40996b0712 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Wed, 17 Jun 2020 20:16:27 -0700 Subject: [PATCH 2/9] Add a global disable for the most common new pylint error --- .pylintrc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pylintrc b/.pylintrc index 42b8783e7d9f4..e887176d655cd 100644 --- a/.pylintrc +++ b/.pylintrc @@ -81,7 +81,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports +disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports,import-outside-toplevel [REPORTS] @@ -254,7 +254,7 @@ notes=FIXME,XXX [SIMILARITIES] # Minimum lines number of a similarity. -min-similarity-lines=4 +min-similarity-lines=5 # Ignore comments when computing similarities. ignore-comments=yes From 8c65325cd3a5890994ae7ffbccd432d99994eaa8 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Wed, 17 Jun 2020 20:28:06 -0700 Subject: [PATCH 3/9] Fix a bunch of files containing very few errors --- superset/dashboards/dao.py | 2 +- superset/db_engine_specs/drill.py | 2 +- superset/db_engine_specs/impala.py | 2 +- superset/db_engine_specs/pinot.py | 4 +++- superset/db_engine_specs/sqlite.py | 2 +- superset/tasks/schedules.py | 2 +- superset/tasks/slack_util.py | 2 +- superset/utils/dashboard_filter_scopes_converter.py | 2 +- superset/views/annotations.py | 5 ++--- superset/views/dashboard/views.py | 7 +++---- 10 files changed, 15 insertions(+), 15 deletions(-) diff --git a/superset/dashboards/dao.py b/superset/dashboards/dao.py index 6e12cd547ff3f..28b685f177f0f 100644 --- a/superset/dashboards/dao.py +++ b/superset/dashboards/dao.py @@ -49,7 +49,7 @@ def validate_update_slug_uniqueness(dashboard_id: int, slug: Optional[str]) -> b @staticmethod def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard: - owners = [owner for owner in model.owners] + owners = list(model.owners) for slc in model.slices: slc.owners = list(set(owners) | set(slc.owners)) if commit: diff --git a/superset/db_engine_specs/drill.py b/superset/db_engine_specs/drill.py index 8da7cd8bb4bb8..e43fe1def4731 100644 --- a/superset/db_engine_specs/drill.py +++ b/superset/db_engine_specs/drill.py @@ -56,7 +56,7 @@ def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]: tt = target_type.upper() if tt == "DATE": return f"TO_DATE('{dttm.date().isoformat()}', 'yyyy-MM-dd')" - elif tt == "TIMESTAMP": + if tt == "TIMESTAMP": return f"""TO_TIMESTAMP('{dttm.isoformat(sep=" ", timespec="seconds")}', 'yyyy-MM-dd HH:mm:ss')""" # pylint: disable=line-too-long return None diff --git a/superset/db_engine_specs/impala.py b/superset/db_engine_specs/impala.py index fd2dce5e324ea..59eaf0106efb3 100644 --- a/superset/db_engine_specs/impala.py +++ b/superset/db_engine_specs/impala.py @@ -47,7 +47,7 @@ def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]: tt = target_type.upper() if tt == "DATE": return f"CAST('{dttm.date().isoformat()}' AS DATE)" - elif tt == "TIMESTAMP": + if tt == "TIMESTAMP": return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS TIMESTAMP)""" return None diff --git a/superset/db_engine_specs/pinot.py b/superset/db_engine_specs/pinot.py index 9243fefa7da14..853d756e340fb 100644 --- a/superset/db_engine_specs/pinot.py +++ b/superset/db_engine_specs/pinot.py @@ -84,7 +84,9 @@ def get_timestamp_expr( if not granularity: raise NotImplementedError("No pinot grain spec for " + str(time_grain)) else: - return TimestampExpression(f"{{col}}", col) + return TimestampExpression( + f"{{col}}", col # pylint: disable=f-string-without-interpolation + ) # In pinot the output is a string since there is no timestamp column like pg time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', '{granularity}')" return TimestampExpression(time_expr, col) diff --git a/superset/db_engine_specs/sqlite.py b/superset/db_engine_specs/sqlite.py index d824c3203e690..11856b084acdc 100644 --- a/superset/db_engine_specs/sqlite.py +++ b/superset/db_engine_specs/sqlite.py @@ -64,7 +64,7 @@ def get_all_datasource_names( cache=database.table_cache_enabled, cache_timeout=database.table_cache_timeout, ) - elif datasource_type == "view": + if datasource_type == "view": return database.get_all_view_names_in_schema( schema=schema, force=True, diff --git a/superset/tasks/schedules.py b/superset/tasks/schedules.py index 3930bebafd3d4..9948e94639e17 100644 --- a/superset/tasks/schedules.py +++ b/superset/tasks/schedules.py @@ -488,7 +488,7 @@ def schedule_email_report( # pylint: disable=unused-argument recipients = recipients or schedule.recipients slack_channel = slack_channel or schedule.slack_channel logger.info( - f"Starting report for slack: {slack_channel} and recipients: {recipients}." + "Starting report for slack: %s and recipients: %s.", slack_channel, recipients ) if report_type == ScheduleType.dashboard: diff --git a/superset/tasks/slack_util.py b/superset/tasks/slack_util.py index 018308d2beb81..5c3d3af4191d5 100644 --- a/superset/tasks/slack_util.py +++ b/superset/tasks/slack_util.py @@ -19,10 +19,10 @@ from typing import cast, Union from retry.api import retry + from slack import WebClient from slack.errors import SlackApiError from slack.web.slack_response import SlackResponse - from superset import app # Globals diff --git a/superset/utils/dashboard_filter_scopes_converter.py b/superset/utils/dashboard_filter_scopes_converter.py index d95582ddd79fa..db897fa64c5a7 100644 --- a/superset/utils/dashboard_filter_scopes_converter.py +++ b/superset/utils/dashboard_filter_scopes_converter.py @@ -49,7 +49,7 @@ def add_filter_scope( "immune": current_filter_immune, } else: - logging.info(f"slice [{filter_id}] has invalid field: {filter_field}") + logging.info("slice [%i] has invalid field: %s", filter_id, filter_field) for filter_slice in filters: filter_fields: Dict[str, Dict[str, Any]] = {} diff --git a/superset/views/annotations.py b/superset/views/annotations.py index a8c26c7c0032d..03473bd600813 100644 --- a/superset/views/annotations.py +++ b/superset/views/annotations.py @@ -23,8 +23,7 @@ from superset.constants import RouteMethod from superset.models.annotations import Annotation, AnnotationLayer - -from .base import SupersetModelView +from superset.views.base import SupersetModelView class StartEndDttmValidator: # pylint: disable=too-few-public-methods @@ -35,7 +34,7 @@ class StartEndDttmValidator: # pylint: disable=too-few-public-methods def __call__(self, form: Dict[str, Any], field: Any) -> None: if not form["start_dttm"].data and not form["end_dttm"].data: raise StopValidation(_("annotation start time or end time is required.")) - elif ( + if ( form["end_dttm"].data and form["start_dttm"].data and form["end_dttm"].data < form["start_dttm"].data diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py index 2d68cbcb2d912..a446bf37b9377 100644 --- a/superset/views/dashboard/views.py +++ b/superset/views/dashboard/views.py @@ -29,15 +29,14 @@ from superset.constants import RouteMethod from superset.typing import FlaskResponse from superset.utils import core as utils - -from ..base import ( +from superset.views.base import ( BaseSupersetView, check_ownership, DeleteMixin, generate_download_headers, SupersetModelView, ) -from .mixin import DashboardMixin +from superset.views.dashboard.mixin import DashboardMixin class DashboardModelView( @@ -95,7 +94,7 @@ def pre_add(self, item: "DashboardModelView") -> None: item.owners.append(g.user) utils.validate_json(item.json_metadata) utils.validate_json(item.position_json) - owners = [o for o in item.owners] + owners = list(item.owners) for slc in item.slices: slc.owners = list(set(owners) | set(slc.owners)) From 4350b808d3cdd0752697ee615d5e34a1570d1ddf Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Wed, 17 Jun 2020 20:46:42 -0700 Subject: [PATCH 4/9] More pylint tweakage, low-hanging fruit --- superset/app.py | 3 +-- superset/commands/base.py | 2 -- superset/config.py | 4 +++- superset/dao/exceptions.py | 2 -- superset/datasets/dao.py | 4 ++-- superset/db_engine_specs/sqlite.py | 3 +-- superset/models/dashboard.py | 2 +- superset/models/helpers.py | 1 + superset/models/schedules.py | 2 +- superset/tasks/cache.py | 4 ++-- superset/utils/log.py | 2 +- superset/views/base.py | 5 ++--- superset/views/database/decorators.py | 8 +++++--- superset/views/utils.py | 10 ++++------ 14 files changed, 24 insertions(+), 28 deletions(-) diff --git a/superset/app.py b/superset/app.py index 3e83fd581a88c..7dc7aec5f4250 100644 --- a/superset/app.py +++ b/superset/app.py @@ -95,7 +95,6 @@ def post_init(self) -> None: """ Called after any other init tasks """ - pass def configure_celery(self) -> None: celery_app.config_from_object(self.config["CELERY_CONFIG"]) @@ -593,7 +592,7 @@ def configure_wtf(self) -> None: def register_blueprints(self) -> None: for bp in self.config["BLUEPRINTS"]: try: - logger.info(f"Registering blueprint: '{bp.name}'") + logger.info("Registering blueprint: %s", bp.name) self.flask_app.register_blueprint(bp) except Exception: # pylint: disable=broad-except logger.exception("blueprint registration failed") diff --git a/superset/commands/base.py b/superset/commands/base.py index 998a7568597b7..0bcf3a7bee0d4 100644 --- a/superset/commands/base.py +++ b/superset/commands/base.py @@ -29,7 +29,6 @@ def run(self) -> Any: Run executes the command. Can raise command exceptions :raises: CommandException """ - pass @abstractmethod def validate(self) -> None: @@ -38,4 +37,3 @@ def validate(self) -> None: Will raise exception if validation fails :raises: CommandException """ - pass diff --git a/superset/config.py b/superset/config.py index b862a9c98449c..0a1eada08efdf 100644 --- a/superset/config.py +++ b/superset/config.py @@ -898,7 +898,9 @@ class CeleryConfig: # pylint: disable=too-few-public-methods print(f"Loaded your LOCAL configuration at [{cfg_path}]") except Exception: logger.exception( - f"Failed to import config for {CONFIG_PATH_ENV_VAR}={cfg_path}" + "Failed to import config for %s=%s", + CONFIG_PATH_ENV_VAR, + cfg_path ) raise elif importlib.util.find_spec("superset_config"): diff --git a/superset/dao/exceptions.py b/superset/dao/exceptions.py index 5a6bbbe222bfe..822b23982e5d8 100644 --- a/superset/dao/exceptions.py +++ b/superset/dao/exceptions.py @@ -22,8 +22,6 @@ class DAOException(SupersetException): Base DAO exception class """ - pass - class DAOCreateFailedError(DAOException): """ diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py index ef20a69dd19cf..0a253fa5a4b0f 100644 --- a/superset/datasets/dao.py +++ b/superset/datasets/dao.py @@ -46,7 +46,7 @@ def get_database_by_id(database_id: int) -> Optional[Database]: try: return db.session.query(Database).filter_by(id=database_id).one_or_none() except SQLAlchemyError as ex: # pragma: no cover - logger.error(f"Could not get database by id: {ex}") + logger.error("Could not get database by id: %s", str(ex)) return None @staticmethod @@ -55,7 +55,7 @@ def validate_table_exists(database: Database, table_name: str, schema: str) -> b database.get_table(table_name, schema=schema) return True except SQLAlchemyError as ex: # pragma: no cover - logger.error(f"Got an error {ex} validating table: {table_name}") + logger.error("Got an error %s validating table: %s", str(ex), table_name) return False @staticmethod diff --git a/superset/db_engine_specs/sqlite.py b/superset/db_engine_specs/sqlite.py index 11856b084acdc..863e5e624c74e 100644 --- a/superset/db_engine_specs/sqlite.py +++ b/superset/db_engine_specs/sqlite.py @@ -71,8 +71,7 @@ def get_all_datasource_names( cache=database.table_cache_enabled, cache_timeout=database.table_cache_timeout, ) - else: - raise Exception(f"Unsupported datasource_type: {datasource_type}") + raise Exception(f"Unsupported datasource_type: {datasource_type}") @classmethod def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]: diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index d10809e2dda51..b34906fc3f0ff 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -42,7 +42,7 @@ from superset import app, ConnectorRegistry, db, is_feature_enabled, security_manager from superset.models.helpers import AuditMixinNullable, ImportMixin -from superset.models.slice import Slice as Slice +from superset.models.slice import Slice from superset.models.tags import DashboardUpdater from superset.models.user_attributes import UserAttribute from superset.tasks.thumbnails import cache_dashboard_thumbnail diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 4ffe5e9c8dcff..d003f55e47e29 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -363,6 +363,7 @@ def changed_by_fk(self) -> sa.Column: nullable=True, ) + @property def changed_by_name(self) -> str: if self.created_by: return escape("{}".format(self.created_by)) diff --git a/superset/models/schedules.py b/superset/models/schedules.py index f66a23768b3ed..57ff52d1bbb96 100644 --- a/superset/models/schedules.py +++ b/superset/models/schedules.py @@ -90,6 +90,6 @@ class SliceEmailSchedule(Model, AuditMixinNullable, ImportMixin, EmailSchedule): def get_scheduler_model(report_type: ScheduleType) -> Optional[Type[EmailSchedule]]: if report_type == ScheduleType.dashboard: return DashboardEmailSchedule - elif report_type == ScheduleType.slice: + if report_type == ScheduleType.slice: return SliceEmailSchedule return None diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index a5b0199c51c41..54b0dc1b17822 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -275,7 +275,7 @@ def cache_warmup( logger.error(message) return message - logger.info(f"Loading {class_.__name__}") + logger.info("Loading %s", class_.__name__) try: strategy = class_(*args, **kwargs) logger.info("Success!") @@ -287,7 +287,7 @@ def cache_warmup( results: Dict[str, List[str]] = {"success": [], "errors": []} for url in strategy.get_urls(): try: - logger.info(f"Fetching {url}") + logger.info("Fetching %s", url) request.urlopen(url) results["success"].append(url) except URLError: diff --git a/superset/utils/log.py b/superset/utils/log.py index b31abcec8cdc0..382d7801db697 100644 --- a/superset/utils/log.py +++ b/superset/utils/log.py @@ -134,7 +134,7 @@ def get_event_logger_from_cfg_value(cfg_value: Any) -> AbstractEventLogger: "of superset.utils.log.AbstractEventLogger." ) - logging.info(f"Configured event logger of type {type(result)}") + logging.info("Configured event logger of type %s", type(result)) return cast(AbstractEventLogger, result) diff --git a/superset/views/base.py b/superset/views/base.py index b95be11dfa89c..6747f44ce7847 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -212,7 +212,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None: try: table.get_sqla_table_object() except Exception as ex: - logger.exception(f"Got an error in pre_add for {table.name}") + logger.exception("Got an error in pre_add for %s", table.name) raise Exception( _( "Table [%{table}s] could not be found, " @@ -498,8 +498,7 @@ def check_ownership(obj: Any, raise_if_false: bool = True) -> bool: return True if raise_if_false: raise security_exception - else: - return False + return False def bind_field( diff --git a/superset/views/database/decorators.py b/superset/views/database/decorators.py index 071b661c75d18..2cea2f96f8eb7 100644 --- a/superset/views/database/decorators.py +++ b/superset/views/database/decorators.py @@ -51,14 +51,16 @@ def wraps( ) return self.response_404() if not self.appbuilder.sm.can_access_table( - database, Table(table_name_parsed, schema_name_parsed), + database, Table(table_name_parsed, schema_name_parsed) ): self.stats_logger.incr( f"permisssion_denied_{self.__class__.__name__}.select_star" ) logger.warning( - f"Permission denied for user {g.user} on table: {table_name_parsed} " - f"schema: {schema_name_parsed}" + "Permission denied for user %s on table: %s schema: %s", + g.user, + table_name_parsed, + schema_name_parsed, ) return self.response_404() return f(self, database, table_name_parsed, schema_name_parsed) diff --git a/superset/views/utils.py b/superset/views/utils.py index 73c3a54c5cbea..6248865401532 100644 --- a/superset/views/utils.py +++ b/superset/views/utils.py @@ -460,7 +460,7 @@ def check_slice_perms(_self: Any, slice_id: int) -> None: def _deserialize_results_payload( payload: Union[bytes, str], query: Query, use_msgpack: Optional[bool] = False ) -> Dict[str, Any]: - logger.debug(f"Deserializing from msgpack: {use_msgpack}") + logger.debug("Deserializing from msgpack: %r", use_msgpack) if use_msgpack: with stats_timing( "sqllab.query.results_backend_msgpack_deserialize", stats_logger @@ -482,11 +482,9 @@ def _deserialize_results_payload( ) return ds_payload - else: - with stats_timing( - "sqllab.query.results_backend_json_deserialize", stats_logger - ): - return json.loads(payload) + + with stats_timing("sqllab.query.results_backend_json_deserialize", stats_logger): + return json.loads(payload) def get_cta_schema_name( From dff8fd8299b0bba84c7b2ad0cdf5f3ed43ef3cad Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Wed, 17 Jun 2020 21:04:12 -0700 Subject: [PATCH 5/9] More easy stuff... --- superset/dashboards/api.py | 12 +++++++--- superset/datasets/api.py | 12 +++++++--- superset/db_engine_specs/base.py | 10 ++++---- superset/db_engine_specs/presto.py | 24 +++++++++++-------- superset/models/sql_types/presto_sql_types.py | 5 ++++ superset/sql_validators/presto_db.py | 6 ++--- 6 files changed, 44 insertions(+), 25 deletions(-) diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 21d4e7959fe4c..a897a2d333f70 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -206,7 +206,9 @@ def post(self) -> Response: except DashboardInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except DashboardCreateFailedError as ex: - logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + logger.error( + "Error creating model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["PUT"]) @@ -274,7 +276,9 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ except DashboardInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except DashboardUpdateFailedError as ex: - logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + logger.error( + "Error updating model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @@ -321,7 +325,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ except DashboardForbiddenError: return self.response_403() except DashboardDeleteFailedError as ex: - logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + logger.error( + "Error deleting model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) diff --git a/superset/datasets/api.py b/superset/datasets/api.py index bafcebfb7a50d..28c64c86545a6 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -186,7 +186,9 @@ def post(self) -> Response: except DatasetInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except DatasetCreateFailedError as ex: - logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + logger.error( + "Error creating model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["PUT"]) @@ -254,7 +256,9 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ except DatasetInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except DatasetUpdateFailedError as ex: - logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + logger.error( + "Error updating model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @@ -301,7 +305,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ except DatasetForbiddenError: return self.response_403() except DatasetDeleteFailedError as ex: - logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + logger.error( + "Error deleting model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/export/", methods=["GET"]) diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 86bbd11fbd353..c86ee9d0d3171 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -332,7 +332,6 @@ def alter_new_orm_column(cls, orm_col: "TableColumn") -> None: set to is_dttm=True. Note that this only gets called when new columns are detected/created""" # TODO: Fix circular import caused by importing TableColumn - pass @classmethod def epoch_to_dttm(cls) -> str: @@ -401,9 +400,11 @@ def apply_limit_to_sql(cls, sql: str, limit: int, database: "Database") -> str: .limit(limit) ) return database.compile_sqla_query(qry) - elif LimitMethod.FORCE_LIMIT: + + if LimitMethod.FORCE_LIMIT: parsed_query = sql_parse.ParsedQuery(sql) sql = parsed_query.set_or_update_query_limit(limit) + return sql @classmethod @@ -465,7 +466,7 @@ def create_table_from_csv( # pylint: disable=too-many-arguments Create table from contents of a csv. Note: this method does not create metadata for the table. """ - df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs,) + df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs) engine = cls.get_engine(database) if table.schema: # only add schema when it is preset and non empty @@ -529,7 +530,6 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: for handling the cursor and updating progress information in the query object""" # TODO: Fix circular import error caused by importing sql_lab.Query - pass @classmethod def extract_error_message(cls, ex: Exception) -> str: @@ -573,14 +573,12 @@ def adjust_database_uri(cls, uri: URL, selected_schema: Optional[str]) -> None: Some database drivers like presto accept '{catalog}/{schema}' in the database component of the URL, that can be handled here. """ - pass @classmethod def patch(cls) -> None: """ TODO: Improve docstring and refactor implementation in Hive """ - pass @classmethod def get_schema_names(cls, inspector: Inspector) -> List[str]: diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index e8c9603df3d4b..ee474d53ae6db 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -79,7 +79,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, str]]: children_type = group["children"] if type_ == "ARRAY": return [{"name": column["name"], "type": children_type}] - elif type_ == "ROW": + + if type_ == "ROW": nameless_columns = 0 columns = [] for child in utils.split(children_type, ","): @@ -93,8 +94,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, str]]: nameless_columns += 1 columns.append({"name": f"{column['name']}.{name.lower()}", "type": type_}) return columns - else: - raise Exception(f"Unknown type {type_}!") + + raise Exception(f"Unknown type {type_}!") class PrestoEngineSpec(BaseEngineSpec): @@ -278,7 +279,7 @@ def _parse_structural_column( # pylint: disable=too-many-locals,too-many-branch if not (inner_type.endswith("array") or inner_type.endswith("row")): stack.pop() # We have an array of row objects (i.e. array(row(...))) - elif inner_type == "array" or inner_type == "row": + elif inner_type in ("array", "row"): # Push a dummy object to represent the structural data type stack.append(("", inner_type)) # We have an array of a basic data types(i.e. array(varchar)). @@ -339,8 +340,9 @@ def get_columns( ) result[structural_column_index]["default"] = None continue - else: # otherwise column is a basic data type - column_type = presto_type_map[column.Type]() + + # otherwise column is a basic data type + column_type = presto_type_map[column.Type]() except KeyError: logger.info( "Did not recognize type {} of column {}".format( # pylint: disable=logging-format-interpolation @@ -727,7 +729,7 @@ def get_create_view( def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: """Updates progress information""" query_id = query.id - logger.info(f"Query {query_id}: Polling the cursor for progress") + logger.info("Query %i: Polling the cursor for progress", query_id) polled = cursor.poll() # poll returns dict -- JSON status information or ``None`` # if the query is done @@ -761,7 +763,7 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: query.progress = progress session.commit() time.sleep(1) - logger.info(f"Query {query_id}: Polling the cursor for progress") + logger.info("Query %i: Polling the cursor for progress", query_id) polled = cursor.poll() @classmethod @@ -903,12 +905,14 @@ def latest_partition( raise SupersetTemplateException( "The table should have one partitioned field" ) - elif not show_first and len(indexes[0]["column_names"]) > 1: + + if not show_first and len(indexes[0]["column_names"]) > 1: raise SupersetTemplateException( "The table should have a single partitioned field " "to use this function. You may want to use " "`presto.latest_sub_partition`" ) + column_names = indexes[0]["column_names"] part_fields = [(column_name, True) for column_name in column_names] sql = cls._partition_query(table_name, database, 1, part_fields) @@ -947,7 +951,7 @@ def latest_sub_partition( indexes = database.get_indexes(table_name, schema) part_fields = indexes[0]["column_names"] for k in kwargs.keys(): # pylint: disable=consider-iterating-dictionary - if k not in k in part_fields: + if k not in k in part_fields: # pylint: disable=comparison-with-itself msg = "Field [{k}] is not part of the portioning key" raise SupersetTemplateException(msg) if len(kwargs.keys()) != len(part_fields) - 1: diff --git a/superset/models/sql_types/presto_sql_types.py b/superset/models/sql_types/presto_sql_types.py index 47486cf156840..d6f6d3995f6d7 100644 --- a/superset/models/sql_types/presto_sql_types.py +++ b/superset/models/sql_types/presto_sql_types.py @@ -29,6 +29,7 @@ class TinyInteger(Integer): A type for tiny ``int`` integers. """ + @property def python_type(self) -> Type[int]: return int @@ -42,6 +43,7 @@ class Interval(TypeEngine): A type for intervals. """ + @property def python_type(self) -> Optional[Type[Any]]: return None @@ -55,6 +57,7 @@ class Array(TypeEngine): A type for arrays. """ + @property def python_type(self) -> Optional[Type[List[Any]]]: return list @@ -68,6 +71,7 @@ class Map(TypeEngine): A type for maps. """ + @property def python_type(self) -> Optional[Type[Dict[Any, Any]]]: return dict @@ -81,6 +85,7 @@ class Row(TypeEngine): A type for rows. """ + @property def python_type(self) -> Optional[Type[Any]]: return None diff --git a/superset/sql_validators/presto_db.py b/superset/sql_validators/presto_db.py index 6c5bb309bbfd6..ccafb98474a61 100644 --- a/superset/sql_validators/presto_db.py +++ b/superset/sql_validators/presto_db.py @@ -138,7 +138,7 @@ def validate_statement( end_column=end_column, ) except Exception as ex: - logger.exception(f"Unexpected error running validation query: {ex}") + logger.exception("Unexpected error running validation query: %s", str(ex)) raise ex @classmethod @@ -156,7 +156,7 @@ def validate( parsed_query = ParsedQuery(sql) statements = parsed_query.get_statements() - logger.info(f"Validating {len(statements)} statement(s)") + logger.info("Validating %i statement(s)", len(statements)) engine = database.get_sqla_engine( schema=schema, nullpool=True, @@ -174,6 +174,6 @@ def validate( ) if annotation: annotations.append(annotation) - logger.debug(f"Validation found {len(annotations)} error(s)") + logger.debug("Validation found %i error(s)", len(annotations)) return annotations From e89afe84083890f555cea464dc47ee082a3932f3 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Thu, 18 Jun 2020 08:07:13 -0700 Subject: [PATCH 6/9] Fix more erroring files --- superset/charts/api.py | 20 +++++++++++++------- superset/common/query_object.py | 29 ++++++++++++++++++----------- superset/connectors/base/models.py | 4 ++-- superset/connectors/druid/models.py | 15 +++++---------- superset/db_engine_specs/hive.py | 19 ++++++++++++------- superset/models/annotations.py | 2 +- superset/models/core.py | 11 ++++++----- superset/sql_lab.py | 20 +++++++++++--------- superset/tasks/slack_util.py | 4 ++-- superset/utils/screenshots.py | 18 +++++++++--------- 10 files changed, 79 insertions(+), 63 deletions(-) diff --git a/superset/charts/api.py b/superset/charts/api.py index 148a337fdc0f0..d309ef17ab08d 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -219,7 +219,9 @@ def post(self) -> Response: except ChartInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except ChartCreateFailedError as ex: - logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + logger.error( + "Error creating model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["PUT"]) @@ -287,7 +289,9 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ except ChartInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except ChartUpdateFailedError as ex: - logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + logger.error( + "Error updating model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @@ -334,7 +338,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ except ChartForbiddenError: return self.response_403() except ChartDeleteFailedError as ex: - logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + logger.error( + "Error deleting model %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @@ -386,9 +392,7 @@ def bulk_delete( return self.response( 200, message=ngettext( - f"Deleted %(num)d chart", - f"Deleted %(num)d charts", - num=len(item_ids), + "Deleted %(num)d chart", "Deleted %(num)d charts", num=len(item_ids) ), ) except ChartNotFoundError: @@ -464,13 +468,15 @@ def data(self) -> Response: headers=generate_download_headers("csv"), mimetype="application/csv", ) - elif result_format == ChartDataResultFormat.JSON: + + if result_format == ChartDataResultFormat.JSON: response_data = simplejson.dumps( {"result": payload}, default=json_int_dttm_ser, ignore_nan=True ) resp = make_response(response_data, 200) resp.headers["Content-Type"] = "application/json; charset=utf-8" return resp + raise self.response_400(message=f"Unsupported result_format: {result_format}") @expose("//thumbnail//", methods=["GET"]) diff --git a/superset/common/query_object.py b/superset/common/query_object.py index d09c75b312765..8de21659d0c0a 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -141,8 +141,8 @@ def __init__( if is_sip_38 and groupby: self.columns += groupby logger.warning( - f"The field `groupby` is deprecated. Viz plugins should " - f"pass all selectables via the `columns` field" + "The field `groupby` is deprecated. Viz plugins should " + "pass all selectables via the `columns` field" ) self.orderby = orderby or [] @@ -151,15 +151,18 @@ def __init__( for field in DEPRECATED_FIELDS: if field.old_name in kwargs: logger.warning( - f"The field `{field.old_name}` is deprecated, please use " - f"`{field.new_name}` instead." + "The field `%s` is deprecated, please use `%s` instead.", + field.old_name, + field.new_name, ) value = kwargs[field.old_name] if value: if hasattr(self, field.new_name): logger.warning( - f"The field `{field.new_name}` is already populated, " - f"replacing value with contents from `{field.old_name}`." + "The field `%s` is already populated, " + "replacing value with contents from `%s`.", + field.new_name, + field.old_name, ) setattr(self, field.new_name, value) @@ -167,16 +170,20 @@ def __init__( for field in DEPRECATED_EXTRAS_FIELDS: if field.old_name in kwargs: logger.warning( - f"The field `{field.old_name}` is deprecated and should be " - f"passed to `extras` via the `{field.new_name}` property." + "The field `%s` is deprecated and should " + "be passed to `extras` via the `%s` property.", + field.old_name, + field.new_name, ) value = kwargs[field.old_name] if value: if hasattr(self.extras, field.new_name): logger.warning( - f"The field `{field.new_name}` is already populated in " - f"`extras`, replacing value with contents " - f"from `{field.old_name}`." + "The field `%s` is already populated in " + "`extras`, replacing value with contents " + "from `%s`.", + field.new_name, + field.old_name, ) self.extras[field.new_name] = value diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index 5ba28e8c6fcd0..f2dad886654e5 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -348,7 +348,7 @@ def handle_single_value(value: Optional[FilterValue]) -> Optional[FilterValue]: value = utils.cast_to_num(value) if value == NULL_STRING: return None - elif value == "": + if value == "": return "" return value @@ -516,7 +516,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin): export_fields: List[Any] = [] def __repr__(self) -> str: - return self.column_name + return str(self.column_name) num_types = ( "DOUBLE", diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index ccfff6e0da465..1c423a1fc5413 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -14,8 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=C,R,W -# pylint: disable=invalid-unary-operand-type +# pylint: skip-file import json import logging import re @@ -81,12 +80,7 @@ pass try: - from superset.utils.core import ( - DimSelector, - DTTM_ALIAS, - FilterOperator, - flasher, - ) + from superset.utils.core import DimSelector, DTTM_ALIAS, FilterOperator, flasher except ImportError: pass @@ -845,7 +839,8 @@ def granularity( else: granularity["type"] = "duration" granularity["duration"] = ( - utils.parse_human_timedelta(period_name).total_seconds() * 1000 # type: ignore + utils.parse_human_timedelta(period_name).total_seconds() + * 1000 # type: ignore ) return granularity @@ -950,7 +945,7 @@ def resolve_postagg( @staticmethod def metrics_and_post_aggs( - metrics: List[Metric], metrics_dict: Dict[str, DruidMetric], + metrics: List[Metric], metrics_dict: Dict[str, DruidMetric] ) -> Tuple["OrderedDict[str, Any]", "OrderedDict[str, Any]"]: # Separate metrics into those that are aggregations # and those that are post aggregations diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index f99b1803ba3d4..4cfcb4ce5004f 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -200,7 +200,7 @@ def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]: tt = target_type.upper() if tt == "DATE": return f"CAST('{dttm.date().isoformat()}' AS DATE)" - elif tt == "TIMESTAMP": + if tt == "TIMESTAMP": return f"""CAST('{dttm.isoformat(sep=" ", timespec="microseconds")}' AS TIMESTAMP)""" # pylint: disable=line-too-long return None @@ -284,7 +284,9 @@ def handle_cursor( # pylint: disable=too-many-locals if log: log_lines = log.splitlines() progress = cls.progress(log_lines) - logger.info(f"Query {query_id}: Progress total: {progress}") + logger.info( + "Query %s: Progress total: %s", str(query_id), str(progress) + ) needs_commit = False if progress > query.progress: query.progress = progress @@ -294,21 +296,25 @@ def handle_cursor( # pylint: disable=too-many-locals if tracking_url: job_id = tracking_url.split("/")[-2] logger.info( - f"Query {query_id}: Found the tracking url: {tracking_url}" + "Query %s: Found the tracking url: %s", + str(query_id), + tracking_url, ) tracking_url = tracking_url_trans(tracking_url) logger.info( - f"Query {query_id}: Transformation applied: {tracking_url}" + "Query %s: Transformation applied: %s", + str(query_id), + tracking_url, ) query.tracking_url = tracking_url - logger.info(f"Query {query_id}: Job id: {job_id}") + logger.info("Query %s: Job id: %s", str(query_id), str(job_id)) needs_commit = True if job_id and len(log_lines) > last_log_line: # Wait for job id before logging things out # this allows for prefixing all log lines and becoming # searchable in something like Kibana for l in log_lines[last_log_line:]: - logger.info(f"Query {query_id}: [{job_id}] {l}") + logger.info("Query %s: [%s] %s", str(query_id), str(job_id), l) last_log_line = len(log_lines) if needs_commit: session.commit() @@ -414,7 +420,6 @@ def modify_url_for_impersonation( """ # Do nothing in the URL object since instead this should modify # the configuraiton dictionary. See get_configuration_for_impersonation - pass @classmethod def get_configuration_for_impersonation( diff --git a/superset/models/annotations.py b/superset/models/annotations.py index ec8d3c0457a05..b2672fbe5ed38 100644 --- a/superset/models/annotations.py +++ b/superset/models/annotations.py @@ -34,7 +34,7 @@ class AnnotationLayer(Model, AuditMixinNullable): descr = Column(Text) def __repr__(self) -> str: - return self.name + return str(self.name) class Annotation(Model, AuditMixinNullable): diff --git a/superset/models/core.py b/superset/models/core.py index ed90e4a4290e7..3fe92a3cafd91 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -170,7 +170,9 @@ def function_names(self) -> List[str]: except Exception as ex: # pylint: disable=broad-except # function_names property is used in bulk APIs and should not hard crash # more info in: https://github.com/apache/incubator-superset/issues/9678 - logger.error(f"Failed to fetch database function names with error: {ex}") + logger.error( + "Failed to fetch database function names with error: %s", str(ex) + ) return [] @property @@ -207,7 +209,7 @@ def url_object(self) -> URL: @property def backend(self) -> str: sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted) - return sqlalchemy_url.get_backend_name() + return sqlalchemy_url.get_backend_name() # pylint: disable=no-member @property def metadata_cache_timeout(self) -> Dict[str, Any]: @@ -452,8 +454,7 @@ def get_all_table_names_in_database( return self.db_engine_spec.get_all_datasource_names(self, "table") @cache_util.memoized_func( - key=lambda *args, **kwargs: "db:{}:schema:None:view_list", - attribute_in_key="id", + key=lambda *args, **kwargs: "db:{}:schema:None:view_list", attribute_in_key="id" ) def get_all_view_names_in_database( self, @@ -652,7 +653,7 @@ def has_table_by_name(self, table_name: str, schema: Optional[str] = None) -> bo @utils.memoized def get_dialect(self) -> Dialect: sqla_url = url.make_url(self.sqlalchemy_uri_decrypted) - return sqla_url.get_dialect()() + return sqla_url.get_dialect()() # pylint: disable=no-member sqla.event.listen(Database, "after_insert", security_manager.set_perm) diff --git a/superset/sql_lab.py b/superset/sql_lab.py index d9f5b38b6d966..4d3597c43ebf7 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -96,9 +96,9 @@ def handle_query_error( def get_query_backoff_handler(details: Dict[Any, Any]) -> None: query_id = details["kwargs"]["query_id"] - logger.error(f"Query with id `{query_id}` could not be retrieved") + logger.error("Query with id `%s` could not be retrieved", str(query_id)) stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] - 1)) - logger.error(f"Query {query_id}: Sleeping for a sec before retrying...") + logger.error("Query %s: Sleeping for a sec before retrying...", str(query_id)) def get_query_giveup_handler(_: Any) -> None: @@ -287,7 +287,7 @@ def execute_sql_statement( def _serialize_payload( payload: Dict[Any, Any], use_msgpack: Optional[bool] = False ) -> Union[bytes, str]: - logger.debug(f"Serializing to msgpack: {use_msgpack}") + logger.debug("Serializing to msgpack: %r", use_msgpack) if use_msgpack: return msgpack.dumps(payload, default=json_iso_dttm_ser, use_bin_type=True) @@ -360,9 +360,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca # Breaking down into multiple statements parsed_query = ParsedQuery(rendered_query) statements = parsed_query.get_statements() - logger.info(f"Query {query_id}: Executing {len(statements)} statement(s)") + logger.info("Query %s: Executing %i statement(s)", str(query_id), len(statements)) - logger.info(f"Query {query_id}: Set query to 'running'") + logger.info("Query %s: Set query to 'running'", str(query_id)) query.status = QueryStatus.RUNNING query.start_running_time = now_as_float() session.commit() @@ -386,7 +386,7 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca # Run statement msg = f"Running statement {i+1} out of {statement_count}" - logger.info(f"Query {query_id}: {msg}") + logger.info("Query %s: %s", str(query_id), msg) query.set_extra_json_key("progress", msg) session.commit() try: @@ -437,7 +437,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca if store_results and results_backend: key = str(uuid.uuid4()) - logger.info(f"Query {query_id}: Storing results in results backend, key: {key}") + logger.info( + "Query %s: Storing results in results backend, key: %s", str(query_id), key + ) with stats_timing("sqllab.query.results_backend_write", stats_logger): with stats_timing( "sqllab.query.results_backend_write_serialization", stats_logger @@ -451,9 +453,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca compressed = zlib_compress(serialized_payload) logger.debug( - f"*** serialized payload size: {getsizeof(serialized_payload)}" + "*** serialized payload size: %i", getsizeof(serialized_payload) ) - logger.debug(f"*** compressed payload size: {getsizeof(compressed)}") + logger.debug("*** compressed payload size: %i", getsizeof(compressed)) results_backend.set(key, compressed, cache_timeout) query.results_key = key diff --git a/superset/tasks/slack_util.py b/superset/tasks/slack_util.py index 5c3d3af4191d5..ef647ebd2a5a2 100644 --- a/superset/tasks/slack_util.py +++ b/superset/tasks/slack_util.py @@ -19,10 +19,10 @@ from typing import cast, Union from retry.api import retry - from slack import WebClient from slack.errors import SlackApiError from slack.web.slack_response import SlackResponse + from superset import app # Globals @@ -42,5 +42,5 @@ def deliver_slack_msg( channels=slack_channel, file=file, initial_comment=body, title=subject ), ) - logger.info(f"Sent the report to the slack {slack_channel}") + logger.info("Sent the report to the slack %s", slack_channel) assert response["file"], str(response) # the uploaded file diff --git a/superset/utils/screenshots.py b/superset/utils/screenshots.py index b2f222f219522..3db41b15657b6 100644 --- a/superset/utils/screenshots.py +++ b/superset/utils/screenshots.py @@ -155,14 +155,14 @@ def get_screenshot( driver.set_window_size(*self._window) driver.get(url) img: Optional[bytes] = None - logger.debug(f"Sleeping for {SELENIUM_HEADSTART} seconds") + logger.debug("Sleeping for %i seconds", SELENIUM_HEADSTART) time.sleep(SELENIUM_HEADSTART) try: - logger.debug(f"Wait for the presence of {element_name}") + logger.debug("Wait for the presence of %s", element_name) element = WebDriverWait(driver, 10).until( EC.presence_of_element_located((By.CLASS_NAME, element_name)) ) - logger.debug(f"Wait for .loading to be done") + logger.debug("Wait for .loading to be done") WebDriverWait(driver, 60).until_not( EC.presence_of_all_elements_located((By.CLASS_NAME, "loading")) ) @@ -226,7 +226,7 @@ def get( user=user, thumb_size=thumb_size, cache=cache ) else: - logger.info(f"Loaded thumbnail from cache: {self.cache_key}") + logger.info("Loaded thumbnail from cache: %s", self.cache_key) if payload: return BytesIO(payload) return None @@ -259,7 +259,7 @@ def compute_and_cache( # pylint: disable=too-many-arguments logger.info("Thumb already cached, skipping...") return None thumb_size = thumb_size or self.thumb_size - logger.info(f"Processing url for thumbnail: {cache_key}") + logger.info("Processing url for thumbnail: %s", cache_key) payload = None @@ -277,7 +277,7 @@ def compute_and_cache( # pylint: disable=too-many-arguments payload = None if payload and cache: - logger.info(f"Caching thumbnail: {cache_key} {cache}") + logger.info("Caching thumbnail: %s %s", cache_key, str(cache)) cache.set(cache_key, payload) return payload @@ -291,13 +291,13 @@ def resize_image( ) -> bytes: thumb_size = thumb_size or cls.thumb_size img = Image.open(BytesIO(img_bytes)) - logger.debug(f"Selenium image size: {img.size}") + logger.debug("Selenium image size: %s", str(img.size)) if crop and img.size[1] != cls.window_size[1]: desired_ratio = float(cls.window_size[1]) / cls.window_size[0] desired_width = int(img.size[0] * desired_ratio) - logger.debug(f"Cropping to: {img.size[0]}*{desired_width}") + logger.debug("Cropping to: %s*%s", str(img.size[0]), str(desired_width)) img = img.crop((0, 0, img.size[0], desired_width)) - logger.debug(f"Resizing to {thumb_size}") + logger.debug("Resizing to %s", str(thumb_size)) img = img.resize(thumb_size, Image.ANTIALIAS) new_img = BytesIO() if output != "png": From babb5605b20ac3bb0ef0792810b9b940242ef459 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Thu, 18 Jun 2020 08:28:02 -0700 Subject: [PATCH 7/9] Fix the last couple of errors, clean pylint! --- superset/dashboards/api.py | 4 ++-- superset/datasets/api.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index a897a2d333f70..53154aefdac3f 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -379,8 +379,8 @@ def bulk_delete( return self.response( 200, message=ngettext( - f"Deleted %(num)d dashboard", - f"Deleted %(num)d dashboards", + "Deleted %(num)d dashboard", + "Deleted %(num)d dashboards", num=len(item_ids), ), ) diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 28c64c86545a6..b1f4ef2e6906c 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -407,5 +407,7 @@ def refresh(self, pk: int) -> Response: except DatasetForbiddenError: return self.response_403() except DatasetRefreshFailedError as ex: - logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}") + logger.error( + "Error refreshing dataset %s: %s", self.__class__.__name__, str(ex) + ) return self.response_422(message=str(ex)) From a4f90da8fa760d2d2183a879fd0588db52df6383 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Thu, 18 Jun 2020 08:36:41 -0700 Subject: [PATCH 8/9] Black --- superset/config.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/superset/config.py b/superset/config.py index 0a1eada08efdf..ad4c58fe13420 100644 --- a/superset/config.py +++ b/superset/config.py @@ -898,9 +898,7 @@ class CeleryConfig: # pylint: disable=too-few-public-methods print(f"Loaded your LOCAL configuration at [{cfg_path}]") except Exception: logger.exception( - "Failed to import config for %s=%s", - CONFIG_PATH_ENV_VAR, - cfg_path + "Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path ) raise elif importlib.util.find_spec("superset_config"): From b62578cd1f0be7ac704f54acf75a507521983187 Mon Sep 17 00:00:00 2001 From: Will Barrett Date: Thu, 18 Jun 2020 08:51:13 -0700 Subject: [PATCH 9/9] Fix mypy issue in connectors/druid/models.py --- superset/connectors/druid/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index 1c423a1fc5413..9237a1f715b0d 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -839,8 +839,8 @@ def granularity( else: granularity["type"] = "duration" granularity["duration"] = ( - utils.parse_human_timedelta(period_name).total_seconds() - * 1000 # type: ignore + utils.parse_human_timedelta(period_name).total_seconds() # type: ignore + * 1000 ) return granularity