From 24b43beff9d10ad16a3ba3d0f44dacc584f68753 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Fri, 13 Aug 2021 15:32:28 -0700 Subject: [PATCH] chore(pylint): Bump Pylint to 2.9.6 (#16146) Co-authored-by: John Bodley --- .pylintrc | 1 + requirements/testing.txt | 4 ++-- superset/cli.py | 20 +++++++++++++------ superset/config.py | 2 +- superset/connectors/connector_registry.py | 5 ++--- superset/connectors/sqla/models.py | 6 ++---- superset/dashboards/api.py | 2 +- .../datasets/commands/importers/v1/utils.py | 3 +-- superset/examples/flights.py | 7 ++----- superset/examples/helpers.py | 4 +++- superset/jinja_context.py | 4 ++-- superset/models/core.py | 6 ++++-- superset/models/schedules.py | 3 +++ superset/models/tags.py | 2 ++ superset/tasks/alerts/validator.py | 10 +++++----- superset/tasks/cache.py | 2 +- superset/tasks/schedules.py | 4 ++-- superset/utils/core.py | 9 ++++----- superset/utils/memoized.py | 2 +- superset/utils/pandas_postprocessing.py | 2 +- superset/views/database/views.py | 2 +- superset/views/utils.py | 4 ++-- tests/integration_tests/alerts_tests.py | 2 +- 23 files changed, 58 insertions(+), 48 deletions(-) diff --git a/.pylintrc b/.pylintrc index 59c215e30d..c075529867 100644 --- a/.pylintrc +++ b/.pylintrc @@ -90,6 +90,7 @@ disable= super-with-arguments, too-few-public-methods, too-many-locals, + duplicate-code, [REPORTS] diff --git a/requirements/testing.txt b/requirements/testing.txt index 1b56b39593..ccbd88989e 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -11,7 +11,7 @@ # via -r requirements/base.in appnope==0.1.2 # via ipython -astroid==2.5 +astroid==2.6.6 # via pylint backcall==0.2.0 # via ipython @@ -71,7 +71,7 @@ pyhive[hive,presto]==0.6.4 # via # -r requirements/development.in # -r requirements/testing.in -pylint==2.6.0 +pylint==2.9.6 # via -r requirements/testing.in pytest==6.2.4 # via diff --git a/superset/cli.py b/superset/cli.py index c97bbb6b42..69051a3342 100755 --- a/superset/cli.py +++ b/superset/cli.py @@ -221,7 +221,7 @@ def import_directory(directory: str, overwrite: bool, force: bool) -> None: help="Create the DB if it doesn't exist", ) def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None: - """Updates a database connection URI """ + """Updates a database connection URI""" utils.get_or_create_db(database_name, uri, not skip_create) @@ -341,7 +341,8 @@ if feature_flags.get("VERSIONED_EXPORT"): with ZipFile(path) as bundle: contents = get_contents_from_bundle(bundle) else: - contents = {path: open(path).read()} + with open(path) as file: + contents = {path: file.read()} try: ImportDashboardsCommand(contents, overwrite=True).run() except Exception: # pylint: disable=broad-except @@ -366,7 +367,8 @@ if feature_flags.get("VERSIONED_EXPORT"): with ZipFile(path) as bundle: contents = get_contents_from_bundle(bundle) else: - contents = {path: open(path).read()} + with open(path) as file: + contents = {path: file.read()} try: ImportDatasetsCommand(contents, overwrite=True).run() except Exception: # pylint: disable=broad-except @@ -491,7 +493,10 @@ else: files.extend(path_object.rglob("*.json")) if username is not None: g.user = security_manager.find_user(username=username) - contents = {path.name: open(path).read() for path in files} + contents = {} + for path_ in files: + with open(path_) as file: + contents[path_.name] = file.read() try: ImportDashboardsCommand(contents).run() except Exception: # pylint: disable=broad-except @@ -539,7 +544,10 @@ else: elif path_object.exists() and recursive: files.extend(path_object.rglob("*.yaml")) files.extend(path_object.rglob("*.yml")) - contents = {path.name: open(path).read() for path in files} + contents = {} + for path_ in files: + with open(path_) as file: + contents[path_.name] = file.read() try: ImportDatasetsCommand(contents, sync_columns, sync_metrics).run() except Exception: # pylint: disable=broad-except @@ -632,7 +640,7 @@ def flower(port: int, address: str) -> None: print(Fore.BLUE + "-=" * 40) print(Fore.YELLOW + cmd) print(Fore.BLUE + "-=" * 40) - Popen(cmd, shell=True).wait() + Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with @superset.command() diff --git a/superset/config.py b/superset/config.py index 632f74cf64..58d9693f66 100644 --- a/superset/config.py +++ b/superset/config.py @@ -20,7 +20,7 @@ All configuration in this file can be overridden by providing a superset_config in your PYTHONPATH as there is a ``from superset_config import *`` at the end of this file. """ -import imp +import imp # pylint: disable=deprecated-module import importlib.util import json import logging diff --git a/superset/connectors/connector_registry.py b/superset/connectors/connector_registry.py index 86081cd1b5..5f40644dca 100644 --- a/superset/connectors/connector_registry.py +++ b/superset/connectors/connector_registry.py @@ -31,7 +31,7 @@ if TYPE_CHECKING: class ConnectorRegistry: - """ Central Registry for all available datasource engines""" + """Central Registry for all available datasource engines""" sources: Dict[str, Type["BaseDatasource"]] = {} @@ -68,8 +68,7 @@ class ConnectorRegistry: @classmethod def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]: datasources: List["BaseDatasource"] = [] - for source_type in ConnectorRegistry.sources: - source_class = ConnectorRegistry.sources[source_type] + for source_class in ConnectorRegistry.sources.values(): qry = session.query(source_class) qry = source_class.default_query(qry) datasources.extend(qry.all()) diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 7a91ac0016..c9b30852ac 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -404,9 +404,7 @@ class SqlMetric(Model, BaseMetric): "extra", "warning_text", ] - update_from_object_fields = list( - [s for s in export_fields if s not in ("table_id",)] - ) + update_from_object_fields = list(s for s in export_fields if s != "table_id") export_parent = "table" def get_sqla_col(self, label: Optional[str] = None) -> Column: @@ -1151,7 +1149,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at having_clause_and = [] for flt in filter: # type: ignore - if not all([flt.get(s) for s in ["col", "op"]]): + if not all(flt.get(s) for s in ["col", "op"]): continue col = flt["col"] val = flt.get("val") diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index bca0334310..9cb1a7e387 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -237,7 +237,7 @@ class DashboardRestApi(BaseSupersetModelRestApi): @statsd_metrics @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get", - log_to_statsd=False, + log_to_statsd=False, # pylint: disable=arguments-renamed ) def get(self, id_or_slug: str) -> Response: """Gets a dashboard diff --git a/superset/datasets/commands/importers/v1/utils.py b/superset/datasets/commands/importers/v1/utils.py index d61be518a2..e6917292c5 100644 --- a/superset/datasets/commands/importers/v1/utils.py +++ b/superset/datasets/commands/importers/v1/utils.py @@ -135,8 +135,7 @@ def import_dataset( def load_data( data_uri: str, dataset: SqlaTable, example_database: Database, session: Session ) -> None: - - data = request.urlopen(data_uri) + data = request.urlopen(data_uri) # pylint: disable=consider-using-with if data_uri.endswith(".gz"): data = gzip.open(data) df = pd.read_csv(data, encoding="utf-8") diff --git a/superset/examples/flights.py b/superset/examples/flights.py index b5e5c0e2ff..ef993d314e 100644 --- a/superset/examples/flights.py +++ b/superset/examples/flights.py @@ -38,14 +38,11 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None: airports = pd.read_csv(airports_bytes, encoding="latin-1") airports = airports.set_index("IATA_CODE") - pdf["ds"] = ( + pdf["ds"] = ( # pylint: disable=unsupported-assignment-operation pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str) ) pdf.ds = pd.to_datetime(pdf.ds) - del pdf["YEAR"] - del pdf["MONTH"] - del pdf["DAY"] - + pdf.drop(columns=["DAY", "MONTH", "YEAR"]) pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG") pdf = pdf.join(airports, on="DESTINATION_AIRPORT", rsuffix="_DEST") pdf.to_sql( diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py index 5ab2364dba..d8b2c59fb7 100644 --- a/superset/examples/helpers.py +++ b/superset/examples/helpers.py @@ -69,7 +69,9 @@ def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str: def get_example_data( filepath: str, is_gzip: bool = True, make_bytes: bool = False ) -> BytesIO: - content = request.urlopen(f"{BASE_URL}{filepath}?raw=true").read() + content = request.urlopen( # pylint: disable=consider-using-with + f"{BASE_URL}{filepath}?raw=true" + ).read() if is_gzip: content = zlib.decompress(content, zlib.MAX_WBITS | 16) if make_bytes: diff --git a/superset/jinja_context.py b/superset/jinja_context.py index 6938c478fd..84fa9b6c9a 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -527,10 +527,10 @@ DEFAULT_PROCESSORS = {"presto": PrestoTemplateProcessor, "hive": HiveTemplatePro @memoized def get_template_processors() -> Dict[str, Any]: processors = current_app.config.get("CUSTOM_TEMPLATE_PROCESSORS", {}) - for engine in DEFAULT_PROCESSORS: + for engine, processor in DEFAULT_PROCESSORS.items(): # do not overwrite engine-specific CUSTOM_TEMPLATE_PROCESSORS if not engine in processors: - processors[engine] = DEFAULT_PROCESSORS[engine] + processors[engine] = processor return processors diff --git a/superset/models/core.py b/superset/models/core.py index 87711561f7..77cba41763 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -506,7 +506,7 @@ class Database( return self.db_engine_spec.get_all_datasource_names(self, "view") @cache_util.memoized_func( - key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list", # type: ignore + key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list", cache=cache_manager.data_cache, ) def get_all_table_names_in_schema( @@ -536,9 +536,10 @@ class Database( ] except Exception as ex: # pylint: disable=broad-except logger.warning(ex) + return [] @cache_util.memoized_func( - key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list", # type: ignore + key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list", cache=cache_manager.data_cache, ) def get_all_view_names_in_schema( @@ -566,6 +567,7 @@ class Database( return [utils.DatasourceName(table=view, schema=schema) for view in views] except Exception as ex: # pylint: disable=broad-except logger.warning(ex) + return [] @cache_util.memoized_func( key=lambda self, *args, **kwargs: f"db:{self.id}:schema_list", diff --git a/superset/models/schedules.py b/superset/models/schedules.py index fdc90f00ae..f60890bfc3 100644 --- a/superset/models/schedules.py +++ b/superset/models/schedules.py @@ -31,17 +31,20 @@ metadata = Model.metadata # pylint: disable=no-member class ScheduleType(str, enum.Enum): + # pylint: disable=invalid-name slice = "slice" dashboard = "dashboard" alert = "alert" class EmailDeliveryType(str, enum.Enum): + # pylint: disable=invalid-name attachment = "Attachment" inline = "Inline" class SliceEmailReportFormat(str, enum.Enum): + # pylint: disable=invalid-name visualization = "Visualization" data = "Raw data" diff --git a/superset/models/tags.py b/superset/models/tags.py index 722c5b099e..86fc411222 100644 --- a/superset/models/tags.py +++ b/superset/models/tags.py @@ -46,6 +46,7 @@ class TagTypes(enum.Enum): can find all their objects by querying for the tag `owner:alice`. """ + # pylint: disable=invalid-name # explicit tags, added manually by the owner custom = 1 @@ -59,6 +60,7 @@ class ObjectTypes(enum.Enum): """Object types.""" + # pylint: disable=invalid-name query = 1 chart = 2 dashboard = 3 diff --git a/superset/tasks/alerts/validator.py b/superset/tasks/alerts/validator.py index 36cfa2a439..38b5791341 100644 --- a/superset/tasks/alerts/validator.py +++ b/superset/tasks/alerts/validator.py @@ -28,8 +28,8 @@ OPERATOR_FUNCTIONS = {">=": ge, ">": gt, "<=": le, "<": lt, "==": eq, "!=": ne} class AlertValidatorType(str, enum.Enum): - not_null = "not null" - operator = "operator" + NOT_NULL = "not null" + OPERATOR = "operator" @classmethod def valid_type(cls, validator_type: str) -> bool: @@ -44,7 +44,7 @@ def check_validator(validator_type: str, config: str) -> None: config_dict = json.loads(config) - if validator_type == AlertValidatorType.operator.value: + if validator_type == AlertValidatorType.OPERATOR.value: if not (config_dict.get("op") and config_dict.get("threshold") is not None): raise SupersetException( @@ -102,8 +102,8 @@ def get_validator_function( """Returns a validation function based on validator_type""" alert_validators = { - AlertValidatorType.not_null.value: not_null_validator, - AlertValidatorType.operator.value: operator_validator, + AlertValidatorType.NOT_NULL.value: not_null_validator, + AlertValidatorType.OPERATOR.value: operator_validator, } if alert_validators.get(validator_type.lower()): return alert_validators[validator_type.lower()] diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 546eaebdb0..b93e6a633c 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -288,7 +288,7 @@ def cache_warmup( for url in strategy.get_urls(): try: logger.info("Fetching %s", url) - request.urlopen(url) + request.urlopen(url) # pylint: disable=consider-using-with results["success"].append(url) except URLError: logger.exception("Error warming up cache!") diff --git a/superset/tasks/schedules.py b/superset/tasks/schedules.py index 8d85ded4c5..fe75fbe4a2 100644 --- a/superset/tasks/schedules.py +++ b/superset/tasks/schedules.py @@ -827,7 +827,7 @@ def get_scheduler_action(report_type: str) -> Optional[Callable[..., Any]]: @celery_app.task(name="email_reports.schedule_hourly") def schedule_hourly() -> None: - """ Celery beat job meant to be invoked hourly """ + """Celery beat job meant to be invoked hourly""" if not config["ENABLE_SCHEDULED_EMAIL_REPORTS"]: logger.info("Scheduled email reports not enabled in config") return @@ -845,7 +845,7 @@ def schedule_hourly() -> None: @celery_app.task(name="alerts.schedule_check") def schedule_alerts() -> None: - """ Celery beat job meant to be invoked every minute to check alerts """ + """Celery beat job meant to be invoked every minute to check alerts""" resolution = 0 now = datetime.utcnow() start_at = now - timedelta( diff --git a/superset/utils/core.py b/superset/utils/core.py index d48606ec0d..2e34fd4f89 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -1321,8 +1321,8 @@ def get_first_metric_name(metrics: Sequence[Metric]) -> Optional[str]: def ensure_path_exists(path: str) -> None: try: os.makedirs(path) - except OSError as exc: - if not (os.path.isdir(path) and exc.errno == errno.EEXIST): + except OSError as ex: + if not (os.path.isdir(path) and ex.errno == errno.EEXIST): raise @@ -1440,9 +1440,8 @@ def create_ssl_cert_file(certificate: str) -> str: if not os.path.exists(path): # Validate certificate prior to persisting to temporary directory parse_ssl_cert(certificate) - cert_file = open(path, "w") - cert_file.write(certificate) - cert_file.close() + with open(path, "w") as cert_file: + cert_file.write(certificate) return path diff --git a/superset/utils/memoized.py b/superset/utils/memoized.py index 15cd877709..916984c9b8 100644 --- a/superset/utils/memoized.py +++ b/superset/utils/memoized.py @@ -39,7 +39,7 @@ class _memoized: def __call__(self, *args: Any, **kwargs: Any) -> Any: key = [args, frozenset(kwargs.items())] if self.is_method: - key.append(tuple([getattr(args[0], v, None) for v in self.watch])) + key.append(tuple(getattr(args[0], v, None) for v in self.watch)) key = tuple(key) # type: ignore try: if key in self.cache: diff --git a/superset/utils/pandas_postprocessing.py b/superset/utils/pandas_postprocessing.py index 0e03dea61c..2969beb011 100644 --- a/superset/utils/pandas_postprocessing.py +++ b/superset/utils/pandas_postprocessing.py @@ -593,7 +593,7 @@ def geohash_encode( ) return _append_columns(df, encode_df, {"geohash": geohash}) except ValueError: - QueryObjectValidationError(_("Invalid longitude/latitude")) + raise QueryObjectValidationError(_("Invalid longitude/latitude")) def geodetic_parse( diff --git a/superset/views/database/views.py b/superset/views/database/views.py index 5236f31c49..0a0a50d81f 100644 --- a/superset/views/database/views.py +++ b/superset/views/database/views.py @@ -291,7 +291,7 @@ class ExcelToDatabaseView(SimpleFormView): flash(message, "danger") return redirect("/exceltodatabaseview/form") - uploaded_tmp_file_path = tempfile.NamedTemporaryFile( + uploaded_tmp_file_path = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with dir=app.config["UPLOAD_FOLDER"], suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(), delete=False, diff --git a/superset/views/utils.py b/superset/views/utils.py index 7a461f5ae7..4d4bc17ab7 100644 --- a/superset/views/utils.py +++ b/superset/views/utils.py @@ -352,7 +352,7 @@ def get_dashboard_extra_filters( dashboard is None or not dashboard.json_metadata or not dashboard.slices - or not any([slc for slc in dashboard.slices if slc.id == slice_id]) + or not any(slc for slc in dashboard.slices if slc.id == slice_id) ): return [] @@ -455,7 +455,7 @@ def is_slice_in_container( def is_owner(obj: Union[Dashboard, Slice], user: User) -> bool: - """ Check if user is owner of the slice """ + """Check if user is owner of the slice""" return obj and user in obj.owners diff --git a/tests/integration_tests/alerts_tests.py b/tests/integration_tests/alerts_tests.py index c847b71875..5f8f36fe74 100644 --- a/tests/integration_tests/alerts_tests.py +++ b/tests/integration_tests/alerts_tests.py @@ -76,7 +76,7 @@ def setup_database(): def create_alert( db_session: Session, sql: str, - validator_type: AlertValidatorType = AlertValidatorType.operator, + validator_type: AlertValidatorType = AlertValidatorType.OPERATOR, validator_config: str = "", ) -> Alert: db_session.commit()