chore(pylint): Bump Pylint to 2.9.6 (#16146)

Co-authored-by: John Bodley <john.bodley@airbnb.com>
This commit is contained in:
John Bodley 2021-08-13 15:32:28 -07:00 committed by GitHub
parent a5dbe6a14d
commit 24b43beff9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 58 additions and 48 deletions

View File

@ -90,6 +90,7 @@ disable=
super-with-arguments, super-with-arguments,
too-few-public-methods, too-few-public-methods,
too-many-locals, too-many-locals,
duplicate-code,
[REPORTS] [REPORTS]

View File

@ -11,7 +11,7 @@
# via -r requirements/base.in # via -r requirements/base.in
appnope==0.1.2 appnope==0.1.2
# via ipython # via ipython
astroid==2.5 astroid==2.6.6
# via pylint # via pylint
backcall==0.2.0 backcall==0.2.0
# via ipython # via ipython
@ -71,7 +71,7 @@ pyhive[hive,presto]==0.6.4
# via # via
# -r requirements/development.in # -r requirements/development.in
# -r requirements/testing.in # -r requirements/testing.in
pylint==2.6.0 pylint==2.9.6
# via -r requirements/testing.in # via -r requirements/testing.in
pytest==6.2.4 pytest==6.2.4
# via # via

View File

@ -221,7 +221,7 @@ def import_directory(directory: str, overwrite: bool, force: bool) -> None:
help="Create the DB if it doesn't exist", help="Create the DB if it doesn't exist",
) )
def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None: def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
"""Updates a database connection URI """ """Updates a database connection URI"""
utils.get_or_create_db(database_name, uri, not skip_create) utils.get_or_create_db(database_name, uri, not skip_create)
@ -341,7 +341,8 @@ if feature_flags.get("VERSIONED_EXPORT"):
with ZipFile(path) as bundle: with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle) contents = get_contents_from_bundle(bundle)
else: else:
contents = {path: open(path).read()} with open(path) as file:
contents = {path: file.read()}
try: try:
ImportDashboardsCommand(contents, overwrite=True).run() ImportDashboardsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
@ -366,7 +367,8 @@ if feature_flags.get("VERSIONED_EXPORT"):
with ZipFile(path) as bundle: with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle) contents = get_contents_from_bundle(bundle)
else: else:
contents = {path: open(path).read()} with open(path) as file:
contents = {path: file.read()}
try: try:
ImportDatasetsCommand(contents, overwrite=True).run() ImportDatasetsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
@ -491,7 +493,10 @@ else:
files.extend(path_object.rglob("*.json")) files.extend(path_object.rglob("*.json"))
if username is not None: if username is not None:
g.user = security_manager.find_user(username=username) g.user = security_manager.find_user(username=username)
contents = {path.name: open(path).read() for path in files} contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try: try:
ImportDashboardsCommand(contents).run() ImportDashboardsCommand(contents).run()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
@ -539,7 +544,10 @@ else:
elif path_object.exists() and recursive: elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.yaml")) files.extend(path_object.rglob("*.yaml"))
files.extend(path_object.rglob("*.yml")) files.extend(path_object.rglob("*.yml"))
contents = {path.name: open(path).read() for path in files} contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try: try:
ImportDatasetsCommand(contents, sync_columns, sync_metrics).run() ImportDatasetsCommand(contents, sync_columns, sync_metrics).run()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
@ -632,7 +640,7 @@ def flower(port: int, address: str) -> None:
print(Fore.BLUE + "-=" * 40) print(Fore.BLUE + "-=" * 40)
print(Fore.YELLOW + cmd) print(Fore.YELLOW + cmd)
print(Fore.BLUE + "-=" * 40) print(Fore.BLUE + "-=" * 40)
Popen(cmd, shell=True).wait() Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with
@superset.command() @superset.command()

View File

@ -20,7 +20,7 @@ All configuration in this file can be overridden by providing a superset_config
in your PYTHONPATH as there is a ``from superset_config import *`` in your PYTHONPATH as there is a ``from superset_config import *``
at the end of this file. at the end of this file.
""" """
import imp import imp # pylint: disable=deprecated-module
import importlib.util import importlib.util
import json import json
import logging import logging

View File

@ -31,7 +31,7 @@ if TYPE_CHECKING:
class ConnectorRegistry: class ConnectorRegistry:
""" Central Registry for all available datasource engines""" """Central Registry for all available datasource engines"""
sources: Dict[str, Type["BaseDatasource"]] = {} sources: Dict[str, Type["BaseDatasource"]] = {}
@ -68,8 +68,7 @@ class ConnectorRegistry:
@classmethod @classmethod
def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]: def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]:
datasources: List["BaseDatasource"] = [] datasources: List["BaseDatasource"] = []
for source_type in ConnectorRegistry.sources: for source_class in ConnectorRegistry.sources.values():
source_class = ConnectorRegistry.sources[source_type]
qry = session.query(source_class) qry = session.query(source_class)
qry = source_class.default_query(qry) qry = source_class.default_query(qry)
datasources.extend(qry.all()) datasources.extend(qry.all())

View File

@ -404,9 +404,7 @@ class SqlMetric(Model, BaseMetric):
"extra", "extra",
"warning_text", "warning_text",
] ]
update_from_object_fields = list( update_from_object_fields = list(s for s in export_fields if s != "table_id")
[s for s in export_fields if s not in ("table_id",)]
)
export_parent = "table" export_parent = "table"
def get_sqla_col(self, label: Optional[str] = None) -> Column: def get_sqla_col(self, label: Optional[str] = None) -> Column:
@ -1151,7 +1149,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
having_clause_and = [] having_clause_and = []
for flt in filter: # type: ignore for flt in filter: # type: ignore
if not all([flt.get(s) for s in ["col", "op"]]): if not all(flt.get(s) for s in ["col", "op"]):
continue continue
col = flt["col"] col = flt["col"]
val = flt.get("val") val = flt.get("val")

View File

@ -237,7 +237,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
@statsd_metrics @statsd_metrics
@event_logger.log_this_with_context( @event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get", action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get",
log_to_statsd=False, log_to_statsd=False, # pylint: disable=arguments-renamed
) )
def get(self, id_or_slug: str) -> Response: def get(self, id_or_slug: str) -> Response:
"""Gets a dashboard """Gets a dashboard

View File

@ -135,8 +135,7 @@ def import_dataset(
def load_data( def load_data(
data_uri: str, dataset: SqlaTable, example_database: Database, session: Session data_uri: str, dataset: SqlaTable, example_database: Database, session: Session
) -> None: ) -> None:
data = request.urlopen(data_uri) # pylint: disable=consider-using-with
data = request.urlopen(data_uri)
if data_uri.endswith(".gz"): if data_uri.endswith(".gz"):
data = gzip.open(data) data = gzip.open(data)
df = pd.read_csv(data, encoding="utf-8") df = pd.read_csv(data, encoding="utf-8")

View File

@ -38,14 +38,11 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
airports = pd.read_csv(airports_bytes, encoding="latin-1") airports = pd.read_csv(airports_bytes, encoding="latin-1")
airports = airports.set_index("IATA_CODE") airports = airports.set_index("IATA_CODE")
pdf["ds"] = ( pdf["ds"] = ( # pylint: disable=unsupported-assignment-operation
pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str) pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
) )
pdf.ds = pd.to_datetime(pdf.ds) pdf.ds = pd.to_datetime(pdf.ds)
del pdf["YEAR"] pdf.drop(columns=["DAY", "MONTH", "YEAR"])
del pdf["MONTH"]
del pdf["DAY"]
pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG") pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")
pdf = pdf.join(airports, on="DESTINATION_AIRPORT", rsuffix="_DEST") pdf = pdf.join(airports, on="DESTINATION_AIRPORT", rsuffix="_DEST")
pdf.to_sql( pdf.to_sql(

View File

@ -69,7 +69,9 @@ def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str:
def get_example_data( def get_example_data(
filepath: str, is_gzip: bool = True, make_bytes: bool = False filepath: str, is_gzip: bool = True, make_bytes: bool = False
) -> BytesIO: ) -> BytesIO:
content = request.urlopen(f"{BASE_URL}{filepath}?raw=true").read() content = request.urlopen( # pylint: disable=consider-using-with
f"{BASE_URL}{filepath}?raw=true"
).read()
if is_gzip: if is_gzip:
content = zlib.decompress(content, zlib.MAX_WBITS | 16) content = zlib.decompress(content, zlib.MAX_WBITS | 16)
if make_bytes: if make_bytes:

View File

@ -527,10 +527,10 @@ DEFAULT_PROCESSORS = {"presto": PrestoTemplateProcessor, "hive": HiveTemplatePro
@memoized @memoized
def get_template_processors() -> Dict[str, Any]: def get_template_processors() -> Dict[str, Any]:
processors = current_app.config.get("CUSTOM_TEMPLATE_PROCESSORS", {}) processors = current_app.config.get("CUSTOM_TEMPLATE_PROCESSORS", {})
for engine in DEFAULT_PROCESSORS: for engine, processor in DEFAULT_PROCESSORS.items():
# do not overwrite engine-specific CUSTOM_TEMPLATE_PROCESSORS # do not overwrite engine-specific CUSTOM_TEMPLATE_PROCESSORS
if not engine in processors: if not engine in processors:
processors[engine] = DEFAULT_PROCESSORS[engine] processors[engine] = processor
return processors return processors

View File

@ -506,7 +506,7 @@ class Database(
return self.db_engine_spec.get_all_datasource_names(self, "view") return self.db_engine_spec.get_all_datasource_names(self, "view")
@cache_util.memoized_func( @cache_util.memoized_func(
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list", # type: ignore key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list",
cache=cache_manager.data_cache, cache=cache_manager.data_cache,
) )
def get_all_table_names_in_schema( def get_all_table_names_in_schema(
@ -536,9 +536,10 @@ class Database(
] ]
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.warning(ex) logger.warning(ex)
return []
@cache_util.memoized_func( @cache_util.memoized_func(
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list", # type: ignore key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list",
cache=cache_manager.data_cache, cache=cache_manager.data_cache,
) )
def get_all_view_names_in_schema( def get_all_view_names_in_schema(
@ -566,6 +567,7 @@ class Database(
return [utils.DatasourceName(table=view, schema=schema) for view in views] return [utils.DatasourceName(table=view, schema=schema) for view in views]
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.warning(ex) logger.warning(ex)
return []
@cache_util.memoized_func( @cache_util.memoized_func(
key=lambda self, *args, **kwargs: f"db:{self.id}:schema_list", key=lambda self, *args, **kwargs: f"db:{self.id}:schema_list",

View File

@ -31,17 +31,20 @@ metadata = Model.metadata # pylint: disable=no-member
class ScheduleType(str, enum.Enum): class ScheduleType(str, enum.Enum):
# pylint: disable=invalid-name
slice = "slice" slice = "slice"
dashboard = "dashboard" dashboard = "dashboard"
alert = "alert" alert = "alert"
class EmailDeliveryType(str, enum.Enum): class EmailDeliveryType(str, enum.Enum):
# pylint: disable=invalid-name
attachment = "Attachment" attachment = "Attachment"
inline = "Inline" inline = "Inline"
class SliceEmailReportFormat(str, enum.Enum): class SliceEmailReportFormat(str, enum.Enum):
# pylint: disable=invalid-name
visualization = "Visualization" visualization = "Visualization"
data = "Raw data" data = "Raw data"

View File

@ -46,6 +46,7 @@ class TagTypes(enum.Enum):
can find all their objects by querying for the tag `owner:alice`. can find all their objects by querying for the tag `owner:alice`.
""" """
# pylint: disable=invalid-name
# explicit tags, added manually by the owner # explicit tags, added manually by the owner
custom = 1 custom = 1
@ -59,6 +60,7 @@ class ObjectTypes(enum.Enum):
"""Object types.""" """Object types."""
# pylint: disable=invalid-name
query = 1 query = 1
chart = 2 chart = 2
dashboard = 3 dashboard = 3

View File

@ -28,8 +28,8 @@ OPERATOR_FUNCTIONS = {">=": ge, ">": gt, "<=": le, "<": lt, "==": eq, "!=": ne}
class AlertValidatorType(str, enum.Enum): class AlertValidatorType(str, enum.Enum):
not_null = "not null" NOT_NULL = "not null"
operator = "operator" OPERATOR = "operator"
@classmethod @classmethod
def valid_type(cls, validator_type: str) -> bool: def valid_type(cls, validator_type: str) -> bool:
@ -44,7 +44,7 @@ def check_validator(validator_type: str, config: str) -> None:
config_dict = json.loads(config) config_dict = json.loads(config)
if validator_type == AlertValidatorType.operator.value: if validator_type == AlertValidatorType.OPERATOR.value:
if not (config_dict.get("op") and config_dict.get("threshold") is not None): if not (config_dict.get("op") and config_dict.get("threshold") is not None):
raise SupersetException( raise SupersetException(
@ -102,8 +102,8 @@ def get_validator_function(
"""Returns a validation function based on validator_type""" """Returns a validation function based on validator_type"""
alert_validators = { alert_validators = {
AlertValidatorType.not_null.value: not_null_validator, AlertValidatorType.NOT_NULL.value: not_null_validator,
AlertValidatorType.operator.value: operator_validator, AlertValidatorType.OPERATOR.value: operator_validator,
} }
if alert_validators.get(validator_type.lower()): if alert_validators.get(validator_type.lower()):
return alert_validators[validator_type.lower()] return alert_validators[validator_type.lower()]

View File

@ -288,7 +288,7 @@ def cache_warmup(
for url in strategy.get_urls(): for url in strategy.get_urls():
try: try:
logger.info("Fetching %s", url) logger.info("Fetching %s", url)
request.urlopen(url) request.urlopen(url) # pylint: disable=consider-using-with
results["success"].append(url) results["success"].append(url)
except URLError: except URLError:
logger.exception("Error warming up cache!") logger.exception("Error warming up cache!")

View File

@ -827,7 +827,7 @@ def get_scheduler_action(report_type: str) -> Optional[Callable[..., Any]]:
@celery_app.task(name="email_reports.schedule_hourly") @celery_app.task(name="email_reports.schedule_hourly")
def schedule_hourly() -> None: def schedule_hourly() -> None:
""" Celery beat job meant to be invoked hourly """ """Celery beat job meant to be invoked hourly"""
if not config["ENABLE_SCHEDULED_EMAIL_REPORTS"]: if not config["ENABLE_SCHEDULED_EMAIL_REPORTS"]:
logger.info("Scheduled email reports not enabled in config") logger.info("Scheduled email reports not enabled in config")
return return
@ -845,7 +845,7 @@ def schedule_hourly() -> None:
@celery_app.task(name="alerts.schedule_check") @celery_app.task(name="alerts.schedule_check")
def schedule_alerts() -> None: def schedule_alerts() -> None:
""" Celery beat job meant to be invoked every minute to check alerts """ """Celery beat job meant to be invoked every minute to check alerts"""
resolution = 0 resolution = 0
now = datetime.utcnow() now = datetime.utcnow()
start_at = now - timedelta( start_at = now - timedelta(

View File

@ -1321,8 +1321,8 @@ def get_first_metric_name(metrics: Sequence[Metric]) -> Optional[str]:
def ensure_path_exists(path: str) -> None: def ensure_path_exists(path: str) -> None:
try: try:
os.makedirs(path) os.makedirs(path)
except OSError as exc: except OSError as ex:
if not (os.path.isdir(path) and exc.errno == errno.EEXIST): if not (os.path.isdir(path) and ex.errno == errno.EEXIST):
raise raise
@ -1440,9 +1440,8 @@ def create_ssl_cert_file(certificate: str) -> str:
if not os.path.exists(path): if not os.path.exists(path):
# Validate certificate prior to persisting to temporary directory # Validate certificate prior to persisting to temporary directory
parse_ssl_cert(certificate) parse_ssl_cert(certificate)
cert_file = open(path, "w") with open(path, "w") as cert_file:
cert_file.write(certificate) cert_file.write(certificate)
cert_file.close()
return path return path

View File

@ -39,7 +39,7 @@ class _memoized:
def __call__(self, *args: Any, **kwargs: Any) -> Any: def __call__(self, *args: Any, **kwargs: Any) -> Any:
key = [args, frozenset(kwargs.items())] key = [args, frozenset(kwargs.items())]
if self.is_method: if self.is_method:
key.append(tuple([getattr(args[0], v, None) for v in self.watch])) key.append(tuple(getattr(args[0], v, None) for v in self.watch))
key = tuple(key) # type: ignore key = tuple(key) # type: ignore
try: try:
if key in self.cache: if key in self.cache:

View File

@ -593,7 +593,7 @@ def geohash_encode(
) )
return _append_columns(df, encode_df, {"geohash": geohash}) return _append_columns(df, encode_df, {"geohash": geohash})
except ValueError: except ValueError:
QueryObjectValidationError(_("Invalid longitude/latitude")) raise QueryObjectValidationError(_("Invalid longitude/latitude"))
def geodetic_parse( def geodetic_parse(

View File

@ -291,7 +291,7 @@ class ExcelToDatabaseView(SimpleFormView):
flash(message, "danger") flash(message, "danger")
return redirect("/exceltodatabaseview/form") return redirect("/exceltodatabaseview/form")
uploaded_tmp_file_path = tempfile.NamedTemporaryFile( uploaded_tmp_file_path = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with
dir=app.config["UPLOAD_FOLDER"], dir=app.config["UPLOAD_FOLDER"],
suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(), suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(),
delete=False, delete=False,

View File

@ -352,7 +352,7 @@ def get_dashboard_extra_filters(
dashboard is None dashboard is None
or not dashboard.json_metadata or not dashboard.json_metadata
or not dashboard.slices or not dashboard.slices
or not any([slc for slc in dashboard.slices if slc.id == slice_id]) or not any(slc for slc in dashboard.slices if slc.id == slice_id)
): ):
return [] return []
@ -455,7 +455,7 @@ def is_slice_in_container(
def is_owner(obj: Union[Dashboard, Slice], user: User) -> bool: def is_owner(obj: Union[Dashboard, Slice], user: User) -> bool:
""" Check if user is owner of the slice """ """Check if user is owner of the slice"""
return obj and user in obj.owners return obj and user in obj.owners

View File

@ -76,7 +76,7 @@ def setup_database():
def create_alert( def create_alert(
db_session: Session, db_session: Session,
sql: str, sql: str,
validator_type: AlertValidatorType = AlertValidatorType.operator, validator_type: AlertValidatorType = AlertValidatorType.OPERATOR,
validator_config: str = "", validator_config: str = "",
) -> Alert: ) -> Alert:
db_session.commit() db_session.commit()