mirror of https://github.com/apache/superset.git
chore(pylint): Bump Pylint to 2.9.6 (#16146)
Co-authored-by: John Bodley <john.bodley@airbnb.com>
This commit is contained in:
parent
a5dbe6a14d
commit
24b43beff9
|
@ -90,6 +90,7 @@ disable=
|
|||
super-with-arguments,
|
||||
too-few-public-methods,
|
||||
too-many-locals,
|
||||
duplicate-code,
|
||||
|
||||
[REPORTS]
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
# via -r requirements/base.in
|
||||
appnope==0.1.2
|
||||
# via ipython
|
||||
astroid==2.5
|
||||
astroid==2.6.6
|
||||
# via pylint
|
||||
backcall==0.2.0
|
||||
# via ipython
|
||||
|
@ -71,7 +71,7 @@ pyhive[hive,presto]==0.6.4
|
|||
# via
|
||||
# -r requirements/development.in
|
||||
# -r requirements/testing.in
|
||||
pylint==2.6.0
|
||||
pylint==2.9.6
|
||||
# via -r requirements/testing.in
|
||||
pytest==6.2.4
|
||||
# via
|
||||
|
|
|
@ -341,7 +341,8 @@ if feature_flags.get("VERSIONED_EXPORT"):
|
|||
with ZipFile(path) as bundle:
|
||||
contents = get_contents_from_bundle(bundle)
|
||||
else:
|
||||
contents = {path: open(path).read()}
|
||||
with open(path) as file:
|
||||
contents = {path: file.read()}
|
||||
try:
|
||||
ImportDashboardsCommand(contents, overwrite=True).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -366,7 +367,8 @@ if feature_flags.get("VERSIONED_EXPORT"):
|
|||
with ZipFile(path) as bundle:
|
||||
contents = get_contents_from_bundle(bundle)
|
||||
else:
|
||||
contents = {path: open(path).read()}
|
||||
with open(path) as file:
|
||||
contents = {path: file.read()}
|
||||
try:
|
||||
ImportDatasetsCommand(contents, overwrite=True).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -491,7 +493,10 @@ else:
|
|||
files.extend(path_object.rglob("*.json"))
|
||||
if username is not None:
|
||||
g.user = security_manager.find_user(username=username)
|
||||
contents = {path.name: open(path).read() for path in files}
|
||||
contents = {}
|
||||
for path_ in files:
|
||||
with open(path_) as file:
|
||||
contents[path_.name] = file.read()
|
||||
try:
|
||||
ImportDashboardsCommand(contents).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -539,7 +544,10 @@ else:
|
|||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.yaml"))
|
||||
files.extend(path_object.rglob("*.yml"))
|
||||
contents = {path.name: open(path).read() for path in files}
|
||||
contents = {}
|
||||
for path_ in files:
|
||||
with open(path_) as file:
|
||||
contents[path_.name] = file.read()
|
||||
try:
|
||||
ImportDatasetsCommand(contents, sync_columns, sync_metrics).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -632,7 +640,7 @@ def flower(port: int, address: str) -> None:
|
|||
print(Fore.BLUE + "-=" * 40)
|
||||
print(Fore.YELLOW + cmd)
|
||||
print(Fore.BLUE + "-=" * 40)
|
||||
Popen(cmd, shell=True).wait()
|
||||
Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with
|
||||
|
||||
|
||||
@superset.command()
|
||||
|
|
|
@ -20,7 +20,7 @@ All configuration in this file can be overridden by providing a superset_config
|
|||
in your PYTHONPATH as there is a ``from superset_config import *``
|
||||
at the end of this file.
|
||||
"""
|
||||
import imp
|
||||
import imp # pylint: disable=deprecated-module
|
||||
import importlib.util
|
||||
import json
|
||||
import logging
|
||||
|
|
|
@ -68,8 +68,7 @@ class ConnectorRegistry:
|
|||
@classmethod
|
||||
def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]:
|
||||
datasources: List["BaseDatasource"] = []
|
||||
for source_type in ConnectorRegistry.sources:
|
||||
source_class = ConnectorRegistry.sources[source_type]
|
||||
for source_class in ConnectorRegistry.sources.values():
|
||||
qry = session.query(source_class)
|
||||
qry = source_class.default_query(qry)
|
||||
datasources.extend(qry.all())
|
||||
|
|
|
@ -404,9 +404,7 @@ class SqlMetric(Model, BaseMetric):
|
|||
"extra",
|
||||
"warning_text",
|
||||
]
|
||||
update_from_object_fields = list(
|
||||
[s for s in export_fields if s not in ("table_id",)]
|
||||
)
|
||||
update_from_object_fields = list(s for s in export_fields if s != "table_id")
|
||||
export_parent = "table"
|
||||
|
||||
def get_sqla_col(self, label: Optional[str] = None) -> Column:
|
||||
|
@ -1151,7 +1149,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
|
|||
having_clause_and = []
|
||||
|
||||
for flt in filter: # type: ignore
|
||||
if not all([flt.get(s) for s in ["col", "op"]]):
|
||||
if not all(flt.get(s) for s in ["col", "op"]):
|
||||
continue
|
||||
col = flt["col"]
|
||||
val = flt.get("val")
|
||||
|
|
|
@ -237,7 +237,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
|
|||
@statsd_metrics
|
||||
@event_logger.log_this_with_context(
|
||||
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get",
|
||||
log_to_statsd=False,
|
||||
log_to_statsd=False, # pylint: disable=arguments-renamed
|
||||
)
|
||||
def get(self, id_or_slug: str) -> Response:
|
||||
"""Gets a dashboard
|
||||
|
|
|
@ -135,8 +135,7 @@ def import_dataset(
|
|||
def load_data(
|
||||
data_uri: str, dataset: SqlaTable, example_database: Database, session: Session
|
||||
) -> None:
|
||||
|
||||
data = request.urlopen(data_uri)
|
||||
data = request.urlopen(data_uri) # pylint: disable=consider-using-with
|
||||
if data_uri.endswith(".gz"):
|
||||
data = gzip.open(data)
|
||||
df = pd.read_csv(data, encoding="utf-8")
|
||||
|
|
|
@ -38,14 +38,11 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
|
|||
airports = pd.read_csv(airports_bytes, encoding="latin-1")
|
||||
airports = airports.set_index("IATA_CODE")
|
||||
|
||||
pdf["ds"] = (
|
||||
pdf["ds"] = ( # pylint: disable=unsupported-assignment-operation
|
||||
pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
|
||||
)
|
||||
pdf.ds = pd.to_datetime(pdf.ds)
|
||||
del pdf["YEAR"]
|
||||
del pdf["MONTH"]
|
||||
del pdf["DAY"]
|
||||
|
||||
pdf.drop(columns=["DAY", "MONTH", "YEAR"])
|
||||
pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")
|
||||
pdf = pdf.join(airports, on="DESTINATION_AIRPORT", rsuffix="_DEST")
|
||||
pdf.to_sql(
|
||||
|
|
|
@ -69,7 +69,9 @@ def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str:
|
|||
def get_example_data(
|
||||
filepath: str, is_gzip: bool = True, make_bytes: bool = False
|
||||
) -> BytesIO:
|
||||
content = request.urlopen(f"{BASE_URL}{filepath}?raw=true").read()
|
||||
content = request.urlopen( # pylint: disable=consider-using-with
|
||||
f"{BASE_URL}{filepath}?raw=true"
|
||||
).read()
|
||||
if is_gzip:
|
||||
content = zlib.decompress(content, zlib.MAX_WBITS | 16)
|
||||
if make_bytes:
|
||||
|
|
|
@ -527,10 +527,10 @@ DEFAULT_PROCESSORS = {"presto": PrestoTemplateProcessor, "hive": HiveTemplatePro
|
|||
@memoized
|
||||
def get_template_processors() -> Dict[str, Any]:
|
||||
processors = current_app.config.get("CUSTOM_TEMPLATE_PROCESSORS", {})
|
||||
for engine in DEFAULT_PROCESSORS:
|
||||
for engine, processor in DEFAULT_PROCESSORS.items():
|
||||
# do not overwrite engine-specific CUSTOM_TEMPLATE_PROCESSORS
|
||||
if not engine in processors:
|
||||
processors[engine] = DEFAULT_PROCESSORS[engine]
|
||||
processors[engine] = processor
|
||||
|
||||
return processors
|
||||
|
||||
|
|
|
@ -506,7 +506,7 @@ class Database(
|
|||
return self.db_engine_spec.get_all_datasource_names(self, "view")
|
||||
|
||||
@cache_util.memoized_func(
|
||||
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list", # type: ignore
|
||||
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list",
|
||||
cache=cache_manager.data_cache,
|
||||
)
|
||||
def get_all_table_names_in_schema(
|
||||
|
@ -536,9 +536,10 @@ class Database(
|
|||
]
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
logger.warning(ex)
|
||||
return []
|
||||
|
||||
@cache_util.memoized_func(
|
||||
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list", # type: ignore
|
||||
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list",
|
||||
cache=cache_manager.data_cache,
|
||||
)
|
||||
def get_all_view_names_in_schema(
|
||||
|
@ -566,6 +567,7 @@ class Database(
|
|||
return [utils.DatasourceName(table=view, schema=schema) for view in views]
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
logger.warning(ex)
|
||||
return []
|
||||
|
||||
@cache_util.memoized_func(
|
||||
key=lambda self, *args, **kwargs: f"db:{self.id}:schema_list",
|
||||
|
|
|
@ -31,17 +31,20 @@ metadata = Model.metadata # pylint: disable=no-member
|
|||
|
||||
|
||||
class ScheduleType(str, enum.Enum):
|
||||
# pylint: disable=invalid-name
|
||||
slice = "slice"
|
||||
dashboard = "dashboard"
|
||||
alert = "alert"
|
||||
|
||||
|
||||
class EmailDeliveryType(str, enum.Enum):
|
||||
# pylint: disable=invalid-name
|
||||
attachment = "Attachment"
|
||||
inline = "Inline"
|
||||
|
||||
|
||||
class SliceEmailReportFormat(str, enum.Enum):
|
||||
# pylint: disable=invalid-name
|
||||
visualization = "Visualization"
|
||||
data = "Raw data"
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ class TagTypes(enum.Enum):
|
|||
can find all their objects by querying for the tag `owner:alice`.
|
||||
"""
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# explicit tags, added manually by the owner
|
||||
custom = 1
|
||||
|
||||
|
@ -59,6 +60,7 @@ class ObjectTypes(enum.Enum):
|
|||
|
||||
"""Object types."""
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
query = 1
|
||||
chart = 2
|
||||
dashboard = 3
|
||||
|
|
|
@ -28,8 +28,8 @@ OPERATOR_FUNCTIONS = {">=": ge, ">": gt, "<=": le, "<": lt, "==": eq, "!=": ne}
|
|||
|
||||
|
||||
class AlertValidatorType(str, enum.Enum):
|
||||
not_null = "not null"
|
||||
operator = "operator"
|
||||
NOT_NULL = "not null"
|
||||
OPERATOR = "operator"
|
||||
|
||||
@classmethod
|
||||
def valid_type(cls, validator_type: str) -> bool:
|
||||
|
@ -44,7 +44,7 @@ def check_validator(validator_type: str, config: str) -> None:
|
|||
|
||||
config_dict = json.loads(config)
|
||||
|
||||
if validator_type == AlertValidatorType.operator.value:
|
||||
if validator_type == AlertValidatorType.OPERATOR.value:
|
||||
|
||||
if not (config_dict.get("op") and config_dict.get("threshold") is not None):
|
||||
raise SupersetException(
|
||||
|
@ -102,8 +102,8 @@ def get_validator_function(
|
|||
"""Returns a validation function based on validator_type"""
|
||||
|
||||
alert_validators = {
|
||||
AlertValidatorType.not_null.value: not_null_validator,
|
||||
AlertValidatorType.operator.value: operator_validator,
|
||||
AlertValidatorType.NOT_NULL.value: not_null_validator,
|
||||
AlertValidatorType.OPERATOR.value: operator_validator,
|
||||
}
|
||||
if alert_validators.get(validator_type.lower()):
|
||||
return alert_validators[validator_type.lower()]
|
||||
|
|
|
@ -288,7 +288,7 @@ def cache_warmup(
|
|||
for url in strategy.get_urls():
|
||||
try:
|
||||
logger.info("Fetching %s", url)
|
||||
request.urlopen(url)
|
||||
request.urlopen(url) # pylint: disable=consider-using-with
|
||||
results["success"].append(url)
|
||||
except URLError:
|
||||
logger.exception("Error warming up cache!")
|
||||
|
|
|
@ -1321,8 +1321,8 @@ def get_first_metric_name(metrics: Sequence[Metric]) -> Optional[str]:
|
|||
def ensure_path_exists(path: str) -> None:
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as exc:
|
||||
if not (os.path.isdir(path) and exc.errno == errno.EEXIST):
|
||||
except OSError as ex:
|
||||
if not (os.path.isdir(path) and ex.errno == errno.EEXIST):
|
||||
raise
|
||||
|
||||
|
||||
|
@ -1440,9 +1440,8 @@ def create_ssl_cert_file(certificate: str) -> str:
|
|||
if not os.path.exists(path):
|
||||
# Validate certificate prior to persisting to temporary directory
|
||||
parse_ssl_cert(certificate)
|
||||
cert_file = open(path, "w")
|
||||
with open(path, "w") as cert_file:
|
||||
cert_file.write(certificate)
|
||||
cert_file.close()
|
||||
return path
|
||||
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ class _memoized:
|
|||
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
||||
key = [args, frozenset(kwargs.items())]
|
||||
if self.is_method:
|
||||
key.append(tuple([getattr(args[0], v, None) for v in self.watch]))
|
||||
key.append(tuple(getattr(args[0], v, None) for v in self.watch))
|
||||
key = tuple(key) # type: ignore
|
||||
try:
|
||||
if key in self.cache:
|
||||
|
|
|
@ -593,7 +593,7 @@ def geohash_encode(
|
|||
)
|
||||
return _append_columns(df, encode_df, {"geohash": geohash})
|
||||
except ValueError:
|
||||
QueryObjectValidationError(_("Invalid longitude/latitude"))
|
||||
raise QueryObjectValidationError(_("Invalid longitude/latitude"))
|
||||
|
||||
|
||||
def geodetic_parse(
|
||||
|
|
|
@ -291,7 +291,7 @@ class ExcelToDatabaseView(SimpleFormView):
|
|||
flash(message, "danger")
|
||||
return redirect("/exceltodatabaseview/form")
|
||||
|
||||
uploaded_tmp_file_path = tempfile.NamedTemporaryFile(
|
||||
uploaded_tmp_file_path = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with
|
||||
dir=app.config["UPLOAD_FOLDER"],
|
||||
suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(),
|
||||
delete=False,
|
||||
|
|
|
@ -352,7 +352,7 @@ def get_dashboard_extra_filters(
|
|||
dashboard is None
|
||||
or not dashboard.json_metadata
|
||||
or not dashboard.slices
|
||||
or not any([slc for slc in dashboard.slices if slc.id == slice_id])
|
||||
or not any(slc for slc in dashboard.slices if slc.id == slice_id)
|
||||
):
|
||||
return []
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ def setup_database():
|
|||
def create_alert(
|
||||
db_session: Session,
|
||||
sql: str,
|
||||
validator_type: AlertValidatorType = AlertValidatorType.operator,
|
||||
validator_type: AlertValidatorType = AlertValidatorType.OPERATOR,
|
||||
validator_config: str = "",
|
||||
) -> Alert:
|
||||
db_session.commit()
|
||||
|
|
Loading…
Reference in New Issue