chore: Upgrade pylint to 2.5.3 and fix most new rules (#10101)

* Bump pylint version to 2.5.3

* Add a global disable for the most common new pylint error

* Fix a bunch of files containing very few errors

* More pylint tweakage, low-hanging fruit

* More easy stuff...

* Fix more erroring files

* Fix the last couple of errors, clean pylint!

* Black

* Fix mypy issue in connectors/druid/models.py
This commit is contained in:
Will Barrett 2020-06-18 14:03:42 -07:00 committed by GitHub
parent 02fee35314
commit 8e23d4f369
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 166 additions and 135 deletions

View File

@ -81,7 +81,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports,import-outside-toplevel
[REPORTS]
@ -254,7 +254,7 @@ notes=FIXME,XXX
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
min-similarity-lines=5
# Ignore comments when computing similarities.
ignore-comments=yes

View File

@ -28,7 +28,7 @@ psycopg2-binary==2.8.5
pycodestyle==2.5.0
pydruid==0.6.1
pyhive==0.6.2
pylint==1.9.2
pylint==2.5.3
redis==3.5.1
requests==2.23.0
statsd==3.3.0

View File

@ -95,7 +95,6 @@ class SupersetAppInitializer:
"""
Called after any other init tasks
"""
pass
def configure_celery(self) -> None:
celery_app.config_from_object(self.config["CELERY_CONFIG"])
@ -593,7 +592,7 @@ class SupersetAppInitializer:
def register_blueprints(self) -> None:
for bp in self.config["BLUEPRINTS"]:
try:
logger.info(f"Registering blueprint: '{bp.name}'")
logger.info("Registering blueprint: %s", bp.name)
self.flask_app.register_blueprint(bp)
except Exception: # pylint: disable=broad-except
logger.exception("blueprint registration failed")

View File

@ -219,7 +219,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@ -287,7 +289,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@ -334,7 +338,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartForbiddenError:
return self.response_403()
except ChartDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@ -386,9 +392,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response(
200,
message=ngettext(
f"Deleted %(num)d chart",
f"Deleted %(num)d charts",
num=len(item_ids),
"Deleted %(num)d chart", "Deleted %(num)d charts", num=len(item_ids)
),
)
except ChartNotFoundError:
@ -464,13 +468,15 @@ class ChartRestApi(BaseSupersetModelRestApi):
headers=generate_download_headers("csv"),
mimetype="application/csv",
)
elif result_format == ChartDataResultFormat.JSON:
if result_format == ChartDataResultFormat.JSON:
response_data = simplejson.dumps(
{"result": payload}, default=json_int_dttm_ser, ignore_nan=True
)
resp = make_response(response_data, 200)
resp.headers["Content-Type"] = "application/json; charset=utf-8"
return resp
raise self.response_400(message=f"Unsupported result_format: {result_format}")
@expose("/<pk>/thumbnail/<digest>/", methods=["GET"])

View File

@ -29,7 +29,6 @@ class BaseCommand(ABC):
Run executes the command. Can raise command exceptions
:raises: CommandException
"""
pass
@abstractmethod
def validate(self) -> None:
@ -38,4 +37,3 @@ class BaseCommand(ABC):
Will raise exception if validation fails
:raises: CommandException
"""
pass

View File

@ -141,8 +141,8 @@ class QueryObject:
if is_sip_38 and groupby:
self.columns += groupby
logger.warning(
f"The field `groupby` is deprecated. Viz plugins should "
f"pass all selectables via the `columns` field"
"The field `groupby` is deprecated. Viz plugins should "
"pass all selectables via the `columns` field"
)
self.orderby = orderby or []
@ -151,15 +151,18 @@ class QueryObject:
for field in DEPRECATED_FIELDS:
if field.old_name in kwargs:
logger.warning(
f"The field `{field.old_name}` is deprecated, please use "
f"`{field.new_name}` instead."
"The field `%s` is deprecated, please use `%s` instead.",
field.old_name,
field.new_name,
)
value = kwargs[field.old_name]
if value:
if hasattr(self, field.new_name):
logger.warning(
f"The field `{field.new_name}` is already populated, "
f"replacing value with contents from `{field.old_name}`."
"The field `%s` is already populated, "
"replacing value with contents from `%s`.",
field.new_name,
field.old_name,
)
setattr(self, field.new_name, value)
@ -167,16 +170,20 @@ class QueryObject:
for field in DEPRECATED_EXTRAS_FIELDS:
if field.old_name in kwargs:
logger.warning(
f"The field `{field.old_name}` is deprecated and should be "
f"passed to `extras` via the `{field.new_name}` property."
"The field `%s` is deprecated and should "
"be passed to `extras` via the `%s` property.",
field.old_name,
field.new_name,
)
value = kwargs[field.old_name]
if value:
if hasattr(self.extras, field.new_name):
logger.warning(
f"The field `{field.new_name}` is already populated in "
f"`extras`, replacing value with contents "
f"from `{field.old_name}`."
"The field `%s` is already populated in "
"`extras`, replacing value with contents "
"from `%s`.",
field.new_name,
field.old_name,
)
self.extras[field.new_name] = value

View File

@ -898,7 +898,7 @@ if CONFIG_PATH_ENV_VAR in os.environ:
print(f"Loaded your LOCAL configuration at [{cfg_path}]")
except Exception:
logger.exception(
f"Failed to import config for {CONFIG_PATH_ENV_VAR}={cfg_path}"
"Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path
)
raise
elif importlib.util.find_spec("superset_config"):

View File

@ -348,7 +348,7 @@ class BaseDatasource(
value = utils.cast_to_num(value)
if value == NULL_STRING:
return None
elif value == "<empty string>":
if value == "<empty string>":
return ""
return value
@ -516,7 +516,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
export_fields: List[Any] = []
def __repr__(self) -> str:
return self.column_name
return str(self.column_name)
num_types = (
"DOUBLE",

View File

@ -14,8 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
# pylint: disable=invalid-unary-operand-type
# pylint: skip-file
import json
import logging
import re
@ -81,12 +80,7 @@ except ImportError:
pass
try:
from superset.utils.core import (
DimSelector,
DTTM_ALIAS,
FilterOperator,
flasher,
)
from superset.utils.core import DimSelector, DTTM_ALIAS, FilterOperator, flasher
except ImportError:
pass
@ -845,7 +839,8 @@ class DruidDatasource(Model, BaseDatasource):
else:
granularity["type"] = "duration"
granularity["duration"] = (
utils.parse_human_timedelta(period_name).total_seconds() * 1000 # type: ignore
utils.parse_human_timedelta(period_name).total_seconds() # type: ignore
* 1000
)
return granularity
@ -950,7 +945,7 @@ class DruidDatasource(Model, BaseDatasource):
@staticmethod
def metrics_and_post_aggs(
metrics: List[Metric], metrics_dict: Dict[str, DruidMetric],
metrics: List[Metric], metrics_dict: Dict[str, DruidMetric]
) -> Tuple["OrderedDict[str, Any]", "OrderedDict[str, Any]"]:
# Separate metrics into those that are aggregations
# and those that are post aggregations

View File

@ -22,8 +22,6 @@ class DAOException(SupersetException):
Base DAO exception class
"""
pass
class DAOCreateFailedError(DAOException):
"""

View File

@ -206,7 +206,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@ -274,7 +276,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@ -321,7 +325,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
except DashboardForbiddenError:
return self.response_403()
except DashboardDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@ -373,8 +379,8 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response(
200,
message=ngettext(
f"Deleted %(num)d dashboard",
f"Deleted %(num)d dashboards",
"Deleted %(num)d dashboard",
"Deleted %(num)d dashboards",
num=len(item_ids),
),
)

View File

@ -49,7 +49,7 @@ class DashboardDAO(BaseDAO):
@staticmethod
def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard:
owners = [owner for owner in model.owners]
owners = list(model.owners)
for slc in model.slices:
slc.owners = list(set(owners) | set(slc.owners))
if commit:

View File

@ -186,7 +186,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DatasetCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@ -254,7 +256,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DatasetUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@ -301,7 +305,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetForbiddenError:
return self.response_403()
except DatasetDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"])
@ -401,5 +407,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetForbiddenError:
return self.response_403()
except DatasetRefreshFailedError as ex:
logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}")
logger.error(
"Error refreshing dataset %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

View File

@ -46,7 +46,7 @@ class DatasetDAO(BaseDAO):
try:
return db.session.query(Database).filter_by(id=database_id).one_or_none()
except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Could not get database by id: {ex}")
logger.error("Could not get database by id: %s", str(ex))
return None
@staticmethod
@ -55,7 +55,7 @@ class DatasetDAO(BaseDAO):
database.get_table(table_name, schema=schema)
return True
except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Got an error {ex} validating table: {table_name}")
logger.error("Got an error %s validating table: %s", str(ex), table_name)
return False
@staticmethod

View File

@ -332,7 +332,6 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
set to is_dttm=True. Note that this only gets called when new
columns are detected/created"""
# TODO: Fix circular import caused by importing TableColumn
pass
@classmethod
def epoch_to_dttm(cls) -> str:
@ -401,9 +400,11 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
.limit(limit)
)
return database.compile_sqla_query(qry)
elif LimitMethod.FORCE_LIMIT:
if LimitMethod.FORCE_LIMIT:
parsed_query = sql_parse.ParsedQuery(sql)
sql = parsed_query.set_or_update_query_limit(limit)
return sql
@classmethod
@ -465,7 +466,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
Create table from contents of a csv. Note: this method does not create
metadata for the table.
"""
df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs,)
df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs)
engine = cls.get_engine(database)
if table.schema:
# only add schema when it is preset and non empty
@ -529,7 +530,6 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
for handling the cursor and updating progress information in the
query object"""
# TODO: Fix circular import error caused by importing sql_lab.Query
pass
@classmethod
def extract_error_message(cls, ex: Exception) -> str:
@ -573,14 +573,12 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
Some database drivers like presto accept '{catalog}/{schema}' in
the database component of the URL, that can be handled here.
"""
pass
@classmethod
def patch(cls) -> None:
"""
TODO: Improve docstring and refactor implementation in Hive
"""
pass
@classmethod
def get_schema_names(cls, inspector: Inspector) -> List[str]:

View File

@ -56,7 +56,7 @@ class DrillEngineSpec(BaseEngineSpec):
tt = target_type.upper()
if tt == "DATE":
return f"TO_DATE('{dttm.date().isoformat()}', 'yyyy-MM-dd')"
elif tt == "TIMESTAMP":
if tt == "TIMESTAMP":
return f"""TO_TIMESTAMP('{dttm.isoformat(sep=" ", timespec="seconds")}', 'yyyy-MM-dd HH:mm:ss')""" # pylint: disable=line-too-long
return None

View File

@ -200,7 +200,7 @@ class HiveEngineSpec(PrestoEngineSpec):
tt = target_type.upper()
if tt == "DATE":
return f"CAST('{dttm.date().isoformat()}' AS DATE)"
elif tt == "TIMESTAMP":
if tt == "TIMESTAMP":
return f"""CAST('{dttm.isoformat(sep=" ", timespec="microseconds")}' AS TIMESTAMP)""" # pylint: disable=line-too-long
return None
@ -284,7 +284,9 @@ class HiveEngineSpec(PrestoEngineSpec):
if log:
log_lines = log.splitlines()
progress = cls.progress(log_lines)
logger.info(f"Query {query_id}: Progress total: {progress}")
logger.info(
"Query %s: Progress total: %s", str(query_id), str(progress)
)
needs_commit = False
if progress > query.progress:
query.progress = progress
@ -294,21 +296,25 @@ class HiveEngineSpec(PrestoEngineSpec):
if tracking_url:
job_id = tracking_url.split("/")[-2]
logger.info(
f"Query {query_id}: Found the tracking url: {tracking_url}"
"Query %s: Found the tracking url: %s",
str(query_id),
tracking_url,
)
tracking_url = tracking_url_trans(tracking_url)
logger.info(
f"Query {query_id}: Transformation applied: {tracking_url}"
"Query %s: Transformation applied: %s",
str(query_id),
tracking_url,
)
query.tracking_url = tracking_url
logger.info(f"Query {query_id}: Job id: {job_id}")
logger.info("Query %s: Job id: %s", str(query_id), str(job_id))
needs_commit = True
if job_id and len(log_lines) > last_log_line:
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
for l in log_lines[last_log_line:]:
logger.info(f"Query {query_id}: [{job_id}] {l}")
logger.info("Query %s: [%s] %s", str(query_id), str(job_id), l)
last_log_line = len(log_lines)
if needs_commit:
session.commit()
@ -414,7 +420,6 @@ class HiveEngineSpec(PrestoEngineSpec):
"""
# Do nothing in the URL object since instead this should modify
# the configuraiton dictionary. See get_configuration_for_impersonation
pass
@classmethod
def get_configuration_for_impersonation(

View File

@ -47,7 +47,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
tt = target_type.upper()
if tt == "DATE":
return f"CAST('{dttm.date().isoformat()}' AS DATE)"
elif tt == "TIMESTAMP":
if tt == "TIMESTAMP":
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS TIMESTAMP)"""
return None

View File

@ -84,7 +84,9 @@ class PinotEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
if not granularity:
raise NotImplementedError("No pinot grain spec for " + str(time_grain))
else:
return TimestampExpression(f"{{col}}", col)
return TimestampExpression(
f"{{col}}", col # pylint: disable=f-string-without-interpolation
)
# In pinot the output is a string since there is no timestamp column like pg
time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', '{granularity}')"
return TimestampExpression(time_expr, col)

View File

@ -79,7 +79,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, str]]:
children_type = group["children"]
if type_ == "ARRAY":
return [{"name": column["name"], "type": children_type}]
elif type_ == "ROW":
if type_ == "ROW":
nameless_columns = 0
columns = []
for child in utils.split(children_type, ","):
@ -93,8 +94,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, str]]:
nameless_columns += 1
columns.append({"name": f"{column['name']}.{name.lower()}", "type": type_})
return columns
else:
raise Exception(f"Unknown type {type_}!")
raise Exception(f"Unknown type {type_}!")
class PrestoEngineSpec(BaseEngineSpec):
@ -278,7 +279,7 @@ class PrestoEngineSpec(BaseEngineSpec):
if not (inner_type.endswith("array") or inner_type.endswith("row")):
stack.pop()
# We have an array of row objects (i.e. array(row(...)))
elif inner_type == "array" or inner_type == "row":
elif inner_type in ("array", "row"):
# Push a dummy object to represent the structural data type
stack.append(("", inner_type))
# We have an array of a basic data types(i.e. array(varchar)).
@ -339,8 +340,9 @@ class PrestoEngineSpec(BaseEngineSpec):
)
result[structural_column_index]["default"] = None
continue
else: # otherwise column is a basic data type
column_type = presto_type_map[column.Type]()
# otherwise column is a basic data type
column_type = presto_type_map[column.Type]()
except KeyError:
logger.info(
"Did not recognize type {} of column {}".format( # pylint: disable=logging-format-interpolation
@ -727,7 +729,7 @@ class PrestoEngineSpec(BaseEngineSpec):
def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None:
"""Updates progress information"""
query_id = query.id
logger.info(f"Query {query_id}: Polling the cursor for progress")
logger.info("Query %i: Polling the cursor for progress", query_id)
polled = cursor.poll()
# poll returns dict -- JSON status information or ``None``
# if the query is done
@ -761,7 +763,7 @@ class PrestoEngineSpec(BaseEngineSpec):
query.progress = progress
session.commit()
time.sleep(1)
logger.info(f"Query {query_id}: Polling the cursor for progress")
logger.info("Query %i: Polling the cursor for progress", query_id)
polled = cursor.poll()
@classmethod
@ -903,12 +905,14 @@ class PrestoEngineSpec(BaseEngineSpec):
raise SupersetTemplateException(
"The table should have one partitioned field"
)
elif not show_first and len(indexes[0]["column_names"]) > 1:
if not show_first and len(indexes[0]["column_names"]) > 1:
raise SupersetTemplateException(
"The table should have a single partitioned field "
"to use this function. You may want to use "
"`presto.latest_sub_partition`"
)
column_names = indexes[0]["column_names"]
part_fields = [(column_name, True) for column_name in column_names]
sql = cls._partition_query(table_name, database, 1, part_fields)
@ -947,7 +951,7 @@ class PrestoEngineSpec(BaseEngineSpec):
indexes = database.get_indexes(table_name, schema)
part_fields = indexes[0]["column_names"]
for k in kwargs.keys(): # pylint: disable=consider-iterating-dictionary
if k not in k in part_fields:
if k not in k in part_fields: # pylint: disable=comparison-with-itself
msg = "Field [{k}] is not part of the portioning key"
raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1:

View File

@ -64,15 +64,14 @@ class SqliteEngineSpec(BaseEngineSpec):
cache=database.table_cache_enabled,
cache_timeout=database.table_cache_timeout,
)
elif datasource_type == "view":
if datasource_type == "view":
return database.get_all_view_names_in_schema(
schema=schema,
force=True,
cache=database.table_cache_enabled,
cache_timeout=database.table_cache_timeout,
)
else:
raise Exception(f"Unsupported datasource_type: {datasource_type}")
raise Exception(f"Unsupported datasource_type: {datasource_type}")
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:

View File

@ -34,7 +34,7 @@ class AnnotationLayer(Model, AuditMixinNullable):
descr = Column(Text)
def __repr__(self) -> str:
return self.name
return str(self.name)
class Annotation(Model, AuditMixinNullable):

View File

@ -170,7 +170,9 @@ class Database(
except Exception as ex: # pylint: disable=broad-except
# function_names property is used in bulk APIs and should not hard crash
# more info in: https://github.com/apache/incubator-superset/issues/9678
logger.error(f"Failed to fetch database function names with error: {ex}")
logger.error(
"Failed to fetch database function names with error: %s", str(ex)
)
return []
@property
@ -207,7 +209,7 @@ class Database(
@property
def backend(self) -> str:
sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
return sqlalchemy_url.get_backend_name()
return sqlalchemy_url.get_backend_name() # pylint: disable=no-member
@property
def metadata_cache_timeout(self) -> Dict[str, Any]:
@ -452,8 +454,7 @@ class Database(
return self.db_engine_spec.get_all_datasource_names(self, "table")
@cache_util.memoized_func(
key=lambda *args, **kwargs: "db:{}:schema:None:view_list",
attribute_in_key="id",
key=lambda *args, **kwargs: "db:{}:schema:None:view_list", attribute_in_key="id"
)
def get_all_view_names_in_database(
self,
@ -652,7 +653,7 @@ class Database(
@utils.memoized
def get_dialect(self) -> Dialect:
sqla_url = url.make_url(self.sqlalchemy_uri_decrypted)
return sqla_url.get_dialect()()
return sqla_url.get_dialect()() # pylint: disable=no-member
sqla.event.listen(Database, "after_insert", security_manager.set_perm)

View File

@ -42,7 +42,7 @@ from sqlalchemy.orm.mapper import Mapper
from superset import app, ConnectorRegistry, db, is_feature_enabled, security_manager
from superset.models.helpers import AuditMixinNullable, ImportMixin
from superset.models.slice import Slice as Slice
from superset.models.slice import Slice
from superset.models.tags import DashboardUpdater
from superset.models.user_attributes import UserAttribute
from superset.tasks.thumbnails import cache_dashboard_thumbnail

View File

@ -363,6 +363,7 @@ class AuditMixinNullable(AuditMixin):
nullable=True,
)
@property
def changed_by_name(self) -> str:
if self.created_by:
return escape("{}".format(self.created_by))

View File

@ -90,6 +90,6 @@ class SliceEmailSchedule(Model, AuditMixinNullable, ImportMixin, EmailSchedule):
def get_scheduler_model(report_type: ScheduleType) -> Optional[Type[EmailSchedule]]:
if report_type == ScheduleType.dashboard:
return DashboardEmailSchedule
elif report_type == ScheduleType.slice:
if report_type == ScheduleType.slice:
return SliceEmailSchedule
return None

View File

@ -29,6 +29,7 @@ class TinyInteger(Integer):
A type for tiny ``int`` integers.
"""
@property
def python_type(self) -> Type[int]:
return int
@ -42,6 +43,7 @@ class Interval(TypeEngine):
A type for intervals.
"""
@property
def python_type(self) -> Optional[Type[Any]]:
return None
@ -55,6 +57,7 @@ class Array(TypeEngine):
A type for arrays.
"""
@property
def python_type(self) -> Optional[Type[List[Any]]]:
return list
@ -68,6 +71,7 @@ class Map(TypeEngine):
A type for maps.
"""
@property
def python_type(self) -> Optional[Type[Dict[Any, Any]]]:
return dict
@ -81,6 +85,7 @@ class Row(TypeEngine):
A type for rows.
"""
@property
def python_type(self) -> Optional[Type[Any]]:
return None

View File

@ -96,9 +96,9 @@ def handle_query_error(
def get_query_backoff_handler(details: Dict[Any, Any]) -> None:
query_id = details["kwargs"]["query_id"]
logger.error(f"Query with id `{query_id}` could not be retrieved")
logger.error("Query with id `%s` could not be retrieved", str(query_id))
stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] - 1))
logger.error(f"Query {query_id}: Sleeping for a sec before retrying...")
logger.error("Query %s: Sleeping for a sec before retrying...", str(query_id))
def get_query_giveup_handler(_: Any) -> None:
@ -287,7 +287,7 @@ def execute_sql_statement(
def _serialize_payload(
payload: Dict[Any, Any], use_msgpack: Optional[bool] = False
) -> Union[bytes, str]:
logger.debug(f"Serializing to msgpack: {use_msgpack}")
logger.debug("Serializing to msgpack: %r", use_msgpack)
if use_msgpack:
return msgpack.dumps(payload, default=json_iso_dttm_ser, use_bin_type=True)
@ -360,9 +360,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
# Breaking down into multiple statements
parsed_query = ParsedQuery(rendered_query)
statements = parsed_query.get_statements()
logger.info(f"Query {query_id}: Executing {len(statements)} statement(s)")
logger.info("Query %s: Executing %i statement(s)", str(query_id), len(statements))
logger.info(f"Query {query_id}: Set query to 'running'")
logger.info("Query %s: Set query to 'running'", str(query_id))
query.status = QueryStatus.RUNNING
query.start_running_time = now_as_float()
session.commit()
@ -386,7 +386,7 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
# Run statement
msg = f"Running statement {i+1} out of {statement_count}"
logger.info(f"Query {query_id}: {msg}")
logger.info("Query %s: %s", str(query_id), msg)
query.set_extra_json_key("progress", msg)
session.commit()
try:
@ -437,7 +437,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
if store_results and results_backend:
key = str(uuid.uuid4())
logger.info(f"Query {query_id}: Storing results in results backend, key: {key}")
logger.info(
"Query %s: Storing results in results backend, key: %s", str(query_id), key
)
with stats_timing("sqllab.query.results_backend_write", stats_logger):
with stats_timing(
"sqllab.query.results_backend_write_serialization", stats_logger
@ -451,9 +453,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
compressed = zlib_compress(serialized_payload)
logger.debug(
f"*** serialized payload size: {getsizeof(serialized_payload)}"
"*** serialized payload size: %i", getsizeof(serialized_payload)
)
logger.debug(f"*** compressed payload size: {getsizeof(compressed)}")
logger.debug("*** compressed payload size: %i", getsizeof(compressed))
results_backend.set(key, compressed, cache_timeout)
query.results_key = key

View File

@ -138,7 +138,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
end_column=end_column,
)
except Exception as ex:
logger.exception(f"Unexpected error running validation query: {ex}")
logger.exception("Unexpected error running validation query: %s", str(ex))
raise ex
@classmethod
@ -156,7 +156,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
parsed_query = ParsedQuery(sql)
statements = parsed_query.get_statements()
logger.info(f"Validating {len(statements)} statement(s)")
logger.info("Validating %i statement(s)", len(statements))
engine = database.get_sqla_engine(
schema=schema,
nullpool=True,
@ -174,6 +174,6 @@ class PrestoDBSQLValidator(BaseSQLValidator):
)
if annotation:
annotations.append(annotation)
logger.debug(f"Validation found {len(annotations)} error(s)")
logger.debug("Validation found %i error(s)", len(annotations))
return annotations

View File

@ -275,7 +275,7 @@ def cache_warmup(
logger.error(message)
return message
logger.info(f"Loading {class_.__name__}")
logger.info("Loading %s", class_.__name__)
try:
strategy = class_(*args, **kwargs)
logger.info("Success!")
@ -287,7 +287,7 @@ def cache_warmup(
results: Dict[str, List[str]] = {"success": [], "errors": []}
for url in strategy.get_urls():
try:
logger.info(f"Fetching {url}")
logger.info("Fetching %s", url)
request.urlopen(url)
results["success"].append(url)
except URLError:

View File

@ -488,7 +488,7 @@ def schedule_email_report( # pylint: disable=unused-argument
recipients = recipients or schedule.recipients
slack_channel = slack_channel or schedule.slack_channel
logger.info(
f"Starting report for slack: {slack_channel} and recipients: {recipients}."
"Starting report for slack: %s and recipients: %s.", slack_channel, recipients
)
if report_type == ScheduleType.dashboard:

View File

@ -42,5 +42,5 @@ def deliver_slack_msg(
channels=slack_channel, file=file, initial_comment=body, title=subject
),
)
logger.info(f"Sent the report to the slack {slack_channel}")
logger.info("Sent the report to the slack %s", slack_channel)
assert response["file"], str(response) # the uploaded file

View File

@ -49,7 +49,7 @@ def convert_filter_scopes(
"immune": current_filter_immune,
}
else:
logging.info(f"slice [{filter_id}] has invalid field: {filter_field}")
logging.info("slice [%i] has invalid field: %s", filter_id, filter_field)
for filter_slice in filters:
filter_fields: Dict[str, Dict[str, Any]] = {}

View File

@ -134,7 +134,7 @@ def get_event_logger_from_cfg_value(cfg_value: Any) -> AbstractEventLogger:
"of superset.utils.log.AbstractEventLogger."
)
logging.info(f"Configured event logger of type {type(result)}")
logging.info("Configured event logger of type %s", type(result))
return cast(AbstractEventLogger, result)

View File

@ -155,14 +155,14 @@ class AuthWebDriverProxy:
driver.set_window_size(*self._window)
driver.get(url)
img: Optional[bytes] = None
logger.debug(f"Sleeping for {SELENIUM_HEADSTART} seconds")
logger.debug("Sleeping for %i seconds", SELENIUM_HEADSTART)
time.sleep(SELENIUM_HEADSTART)
try:
logger.debug(f"Wait for the presence of {element_name}")
logger.debug("Wait for the presence of %s", element_name)
element = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CLASS_NAME, element_name))
)
logger.debug(f"Wait for .loading to be done")
logger.debug("Wait for .loading to be done")
WebDriverWait(driver, 60).until_not(
EC.presence_of_all_elements_located((By.CLASS_NAME, "loading"))
)
@ -226,7 +226,7 @@ class BaseScreenshot:
user=user, thumb_size=thumb_size, cache=cache
)
else:
logger.info(f"Loaded thumbnail from cache: {self.cache_key}")
logger.info("Loaded thumbnail from cache: %s", self.cache_key)
if payload:
return BytesIO(payload)
return None
@ -259,7 +259,7 @@ class BaseScreenshot:
logger.info("Thumb already cached, skipping...")
return None
thumb_size = thumb_size or self.thumb_size
logger.info(f"Processing url for thumbnail: {cache_key}")
logger.info("Processing url for thumbnail: %s", cache_key)
payload = None
@ -277,7 +277,7 @@ class BaseScreenshot:
payload = None
if payload and cache:
logger.info(f"Caching thumbnail: {cache_key} {cache}")
logger.info("Caching thumbnail: %s %s", cache_key, str(cache))
cache.set(cache_key, payload)
return payload
@ -291,13 +291,13 @@ class BaseScreenshot:
) -> bytes:
thumb_size = thumb_size or cls.thumb_size
img = Image.open(BytesIO(img_bytes))
logger.debug(f"Selenium image size: {img.size}")
logger.debug("Selenium image size: %s", str(img.size))
if crop and img.size[1] != cls.window_size[1]:
desired_ratio = float(cls.window_size[1]) / cls.window_size[0]
desired_width = int(img.size[0] * desired_ratio)
logger.debug(f"Cropping to: {img.size[0]}*{desired_width}")
logger.debug("Cropping to: %s*%s", str(img.size[0]), str(desired_width))
img = img.crop((0, 0, img.size[0], desired_width))
logger.debug(f"Resizing to {thumb_size}")
logger.debug("Resizing to %s", str(thumb_size))
img = img.resize(thumb_size, Image.ANTIALIAS)
new_img = BytesIO()
if output != "png":

View File

@ -23,8 +23,7 @@ from wtforms.validators import StopValidation
from superset.constants import RouteMethod
from superset.models.annotations import Annotation, AnnotationLayer
from .base import SupersetModelView
from superset.views.base import SupersetModelView
class StartEndDttmValidator: # pylint: disable=too-few-public-methods
@ -35,7 +34,7 @@ class StartEndDttmValidator: # pylint: disable=too-few-public-methods
def __call__(self, form: Dict[str, Any], field: Any) -> None:
if not form["start_dttm"].data and not form["end_dttm"].data:
raise StopValidation(_("annotation start time or end time is required."))
elif (
if (
form["end_dttm"].data
and form["start_dttm"].data
and form["end_dttm"].data < form["start_dttm"].data

View File

@ -212,7 +212,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
try:
table.get_sqla_table_object()
except Exception as ex:
logger.exception(f"Got an error in pre_add for {table.name}")
logger.exception("Got an error in pre_add for %s", table.name)
raise Exception(
_(
"Table [%{table}s] could not be found, "
@ -498,8 +498,7 @@ def check_ownership(obj: Any, raise_if_false: bool = True) -> bool:
return True
if raise_if_false:
raise security_exception
else:
return False
return False
def bind_field(

View File

@ -29,15 +29,14 @@ from superset import app, db, event_logger
from superset.constants import RouteMethod
from superset.typing import FlaskResponse
from superset.utils import core as utils
from ..base import (
from superset.views.base import (
BaseSupersetView,
check_ownership,
DeleteMixin,
generate_download_headers,
SupersetModelView,
)
from .mixin import DashboardMixin
from superset.views.dashboard.mixin import DashboardMixin
class DashboardModelView(
@ -95,7 +94,7 @@ class DashboardModelView(
item.owners.append(g.user)
utils.validate_json(item.json_metadata)
utils.validate_json(item.position_json)
owners = [o for o in item.owners]
owners = list(item.owners)
for slc in item.slices:
slc.owners = list(set(owners) | set(slc.owners))

View File

@ -51,14 +51,16 @@ def check_datasource_access(f: Callable[..., Any]) -> Callable[..., Any]:
)
return self.response_404()
if not self.appbuilder.sm.can_access_table(
database, Table(table_name_parsed, schema_name_parsed),
database, Table(table_name_parsed, schema_name_parsed)
):
self.stats_logger.incr(
f"permisssion_denied_{self.__class__.__name__}.select_star"
)
logger.warning(
f"Permission denied for user {g.user} on table: {table_name_parsed} "
f"schema: {schema_name_parsed}"
"Permission denied for user %s on table: %s schema: %s",
g.user,
table_name_parsed,
schema_name_parsed,
)
return self.response_404()
return f(self, database, table_name_parsed, schema_name_parsed)

View File

@ -460,7 +460,7 @@ def check_slice_perms(_self: Any, slice_id: int) -> None:
def _deserialize_results_payload(
payload: Union[bytes, str], query: Query, use_msgpack: Optional[bool] = False
) -> Dict[str, Any]:
logger.debug(f"Deserializing from msgpack: {use_msgpack}")
logger.debug("Deserializing from msgpack: %r", use_msgpack)
if use_msgpack:
with stats_timing(
"sqllab.query.results_backend_msgpack_deserialize", stats_logger
@ -482,11 +482,9 @@ def _deserialize_results_payload(
)
return ds_payload
else:
with stats_timing(
"sqllab.query.results_backend_json_deserialize", stats_logger
):
return json.loads(payload)
with stats_timing("sqllab.query.results_backend_json_deserialize", stats_logger):
return json.loads(payload)
def get_cta_schema_name(