chore: Upgrade pylint to 2.5.3 and fix most new rules (#10101)

* Bump pylint version to 2.5.3

* Add a global disable for the most common new pylint error

* Fix a bunch of files containing very few errors

* More pylint tweakage, low-hanging fruit

* More easy stuff...

* Fix more erroring files

* Fix the last couple of errors, clean pylint!

* Black

* Fix mypy issue in connectors/druid/models.py
This commit is contained in:
Will Barrett 2020-06-18 14:03:42 -07:00 committed by GitHub
parent 02fee35314
commit 8e23d4f369
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 166 additions and 135 deletions

View File

@ -81,7 +81,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have # --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes # no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W" # --disable=W"
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports,import-outside-toplevel
[REPORTS] [REPORTS]
@ -254,7 +254,7 @@ notes=FIXME,XXX
[SIMILARITIES] [SIMILARITIES]
# Minimum lines number of a similarity. # Minimum lines number of a similarity.
min-similarity-lines=4 min-similarity-lines=5
# Ignore comments when computing similarities. # Ignore comments when computing similarities.
ignore-comments=yes ignore-comments=yes

View File

@ -28,7 +28,7 @@ psycopg2-binary==2.8.5
pycodestyle==2.5.0 pycodestyle==2.5.0
pydruid==0.6.1 pydruid==0.6.1
pyhive==0.6.2 pyhive==0.6.2
pylint==1.9.2 pylint==2.5.3
redis==3.5.1 redis==3.5.1
requests==2.23.0 requests==2.23.0
statsd==3.3.0 statsd==3.3.0

View File

@ -95,7 +95,6 @@ class SupersetAppInitializer:
""" """
Called after any other init tasks Called after any other init tasks
""" """
pass
def configure_celery(self) -> None: def configure_celery(self) -> None:
celery_app.config_from_object(self.config["CELERY_CONFIG"]) celery_app.config_from_object(self.config["CELERY_CONFIG"])
@ -593,7 +592,7 @@ class SupersetAppInitializer:
def register_blueprints(self) -> None: def register_blueprints(self) -> None:
for bp in self.config["BLUEPRINTS"]: for bp in self.config["BLUEPRINTS"]:
try: try:
logger.info(f"Registering blueprint: '{bp.name}'") logger.info("Registering blueprint: %s", bp.name)
self.flask_app.register_blueprint(bp) self.flask_app.register_blueprint(bp)
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
logger.exception("blueprint registration failed") logger.exception("blueprint registration failed")

View File

@ -219,7 +219,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartInvalidError as ex: except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except ChartCreateFailedError as ex: except ChartCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}") logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"]) @expose("/<pk>", methods=["PUT"])
@ -287,7 +289,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartInvalidError as ex: except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except ChartUpdateFailedError as ex: except ChartUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}") logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@ -334,7 +338,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartForbiddenError: except ChartForbiddenError:
return self.response_403() return self.response_403()
except ChartDeleteFailedError as ex: except ChartDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"]) @expose("/", methods=["DELETE"])
@ -386,9 +392,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response( return self.response(
200, 200,
message=ngettext( message=ngettext(
f"Deleted %(num)d chart", "Deleted %(num)d chart", "Deleted %(num)d charts", num=len(item_ids)
f"Deleted %(num)d charts",
num=len(item_ids),
), ),
) )
except ChartNotFoundError: except ChartNotFoundError:
@ -464,13 +468,15 @@ class ChartRestApi(BaseSupersetModelRestApi):
headers=generate_download_headers("csv"), headers=generate_download_headers("csv"),
mimetype="application/csv", mimetype="application/csv",
) )
elif result_format == ChartDataResultFormat.JSON:
if result_format == ChartDataResultFormat.JSON:
response_data = simplejson.dumps( response_data = simplejson.dumps(
{"result": payload}, default=json_int_dttm_ser, ignore_nan=True {"result": payload}, default=json_int_dttm_ser, ignore_nan=True
) )
resp = make_response(response_data, 200) resp = make_response(response_data, 200)
resp.headers["Content-Type"] = "application/json; charset=utf-8" resp.headers["Content-Type"] = "application/json; charset=utf-8"
return resp return resp
raise self.response_400(message=f"Unsupported result_format: {result_format}") raise self.response_400(message=f"Unsupported result_format: {result_format}")
@expose("/<pk>/thumbnail/<digest>/", methods=["GET"]) @expose("/<pk>/thumbnail/<digest>/", methods=["GET"])

View File

@ -29,7 +29,6 @@ class BaseCommand(ABC):
Run executes the command. Can raise command exceptions Run executes the command. Can raise command exceptions
:raises: CommandException :raises: CommandException
""" """
pass
@abstractmethod @abstractmethod
def validate(self) -> None: def validate(self) -> None:
@ -38,4 +37,3 @@ class BaseCommand(ABC):
Will raise exception if validation fails Will raise exception if validation fails
:raises: CommandException :raises: CommandException
""" """
pass

View File

@ -141,8 +141,8 @@ class QueryObject:
if is_sip_38 and groupby: if is_sip_38 and groupby:
self.columns += groupby self.columns += groupby
logger.warning( logger.warning(
f"The field `groupby` is deprecated. Viz plugins should " "The field `groupby` is deprecated. Viz plugins should "
f"pass all selectables via the `columns` field" "pass all selectables via the `columns` field"
) )
self.orderby = orderby or [] self.orderby = orderby or []
@ -151,15 +151,18 @@ class QueryObject:
for field in DEPRECATED_FIELDS: for field in DEPRECATED_FIELDS:
if field.old_name in kwargs: if field.old_name in kwargs:
logger.warning( logger.warning(
f"The field `{field.old_name}` is deprecated, please use " "The field `%s` is deprecated, please use `%s` instead.",
f"`{field.new_name}` instead." field.old_name,
field.new_name,
) )
value = kwargs[field.old_name] value = kwargs[field.old_name]
if value: if value:
if hasattr(self, field.new_name): if hasattr(self, field.new_name):
logger.warning( logger.warning(
f"The field `{field.new_name}` is already populated, " "The field `%s` is already populated, "
f"replacing value with contents from `{field.old_name}`." "replacing value with contents from `%s`.",
field.new_name,
field.old_name,
) )
setattr(self, field.new_name, value) setattr(self, field.new_name, value)
@ -167,16 +170,20 @@ class QueryObject:
for field in DEPRECATED_EXTRAS_FIELDS: for field in DEPRECATED_EXTRAS_FIELDS:
if field.old_name in kwargs: if field.old_name in kwargs:
logger.warning( logger.warning(
f"The field `{field.old_name}` is deprecated and should be " "The field `%s` is deprecated and should "
f"passed to `extras` via the `{field.new_name}` property." "be passed to `extras` via the `%s` property.",
field.old_name,
field.new_name,
) )
value = kwargs[field.old_name] value = kwargs[field.old_name]
if value: if value:
if hasattr(self.extras, field.new_name): if hasattr(self.extras, field.new_name):
logger.warning( logger.warning(
f"The field `{field.new_name}` is already populated in " "The field `%s` is already populated in "
f"`extras`, replacing value with contents " "`extras`, replacing value with contents "
f"from `{field.old_name}`." "from `%s`.",
field.new_name,
field.old_name,
) )
self.extras[field.new_name] = value self.extras[field.new_name] = value

View File

@ -898,7 +898,7 @@ if CONFIG_PATH_ENV_VAR in os.environ:
print(f"Loaded your LOCAL configuration at [{cfg_path}]") print(f"Loaded your LOCAL configuration at [{cfg_path}]")
except Exception: except Exception:
logger.exception( logger.exception(
f"Failed to import config for {CONFIG_PATH_ENV_VAR}={cfg_path}" "Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path
) )
raise raise
elif importlib.util.find_spec("superset_config"): elif importlib.util.find_spec("superset_config"):

View File

@ -348,7 +348,7 @@ class BaseDatasource(
value = utils.cast_to_num(value) value = utils.cast_to_num(value)
if value == NULL_STRING: if value == NULL_STRING:
return None return None
elif value == "<empty string>": if value == "<empty string>":
return "" return ""
return value return value
@ -516,7 +516,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
export_fields: List[Any] = [] export_fields: List[Any] = []
def __repr__(self) -> str: def __repr__(self) -> str:
return self.column_name return str(self.column_name)
num_types = ( num_types = (
"DOUBLE", "DOUBLE",

View File

@ -14,8 +14,7 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
# pylint: disable=C,R,W # pylint: skip-file
# pylint: disable=invalid-unary-operand-type
import json import json
import logging import logging
import re import re
@ -81,12 +80,7 @@ except ImportError:
pass pass
try: try:
from superset.utils.core import ( from superset.utils.core import DimSelector, DTTM_ALIAS, FilterOperator, flasher
DimSelector,
DTTM_ALIAS,
FilterOperator,
flasher,
)
except ImportError: except ImportError:
pass pass
@ -845,7 +839,8 @@ class DruidDatasource(Model, BaseDatasource):
else: else:
granularity["type"] = "duration" granularity["type"] = "duration"
granularity["duration"] = ( granularity["duration"] = (
utils.parse_human_timedelta(period_name).total_seconds() * 1000 # type: ignore utils.parse_human_timedelta(period_name).total_seconds() # type: ignore
* 1000
) )
return granularity return granularity
@ -950,7 +945,7 @@ class DruidDatasource(Model, BaseDatasource):
@staticmethod @staticmethod
def metrics_and_post_aggs( def metrics_and_post_aggs(
metrics: List[Metric], metrics_dict: Dict[str, DruidMetric], metrics: List[Metric], metrics_dict: Dict[str, DruidMetric]
) -> Tuple["OrderedDict[str, Any]", "OrderedDict[str, Any]"]: ) -> Tuple["OrderedDict[str, Any]", "OrderedDict[str, Any]"]:
# Separate metrics into those that are aggregations # Separate metrics into those that are aggregations
# and those that are post aggregations # and those that are post aggregations

View File

@ -22,8 +22,6 @@ class DAOException(SupersetException):
Base DAO exception class Base DAO exception class
""" """
pass
class DAOCreateFailedError(DAOException): class DAOCreateFailedError(DAOException):
""" """

View File

@ -206,7 +206,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
except DashboardInvalidError as ex: except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DashboardCreateFailedError as ex: except DashboardCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}") logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"]) @expose("/<pk>", methods=["PUT"])
@ -274,7 +276,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
except DashboardInvalidError as ex: except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DashboardUpdateFailedError as ex: except DashboardUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}") logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@ -321,7 +325,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
except DashboardForbiddenError: except DashboardForbiddenError:
return self.response_403() return self.response_403()
except DashboardDeleteFailedError as ex: except DashboardDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"]) @expose("/", methods=["DELETE"])
@ -373,8 +379,8 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response( return self.response(
200, 200,
message=ngettext( message=ngettext(
f"Deleted %(num)d dashboard", "Deleted %(num)d dashboard",
f"Deleted %(num)d dashboards", "Deleted %(num)d dashboards",
num=len(item_ids), num=len(item_ids),
), ),
) )

View File

@ -49,7 +49,7 @@ class DashboardDAO(BaseDAO):
@staticmethod @staticmethod
def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard: def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard:
owners = [owner for owner in model.owners] owners = list(model.owners)
for slc in model.slices: for slc in model.slices:
slc.owners = list(set(owners) | set(slc.owners)) slc.owners = list(set(owners) | set(slc.owners))
if commit: if commit:

View File

@ -186,7 +186,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetInvalidError as ex: except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DatasetCreateFailedError as ex: except DatasetCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}") logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"]) @expose("/<pk>", methods=["PUT"])
@ -254,7 +256,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetInvalidError as ex: except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DatasetUpdateFailedError as ex: except DatasetUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}") logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@ -301,7 +305,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetForbiddenError: except DatasetForbiddenError:
return self.response_403() return self.response_403()
except DatasetDeleteFailedError as ex: except DatasetDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"]) @expose("/export/", methods=["GET"])
@ -401,5 +407,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
except DatasetForbiddenError: except DatasetForbiddenError:
return self.response_403() return self.response_403()
except DatasetRefreshFailedError as ex: except DatasetRefreshFailedError as ex:
logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}") logger.error(
"Error refreshing dataset %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex)) return self.response_422(message=str(ex))

View File

@ -46,7 +46,7 @@ class DatasetDAO(BaseDAO):
try: try:
return db.session.query(Database).filter_by(id=database_id).one_or_none() return db.session.query(Database).filter_by(id=database_id).one_or_none()
except SQLAlchemyError as ex: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Could not get database by id: {ex}") logger.error("Could not get database by id: %s", str(ex))
return None return None
@staticmethod @staticmethod
@ -55,7 +55,7 @@ class DatasetDAO(BaseDAO):
database.get_table(table_name, schema=schema) database.get_table(table_name, schema=schema)
return True return True
except SQLAlchemyError as ex: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Got an error {ex} validating table: {table_name}") logger.error("Got an error %s validating table: %s", str(ex), table_name)
return False return False
@staticmethod @staticmethod

View File

@ -332,7 +332,6 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
set to is_dttm=True. Note that this only gets called when new set to is_dttm=True. Note that this only gets called when new
columns are detected/created""" columns are detected/created"""
# TODO: Fix circular import caused by importing TableColumn # TODO: Fix circular import caused by importing TableColumn
pass
@classmethod @classmethod
def epoch_to_dttm(cls) -> str: def epoch_to_dttm(cls) -> str:
@ -401,9 +400,11 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
.limit(limit) .limit(limit)
) )
return database.compile_sqla_query(qry) return database.compile_sqla_query(qry)
elif LimitMethod.FORCE_LIMIT:
if LimitMethod.FORCE_LIMIT:
parsed_query = sql_parse.ParsedQuery(sql) parsed_query = sql_parse.ParsedQuery(sql)
sql = parsed_query.set_or_update_query_limit(limit) sql = parsed_query.set_or_update_query_limit(limit)
return sql return sql
@classmethod @classmethod
@ -465,7 +466,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
Create table from contents of a csv. Note: this method does not create Create table from contents of a csv. Note: this method does not create
metadata for the table. metadata for the table.
""" """
df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs,) df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs)
engine = cls.get_engine(database) engine = cls.get_engine(database)
if table.schema: if table.schema:
# only add schema when it is preset and non empty # only add schema when it is preset and non empty
@ -529,7 +530,6 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
for handling the cursor and updating progress information in the for handling the cursor and updating progress information in the
query object""" query object"""
# TODO: Fix circular import error caused by importing sql_lab.Query # TODO: Fix circular import error caused by importing sql_lab.Query
pass
@classmethod @classmethod
def extract_error_message(cls, ex: Exception) -> str: def extract_error_message(cls, ex: Exception) -> str:
@ -573,14 +573,12 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
Some database drivers like presto accept '{catalog}/{schema}' in Some database drivers like presto accept '{catalog}/{schema}' in
the database component of the URL, that can be handled here. the database component of the URL, that can be handled here.
""" """
pass
@classmethod @classmethod
def patch(cls) -> None: def patch(cls) -> None:
""" """
TODO: Improve docstring and refactor implementation in Hive TODO: Improve docstring and refactor implementation in Hive
""" """
pass
@classmethod @classmethod
def get_schema_names(cls, inspector: Inspector) -> List[str]: def get_schema_names(cls, inspector: Inspector) -> List[str]:

View File

@ -56,7 +56,7 @@ class DrillEngineSpec(BaseEngineSpec):
tt = target_type.upper() tt = target_type.upper()
if tt == "DATE": if tt == "DATE":
return f"TO_DATE('{dttm.date().isoformat()}', 'yyyy-MM-dd')" return f"TO_DATE('{dttm.date().isoformat()}', 'yyyy-MM-dd')"
elif tt == "TIMESTAMP": if tt == "TIMESTAMP":
return f"""TO_TIMESTAMP('{dttm.isoformat(sep=" ", timespec="seconds")}', 'yyyy-MM-dd HH:mm:ss')""" # pylint: disable=line-too-long return f"""TO_TIMESTAMP('{dttm.isoformat(sep=" ", timespec="seconds")}', 'yyyy-MM-dd HH:mm:ss')""" # pylint: disable=line-too-long
return None return None

View File

@ -200,7 +200,7 @@ class HiveEngineSpec(PrestoEngineSpec):
tt = target_type.upper() tt = target_type.upper()
if tt == "DATE": if tt == "DATE":
return f"CAST('{dttm.date().isoformat()}' AS DATE)" return f"CAST('{dttm.date().isoformat()}' AS DATE)"
elif tt == "TIMESTAMP": if tt == "TIMESTAMP":
return f"""CAST('{dttm.isoformat(sep=" ", timespec="microseconds")}' AS TIMESTAMP)""" # pylint: disable=line-too-long return f"""CAST('{dttm.isoformat(sep=" ", timespec="microseconds")}' AS TIMESTAMP)""" # pylint: disable=line-too-long
return None return None
@ -284,7 +284,9 @@ class HiveEngineSpec(PrestoEngineSpec):
if log: if log:
log_lines = log.splitlines() log_lines = log.splitlines()
progress = cls.progress(log_lines) progress = cls.progress(log_lines)
logger.info(f"Query {query_id}: Progress total: {progress}") logger.info(
"Query %s: Progress total: %s", str(query_id), str(progress)
)
needs_commit = False needs_commit = False
if progress > query.progress: if progress > query.progress:
query.progress = progress query.progress = progress
@ -294,21 +296,25 @@ class HiveEngineSpec(PrestoEngineSpec):
if tracking_url: if tracking_url:
job_id = tracking_url.split("/")[-2] job_id = tracking_url.split("/")[-2]
logger.info( logger.info(
f"Query {query_id}: Found the tracking url: {tracking_url}" "Query %s: Found the tracking url: %s",
str(query_id),
tracking_url,
) )
tracking_url = tracking_url_trans(tracking_url) tracking_url = tracking_url_trans(tracking_url)
logger.info( logger.info(
f"Query {query_id}: Transformation applied: {tracking_url}" "Query %s: Transformation applied: %s",
str(query_id),
tracking_url,
) )
query.tracking_url = tracking_url query.tracking_url = tracking_url
logger.info(f"Query {query_id}: Job id: {job_id}") logger.info("Query %s: Job id: %s", str(query_id), str(job_id))
needs_commit = True needs_commit = True
if job_id and len(log_lines) > last_log_line: if job_id and len(log_lines) > last_log_line:
# Wait for job id before logging things out # Wait for job id before logging things out
# this allows for prefixing all log lines and becoming # this allows for prefixing all log lines and becoming
# searchable in something like Kibana # searchable in something like Kibana
for l in log_lines[last_log_line:]: for l in log_lines[last_log_line:]:
logger.info(f"Query {query_id}: [{job_id}] {l}") logger.info("Query %s: [%s] %s", str(query_id), str(job_id), l)
last_log_line = len(log_lines) last_log_line = len(log_lines)
if needs_commit: if needs_commit:
session.commit() session.commit()
@ -414,7 +420,6 @@ class HiveEngineSpec(PrestoEngineSpec):
""" """
# Do nothing in the URL object since instead this should modify # Do nothing in the URL object since instead this should modify
# the configuraiton dictionary. See get_configuration_for_impersonation # the configuraiton dictionary. See get_configuration_for_impersonation
pass
@classmethod @classmethod
def get_configuration_for_impersonation( def get_configuration_for_impersonation(

View File

@ -47,7 +47,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
tt = target_type.upper() tt = target_type.upper()
if tt == "DATE": if tt == "DATE":
return f"CAST('{dttm.date().isoformat()}' AS DATE)" return f"CAST('{dttm.date().isoformat()}' AS DATE)"
elif tt == "TIMESTAMP": if tt == "TIMESTAMP":
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS TIMESTAMP)""" return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS TIMESTAMP)"""
return None return None

View File

@ -84,7 +84,9 @@ class PinotEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
if not granularity: if not granularity:
raise NotImplementedError("No pinot grain spec for " + str(time_grain)) raise NotImplementedError("No pinot grain spec for " + str(time_grain))
else: else:
return TimestampExpression(f"{{col}}", col) return TimestampExpression(
f"{{col}}", col # pylint: disable=f-string-without-interpolation
)
# In pinot the output is a string since there is no timestamp column like pg # In pinot the output is a string since there is no timestamp column like pg
time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', '{granularity}')" time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', '{granularity}')"
return TimestampExpression(time_expr, col) return TimestampExpression(time_expr, col)

View File

@ -79,7 +79,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, str]]:
children_type = group["children"] children_type = group["children"]
if type_ == "ARRAY": if type_ == "ARRAY":
return [{"name": column["name"], "type": children_type}] return [{"name": column["name"], "type": children_type}]
elif type_ == "ROW":
if type_ == "ROW":
nameless_columns = 0 nameless_columns = 0
columns = [] columns = []
for child in utils.split(children_type, ","): for child in utils.split(children_type, ","):
@ -93,8 +94,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, str]]:
nameless_columns += 1 nameless_columns += 1
columns.append({"name": f"{column['name']}.{name.lower()}", "type": type_}) columns.append({"name": f"{column['name']}.{name.lower()}", "type": type_})
return columns return columns
else:
raise Exception(f"Unknown type {type_}!") raise Exception(f"Unknown type {type_}!")
class PrestoEngineSpec(BaseEngineSpec): class PrestoEngineSpec(BaseEngineSpec):
@ -278,7 +279,7 @@ class PrestoEngineSpec(BaseEngineSpec):
if not (inner_type.endswith("array") or inner_type.endswith("row")): if not (inner_type.endswith("array") or inner_type.endswith("row")):
stack.pop() stack.pop()
# We have an array of row objects (i.e. array(row(...))) # We have an array of row objects (i.e. array(row(...)))
elif inner_type == "array" or inner_type == "row": elif inner_type in ("array", "row"):
# Push a dummy object to represent the structural data type # Push a dummy object to represent the structural data type
stack.append(("", inner_type)) stack.append(("", inner_type))
# We have an array of a basic data types(i.e. array(varchar)). # We have an array of a basic data types(i.e. array(varchar)).
@ -339,8 +340,9 @@ class PrestoEngineSpec(BaseEngineSpec):
) )
result[structural_column_index]["default"] = None result[structural_column_index]["default"] = None
continue continue
else: # otherwise column is a basic data type
column_type = presto_type_map[column.Type]() # otherwise column is a basic data type
column_type = presto_type_map[column.Type]()
except KeyError: except KeyError:
logger.info( logger.info(
"Did not recognize type {} of column {}".format( # pylint: disable=logging-format-interpolation "Did not recognize type {} of column {}".format( # pylint: disable=logging-format-interpolation
@ -727,7 +729,7 @@ class PrestoEngineSpec(BaseEngineSpec):
def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None:
"""Updates progress information""" """Updates progress information"""
query_id = query.id query_id = query.id
logger.info(f"Query {query_id}: Polling the cursor for progress") logger.info("Query %i: Polling the cursor for progress", query_id)
polled = cursor.poll() polled = cursor.poll()
# poll returns dict -- JSON status information or ``None`` # poll returns dict -- JSON status information or ``None``
# if the query is done # if the query is done
@ -761,7 +763,7 @@ class PrestoEngineSpec(BaseEngineSpec):
query.progress = progress query.progress = progress
session.commit() session.commit()
time.sleep(1) time.sleep(1)
logger.info(f"Query {query_id}: Polling the cursor for progress") logger.info("Query %i: Polling the cursor for progress", query_id)
polled = cursor.poll() polled = cursor.poll()
@classmethod @classmethod
@ -903,12 +905,14 @@ class PrestoEngineSpec(BaseEngineSpec):
raise SupersetTemplateException( raise SupersetTemplateException(
"The table should have one partitioned field" "The table should have one partitioned field"
) )
elif not show_first and len(indexes[0]["column_names"]) > 1:
if not show_first and len(indexes[0]["column_names"]) > 1:
raise SupersetTemplateException( raise SupersetTemplateException(
"The table should have a single partitioned field " "The table should have a single partitioned field "
"to use this function. You may want to use " "to use this function. You may want to use "
"`presto.latest_sub_partition`" "`presto.latest_sub_partition`"
) )
column_names = indexes[0]["column_names"] column_names = indexes[0]["column_names"]
part_fields = [(column_name, True) for column_name in column_names] part_fields = [(column_name, True) for column_name in column_names]
sql = cls._partition_query(table_name, database, 1, part_fields) sql = cls._partition_query(table_name, database, 1, part_fields)
@ -947,7 +951,7 @@ class PrestoEngineSpec(BaseEngineSpec):
indexes = database.get_indexes(table_name, schema) indexes = database.get_indexes(table_name, schema)
part_fields = indexes[0]["column_names"] part_fields = indexes[0]["column_names"]
for k in kwargs.keys(): # pylint: disable=consider-iterating-dictionary for k in kwargs.keys(): # pylint: disable=consider-iterating-dictionary
if k not in k in part_fields: if k not in k in part_fields: # pylint: disable=comparison-with-itself
msg = "Field [{k}] is not part of the portioning key" msg = "Field [{k}] is not part of the portioning key"
raise SupersetTemplateException(msg) raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1: if len(kwargs.keys()) != len(part_fields) - 1:

View File

@ -64,15 +64,14 @@ class SqliteEngineSpec(BaseEngineSpec):
cache=database.table_cache_enabled, cache=database.table_cache_enabled,
cache_timeout=database.table_cache_timeout, cache_timeout=database.table_cache_timeout,
) )
elif datasource_type == "view": if datasource_type == "view":
return database.get_all_view_names_in_schema( return database.get_all_view_names_in_schema(
schema=schema, schema=schema,
force=True, force=True,
cache=database.table_cache_enabled, cache=database.table_cache_enabled,
cache_timeout=database.table_cache_timeout, cache_timeout=database.table_cache_timeout,
) )
else: raise Exception(f"Unsupported datasource_type: {datasource_type}")
raise Exception(f"Unsupported datasource_type: {datasource_type}")
@classmethod @classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]: def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:

View File

@ -34,7 +34,7 @@ class AnnotationLayer(Model, AuditMixinNullable):
descr = Column(Text) descr = Column(Text)
def __repr__(self) -> str: def __repr__(self) -> str:
return self.name return str(self.name)
class Annotation(Model, AuditMixinNullable): class Annotation(Model, AuditMixinNullable):

View File

@ -170,7 +170,9 @@ class Database(
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
# function_names property is used in bulk APIs and should not hard crash # function_names property is used in bulk APIs and should not hard crash
# more info in: https://github.com/apache/incubator-superset/issues/9678 # more info in: https://github.com/apache/incubator-superset/issues/9678
logger.error(f"Failed to fetch database function names with error: {ex}") logger.error(
"Failed to fetch database function names with error: %s", str(ex)
)
return [] return []
@property @property
@ -207,7 +209,7 @@ class Database(
@property @property
def backend(self) -> str: def backend(self) -> str:
sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted) sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
return sqlalchemy_url.get_backend_name() return sqlalchemy_url.get_backend_name() # pylint: disable=no-member
@property @property
def metadata_cache_timeout(self) -> Dict[str, Any]: def metadata_cache_timeout(self) -> Dict[str, Any]:
@ -452,8 +454,7 @@ class Database(
return self.db_engine_spec.get_all_datasource_names(self, "table") return self.db_engine_spec.get_all_datasource_names(self, "table")
@cache_util.memoized_func( @cache_util.memoized_func(
key=lambda *args, **kwargs: "db:{}:schema:None:view_list", key=lambda *args, **kwargs: "db:{}:schema:None:view_list", attribute_in_key="id"
attribute_in_key="id",
) )
def get_all_view_names_in_database( def get_all_view_names_in_database(
self, self,
@ -652,7 +653,7 @@ class Database(
@utils.memoized @utils.memoized
def get_dialect(self) -> Dialect: def get_dialect(self) -> Dialect:
sqla_url = url.make_url(self.sqlalchemy_uri_decrypted) sqla_url = url.make_url(self.sqlalchemy_uri_decrypted)
return sqla_url.get_dialect()() return sqla_url.get_dialect()() # pylint: disable=no-member
sqla.event.listen(Database, "after_insert", security_manager.set_perm) sqla.event.listen(Database, "after_insert", security_manager.set_perm)

View File

@ -42,7 +42,7 @@ from sqlalchemy.orm.mapper import Mapper
from superset import app, ConnectorRegistry, db, is_feature_enabled, security_manager from superset import app, ConnectorRegistry, db, is_feature_enabled, security_manager
from superset.models.helpers import AuditMixinNullable, ImportMixin from superset.models.helpers import AuditMixinNullable, ImportMixin
from superset.models.slice import Slice as Slice from superset.models.slice import Slice
from superset.models.tags import DashboardUpdater from superset.models.tags import DashboardUpdater
from superset.models.user_attributes import UserAttribute from superset.models.user_attributes import UserAttribute
from superset.tasks.thumbnails import cache_dashboard_thumbnail from superset.tasks.thumbnails import cache_dashboard_thumbnail

View File

@ -363,6 +363,7 @@ class AuditMixinNullable(AuditMixin):
nullable=True, nullable=True,
) )
@property
def changed_by_name(self) -> str: def changed_by_name(self) -> str:
if self.created_by: if self.created_by:
return escape("{}".format(self.created_by)) return escape("{}".format(self.created_by))

View File

@ -90,6 +90,6 @@ class SliceEmailSchedule(Model, AuditMixinNullable, ImportMixin, EmailSchedule):
def get_scheduler_model(report_type: ScheduleType) -> Optional[Type[EmailSchedule]]: def get_scheduler_model(report_type: ScheduleType) -> Optional[Type[EmailSchedule]]:
if report_type == ScheduleType.dashboard: if report_type == ScheduleType.dashboard:
return DashboardEmailSchedule return DashboardEmailSchedule
elif report_type == ScheduleType.slice: if report_type == ScheduleType.slice:
return SliceEmailSchedule return SliceEmailSchedule
return None return None

View File

@ -29,6 +29,7 @@ class TinyInteger(Integer):
A type for tiny ``int`` integers. A type for tiny ``int`` integers.
""" """
@property
def python_type(self) -> Type[int]: def python_type(self) -> Type[int]:
return int return int
@ -42,6 +43,7 @@ class Interval(TypeEngine):
A type for intervals. A type for intervals.
""" """
@property
def python_type(self) -> Optional[Type[Any]]: def python_type(self) -> Optional[Type[Any]]:
return None return None
@ -55,6 +57,7 @@ class Array(TypeEngine):
A type for arrays. A type for arrays.
""" """
@property
def python_type(self) -> Optional[Type[List[Any]]]: def python_type(self) -> Optional[Type[List[Any]]]:
return list return list
@ -68,6 +71,7 @@ class Map(TypeEngine):
A type for maps. A type for maps.
""" """
@property
def python_type(self) -> Optional[Type[Dict[Any, Any]]]: def python_type(self) -> Optional[Type[Dict[Any, Any]]]:
return dict return dict
@ -81,6 +85,7 @@ class Row(TypeEngine):
A type for rows. A type for rows.
""" """
@property
def python_type(self) -> Optional[Type[Any]]: def python_type(self) -> Optional[Type[Any]]:
return None return None

View File

@ -96,9 +96,9 @@ def handle_query_error(
def get_query_backoff_handler(details: Dict[Any, Any]) -> None: def get_query_backoff_handler(details: Dict[Any, Any]) -> None:
query_id = details["kwargs"]["query_id"] query_id = details["kwargs"]["query_id"]
logger.error(f"Query with id `{query_id}` could not be retrieved") logger.error("Query with id `%s` could not be retrieved", str(query_id))
stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] - 1)) stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] - 1))
logger.error(f"Query {query_id}: Sleeping for a sec before retrying...") logger.error("Query %s: Sleeping for a sec before retrying...", str(query_id))
def get_query_giveup_handler(_: Any) -> None: def get_query_giveup_handler(_: Any) -> None:
@ -287,7 +287,7 @@ def execute_sql_statement(
def _serialize_payload( def _serialize_payload(
payload: Dict[Any, Any], use_msgpack: Optional[bool] = False payload: Dict[Any, Any], use_msgpack: Optional[bool] = False
) -> Union[bytes, str]: ) -> Union[bytes, str]:
logger.debug(f"Serializing to msgpack: {use_msgpack}") logger.debug("Serializing to msgpack: %r", use_msgpack)
if use_msgpack: if use_msgpack:
return msgpack.dumps(payload, default=json_iso_dttm_ser, use_bin_type=True) return msgpack.dumps(payload, default=json_iso_dttm_ser, use_bin_type=True)
@ -360,9 +360,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
# Breaking down into multiple statements # Breaking down into multiple statements
parsed_query = ParsedQuery(rendered_query) parsed_query = ParsedQuery(rendered_query)
statements = parsed_query.get_statements() statements = parsed_query.get_statements()
logger.info(f"Query {query_id}: Executing {len(statements)} statement(s)") logger.info("Query %s: Executing %i statement(s)", str(query_id), len(statements))
logger.info(f"Query {query_id}: Set query to 'running'") logger.info("Query %s: Set query to 'running'", str(query_id))
query.status = QueryStatus.RUNNING query.status = QueryStatus.RUNNING
query.start_running_time = now_as_float() query.start_running_time = now_as_float()
session.commit() session.commit()
@ -386,7 +386,7 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
# Run statement # Run statement
msg = f"Running statement {i+1} out of {statement_count}" msg = f"Running statement {i+1} out of {statement_count}"
logger.info(f"Query {query_id}: {msg}") logger.info("Query %s: %s", str(query_id), msg)
query.set_extra_json_key("progress", msg) query.set_extra_json_key("progress", msg)
session.commit() session.commit()
try: try:
@ -437,7 +437,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
if store_results and results_backend: if store_results and results_backend:
key = str(uuid.uuid4()) key = str(uuid.uuid4())
logger.info(f"Query {query_id}: Storing results in results backend, key: {key}") logger.info(
"Query %s: Storing results in results backend, key: %s", str(query_id), key
)
with stats_timing("sqllab.query.results_backend_write", stats_logger): with stats_timing("sqllab.query.results_backend_write", stats_logger):
with stats_timing( with stats_timing(
"sqllab.query.results_backend_write_serialization", stats_logger "sqllab.query.results_backend_write_serialization", stats_logger
@ -451,9 +453,9 @@ def execute_sql_statements( # pylint: disable=too-many-arguments, too-many-loca
compressed = zlib_compress(serialized_payload) compressed = zlib_compress(serialized_payload)
logger.debug( logger.debug(
f"*** serialized payload size: {getsizeof(serialized_payload)}" "*** serialized payload size: %i", getsizeof(serialized_payload)
) )
logger.debug(f"*** compressed payload size: {getsizeof(compressed)}") logger.debug("*** compressed payload size: %i", getsizeof(compressed))
results_backend.set(key, compressed, cache_timeout) results_backend.set(key, compressed, cache_timeout)
query.results_key = key query.results_key = key

View File

@ -138,7 +138,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
end_column=end_column, end_column=end_column,
) )
except Exception as ex: except Exception as ex:
logger.exception(f"Unexpected error running validation query: {ex}") logger.exception("Unexpected error running validation query: %s", str(ex))
raise ex raise ex
@classmethod @classmethod
@ -156,7 +156,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
parsed_query = ParsedQuery(sql) parsed_query = ParsedQuery(sql)
statements = parsed_query.get_statements() statements = parsed_query.get_statements()
logger.info(f"Validating {len(statements)} statement(s)") logger.info("Validating %i statement(s)", len(statements))
engine = database.get_sqla_engine( engine = database.get_sqla_engine(
schema=schema, schema=schema,
nullpool=True, nullpool=True,
@ -174,6 +174,6 @@ class PrestoDBSQLValidator(BaseSQLValidator):
) )
if annotation: if annotation:
annotations.append(annotation) annotations.append(annotation)
logger.debug(f"Validation found {len(annotations)} error(s)") logger.debug("Validation found %i error(s)", len(annotations))
return annotations return annotations

View File

@ -275,7 +275,7 @@ def cache_warmup(
logger.error(message) logger.error(message)
return message return message
logger.info(f"Loading {class_.__name__}") logger.info("Loading %s", class_.__name__)
try: try:
strategy = class_(*args, **kwargs) strategy = class_(*args, **kwargs)
logger.info("Success!") logger.info("Success!")
@ -287,7 +287,7 @@ def cache_warmup(
results: Dict[str, List[str]] = {"success": [], "errors": []} results: Dict[str, List[str]] = {"success": [], "errors": []}
for url in strategy.get_urls(): for url in strategy.get_urls():
try: try:
logger.info(f"Fetching {url}") logger.info("Fetching %s", url)
request.urlopen(url) request.urlopen(url)
results["success"].append(url) results["success"].append(url)
except URLError: except URLError:

View File

@ -488,7 +488,7 @@ def schedule_email_report( # pylint: disable=unused-argument
recipients = recipients or schedule.recipients recipients = recipients or schedule.recipients
slack_channel = slack_channel or schedule.slack_channel slack_channel = slack_channel or schedule.slack_channel
logger.info( logger.info(
f"Starting report for slack: {slack_channel} and recipients: {recipients}." "Starting report for slack: %s and recipients: %s.", slack_channel, recipients
) )
if report_type == ScheduleType.dashboard: if report_type == ScheduleType.dashboard:

View File

@ -42,5 +42,5 @@ def deliver_slack_msg(
channels=slack_channel, file=file, initial_comment=body, title=subject channels=slack_channel, file=file, initial_comment=body, title=subject
), ),
) )
logger.info(f"Sent the report to the slack {slack_channel}") logger.info("Sent the report to the slack %s", slack_channel)
assert response["file"], str(response) # the uploaded file assert response["file"], str(response) # the uploaded file

View File

@ -49,7 +49,7 @@ def convert_filter_scopes(
"immune": current_filter_immune, "immune": current_filter_immune,
} }
else: else:
logging.info(f"slice [{filter_id}] has invalid field: {filter_field}") logging.info("slice [%i] has invalid field: %s", filter_id, filter_field)
for filter_slice in filters: for filter_slice in filters:
filter_fields: Dict[str, Dict[str, Any]] = {} filter_fields: Dict[str, Dict[str, Any]] = {}

View File

@ -134,7 +134,7 @@ def get_event_logger_from_cfg_value(cfg_value: Any) -> AbstractEventLogger:
"of superset.utils.log.AbstractEventLogger." "of superset.utils.log.AbstractEventLogger."
) )
logging.info(f"Configured event logger of type {type(result)}") logging.info("Configured event logger of type %s", type(result))
return cast(AbstractEventLogger, result) return cast(AbstractEventLogger, result)

View File

@ -155,14 +155,14 @@ class AuthWebDriverProxy:
driver.set_window_size(*self._window) driver.set_window_size(*self._window)
driver.get(url) driver.get(url)
img: Optional[bytes] = None img: Optional[bytes] = None
logger.debug(f"Sleeping for {SELENIUM_HEADSTART} seconds") logger.debug("Sleeping for %i seconds", SELENIUM_HEADSTART)
time.sleep(SELENIUM_HEADSTART) time.sleep(SELENIUM_HEADSTART)
try: try:
logger.debug(f"Wait for the presence of {element_name}") logger.debug("Wait for the presence of %s", element_name)
element = WebDriverWait(driver, 10).until( element = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CLASS_NAME, element_name)) EC.presence_of_element_located((By.CLASS_NAME, element_name))
) )
logger.debug(f"Wait for .loading to be done") logger.debug("Wait for .loading to be done")
WebDriverWait(driver, 60).until_not( WebDriverWait(driver, 60).until_not(
EC.presence_of_all_elements_located((By.CLASS_NAME, "loading")) EC.presence_of_all_elements_located((By.CLASS_NAME, "loading"))
) )
@ -226,7 +226,7 @@ class BaseScreenshot:
user=user, thumb_size=thumb_size, cache=cache user=user, thumb_size=thumb_size, cache=cache
) )
else: else:
logger.info(f"Loaded thumbnail from cache: {self.cache_key}") logger.info("Loaded thumbnail from cache: %s", self.cache_key)
if payload: if payload:
return BytesIO(payload) return BytesIO(payload)
return None return None
@ -259,7 +259,7 @@ class BaseScreenshot:
logger.info("Thumb already cached, skipping...") logger.info("Thumb already cached, skipping...")
return None return None
thumb_size = thumb_size or self.thumb_size thumb_size = thumb_size or self.thumb_size
logger.info(f"Processing url for thumbnail: {cache_key}") logger.info("Processing url for thumbnail: %s", cache_key)
payload = None payload = None
@ -277,7 +277,7 @@ class BaseScreenshot:
payload = None payload = None
if payload and cache: if payload and cache:
logger.info(f"Caching thumbnail: {cache_key} {cache}") logger.info("Caching thumbnail: %s %s", cache_key, str(cache))
cache.set(cache_key, payload) cache.set(cache_key, payload)
return payload return payload
@ -291,13 +291,13 @@ class BaseScreenshot:
) -> bytes: ) -> bytes:
thumb_size = thumb_size or cls.thumb_size thumb_size = thumb_size or cls.thumb_size
img = Image.open(BytesIO(img_bytes)) img = Image.open(BytesIO(img_bytes))
logger.debug(f"Selenium image size: {img.size}") logger.debug("Selenium image size: %s", str(img.size))
if crop and img.size[1] != cls.window_size[1]: if crop and img.size[1] != cls.window_size[1]:
desired_ratio = float(cls.window_size[1]) / cls.window_size[0] desired_ratio = float(cls.window_size[1]) / cls.window_size[0]
desired_width = int(img.size[0] * desired_ratio) desired_width = int(img.size[0] * desired_ratio)
logger.debug(f"Cropping to: {img.size[0]}*{desired_width}") logger.debug("Cropping to: %s*%s", str(img.size[0]), str(desired_width))
img = img.crop((0, 0, img.size[0], desired_width)) img = img.crop((0, 0, img.size[0], desired_width))
logger.debug(f"Resizing to {thumb_size}") logger.debug("Resizing to %s", str(thumb_size))
img = img.resize(thumb_size, Image.ANTIALIAS) img = img.resize(thumb_size, Image.ANTIALIAS)
new_img = BytesIO() new_img = BytesIO()
if output != "png": if output != "png":

View File

@ -23,8 +23,7 @@ from wtforms.validators import StopValidation
from superset.constants import RouteMethod from superset.constants import RouteMethod
from superset.models.annotations import Annotation, AnnotationLayer from superset.models.annotations import Annotation, AnnotationLayer
from superset.views.base import SupersetModelView
from .base import SupersetModelView
class StartEndDttmValidator: # pylint: disable=too-few-public-methods class StartEndDttmValidator: # pylint: disable=too-few-public-methods
@ -35,7 +34,7 @@ class StartEndDttmValidator: # pylint: disable=too-few-public-methods
def __call__(self, form: Dict[str, Any], field: Any) -> None: def __call__(self, form: Dict[str, Any], field: Any) -> None:
if not form["start_dttm"].data and not form["end_dttm"].data: if not form["start_dttm"].data and not form["end_dttm"].data:
raise StopValidation(_("annotation start time or end time is required.")) raise StopValidation(_("annotation start time or end time is required."))
elif ( if (
form["end_dttm"].data form["end_dttm"].data
and form["start_dttm"].data and form["start_dttm"].data
and form["end_dttm"].data < form["start_dttm"].data and form["end_dttm"].data < form["start_dttm"].data

View File

@ -212,7 +212,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
try: try:
table.get_sqla_table_object() table.get_sqla_table_object()
except Exception as ex: except Exception as ex:
logger.exception(f"Got an error in pre_add for {table.name}") logger.exception("Got an error in pre_add for %s", table.name)
raise Exception( raise Exception(
_( _(
"Table [%{table}s] could not be found, " "Table [%{table}s] could not be found, "
@ -498,8 +498,7 @@ def check_ownership(obj: Any, raise_if_false: bool = True) -> bool:
return True return True
if raise_if_false: if raise_if_false:
raise security_exception raise security_exception
else: return False
return False
def bind_field( def bind_field(

View File

@ -29,15 +29,14 @@ from superset import app, db, event_logger
from superset.constants import RouteMethod from superset.constants import RouteMethod
from superset.typing import FlaskResponse from superset.typing import FlaskResponse
from superset.utils import core as utils from superset.utils import core as utils
from superset.views.base import (
from ..base import (
BaseSupersetView, BaseSupersetView,
check_ownership, check_ownership,
DeleteMixin, DeleteMixin,
generate_download_headers, generate_download_headers,
SupersetModelView, SupersetModelView,
) )
from .mixin import DashboardMixin from superset.views.dashboard.mixin import DashboardMixin
class DashboardModelView( class DashboardModelView(
@ -95,7 +94,7 @@ class DashboardModelView(
item.owners.append(g.user) item.owners.append(g.user)
utils.validate_json(item.json_metadata) utils.validate_json(item.json_metadata)
utils.validate_json(item.position_json) utils.validate_json(item.position_json)
owners = [o for o in item.owners] owners = list(item.owners)
for slc in item.slices: for slc in item.slices:
slc.owners = list(set(owners) | set(slc.owners)) slc.owners = list(set(owners) | set(slc.owners))

View File

@ -51,14 +51,16 @@ def check_datasource_access(f: Callable[..., Any]) -> Callable[..., Any]:
) )
return self.response_404() return self.response_404()
if not self.appbuilder.sm.can_access_table( if not self.appbuilder.sm.can_access_table(
database, Table(table_name_parsed, schema_name_parsed), database, Table(table_name_parsed, schema_name_parsed)
): ):
self.stats_logger.incr( self.stats_logger.incr(
f"permisssion_denied_{self.__class__.__name__}.select_star" f"permisssion_denied_{self.__class__.__name__}.select_star"
) )
logger.warning( logger.warning(
f"Permission denied for user {g.user} on table: {table_name_parsed} " "Permission denied for user %s on table: %s schema: %s",
f"schema: {schema_name_parsed}" g.user,
table_name_parsed,
schema_name_parsed,
) )
return self.response_404() return self.response_404()
return f(self, database, table_name_parsed, schema_name_parsed) return f(self, database, table_name_parsed, schema_name_parsed)

View File

@ -460,7 +460,7 @@ def check_slice_perms(_self: Any, slice_id: int) -> None:
def _deserialize_results_payload( def _deserialize_results_payload(
payload: Union[bytes, str], query: Query, use_msgpack: Optional[bool] = False payload: Union[bytes, str], query: Query, use_msgpack: Optional[bool] = False
) -> Dict[str, Any]: ) -> Dict[str, Any]:
logger.debug(f"Deserializing from msgpack: {use_msgpack}") logger.debug("Deserializing from msgpack: %r", use_msgpack)
if use_msgpack: if use_msgpack:
with stats_timing( with stats_timing(
"sqllab.query.results_backend_msgpack_deserialize", stats_logger "sqllab.query.results_backend_msgpack_deserialize", stats_logger
@ -482,11 +482,9 @@ def _deserialize_results_payload(
) )
return ds_payload return ds_payload
else:
with stats_timing( with stats_timing("sqllab.query.results_backend_json_deserialize", stats_logger):
"sqllab.query.results_backend_json_deserialize", stats_logger return json.loads(payload)
):
return json.loads(payload)
def get_cta_schema_name( def get_cta_schema_name(