chore: Update pylint to 2.17.4 (#24700)

Co-authored-by: John Bodley <john.bodley@gmail.com>
This commit is contained in:
EugeneTorap 2023-07-25 03:13:49 +03:00 committed by GitHub
parent c17accc0b4
commit fc89718d48
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
99 changed files with 297 additions and 336 deletions

View File

@ -50,15 +50,6 @@ unsafe-load-any-extension=no
# run arbitrary code
extension-pkg-whitelist=pyarrow
# Allow optimization of some AST trees. This will activate a peephole AST
# optimizer, which will apply various small optimizations. For instance, it can
# be used to obtain the result of joining multiple strings with the addition
# operator. Joining a lot of strings can lead to a maximum recursion error in
# Pylint and this flag can prevent that. It has one side effect, the resulting
# AST will be different than the one from reality. This option is deprecated
# and it will be removed in Pylint 2.0.
optimize-ast=no
[MESSAGES CONTROL]
@ -83,11 +74,11 @@ enable=
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=
cyclic-import, # re-enable once this no longer raises false positives
missing-docstring,
duplicate-code,
unspecified-encoding,
# re-enable once this no longer raises false positives
too-many-instance-attributes
too-many-instance-attributes # re-enable once this no longer raises false positives
[REPORTS]
@ -96,12 +87,6 @@ disable=
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]". This option is deprecated
# and it will be removed in Pylint 2.0.
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
@ -141,63 +126,33 @@ property-classes=
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{1,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct constant names
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
@ -225,12 +180,6 @@ ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
@ -427,4 +376,4 @@ analyse-fallback-blocks=no
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
overgeneral-exceptions=builtins.Exception

View File

@ -320,7 +320,7 @@ werkzeug==2.3.3
# flask
# flask-jwt-extended
# flask-login
wrapt==1.12.1
wrapt==1.15.0
# via deprecated
wtforms==2.3.3
# via

View File

@ -21,7 +21,7 @@
ipython
progress>=1.5,<2
pyinstrument>=4.0.2,<5
pylint==2.9.6
pylint
python-ldap>=3.4.3
setuptools>=65.5.1
sqloxide

View File

@ -1,4 +1,4 @@
# SHA1:4c0ce3a84b01a5a3fe6c72cbf2fc96e5eada2dbe
# SHA1:e35d6e709dc86002ca35ad59f7119aa6cc1e7179
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@ -10,7 +10,9 @@
# via
# -r requirements/base.in
# -r requirements/development.in
astroid==2.6.6
appnope==0.1.3
# via ipython
astroid==2.15.6
# via pylint
asttokens==2.2.1
# via stack-data
@ -32,6 +34,8 @@ charset-normalizer==3.1.0
# via requests
decorator==5.1.1
# via ipython
dill==0.3.6
# via pylint
et-xmlfile==1.1.0
# via openpyxl
executing==1.2.0
@ -60,7 +64,7 @@ linear-tsv==1.1.0
# via tabulator
matplotlib-inline==0.1.6
# via ipython
mccabe==0.6.1
mccabe==0.7.0
# via pylint
mysqlclient==2.1.0
# via apache-superset
@ -74,6 +78,8 @@ pickleshare==0.7.5
# via ipython
pillow==9.5.0
# via apache-superset
platformdirs==3.8.1
# via pylint
progress==1.6
# via -r requirements/development.in
psycopg2-binary==2.9.6
@ -96,7 +102,7 @@ pyhive[hive]==0.6.5
# via apache-superset
pyinstrument==4.4.0
# via -r requirements/development.in
pylint==2.9.6
pylint==2.17.4
# via -r requirements/development.in
python-ldap==3.4.3
# via -r requirements/development.in
@ -126,7 +132,9 @@ thrift==0.16.0
# thrift-sasl
thrift-sasl==0.4.3
# via pyhive
toml==0.10.2
tomli==2.0.1
# via pylint
tomlkit==0.11.8
# via pylint
traitlets==5.9.0
# via

View File

@ -64,7 +64,7 @@ class DeleteChartCommand(BaseCommand):
if reports := ReportScheduleDAO.find_by_chart_ids(self._model_ids):
report_names = [report.name for report in reports]
raise ChartDeleteFailedReportsExistError(
_("There are associated alerts or reports: %s" % ",".join(report_names))
_(f"There are associated alerts or reports: {','.join(report_names)}")
)
# Check ownership
for model in self._models:

View File

@ -64,7 +64,7 @@ class ChartWarmUpCacheCommand(BaseCommand):
else get_dashboard_extra_filters(chart.id, self._dashboard_id)
)
g.form_data = form_data # pylint: disable=assigning-non-slot
g.form_data = form_data
payload = get_viz(
datasource_type=chart.datasource.type,
datasource_id=chart.datasource.id,

View File

@ -143,7 +143,7 @@ class ChartDataRestApi(ChartRestApi):
query_context = self._create_query_context_from_form(json_body)
command = ChartDataCommand(query_context)
command.validate()
except DatasourceNotFound as error:
except DatasourceNotFound:
return self.response_404()
except QueryObjectValidationError as error:
return self.response_400(message=error.message)
@ -233,7 +233,7 @@ class ChartDataRestApi(ChartRestApi):
query_context = self._create_query_context_from_form(json_body)
command = ChartDataCommand(query_context)
command.validate()
except DatasourceNotFound as error:
except DatasourceNotFound:
return self.response_404()
except QueryObjectValidationError as error:
return self.response_400(message=error.message)
@ -420,11 +420,10 @@ class ChartDataRestApi(ChartRestApi):
return self._send_chart_response(result, form_data, datasource)
# pylint: disable=invalid-name, no-self-use
# pylint: disable=invalid-name
def _load_query_context_form_from_cache(self, cache_key: str) -> dict[str, Any]:
return QueryContextCacheLoader.load(cache_key)
# pylint: disable=no-self-use
def _create_query_context_from_form(
self, form_data: dict[str, Any]
) -> QueryContext:

View File

@ -160,7 +160,8 @@ class ChartOwnedCreatedFavoredByMeFilter(
Slice.id == FavStar.obj_id,
),
isouter=True,
).filter( # pylint: disable=comparison-with-callable
).filter(
# pylint: disable=comparison-with-callable
or_(
Slice.id.in_(owner_ids_query),
Slice.created_by_fk == get_user_id(),

View File

@ -289,7 +289,9 @@ def apply_post_process(
for query in result["queries"]:
if query["result_format"] not in (rf.value for rf in ChartDataResultFormat):
raise Exception(f"Result format {query['result_format']} not supported")
raise Exception( # pylint: disable=broad-exception-raised
f"Result format {query['result_format']} not supported"
)
data = query["data"]

View File

@ -74,7 +74,6 @@ if feature_flags.get("VERSIONED_EXPORT"):
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.models.dashboard import Dashboard
# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username="admin")
dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
@ -109,7 +108,6 @@ if feature_flags.get("VERSIONED_EXPORT"):
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.export import ExportDatasetsCommand
# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username="admin")
dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
@ -151,7 +149,6 @@ if feature_flags.get("VERSIONED_EXPORT"):
)
if username is not None:
# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username=username)
if is_zipfile(path):
with ZipFile(path) as bundle:
@ -317,7 +314,6 @@ else:
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.json"))
if username is not None:
# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username=username)
contents = {}
for path_ in files:

View File

@ -41,7 +41,7 @@ def superset() -> None:
@app.shell_context_processor
def make_shell_context() -> dict[str, Any]:
return dict(app=app, db=db)
return {"app": app, "db": db}
# add sub-commands
@ -71,12 +71,7 @@ def init() -> None:
def version(verbose: bool) -> None:
"""Prints the current version number"""
print(Fore.BLUE + "-=" * 15)
print(
Fore.YELLOW
+ "Superset "
+ Fore.CYAN
+ "{version}".format(version=app.config["VERSION_STRING"])
)
print(Fore.YELLOW + "Superset " + Fore.CYAN + f"{app.config['VERSION_STRING']}")
print(Fore.BLUE + "-=" * 15)
if verbose:
print("[DB] : " + f"{db.engine}")

View File

@ -81,7 +81,7 @@ def update_api_docs() -> None:
title=current_app.appbuilder.app_name,
version=api_version,
openapi_version="3.0.2",
info=dict(description=current_app.appbuilder.app_name),
info={"description": current_app.appbuilder.app_name},
plugins=[MarshmallowPlugin(schema_name_resolver=resolver)],
servers=[{"url": "http://localhost:8088"}],
)

View File

@ -44,7 +44,7 @@ class ObjectNotFoundError(CommandException):
super().__init__(
_(
self.message_format.format(
object_type, '"%s" ' % object_id if object_id else ""
object_type, f'"{object_id}" ' if object_id else ""
)
),
exception,

View File

@ -91,7 +91,7 @@ class ImportExamplesCommand(ImportModelsCommand):
)
@staticmethod
def _import( # pylint: disable=arguments-differ, too-many-locals, too-many-branches
def _import( # pylint: disable=too-many-locals, too-many-branches
session: Session,
configs: dict[str, Any],
overwrite: bool = False,
@ -120,7 +120,9 @@ class ImportExamplesCommand(ImportModelsCommand):
# find the ID of the corresponding database
if config["database_uuid"] not in database_ids:
if examples_db is None:
raise Exception("Cannot find examples database")
raise Exception( # pylint: disable=broad-exception-raised
"Cannot find examples database"
)
config["database_id"] = examples_db.id
else:
config["database_id"] = database_ids[config["database_uuid"]]

View File

@ -93,7 +93,6 @@ class QueryContextFactory: # pylint: disable=too-few-public-methods
cache_values=cache_values,
)
# pylint: disable=no-self-use
def _convert_to_model(self, datasource: DatasourceDict) -> BaseDatasource:
return DatasourceDAO.get_datasource(
session=db.session,

View File

@ -687,7 +687,7 @@ class QueryContextProcessor:
def get_viz_annotation_data(
annotation_layer: dict[str, Any], force: bool
) -> dict[str, Any]:
# pylint: disable=import-outside-toplevel,superfluous-parens
# pylint: disable=import-outside-toplevel
from superset.charts.data.commands.get_data_command import ChartDataCommand
if not (chart := ChartDAO.find_by_id(annotation_layer["value"])):

View File

@ -82,7 +82,7 @@ class QueryObjectFactory: # pylint: disable=too-few-public-methods
session=self._session_maker(),
)
def _process_extras( # pylint: disable=no-self-use
def _process_extras(
self,
extras: dict[str, Any] | None,
) -> dict[str, Any]:

View File

@ -1147,6 +1147,7 @@ BLUEPRINTS: list[Blueprint] = []
# TRACKING_URL_TRANSFORMER = (
# lambda url, query: url if is_fresh(query) else None
# )
# pylint: disable-next=unnecessary-lambda-assignment
TRACKING_URL_TRANSFORMER = lambda url: url
@ -1466,6 +1467,7 @@ SSL_CERT_PATH: str | None = None
# This can be used to set any properties of the object based on naming
# conventions and such. You can find examples in the tests.
# pylint: disable-next=unnecessary-lambda-assignment
SQLA_TABLE_MUTATOR = lambda table: table

View File

@ -589,7 +589,7 @@ class BaseDatasource(
else []
)
def get_extra_cache_keys( # pylint: disable=no-self-use
def get_extra_cache_keys(
self, query_obj: QueryObjectDict # pylint: disable=unused-argument
) -> list[Hashable]:
"""If a datasource needs to provide additional keys for calculation of

View File

@ -300,7 +300,7 @@ class TableColumn(Model, BaseColumn, CertificationMixin):
return GenericDataType.TEMPORAL
return (
column_spec.generic_type # pylint: disable=used-before-assignment
column_spec.generic_type
if (
column_spec := self.db_engine_spec.get_column_spec(
self.type,
@ -1464,7 +1464,9 @@ class SqlaTable(
if not DatasetDAO.validate_uniqueness(
target.database_id, target.schema, target.table_name, target.id
):
raise Exception(get_dataset_exist_error_msg(target.full_name))
raise Exception( # pylint: disable=broad-exception-raised
get_dataset_exist_error_msg(target.full_name)
)
@staticmethod
def update_column( # pylint: disable=unused-argument

View File

@ -335,7 +335,7 @@ class TableModelView( # pylint: disable=too-many-ancestors
"offset": _("Timezone offset (in hours) for this datasource"),
"table_name": _("Name of the table that exists in the source database"),
"schema": _(
"Schema, as used only in some databases like Postgres, Redshift " "and DB2"
"Schema, as used only in some databases like Postgres, Redshift and DB2"
),
"description": Markup(
'Supports <a href="https://daringfireball.net/projects/markdown/">'
@ -361,7 +361,7 @@ class TableModelView( # pylint: disable=too-many-ancestors
"from the backend on the fly"
),
"is_sqllab_view": _(
"Whether the table was generated by the 'Visualize' flow " "in SQL Lab"
"Whether the table was generated by the 'Visualize' flow in SQL Lab"
),
"template_params": _(
"A set of parameters that become available in the query using "
@ -410,7 +410,7 @@ class TableModelView( # pylint: disable=too-many-ancestors
)
}
def post_add( # pylint: disable=arguments-differ
def post_add(
self,
item: "TableModelView",
flash_message: bool = True,

View File

@ -33,7 +33,7 @@ from superset.daos.exceptions import (
from superset.extensions import db
from superset.utils.core import get_iterable
T = TypeVar("T", bound=Model) # pylint: disable=invalid-name
T = TypeVar("T", bound=Model)
class BaseDAO(Generic[T]):

View File

@ -122,9 +122,11 @@ class DatabaseDAO(BaseDAO[Database]):
db.session.query(TabState).filter(TabState.database_id == database_id).all()
)
return dict(
charts=charts, dashboards=dashboards, sqllab_tab_states=sqllab_tab_states
)
return {
"charts": charts,
"dashboards": dashboards,
"sqllab_tab_states": sqllab_tab_states,
}
@classmethod
def get_ssh_tunnel(cls, database_id: int) -> Optional[SSHTunnel]:

View File

@ -66,7 +66,7 @@ class DatasetDAO(BaseDAO[SqlaTable]): # pylint: disable=too-many-public-methods
.distinct()
.all()
)
return dict(charts=charts, dashboards=dashboards)
return {"charts": charts, "dashboards": dashboards}
@staticmethod
def validate_table_exists(

View File

@ -284,6 +284,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
def __repr__(self) -> str:
"""Deterministic string representation of the API instance for etag_cache."""
# pylint: disable=consider-using-f-string
return "Superset.dashboards.api.DashboardRestApi@v{}{}".format(
self.appbuilder.app.config["VERSION_STRING"],
self.appbuilder.app.config["VERSION_SHA"],
@ -305,7 +306,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
@statsd_metrics
@with_dashboard
@event_logger.log_this_with_extra_payload
# pylint: disable=arguments-differ
# pylint: disable=arguments-differ,arguments-renamed
def get(
self,
dash: Dashboard,
@ -756,8 +757,8 @@ class DashboardRestApi(BaseSupersetModelRestApi):
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
log_to_statsd=False,
) # pylint: disable=too-many-locals
def export(self, **kwargs: Any) -> Response:
)
def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
"""Export dashboards
---
get:

View File

@ -60,7 +60,7 @@ class DeleteDashboardCommand(BaseCommand):
if reports := ReportScheduleDAO.find_by_dashboard_ids(self._model_ids):
report_names = [report.name for report in reports]
raise DashboardDeleteFailedReportsExistError(
_("There are associated alerts or reports: %s" % ",".join(report_names))
_(f"There are associated alerts or reports: {','.join(report_names)}")
)
# Check ownership
for model in self._models:

View File

@ -123,7 +123,6 @@ class FilterSetRestApi(BaseSupersetModelRestApi):
super().__init__()
def _init_properties(self) -> None:
# pylint: disable=bad-super-call
super(BaseSupersetModelRestApi, self)._init_properties()
@expose("/<int:dashboard_id>/filtersets", methods=("GET",))
@ -181,7 +180,7 @@ class FilterSetRestApi(BaseSupersetModelRestApi):
$ref: '#/components/responses/404'
"""
if not DashboardDAO.find_by_id(cast(int, dashboard_id)):
return self.response(404, message="dashboard '%s' not found" % dashboard_id)
return self.response(404, message=f"dashboard '{dashboard_id}' not found")
rison_data = kwargs.setdefault("rison", {})
rison_data.setdefault("filters", [])
rison_data["filters"].append(

View File

@ -49,7 +49,6 @@ class DeleteFilterSetCommand(BaseFilterSetCommand):
except FilterSetNotFoundError as err:
if FilterSetDAO.find_by_id(self._filter_set_id): # type: ignore
raise FilterSetForbiddenError(
'the filter-set does not related to dashboard "%s"'
% str(self._dashboard_id)
f"the filter-set does not related to dashboard {self._dashboard_id}"
) from err
raise err

View File

@ -46,7 +46,6 @@ class FilterSetSchema(Schema):
class FilterSetPostSchema(FilterSetSchema):
json_metadata_schema: JsonMetadataSchema = JsonMetadataSchema()
# pylint: disable=W0613
name = fields.String(
required=True,
allow_none=False,
@ -83,7 +82,7 @@ class FilterSetPutSchema(FilterSetSchema):
)
@post_load
def validate( # pylint: disable=unused-argument
def validate(
self, data: Mapping[Any, Any], *, many: Any, partial: Any
) -> dict[str, Any]:
if JSON_METADATA_FIELD in data:

View File

@ -141,7 +141,7 @@ class DashboardJSONMetadataSchema(Schema):
native_filter_migration = fields.Dict()
@pre_load
def remove_show_native_filters( # pylint: disable=unused-argument, no-self-use
def remove_show_native_filters( # pylint: disable=unused-argument
self,
data: dict[str, Any],
**kwargs: Any,
@ -193,10 +193,10 @@ class DashboardGetResponseSchema(Schema):
metadata={"description": certification_details_description}
)
changed_by_name = fields.String()
changed_by = fields.Nested(UserSchema(exclude=(["username"])))
changed_by = fields.Nested(UserSchema(exclude=["username"]))
changed_on = fields.DateTime()
charts = fields.List(fields.String(metadata={"description": charts_description}))
owners = fields.List(fields.Nested(UserSchema(exclude=(["username"]))))
owners = fields.List(fields.Nested(UserSchema(exclude=["username"])))
roles = fields.List(fields.Nested(RolesSchema))
tags = fields.Nested(TagSchema, many=True)
changed_on_humanized = fields.String(data_key="changed_on_delta_humanized")
@ -251,7 +251,7 @@ class DashboardDatasetSchema(Schema):
class BaseDashboardSchema(Schema):
# pylint: disable=no-self-use,unused-argument
# pylint: disable=unused-argument
@post_load
def post_load(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
if data.get("slug"):

View File

@ -143,6 +143,7 @@ class CreateDatabaseCommand(BaseCommand):
exception = DatabaseInvalidError()
exception.extend(exceptions)
event_logger.log_with_context(
# pylint: disable=consider-using-f-string
action="db_connection_failed.{}.{}".format(
exception.__class__.__name__,
".".join(exception.get_list_classnames()),

View File

@ -59,7 +59,7 @@ class DeleteDatabaseCommand(BaseCommand):
if reports := ReportScheduleDAO.find_by_database_id(self._model_id):
report_names = [report.name for report in reports]
raise DatabaseDeleteFailedReportsExistError(
_("There are associated alerts or reports: %s" % ",".join(report_names))
_(f"There are associated alerts or reports: {','.join(report_names)}")
)
# Check if there are datasets for this database
if self._model.tables:

View File

@ -108,10 +108,8 @@ class ValidateSQLCommand(BaseCommand):
raise NoValidatorFoundError(
SupersetError(
message=__(
"No validator named {} found "
"(configured for the {} engine)".format(
validator_name, spec.engine
)
f"No validator named {validator_name} found "
f"(configured for the {spec.engine} engine)"
),
error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR,
level=ErrorLevel.ERROR,

View File

@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=no-self-use, unused-argument
# pylint: disable=unused-argument
import inspect
import json
@ -212,20 +212,20 @@ def extra_validator(value: str) -> str:
raise ValidationError(
[_("Field cannot be decoded by JSON. %(msg)s", msg=str(ex))]
) from ex
else:
metadata_signature = inspect.signature(MetaData)
for key in extra_.get("metadata_params", {}):
if key not in metadata_signature.parameters:
raise ValidationError(
[
_(
"The metadata_params in Extra field "
"is not configured correctly. The key "
"%(key)s is invalid.",
key=key,
)
]
)
metadata_signature = inspect.signature(MetaData)
for key in extra_.get("metadata_params", {}):
if key not in metadata_signature.parameters:
raise ValidationError(
[
_(
"The metadata_params in Extra field "
"is not configured correctly. The key "
"%(key)s is invalid.",
key=key,
)
]
)
return value

View File

@ -84,6 +84,7 @@ class CreateSSHTunnelCommand(BaseCommand):
exception = SSHTunnelInvalidError()
exception.extend(exceptions)
event_logger.log_with_context(
# pylint: disable=consider-using-f-string
action="ssh_tunnel_creation_failed.{}.{}".format(
exception.__class__.__name__,
".".join(exception.get_list_classnames()),

View File

@ -474,8 +474,8 @@ class DatasetRestApi(BaseSupersetModelRestApi):
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
log_to_statsd=False,
) # pylint: disable=too-many-locals
def export(self, **kwargs: Any) -> Response:
)
def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
"""Export datasets
---
get:

View File

@ -106,7 +106,9 @@ def import_metric(session: Session, metric: BaseMetric) -> BaseMetric:
if isinstance(metric, SqlMetric):
lookup_metric = lookup_sqla_metric
else:
raise Exception(f"Invalid metric type: {metric}")
raise Exception( # pylint: disable=broad-exception-raised
f"Invalid metric type: {metric}"
)
return import_simple_obj(session, metric, lookup_metric)
@ -125,7 +127,9 @@ def import_column(session: Session, column: BaseColumn) -> BaseColumn:
if isinstance(column, TableColumn):
lookup_column = lookup_sqla_column
else:
raise Exception(f"Invalid column type: {column}")
raise Exception( # pylint: disable=broad-exception-raised
f"Invalid column type: {column}"
)
return import_simple_obj(session, column, lookup_column)

View File

@ -66,7 +66,9 @@ def get_sqla_type(native_type: str) -> VisitableType:
size = int(match.group(1))
return String(size)
raise Exception(f"Unknown type: {native_type}")
raise Exception( # pylint: disable=broad-exception-raised
f"Unknown type: {native_type}"
)
def get_dtype(df: pd.DataFrame, dataset: SqlaTable) -> dict[str, VisitableType]:

View File

@ -149,7 +149,7 @@ class DatasetRelatedObjectsResponse(Schema):
class ImportV1ColumnSchema(Schema):
# pylint: disable=no-self-use, unused-argument
# pylint: disable=unused-argument
@pre_load
def fix_extra(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
"""
@ -175,7 +175,7 @@ class ImportV1ColumnSchema(Schema):
class ImportV1MetricSchema(Schema):
# pylint: disable=no-self-use, unused-argument
# pylint: disable=unused-argument
@pre_load
def fix_extra(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
"""
@ -198,7 +198,7 @@ class ImportV1MetricSchema(Schema):
class ImportV1DatasetSchema(Schema):
# pylint: disable=no-self-use, unused-argument
# pylint: disable=unused-argument
@pre_load
def fix_extra(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
"""

View File

@ -1381,7 +1381,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
:param cursor: Cursor instance
:return: Dictionary with different costs
"""
raise Exception("Database does not support cost estimation")
raise Exception( # pylint: disable=broad-exception-raised
"Database does not support cost estimation"
)
@classmethod
def query_cost_formatter(
@ -1393,7 +1395,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
:param raw_cost: Raw estimate from `estimate_query_cost`
:return: Human readable cost estimate
"""
raise Exception("Database does not support cost estimation")
raise Exception( # pylint: disable=broad-exception-raised
"Database does not support cost estimation"
)
@classmethod
def process_statement(cls, statement: str, database: Database) -> str:
@ -1435,7 +1439,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
"""
extra = database.get_extra() or {}
if not cls.get_allow_cost_estimate(extra):
raise Exception("Database does not support cost estimation")
raise Exception( # pylint: disable=broad-exception-raised
"Database does not support cost estimation"
)
parsed_query = sql_parse.ParsedQuery(sql)
statements = parsed_query.get_statements()
@ -1971,7 +1977,9 @@ class BasicParametersMixin:
query = parameters.get("query", {}).copy()
if parameters.get("encryption"):
if not cls.encryption_parameters:
raise Exception("Unable to build a URL with encryption enabled")
raise Exception( # pylint: disable=broad-exception-raised
"Unable to build a URL with encryption enabled"
)
query.update(cls.encryption_parameters)
return str(

View File

@ -198,7 +198,9 @@ class DatabricksNativeEngineSpec(DatabricksODBCEngineSpec, BasicParametersMixin)
query = {}
if parameters.get("encryption"):
if not cls.encryption_parameters:
raise Exception("Unable to build a URL with encryption enabled")
raise Exception( # pylint: disable=broad-exception-raised
"Unable to build a URL with encryption enabled"
)
query.update(cls.encryption_parameters)
return str(

View File

@ -76,7 +76,7 @@ def upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
if not bucket_path:
logger.info("No upload bucket specified")
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
"No upload bucket specified. You can specify one in the config file."
)
@ -159,7 +159,9 @@ class HiveEngineSpec(PrestoEngineSpec):
state = cursor.poll()
if state.operationState == ttypes.TOperationState.ERROR_STATE:
raise Exception("Query error", state.errorMessage)
raise Exception( # pylint: disable=broad-exception-raised
"Query error", state.errorMessage
)
try:
return super().fetch_data(cursor, limit)
except pyhive.exc.ProgrammingError:
@ -312,9 +314,10 @@ class HiveEngineSpec(PrestoEngineSpec):
reduce_progress = int(match.groupdict()["reduce_progress"])
stages[stage_number] = (map_progress + reduce_progress) / 2
logger.info(
"Progress detail: {}, " # pylint: disable=logging-format-interpolation
"current job {}, "
"total jobs: {}".format(stages, current_job, total_jobs)
"Progress detail: %s, current job %s, total jobs: %s",
stages,
current_job,
total_jobs,
)
stage_progress = sum(stages.values()) / len(stages.values()) if stages else 0
@ -484,9 +487,7 @@ class HiveEngineSpec(PrestoEngineSpec):
latest_partition: bool = True,
cols: list[ResultSetColumnType] | None = None,
) -> str:
return super( # pylint: disable=bad-super-call
PrestoEngineSpec, cls
).select_star(
return super(PrestoEngineSpec, cls).select_star(
database,
table_name,
engine,

View File

@ -238,7 +238,7 @@ class OcientEngineSpec(BaseEngineSpec):
# Store mapping of superset Query id -> Ocient ID
# These are inserted into the cache when executing the query
# They are then removed, either upon cancellation or query completion
query_id_mapping: dict[str, str] = dict()
query_id_mapping: dict[str, str] = {}
query_id_mapping_lock = threading.Lock()
custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = {

View File

@ -247,7 +247,7 @@ class PostgresEngineSpec(PostgresBaseEngineSpec, BasicParametersMixin):
if search_path := options.get("search_path"):
schemas = search_path.split(",")
if len(schemas) > 1:
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
"Multiple schemas are configured in the search path, which means "
"Superset is unable to determine the schema of unqualified table "
"names and enforce permissions."

View File

@ -120,7 +120,9 @@ def get_children(column: ResultSetColumnType) -> list[ResultSetColumnType]:
raise ValueError
match = pattern.match(column["type"])
if not match:
raise Exception(f"Unable to parse column type {column['type']}")
raise Exception( # pylint: disable=broad-exception-raised
f"Unable to parse column type {column['type']}"
)
group = match.groupdict()
type_ = group["type"].upper()
@ -156,7 +158,7 @@ def get_children(column: ResultSetColumnType) -> list[ResultSetColumnType]:
columns.append(_column)
return columns
raise Exception(f"Unknown type {type_}!")
raise Exception(f"Unknown type {type_}!") # pylint: disable=broad-exception-raised
class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
@ -618,13 +620,14 @@ class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
msg = f"Field [{k}] is not part of the portioning key"
raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1:
# pylint: disable=consider-using-f-string
msg = (
"A filter needs to be specified for {} out of the " "{} fields."
).format(len(part_fields) - 1, len(part_fields))
raise SupersetTemplateException(msg)
for field in part_fields:
if field not in kwargs.keys():
if field not in kwargs:
field_to_return = field
sql = cls._partition_query(
@ -931,9 +934,7 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
)
else: # otherwise this field is a basic data type
full_parent_path = cls._get_full_name(stack)
column_name = "{}.{}".format(
full_parent_path, field_info[0]
)
column_name = f"{full_parent_path}.{field_info[0]}"
result.append(
cls._create_column_info(column_name, column_type)
)
@ -1319,8 +1320,10 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
if total_splits and completed_splits:
progress = 100 * (completed_splits / total_splits)
logger.info(
"Query {} progress: {} / {} " # pylint: disable=logging-format-interpolation
"splits".format(query_id, completed_splits, total_splits)
"Query %s progress: %s / %s splits",
query_id,
completed_splits,
total_splits,
)
if progress > query.progress:
query.progress = progress
@ -1337,6 +1340,7 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
and isinstance(ex.orig[0], dict)
):
error_dict = ex.orig[0]
# pylint: disable=consider-using-f-string
return "{} at {}: {}".format(
error_dict.get("errorName"),
error_dict.get("errorLocation"),

View File

@ -63,7 +63,7 @@ class PERCENTILE(Numeric):
__visit_name__ = "PERCENTILE"
class ARRAY(TypeEngine): # pylint: disable=no-init
class ARRAY(TypeEngine):
__visit_name__ = "ARRAY"
@property
@ -71,7 +71,7 @@ class ARRAY(TypeEngine): # pylint: disable=no-init
return list
class MAP(TypeEngine): # pylint: disable=no-init
class MAP(TypeEngine):
__visit_name__ = "MAP"
@property
@ -79,7 +79,7 @@ class MAP(TypeEngine): # pylint: disable=no-init
return dict
class STRUCT(TypeEngine): # pylint: disable=no-init
class STRUCT(TypeEngine):
__visit_name__ = "STRUCT"
@property

View File

@ -80,11 +80,13 @@ class UIManifestProcessor:
loaded_chunks.add(f)
return filtered_files
return dict(
js_manifest=lambda bundle: get_files(bundle, "js"),
css_manifest=lambda bundle: get_files(bundle, "css"),
assets_prefix=self.app.config["STATIC_ASSETS_PREFIX"] if self.app else "",
)
return {
"js_manifest": lambda bundle: get_files(bundle, "js"),
"css_manifest": lambda bundle: get_files(bundle, "css"),
"assets_prefix": self.app.config["STATIC_ASSETS_PREFIX"]
if self.app
else "",
}
def parse_manifest_json(self) -> None:
try:

View File

@ -37,7 +37,7 @@ class SSHManager:
sshtunnel.TUNNEL_TIMEOUT = app.config["SSH_TUNNEL_TIMEOUT_SEC"]
sshtunnel.SSH_TIMEOUT = app.config["SSH_TUNNEL_PACKET_TIMEOUT_SEC"]
def build_sqla_url( # pylint: disable=no-self-use
def build_sqla_url(
self, sqlalchemy_url: str, server: sshtunnel.SSHTunnelForwarder
) -> str:
# override any ssh tunnel configuration object

View File

@ -539,7 +539,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
custom_sm = self.config["CUSTOM_SECURITY_MANAGER"] or SupersetSecurityManager
if not issubclass(custom_sm, SupersetSecurityManager):
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
"""Your CUSTOM_SECURITY_MANAGER must now extend SupersetSecurityManager,
not FAB's security manager.
See [4565] in UPDATING.md"""

View File

@ -21,6 +21,6 @@ from typing import Any
def update_time_range(form_data: dict[str, Any]) -> None:
"""Move since and until to time_range."""
if "since" in form_data or "until" in form_data:
form_data["time_range"] = "{} : {}".format(
form_data.pop("since", "") or "", form_data.pop("until", "") or ""
)
form_data[
"time_range"
] = f'{form_data.pop("since", "") or ""} : {form_data.pop("until", "") or ""}'

View File

@ -74,7 +74,7 @@ class AuditMixin:
def created_by(cls):
return relationship(
"User",
primaryjoin="%s.created_by_fk == User.id" % cls.__name__,
primaryjoin=f"{cls.__name__}.created_by_fk == User.id",
enable_typechecks=False,
)

View File

@ -106,7 +106,7 @@ def format_seconds(value):
else:
period = "second"
return "{} {}{}".format(value, period, "s" if value > 1 else "")
return f"{value} {period}{'s' if value > 1 else ''}"
def compute_time_compare(granularity, periods):
@ -120,7 +120,7 @@ def compute_time_compare(granularity, periods):
obj = isodate.parse_duration(granularity) * periods
except isodate.isoerror.ISO8601Error:
# if parse_human_timedelta can parse it, return it directly
delta = "{} {}{}".format(periods, granularity, "s" if periods > 1 else "")
delta = f"{periods} {granularity}{'s' if periods > 1 else ''}"
obj = parse_human_timedelta(delta)
if obj:
return delta

View File

@ -375,7 +375,7 @@ class Database(
:return: The effective username
"""
return ( # pylint: disable=used-before-assignment
return (
username
if (username := get_username())
else object_url.username

View File

@ -455,7 +455,7 @@ def _user_link(user: User) -> Union[Markup, str]:
if not user:
return ""
url = f"/superset/profile/{user.username}/"
return Markup('<a href="{}">{}</a>'.format(url, escape(user) or ""))
return Markup(f"<a href=\"{url}\">{escape(user) or ''}</a>")
class AuditMixinNullable(AuditMixin):
@ -470,7 +470,7 @@ class AuditMixinNullable(AuditMixin):
)
@declared_attr
def created_by_fk(self) -> sa.Column:
def created_by_fk(self) -> sa.Column: # pylint: disable=arguments-renamed
return sa.Column(
sa.Integer,
sa.ForeignKey("ab_user.id"),
@ -479,7 +479,7 @@ class AuditMixinNullable(AuditMixin):
)
@declared_attr
def changed_by_fk(self) -> sa.Column:
def changed_by_fk(self) -> sa.Column: # pylint: disable=arguments-renamed
return sa.Column(
sa.Integer,
sa.ForeignKey("ab_user.id"),
@ -598,7 +598,7 @@ class ExtraJSONMixin:
self.extra_json = json.dumps(extra)
@validates("extra_json")
def ensure_extra_json_is_not_none( # pylint: disable=no-self-use
def ensure_extra_json_is_not_none(
self,
_: str,
value: Optional[dict[str, Any]],
@ -824,7 +824,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods
)
) from ex
def _process_sql_expression( # pylint: disable=no-self-use
def _process_sql_expression(
self,
expression: Optional[str],
database_id: int,

View File

@ -299,14 +299,13 @@ class Query(
def default_endpoint(self) -> str:
return ""
@staticmethod
def get_extra_cache_keys(query_obj: dict[str, Any]) -> list[Hashable]:
def get_extra_cache_keys(self, query_obj: dict[str, Any]) -> list[Hashable]:
return []
@property
def tracking_url(self) -> Optional[str]:
"""
Transfrom tracking url at run time because the exact URL may depends
Transform tracking url at run time because the exact URL may depend
on query properties such as execution and finish time.
"""
transform = current_app.config.get("TRACKING_URL_TRANSFORMER")

View File

@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=abstract-method, no-init
# pylint: disable=abstract-method
from typing import Any, Optional
from sqlalchemy.engine.interfaces import Dialect
@ -105,6 +105,7 @@ class TimeStamp(TypeDecorator):
impl = TIMESTAMP
@classmethod
# pylint: disable=arguments-differ
def process_bind_param(cls, value: str, dialect: Dialect) -> str:
"""
Used for in-line rendering of TIMESTAMP data type
@ -121,6 +122,7 @@ class Date(TypeDecorator):
impl = DATE
@classmethod
# pylint: disable=arguments-differ
def process_bind_param(cls, value: str, dialect: Dialect) -> str:
"""
Used for in-line rendering of DATE data type

View File

@ -71,7 +71,6 @@ class QuerySchema(Schema):
load_instance = True
include_relationships = True
# pylint: disable=no-self-use
def get_sql_tables(self, obj: Query) -> list[Table]:
return obj.sql_tables

View File

@ -96,8 +96,7 @@ class AlertCommand(BaseCommand):
if len(rows) > 1:
raise AlertQueryMultipleRowsError(
message=_(
"Alert query returned more than one row. %s rows returned"
% len(rows),
f"Alert query returned more than one row. {len(rows)} rows returned"
)
)
# check if query returned more than one column
@ -105,8 +104,8 @@ class AlertCommand(BaseCommand):
raise AlertQueryMultipleColumnsError(
# len is subtracted by 1 to discard pandas index column
_(
"Alert query returned more than one column. %s columns returned"
% (len(rows[0]) - 1)
f"Alert query returned more than one column. "
f"{(len(rows[0]) - 1)} columns returned"
)
)

View File

@ -734,9 +734,7 @@ class AsyncExecuteReportScheduleCommand(BaseCommand):
except Exception as ex:
raise ReportScheduleUnexpectedError(str(ex)) from ex
def validate( # pylint: disable=arguments-differ
self, session: Session = None
) -> None:
def validate(self, session: Session = None) -> None:
# Validate/populate model exists
logger.info(
"session is validated: id %s, executionid: %s",

View File

@ -30,4 +30,6 @@ def create_notification(
for plugin in BaseNotification.plugins:
if plugin.type == recipient.type:
return plugin(recipient, notification_content)
raise Exception("Recipient type not supported")
raise Exception( # pylint: disable=broad-exception-raised
"Recipient type not supported"
)

View File

@ -220,7 +220,7 @@ class ReportSchedulePostSchema(Schema):
)
@validates("custom_width")
def validate_custom_width( # pylint: disable=no-self-use
def validate_custom_width(
self,
value: Optional[int],
) -> None:
@ -239,7 +239,7 @@ class ReportSchedulePostSchema(Schema):
)
@validates_schema
def validate_report_references( # pylint: disable=unused-argument,no-self-use
def validate_report_references( # pylint: disable=unused-argument
self,
data: dict[str, Any],
**kwargs: Any,
@ -350,7 +350,7 @@ class ReportSchedulePutSchema(Schema):
)
@validates("custom_width")
def validate_custom_width( # pylint: disable=no-self-use
def validate_custom_width(
self,
value: Optional[int],
) -> None:

View File

@ -54,7 +54,7 @@ class ResourceSchema(PermissiveSchema):
id = fields.String(required=True)
@post_load
def convert_enum_to_value( # pylint: disable=no-self-use
def convert_enum_to_value(
self, data: dict[str, Any], **kwargs: Any # pylint: disable=unused-argument
) -> dict[str, Any]:
# we don't care about the enum, we want the value inside

View File

@ -284,7 +284,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
return self.get_guest_user_from_request(request)
return None
def get_schema_perm( # pylint: disable=no-self-use
def get_schema_perm(
self, database: Union["Database", str], schema: Optional[str] = None
) -> Optional[str]:
"""
@ -308,9 +308,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
def get_dataset_perm(dataset_id: int, dataset_name: str, database_name: str) -> str:
return f"[{database_name}].[{dataset_name}](id:{dataset_id})"
def unpack_database_and_schema( # pylint: disable=no-self-use
self, schema_permission: str
) -> DatabaseAndSchema:
def unpack_database_and_schema(self, schema_permission: str) -> DatabaseAndSchema:
# [database_name].[schema|table]
schema_name = schema_permission.split(".")[1][1:-1]
@ -469,9 +467,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
},
)
def get_table_access_error_msg( # pylint: disable=no-self-use
self, tables: set["Table"]
) -> str:
def get_table_access_error_msg(self, tables: set["Table"]) -> str:
"""
Return the error message for the denied SQL tables.
@ -500,7 +496,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
},
)
def get_table_access_link( # pylint: disable=unused-argument,no-self-use
def get_table_access_link( # pylint: disable=unused-argument
self, tables: set["Table"]
) -> Optional[str]:
"""
@ -1871,7 +1867,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
.one_or_none()
)
def get_anonymous_user(self) -> User: # pylint: disable=no-self-use
def get_anonymous_user(self) -> User:
return AnonymousUserMixin()
def get_user_roles(self, user: Optional[User] = None) -> list[Role]:
@ -2151,8 +2147,8 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
# We don't need to send a special error message.
logger.warning("Invalid guest token", exc_info=True)
return None
else:
return self.get_guest_user_from_token(cast(GuestToken, token))
return self.get_guest_user_from_token(cast(GuestToken, token))
def get_guest_user_from_token(self, token: GuestToken) -> GuestUser:
return self.guest_user_cls(

View File

@ -127,7 +127,7 @@ def get_query_backoff_handler(details: dict[Any, Any]) -> None:
logger.error(
"Query with id `%s` could not be retrieved", str(query_id), exc_info=True
)
stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] - 1))
stats_logger.incr(f"error_attempting_orm_query_{details['tries'] - 1}")
logger.error(
"Query %s: Sleeping for a sec before retrying...", str(query_id), exc_info=True
)
@ -233,8 +233,8 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statem
if apply_ctas:
if not query.tmp_table_name:
start_dttm = datetime.fromtimestamp(query.start_time)
query.tmp_table_name = "tmp_{}_table_{}".format(
query.user_id, start_dttm.strftime("%Y_%m_%d_%H_%M_%S")
query.tmp_table_name = (
f'tmp_{query.user_id}_table_{start_dttm.strftime("%Y_%m_%d_%H_%M_%S")}'
)
sql = parsed_query.as_create_table(
query.tmp_table_name,
@ -391,7 +391,7 @@ def execute_sql_statements(
stats_logger.timing("sqllab.query.time_pending", now_as_float() - start_time)
query = get_query(query_id, session)
payload: dict[str, Any] = dict(query_id=query_id)
payload: dict[str, Any] = {"query_id": query_id}
database = query.database
db_engine_spec = database.db_engine_spec
db_engine_spec.patch()

View File

@ -113,8 +113,8 @@ def extract_top_from_query(
token = str_statement.rstrip().split(" ")
token = [part for part in token if part]
top = None
for i, _ in enumerate(token):
if token[i].upper() in top_keywords and len(token) - 1 > i:
for i, part in enumerate(token):
if part.upper() in top_keywords and len(token) - 1 > i:
try:
top = int(token[i + 1])
except ValueError:

View File

@ -107,7 +107,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
# we update at some point in the future.
if not db_error.args or not isinstance(db_error.args[0], dict):
raise PrestoSQLValidationError(
"The pyhive presto client returned an unhandled " "database error."
"The pyhive presto client returned an unhandled database error."
) from db_error
error_args: dict[str, Any] = db_error.args[0]
@ -120,7 +120,6 @@ class PrestoDBSQLValidator(BaseSQLValidator):
if "errorLocation" not in error_args:
# Pylint is confused about the type of error_args, despite the hints
# and checks above.
# pylint: disable=invalid-sequence-index
message = error_args["message"] + "\n(Error location unknown)"
# If we have a message but no error location, return the message and
# set the location as the beginning.
@ -128,7 +127,6 @@ class PrestoDBSQLValidator(BaseSQLValidator):
message=message, line_number=1, start_column=1, end_column=1
)
# pylint: disable=invalid-sequence-index
message = error_args["message"]
err_loc = error_args["errorLocation"]
line_number = err_loc.get("lineNumber", None)

View File

@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, no-self-use, too-few-public-methods, too-many-arguments
# pylint: disable=invalid-name, too-few-public-methods, too-many-arguments
from __future__ import annotations
from typing import Any, Callable, TYPE_CHECKING

View File

@ -151,8 +151,9 @@ class SynchronousSqlJsonExecutor(SqlJsonExecutorBase):
)
def _get_timeout_error_msg(self) -> str:
return "The query exceeded the {timeout} seconds timeout.".format(
timeout=self._timeout_duration_in_seconds
return (
f"The query exceeded the {self._timeout_duration_in_seconds} "
"seconds timeout."
)

View File

@ -95,7 +95,7 @@ class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes
template_params = json.loads(query_params.get("templateParams") or "{}")
except json.JSONDecodeError:
logger.warning(
"Invalid template parameter %s" " specified. Defaulting to empty dict",
"Invalid template parameter %s specified. Defaulting to empty dict",
str(query_params.get("templateParams")),
)
template_params = {}

View File

@ -164,7 +164,9 @@ class Table(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
return []
if not database.id:
raise Exception("Database must be already saved to metastore")
raise Exception( # pylint: disable=broad-exception-raised
"Database must be already saved to metastore"
)
default_props = default_props or {}
session: Session = inspect(database).session

View File

@ -121,7 +121,9 @@ def get_object_type(class_name: str) -> ObjectTypes:
try:
return mapping[class_name.lower()]
except KeyError as ex:
raise Exception(f"No mapping found for {class_name}") from ex
raise Exception( # pylint: disable=broad-exception-raised
f"No mapping found for {class_name}"
) from ex
class ObjectUpdater:

View File

@ -46,7 +46,6 @@ query_timeout = current_app.config[
def set_form_data(form_data: dict[str, Any]) -> None:
# pylint: disable=assigning-non-slot
g.form_data = form_data
@ -90,11 +89,7 @@ def load_chart_data_into_cache(
raise ex
except Exception as ex:
# TODO: QueryContext should support SIP-40 style errors
error = str(
ex.message # pylint: disable=no-member
if hasattr(ex, "message")
else ex
)
error = str(ex.message if hasattr(ex, "message") else ex)
errors = [{"message": error}]
async_query_manager.update_job(
job_metadata, async_query_manager.STATUS_ERROR, errors=errors
@ -159,13 +154,9 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals
raise ex
except Exception as ex:
if isinstance(ex, SupersetVizException):
errors = ex.errors # pylint: disable=no-member
errors = ex.errors
else:
error = (
ex.message # pylint: disable=no-member
if hasattr(ex, "message")
else str(ex)
)
error = ex.message if hasattr(ex, "message") else str(ex)
errors = [error]
async_query_manager.update_job(

View File

@ -221,7 +221,7 @@ def fetch_url(data: str, headers: dict[str, str]) -> dict[str, str]:
"""
result = {}
try:
baseurl = "{WEBDRIVER_BASEURL}".format(**app.config)
baseurl = app.config["WEBDRIVER_BASEURL"]
url = f"{baseurl}api/v1/chart/warm_up_cache"
logger.info("Fetching %s with payload %s", url, data)
req = request.Request(

View File

@ -90,9 +90,8 @@ def execute(self: Celery.task, report_schedule_id: int) -> None:
except CommandException as ex:
logger_func, level = get_logger_from_status(ex.status)
logger_func(
"A downstream {} occurred while generating a report: {}. {}".format(
level, task_id, ex.message
),
f"A downstream {level} occurred "
f"while generating a report: {task_id}. {ex.message}",
exc_info=True,
)
if level == LoggerLevel.EXCEPTION:
@ -105,5 +104,5 @@ def prune_log() -> None:
AsyncPruneReportScheduleLogCommand().run()
except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while pruning report schedule logs: %s", ex)
except CommandException as ex:
except CommandException:
logger.exception("An exception occurred while pruning report schedule logs")

View File

@ -89,7 +89,7 @@ class AsyncQueryManager:
config["CACHE_CONFIG"]["CACHE_TYPE"] == "null"
or config["DATA_CACHE_CONFIG"]["CACHE_TYPE"] == "null"
):
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
"""
Cache backends (CACHE_CONFIG, DATA_CACHE_CONFIG) must be configured
and non-null in order to enable async queries

View File

@ -116,7 +116,7 @@ TIME_COMPARISON = "__"
JS_MAX_INTEGER = 9007199254740991 # Largest int Java Script can handle 2^53-1
InputType = TypeVar("InputType")
InputType = TypeVar("InputType") # pylint: disable=invalid-name
ADHOC_FILTERS_REGEX = re.compile("^adhoc_filters")
@ -920,7 +920,7 @@ def send_email_smtp( # pylint: disable=invalid-name,too-many-arguments,too-many
msg.attach(
MIMEApplication(
f.read(),
Content_Disposition="attachment; filename='%s'" % basename,
Content_Disposition=f"attachment; filename='{basename}'",
Name=basename,
)
)
@ -929,7 +929,7 @@ def send_email_smtp( # pylint: disable=invalid-name,too-many-arguments,too-many
for name, body in (data or {}).items():
msg.attach(
MIMEApplication(
body, Content_Disposition="attachment; filename='%s'" % name, Name=name
body, Content_Disposition=f"attachment; filename='{name}'", Name=name
)
)
@ -939,7 +939,7 @@ def send_email_smtp( # pylint: disable=invalid-name,too-many-arguments,too-many
formatted_time = formatdate(localtime=True)
file_name = f"{subject} {formatted_time}"
image = MIMEImage(imgdata, name=file_name)
image.add_header("Content-ID", "<%s>" % msgid)
image.add_header("Content-ID", f"<{msgid}>")
image.add_header("Content-Disposition", "inline")
msg.attach(image)
msg_mutator = config["EMAIL_HEADER_MUTATOR"]
@ -1140,9 +1140,9 @@ def merge_extra_filters(form_data: dict[str, Any]) -> None:
def get_filter_key(f: dict[str, Any]) -> str:
if "expressionType" in f:
return "{}__{}".format(f["subject"], f["operator"])
return f"{f['subject']}__{f['operator']}"
return "{}__{}".format(f["col"], f["op"])
return f"{f['col']}__{f['op']}"
existing_filters = {}
for existing in adhoc_filters:
@ -1456,7 +1456,6 @@ def override_user(user: User | None, force: bool = True) -> Iterator[Any]:
:param force: Whether to override the current user if set
"""
# pylint: disable=assigning-non-slot
if hasattr(g, "user"):
if force or g.user is None:
current = g.user

View File

@ -17,7 +17,7 @@
import logging
import re
import urllib.request
from typing import Any, Optional
from typing import Any, Optional, Union
from urllib.error import URLError
import numpy as np
@ -65,7 +65,8 @@ def escape_value(value: str) -> str:
def df_to_escaped_csv(df: pd.DataFrame, **kwargs: Any) -> Any:
escape_values = lambda v: escape_value(v) if isinstance(v, str) else v
def escape_values(v: Any) -> Union[str, Any]:
return escape_value(v) if isinstance(v, str) else v
# Escape csv headers
df = df.rename(columns=escape_values)

View File

@ -50,7 +50,9 @@ class SQLAlchemyUtilsAdapter( # pylint: disable=too-few-public-methods
if app_config:
return EncryptedType(*args, app_config["SECRET_KEY"], **kwargs)
raise Exception("Missing app_config kwarg")
raise Exception( # pylint: disable=broad-exception-raised
"Missing app_config kwarg"
)
class EncryptedFieldFactory:
@ -70,7 +72,9 @@ class EncryptedFieldFactory:
if self._concrete_type_adapter:
return self._concrete_type_adapter.create(self._config, *args, **kwargs)
raise Exception("App not initialized yet. Please call init_app first")
raise Exception( # pylint: disable=broad-exception-raised
"App not initialized yet. Please call init_app first"
)
class SecretsMigrator:
@ -146,7 +150,7 @@ class SecretsMigrator:
unencrypted_value = previous_encrypted_type.process_result_value(
self._read_bytes(column_name, row[column_name]), self._dialect
)
except ValueError as exc:
except ValueError as ex:
# Failed to unencrypt
try:
encrypted_type.process_result_value(
@ -160,7 +164,7 @@ class SecretsMigrator:
)
return
except Exception:
raise Exception from exc
raise Exception from ex # pylint: disable=broad-exception-raised
re_encrypted_columns[column_name] = encrypted_type.process_bind_param(
unencrypted_value,

View File

@ -66,7 +66,7 @@ class MachineAuthProvider:
cookies = {}
for cookie_name, cookie_val in cookies.items():
driver.add_cookie(dict(name=cookie_name, value=cookie_val))
driver.add_cookie({"name": cookie_name, "value": cookie_val})
return driver

View File

@ -189,7 +189,7 @@ def add_data(
with database.get_sqla_engine_with_context() as engine:
if columns is None:
if not table_exists:
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
f"The table {table_name} does not exist. To create it you need to "
"pass a list of column names and types."
)

View File

@ -48,7 +48,9 @@ class SupersetProfiler: # pylint: disable=too-few-public-methods
return Response.from_app(self.app, request.environ)
if Profiler is None:
raise Exception("The module pyinstrument is not installed.")
raise Exception( # pylint: disable=broad-exception-raised
"The module pyinstrument is not installed."
)
profiler = Profiler(interval=self.interval)

View File

@ -36,7 +36,7 @@ def compute_hash(obj: Callable[..., Any]) -> str:
if isclass(obj):
return compute_class_hash(obj)
raise Exception(f"Invalid object: {obj}")
raise Exception(f"Invalid object: {obj}") # pylint: disable=broad-exception-raised
def compute_func_hash(function: Callable[..., Any]) -> str:

View File

@ -124,15 +124,17 @@ class WebDriverProxy:
options = firefox.options.Options()
profile = FirefoxProfile()
profile.set_preference("layout.css.devPixelsPerPx", str(pixel_density))
kwargs: dict[Any, Any] = dict(options=options, firefox_profile=profile)
kwargs: dict[Any, Any] = {"options": options, "firefox_profile": profile}
elif self._driver_type == "chrome":
driver_class = chrome.webdriver.WebDriver
options = chrome.options.Options()
options.add_argument(f"--force-device-scale-factor={pixel_density}")
options.add_argument(f"--window-size={self._window[0]},{self._window[1]}")
kwargs = dict(options=options)
kwargs = {"options": options}
else:
raise Exception(f"Webdriver name ({self._driver_type}) not supported")
raise Exception( # pylint: disable=broad-exception-raised
f"Webdriver name ({self._driver_type}) not supported"
)
# Prepare args for the webdriver init
# Add additional configured options

View File

@ -73,7 +73,7 @@ class Api(BaseSupersetView):
@handle_api_exception
@has_access_api
@expose("/v1/form_data/", methods=("GET",))
def query_form_data(self) -> FlaskResponse: # pylint: disable=no-self-use
def query_form_data(self) -> FlaskResponse:
"""
Get the formdata stored in the database for existing slice.
params: slice_id: integer
@ -105,7 +105,7 @@ class Api(BaseSupersetView):
}
return self.json_response({"result": result})
except (ValueError, TimeRangeParseFailError, TimeRangeAmbiguousError) as error:
error_msg = {"message": _("Unexpected time range: %s" % error)}
error_msg = {"message": _(f"Unexpected time range: {error}")}
return self.json_response(error_msg, 400)
def get_query_context_factory(self) -> QueryContextFactory:

View File

@ -293,14 +293,16 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
models.SqlaTable.database_id == table.database.id,
)
if db.session.query(table_query.exists()).scalar():
raise Exception(get_dataset_exist_error_msg(table.full_name))
raise Exception( # pylint: disable=broad-exception-raised
get_dataset_exist_error_msg(table.full_name)
)
# Fail before adding if the table can't be found
try:
table.get_sqla_table_object()
except Exception as ex:
logger.exception("Got an error in pre_add for %s", table.name)
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
_(
"Table [%{table}s] could not be found, "
"please double check your "
@ -608,7 +610,9 @@ def validate_json(form: Form, field: Field) -> None: # pylint: disable=unused-a
json.loads(field.data)
except Exception as ex:
logger.exception(ex)
raise Exception(_("json isn't valid")) from ex
raise Exception( # pylint: disable=broad-exception-raised
_("json isn't valid")
) from ex
class YamlExportMixin: # pylint: disable=too-few-public-methods

View File

@ -51,7 +51,7 @@ class BaseSupersetSchema(Schema):
self.instance: Optional[Model] = None
super().__init__(**kwargs)
def load( # pylint: disable=arguments-differ
def load(
self,
data: Union[Mapping[str, Any], Iterable[Mapping[str, Any]]],
many: Optional[bool] = None,

View File

@ -145,13 +145,12 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@has_access
@event_logger.log_this
@expose("/slice/<int:slice_id>/")
def slice(self, slice_id: int) -> FlaskResponse: # pylint: disable=no-self-use
def slice(self, slice_id: int) -> FlaskResponse:
_, slc = get_form_data(slice_id, use_slice_data=True)
if not slc:
abort(404)
endpoint = "/explore/?form_data={}".format(
parse.quote(json.dumps({"slice_id": slice_id}))
)
form_data = parse.quote(json.dumps({"slice_id": slice_id}))
endpoint = f"/explore/?form_data={form_data}"
is_standalone_mode = ReservedUrlParameters.is_standalone_mode()
if is_standalone_mode:
@ -652,7 +651,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser
),
entry="explore",
title=title.__str__(),
title=title,
standalone_mode=standalone_mode,
)
@ -743,7 +742,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
)
flash(
_(
"Dashboard [{}] just got created and chart [{}] was added " "to it"
"Dashboard [{}] just got created and chart [{}] was added to it"
).format(dash.dashboard_title, slc.slice_name),
"success",
)
@ -771,9 +770,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@has_access_api
@expose("/warm_up_cache/", methods=("GET",))
@deprecated(new_target="api/v1/chart/warm_up_cache/")
def warm_up_cache( # pylint: disable=no-self-use
self,
) -> FlaskResponse:
def warm_up_cache(self) -> FlaskResponse:
"""Warms up the cache for the slice or table.
Note for slices a force refresh occurs.
@ -901,7 +898,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@has_access
@expose("/dashboard/p/<key>/", methods=("GET",))
def dashboard_permalink( # pylint: disable=no-self-use
def dashboard_permalink(
self,
key: str,
) -> FlaskResponse:
@ -926,7 +923,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@has_access
@event_logger.log_this
@expose("/log/", methods=("POST",))
def log(self) -> FlaskResponse: # pylint: disable=no-self-use
def log(self) -> FlaskResponse:
return Response(status=200)
@expose("/theme/")
@ -939,7 +936,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@event_logger.log_this
@expose("/fetch_datasource_metadata")
@deprecated(new_target="api/v1/database/<int:pk>/table/<table_name>/<schema_name>/")
def fetch_datasource_metadata(self) -> FlaskResponse: # pylint: disable=no-self-use
def fetch_datasource_metadata(self) -> FlaskResponse:
"""
Fetch the datasource metadata.
@ -958,7 +955,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return json_success(json.dumps(sanitize_datasource_data(datasource.data)))
@app.errorhandler(500)
def show_traceback(self) -> FlaskResponse: # pylint: disable=no-self-use
def show_traceback(self) -> FlaskResponse:
return (
render_template("superset/traceback.html", error_msg=get_error_msg()),
500,
@ -1009,7 +1006,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return self.render_template(
"superset/basic.html",
title=_("%(user)s's profile", user=get_username()).__str__(),
title=_("%(user)s's profile", user=get_username()),
entry="profile",
bootstrap_data=json.dumps(
payload, default=utils.pessimistic_json_iso_dttm_ser

View File

@ -88,5 +88,5 @@ class DashboardMixin: # pylint: disable=too-few-public-methods
"json_metadata": _("JSON Metadata"),
}
def pre_delete(self, item: "DashboardMixin") -> None: # pylint: disable=no-self-use
def pre_delete(self, item: "DashboardMixin") -> None:
security_manager.raise_for_ownership(item)

View File

@ -64,7 +64,7 @@ class DashboardModelView(
return super().render_app_template()
@action("mulexport", __("Export"), __("Export dashboards?"), "fa-database")
def mulexport( # pylint: disable=no-self-use
def mulexport(
self,
items: Union["DashboardModelView", builtins.list["DashboardModelView"]],
) -> FlaskResponse:
@ -114,7 +114,7 @@ class Dashboard(BaseSupersetView):
@has_access
@expose("/new/")
def new(self) -> FlaskResponse: # pylint: disable=no-self-use
def new(self) -> FlaskResponse:
"""Creates a new, blank dashboard and redirects to it in edit mode"""
new_dashboard = DashboardModel(
dashboard_title="[ untitled dashboard ]",

View File

@ -221,7 +221,7 @@ class DatabaseMixin:
def pre_update(self, database: Database) -> None:
self._pre_add_update(database)
def pre_delete(self, database: Database) -> None: # pylint: disable=no-self-use
def pre_delete(self, database: Database) -> None:
if database.tables:
raise SupersetException(
Markup(
@ -231,12 +231,12 @@ class DatabaseMixin:
)
)
def check_extra(self, database: Database) -> None: # pylint: disable=no-self-use
def check_extra(self, database: Database) -> None:
# this will check whether json.loads(extra) can succeed
try:
extra = database.get_extra()
except Exception as ex:
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
_("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex))
) from ex
@ -244,7 +244,7 @@ class DatabaseMixin:
metadata_signature = inspect.signature(MetaData)
for key in extra.get("metadata_params", {}):
if key not in metadata_signature.parameters:
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
_(
"The metadata_params in Extra field "
"is not configured correctly. The key "
@ -253,13 +253,11 @@ class DatabaseMixin:
)
)
def check_encrypted_extra( # pylint: disable=no-self-use
self, database: Database
) -> None:
def check_encrypted_extra(self, database: Database) -> None:
# this will check whether json.loads(secure_extra) can succeed
try:
database.get_encrypted_extra()
except Exception as ex:
raise Exception(
raise Exception( # pylint: disable=broad-exception-raised
_("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex))
) from ex

View File

@ -455,9 +455,10 @@ class ColumnarToDatabaseView(SimpleFormView):
if file_type == {"zip"}:
zipfile_ob = zipfile.ZipFile( # pylint: disable=consider-using-with
form.columnar_file.data[0]
) # pylint: disable=consider-using-with
)
file_type = {filename.split(".")[-1] for filename in zipfile_ob.namelist()}
files = [
# pylint: disable=consider-using-with
io.BytesIO((zipfile_ob.open(filename).read(), filename)[0])
for filename in zipfile_ob.namelist()
]

View File

@ -45,7 +45,7 @@ class ExternalMetadataSchema(Schema):
schema_name = fields.Str(allow_none=True)
table_name = fields.Str(required=True)
# pylint: disable=no-self-use,unused-argument
# pylint: disable=unused-argument
@post_load
def normalize(
self,
@ -75,7 +75,7 @@ class SamplesPayloadSchema(Schema):
)
@pre_load
# pylint: disable=no-self-use, unused-argument
# pylint: disable=unused-argument
def handle_none(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
if data is None:
return {}

View File

@ -61,7 +61,6 @@ class DynamicPluginsView(ModelView):
edit_title = _("Edit Plugin")
@before_request
# pylint: disable=R0201
def ensure_dynamic_plugins_enabled(self) -> Optional[Response]:
if not is_feature_enabled("DYNAMIC_PLUGINS"):
return make_response("Not found", 404)

View File

@ -44,7 +44,7 @@ class KV(BaseSupersetView):
@event_logger.log_this
@has_access_api
@expose("/store/", methods=("POST",))
def store(self) -> FlaskResponse: # pylint: disable=no-self-use
def store(self) -> FlaskResponse:
try:
value = request.form.get("data")
obj = models.KeyValue(value=value)
@ -57,7 +57,7 @@ class KV(BaseSupersetView):
@event_logger.log_this
@has_access_api
@expose("/<int:key_id>/", methods=("GET",))
def get_value(self, key_id: int) -> FlaskResponse: # pylint: disable=no-self-use
def get_value(self, key_id: int) -> FlaskResponse:
try:
kv = db.session.query(models.KeyValue).filter_by(id=key_id).scalar()
if not kv:

View File

@ -139,7 +139,7 @@ def _get_owner_id(tab_state_id: int) -> int:
class TabStateView(BaseSupersetView):
@has_access_api
@expose("/", methods=("POST",))
def post(self) -> FlaskResponse: # pylint: disable=no-self-use
def post(self) -> FlaskResponse:
query_editor = json.loads(request.form["queryEditor"])
tab_state = TabState(
user_id=get_user_id(),
@ -166,7 +166,7 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("/<int:tab_state_id>", methods=("DELETE",))
def delete(self, tab_state_id: int) -> FlaskResponse: # pylint: disable=no-self-use
def delete(self, tab_state_id: int) -> FlaskResponse:
if _get_owner_id(tab_state_id) != get_user_id():
return Response(status=403)
@ -181,7 +181,7 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("/<int:tab_state_id>", methods=("GET",))
def get(self, tab_state_id: int) -> FlaskResponse: # pylint: disable=no-self-use
def get(self, tab_state_id: int) -> FlaskResponse:
if _get_owner_id(tab_state_id) != get_user_id():
return Response(status=403)
@ -194,9 +194,7 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("<int:tab_state_id>/activate", methods=("POST",))
def activate( # pylint: disable=no-self-use
self, tab_state_id: int
) -> FlaskResponse:
def activate(self, tab_state_id: int) -> FlaskResponse:
owner_id = _get_owner_id(tab_state_id)
if owner_id is None:
return Response(status=404)
@ -213,7 +211,7 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("<int:tab_state_id>", methods=("PUT",))
def put(self, tab_state_id: int) -> FlaskResponse: # pylint: disable=no-self-use
def put(self, tab_state_id: int) -> FlaskResponse:
if _get_owner_id(tab_state_id) != get_user_id():
return Response(status=403)
@ -224,9 +222,7 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("<int:tab_state_id>/migrate_query", methods=("POST",))
def migrate_query( # pylint: disable=no-self-use
self, tab_state_id: int
) -> FlaskResponse:
def migrate_query(self, tab_state_id: int) -> FlaskResponse:
if _get_owner_id(tab_state_id) != get_user_id():
return Response(status=403)
@ -239,9 +235,7 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("<int:tab_state_id>/query/<client_id>", methods=("DELETE",))
def delete_query( # pylint: disable=no-self-use
self, tab_state_id: int, client_id: str
) -> FlaskResponse:
def delete_query(self, tab_state_id: int, client_id: str) -> FlaskResponse:
# Before deleting the query, ensure it's not tied to any
# active tab as the last query. If so, replace the query
# with the latest one created in that tab
@ -277,7 +271,7 @@ class TabStateView(BaseSupersetView):
class TableSchemaView(BaseSupersetView):
@has_access_api
@expose("/", methods=("POST",))
def post(self) -> FlaskResponse: # pylint: disable=no-self-use
def post(self) -> FlaskResponse:
table = json.loads(request.form["table"])
# delete any existing table schema
@ -302,9 +296,7 @@ class TableSchemaView(BaseSupersetView):
@has_access_api
@expose("/<int:table_schema_id>", methods=("DELETE",))
def delete( # pylint: disable=no-self-use
self, table_schema_id: int
) -> FlaskResponse:
def delete(self, table_schema_id: int) -> FlaskResponse:
db.session.query(TableSchema).filter(TableSchema.id == table_schema_id).delete(
synchronize_session=False
)
@ -313,9 +305,7 @@ class TableSchemaView(BaseSupersetView):
@has_access_api
@expose("/<int:table_schema_id>/expanded", methods=("POST",))
def expanded( # pylint: disable=no-self-use
self, table_schema_id: int
) -> FlaskResponse:
def expanded(self, table_schema_id: int) -> FlaskResponse:
payload = json.loads(request.form["expanded"])
(
db.session.query(TableSchema)
@ -332,7 +322,7 @@ class SqlLab(BaseSupersetView):
@expose("/my_queries/")
@has_access
def my_queries(self) -> FlaskResponse: # pylint: disable=no-self-use
def my_queries(self) -> FlaskResponse:
"""Assigns a list of found users to the given role."""
logger.warning(
"This endpoint is deprecated and will be removed in the next major release"

View File

@ -72,7 +72,7 @@ class TagView(BaseSupersetView):
@has_access_api
@expose("/tags/", methods=("GET",))
def tags(self) -> FlaskResponse: # pylint: disable=no-self-use
def tags(self) -> FlaskResponse:
query = db.session.query(Tag).all()
results = [
{

View File

@ -260,7 +260,6 @@ def get_datasource_info(
:raises SupersetException: If the datasource no longer exists
"""
# pylint: disable=superfluous-parens
if "__" in (datasource := form_data.get("datasource", "")):
datasource_id, datasource_type = datasource.split("__")
# The case where the datasource has been deleted

View File

@ -221,7 +221,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
min_periods = int(self.form_data.get("min_periods") or 0)
if rolling_type in ("mean", "std", "sum") and rolling_periods:
kwargs = dict(window=rolling_periods, min_periods=min_periods)
kwargs = {"window": rolling_periods, "min_periods": min_periods}
if rolling_type == "mean":
df = df.rolling(**kwargs).mean()
elif rolling_type == "std":
@ -352,7 +352,6 @@ class BaseViz: # pylint: disable=too-many-public-methods
is_timeseries = self.is_timeseries
# pylint: disable=superfluous-parens
if DTTM_ALIAS in (groupby_labels := get_column_names(groupby)):
del groupby[groupby_labels.index(DTTM_ALIAS)]
is_timeseries = True
@ -680,7 +679,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
return csv.df_to_escaped_csv(df, index=include_index, **config["CSV_EXPORT"])
@deprecated(deprecated_in="3.0")
def get_data(self, df: pd.DataFrame) -> VizData: # pylint: disable=no-self-use
def get_data(self, df: pd.DataFrame) -> VizData:
return df.to_dict(orient="records")
@property
@ -739,11 +738,11 @@ class TimeTableViz(BaseViz):
pt = df.pivot_table(index=DTTM_ALIAS, columns=columns, values=values)
pt.index = pt.index.map(str)
pt = pt.sort_index()
return dict(
records=pt.to_dict(orient="index"),
columns=list(pt.columns),
is_group_by=bool(self.form_data.get("groupby")),
)
return {
"records": pt.to_dict(orient="index"),
"columns": list(pt.columns),
"is_group_by": bool(self.form_data.get("groupby")),
}
class CalHeatmapViz(BaseViz):
@ -2027,7 +2026,7 @@ class BaseDeckGLViz(BaseViz):
return (point.latitude, point.longitude)
except Exception as ex:
raise SpatialException(
_("Invalid spatial point encountered: %s" % latlog)
_(f"Invalid spatial point encountered: {latlog}")
) from ex
@staticmethod

View File

@ -18,7 +18,7 @@ class Row:
def __init__(self, values):
self.values = values
def __name__(self): # pylint: disable=no-self-use
def __name__(self):
return "Row"
def __iter__(self):