chore(pylint): Enable useless-suppression check (#16388)

Co-authored-by: John Bodley <john.bodley@airbnb.com>
This commit is contained in:
John Bodley 2021-08-23 08:58:41 -07:00 committed by GitHub
parent 970d762779
commit 7e4c940314
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
66 changed files with 128 additions and 226 deletions

View File

@ -70,7 +70,8 @@ confidence=
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
#enable=
enable=
useless-suppression,
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this

View File

@ -40,7 +40,6 @@ class ImportChartsCommand(BaseCommand):
until it finds one that matches.
"""
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.args = args

View File

@ -210,9 +210,7 @@ pivot_v2_aggfunc_map = {
}
def pivot_table_v2( # pylint: disable=too-many-branches
df: pd.DataFrame, form_data: Dict[str, Any]
) -> pd.DataFrame:
def pivot_table_v2(df: pd.DataFrame, form_data: Dict[str, Any]) -> pd.DataFrame:
"""
Pivot table v2.
"""

View File

@ -82,7 +82,7 @@ def superset() -> None:
"""This is a management script for the Superset application."""
@app.shell_context_processor
def make_shell_context() -> Dict[str, Any]: # pylint: disable=unused-variable
def make_shell_context() -> Dict[str, Any]:
return dict(app=app, db=db)
@ -297,7 +297,6 @@ if feature_flags.get("VERSIONED_EXPORT"):
"the exception traceback in the log"
)
# pylint: disable=too-many-locals
@superset.command()
@with_appcontext
@click.option(
@ -419,7 +418,6 @@ else:
with open(dashboard_file, "w") as data_stream:
data_stream.write(data)
# pylint: disable=too-many-locals
@superset.command()
@with_appcontext
@click.option(

View File

@ -70,7 +70,6 @@ class ImportModelsCommand(BaseCommand):
db.session.rollback()
raise self.import_error() from ex
# pylint: disable=too-many-branches
def validate(self) -> None:
exceptions: List[ValidationError] = []

View File

@ -85,7 +85,7 @@ class ImportExamplesCommand(ImportModelsCommand):
| ImportDashboardsCommand._get_uuids()
)
# pylint: disable=too-many-locals, arguments-differ, too-many-branches
# pylint: disable=too-many-locals, arguments-differ
@staticmethod
def _import(
session: Session,

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=R
import logging
from datetime import datetime, timedelta
from typing import Any, Dict, List, NamedTuple, Optional
@ -66,7 +65,7 @@ DEPRECATED_EXTRAS_FIELDS = (
)
class QueryObject:
class QueryObject: # pylint: disable=too-many-instance-attributes
"""
The query object's schema matches the interfaces of DB connectors like sqla
and druid. The query objects are constructed on the client.
@ -99,7 +98,7 @@ class QueryObject:
is_rowcount: bool
time_offsets: List[str]
def __init__(
def __init__( # pylint: disable=too-many-arguments,too-many-locals
self,
datasource: Optional[DatasourceDict] = None,
result_type: Optional[ChartDataResultType] = None,

View File

@ -79,7 +79,7 @@ def add_types(engine: Engine, metadata: Metadata) -> None:
# add a tag for each object type
insert = tag.insert()
for type_ in ObjectTypes.__members__: # pylint: disable=not-an-iterable
for type_ in ObjectTypes.__members__:
try:
engine.execute(insert, name=f"type:{type_}", type=TagTypes.type)
except IntegrityError:

View File

@ -38,9 +38,7 @@ from flask import Blueprint
from flask_appbuilder.security.manager import AUTH_DB
from pandas.io.parsers import STR_NA_VALUES
from superset.jinja_context import ( # pylint: disable=unused-import
BaseTemplateProcessor,
)
from superset.jinja_context import BaseTemplateProcessor
from superset.stats_logger import DummyStatsLogger
from superset.typing import CacheConfig
from superset.utils.core import is_test, parse_boolean_string
@ -51,12 +49,9 @@ from superset.utils.logging_configurator import DefaultLoggingConfigurator
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from flask_appbuilder.security.sqla import models # pylint: disable=unused-import
from superset.connectors.sqla.models import ( # pylint: disable=unused-import
SqlaTable,
)
from superset.models.core import Database # pylint: disable=unused-import
from flask_appbuilder.security.sqla import models
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
# Realtime stats logger, a StatsD implementation exists
STATS_LOGGER = DummyStatsLogger()
@ -96,9 +91,7 @@ def _try_json_readversion(filepath: str) -> Optional[str]:
return None
def _try_json_readsha( # pylint: disable=unused-argument
filepath: str, length: int
) -> Optional[str]:
def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
try:
with open(filepath, "r") as f:
return json.load(f).get("GIT_SHA")[:length]
@ -928,7 +921,7 @@ TRACKING_URL_TRANSFORMER = lambda x: x
HIVE_POLL_INTERVAL = int(timedelta(seconds=5).total_seconds())
# Interval between consecutive polls when using Presto Engine
# See here: https://github.com/dropbox/PyHive/blob/8eb0aeab8ca300f3024655419b93dad926c1a351/pyhive/presto.py#L93 # pylint: disable=line-too-long
# See here: https://github.com/dropbox/PyHive/blob/8eb0aeab8ca300f3024655419b93dad926c1a351/pyhive/presto.py#L93 # pylint: disable=line-too-long,useless-suppression
PRESTO_POLL_INTERVAL = int(timedelta(seconds=1).total_seconds())
# Allow for javascript controls components
@ -1266,7 +1259,7 @@ if CONFIG_PATH_ENV_VAR in os.environ:
elif importlib.util.find_spec("superset_config") and not is_test():
try:
import superset_config # pylint: disable=import-error
from superset_config import * # type: ignore # pylint: disable=import-error,wildcard-import,unused-wildcard-import
from superset_config import * # type: ignore # pylint: disable=import-error,wildcard-import
print(f"Loaded your LOCAL configuration at [{superset_config.__file__}]")
except Exception:

View File

@ -75,8 +75,8 @@ class ConnectorRegistry:
return datasources
@classmethod
def get_datasource_by_id( # pylint: disable=too-many-arguments
cls, session: Session, datasource_id: int,
def get_datasource_by_id(
cls, session: Session, datasource_id: int
) -> "BaseDatasource":
"""
Find a datasource instance based on the unique id.

View File

@ -19,7 +19,7 @@ import json
import logging
import re
from collections import defaultdict, OrderedDict
from dataclasses import dataclass, field # pylint: disable=wrong-import-order
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from typing import (
Any,
@ -471,9 +471,7 @@ sqlatable_user = Table(
)
class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-attributes
Model, BaseDatasource
):
class SqlaTable(Model, BaseDatasource): # pylint: disable=too-many-public-methods
"""An ORM object for SqlAlchemy table references"""
@ -1290,7 +1288,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
qry = qry.offset(row_offset)
if (
is_timeseries # pylint: disable=too-many-boolean-expressions
is_timeseries
and timeseries_limit
and not time_groupby_inline
and groupby_exprs_sans_timestamp

View File

@ -53,9 +53,7 @@ from superset.views.base import (
logger = logging.getLogger(__name__)
class TableColumnInlineView( # pylint: disable=too-many-ancestors
CompactCRUDMixin, SupersetModelView
):
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.TableColumn)
# TODO TODO, review need for this on related_views
class_permission_name = "Dataset"
@ -196,9 +194,7 @@ class TableColumnInlineView( # pylint: disable=too-many-ancestors
check_ownership(item.table)
class SqlMetricInlineView( # pylint: disable=too-many-ancestors
CompactCRUDMixin, SupersetModelView
):
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.SqlMetric)
class_permission_name = "Dataset"
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
@ -301,9 +297,7 @@ class RowLevelSecurityListWidget(
super().__init__(**kwargs)
class RowLevelSecurityFiltersModelView( # pylint: disable=too-many-ancestors
SupersetModelView, DeleteMixin
):
class RowLevelSecurityFiltersModelView(SupersetModelView, DeleteMixin):
datamodel = SQLAInterface(models.RowLevelSecurityFilter)
list_widget = cast(SupersetListWidget, RowLevelSecurityListWidget)
@ -561,7 +555,7 @@ class TableModelView( # pylint: disable=too-many-ancestors
@action(
"refresh", __("Refresh Metadata"), __("Refresh column metadata"), "fa-refresh"
)
def refresh( # pylint: disable=no-self-use, too-many-branches
def refresh( # pylint: disable=no-self-use,
self, tables: Union["TableModelView", List["TableModelView"]]
) -> FlaskResponse:
logger.warning(

View File

@ -222,7 +222,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
)
@etag_cache(
get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_changed_on( # pylint: disable=line-too-long
get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_changed_on( # pylint: disable=line-too-long,useless-suppression
id_or_slug
),
max_age=0,
@ -279,7 +279,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404()
@etag_cache(
get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_and_datasets_changed_on( # pylint: disable=line-too-long
get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_and_datasets_changed_on( # pylint: disable=line-too-long,useless-suppression
id_or_slug
),
max_age=0,
@ -340,7 +340,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404()
@etag_cache(
get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_and_slices_changed_on( # pylint: disable=line-too-long
get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_and_slices_changed_on( # pylint: disable=line-too-long,useless-suppression
id_or_slug
),
max_age=0,

View File

@ -43,7 +43,6 @@ class ImportDashboardsCommand(BaseCommand):
until it finds one that matches.
"""
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.args = args

View File

@ -82,7 +82,7 @@ def import_chart(
def import_dashboard(
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
# pylint: disable=too-many-locals,too-many-statements
dashboard_to_import: Dashboard,
dataset_id_mapping: Optional[Dict[int, int]] = None,
import_time: Optional[int] = None,

View File

@ -217,8 +217,6 @@ class BaseDashboardSchema(Schema):
data["slug"] = re.sub(r"[^\w\-]+", "", data["slug"])
return data
# pylint: disable=no-self-use,unused-argument
class DashboardPostSchema(BaseDashboardSchema):
dashboard_title = fields.String(

View File

@ -275,9 +275,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.put",
log_to_statsd=False,
)
def put( # pylint: disable=too-many-return-statements, arguments-differ
self, pk: int
) -> Response:
def put(self, pk: int) -> Response:
"""Changes a Database
---
put:
@ -355,7 +353,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" f".delete",
log_to_statsd=False,
)
def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ
def delete(self, pk: int) -> Response:
"""Deletes a Database
---
delete:
@ -591,9 +589,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
f".test_connection",
log_to_statsd=False,
)
def test_connection( # pylint: disable=too-many-return-statements
self,
) -> FlaskResponse:
def test_connection(self) -> FlaskResponse:
"""Tests a database connection
---
post:
@ -977,9 +973,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
f".validate_parameters",
log_to_statsd=False,
)
def validate_parameters( # pylint: disable=too-many-return-statements
self,
) -> FlaskResponse:
def validate_parameters(self) -> FlaskResponse:
"""validates database connection parameters
---
post:

View File

@ -48,7 +48,7 @@ class CreateDatabaseCommand(BaseCommand):
try:
# Test connection before starting create transaction
TestConnectionDatabaseCommand(self._actor, self._properties).run()
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
event_logger.log_with_context(
action=f"db_creation_failed.{ex.__class__.__name__}",
engine=self._properties.get("sqlalchemy_uri", "").split(":")[0],

View File

@ -38,7 +38,6 @@ class ImportDatabasesCommand(BaseCommand):
until it finds one that matches.
"""
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.args = args

View File

@ -110,7 +110,7 @@ class TestConnectionDatabaseCommand(BaseCommand):
engine=database.db_engine_spec.__name__,
)
raise DatabaseSecurityUnsafeError(message=str(ex)) from ex
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
event_logger.log_with_context(
action=f"test_connection_error.{ex.__class__.__name__}",
engine=database.db_engine_spec.__name__,

View File

@ -116,7 +116,7 @@ class ValidateDatabaseParametersCommand(BaseCommand):
try:
with closing(engine.raw_connection()) as conn:
alive = engine.dialect.do_ping(conn)
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
url = make_url(sqlalchemy_uri)
context = {
"hostname": url.host,

View File

@ -43,7 +43,6 @@ class ImportDatasetsCommand(BaseCommand):
until it finds one that matches.
"""
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.args = args

View File

@ -14,8 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-branches
import gzip
import json
import logging

View File

@ -75,7 +75,7 @@ if TYPE_CHECKING:
logger = logging.getLogger()
class TimeGrain(NamedTuple): # pylint: disable=too-few-public-methods
class TimeGrain(NamedTuple):
name: str # TODO: redundant field, remove
label: str
function: str
@ -108,9 +108,7 @@ builtin_time_grains: Dict[Optional[str], str] = {
}
class TimestampExpression(
ColumnClause
): # pylint: disable=abstract-method,too-many-ancestors,too-few-public-methods
class TimestampExpression(ColumnClause): # pylint: disable=abstract-method
def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
"""Sqlalchemy class that can be can be used to render native column elements
respeting engine-specific quoting rules as part of a string-based expression.

View File

@ -108,8 +108,7 @@ class GSheetsEngineSpec(SqliteEngineSpec):
encrypted_extra: Optional[ # pylint: disable=unused-argument
Dict[str, Any]
] = None,
) -> str: # pylint: disable=unused-variable
) -> str:
return "gsheets://"
@classmethod

View File

@ -341,7 +341,7 @@ class PrestoEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-metho
)
@classmethod
def _parse_structural_column( # pylint: disable=too-many-locals,too-many-branches
def _parse_structural_column( # pylint: disable=too-many-locals
cls,
parent_column_name: str,
parent_data_type: str,
@ -655,9 +655,7 @@ class PrestoEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-metho
)
@classmethod
def estimate_statement_cost( # pylint: disable=too-many-locals
cls, statement: str, cursor: Any
) -> Dict[str, Any]:
def estimate_statement_cost(cls, statement: str, cursor: Any) -> Dict[str, Any]:
"""
Run a SQL query that estimates the cost of a given statement.
@ -749,7 +747,7 @@ class PrestoEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-metho
if tt == utils.TemporalType.DATE:
return f"""from_iso8601_date('{dttm.date().isoformat()}')"""
if tt == utils.TemporalType.TIMESTAMP:
return f"""from_iso8601_timestamp('{dttm.isoformat(timespec="microseconds")}')""" # pylint: disable=line-too-long
return f"""from_iso8601_timestamp('{dttm.isoformat(timespec="microseconds")}')""" # pylint: disable=line-too-long,useless-suppression
return None
@classmethod
@ -777,7 +775,7 @@ class PrestoEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-metho
return datasource_names
@classmethod
def expand_data( # pylint: disable=too-many-locals,too-many-branches
def expand_data( # pylint: disable=too-many-locals
cls, columns: List[Dict[Any, Any]], data: List[Dict[Any, Any]]
) -> Tuple[List[Dict[Any, Any]], List[Dict[Any, Any]], List[Dict[Any, Any]]]:
"""

View File

@ -37,7 +37,6 @@ class SqliteEngineSpec(BaseEngineSpec):
engine = "sqlite"
engine_name = "SQLite"
# pylint: disable=line-too-long
_time_grain_expressions = {
None: "{col}",
"PT1S": "DATETIME(STRFTIME('%Y-%m-%dT%H:%M:%S', {col}))",

View File

@ -29,7 +29,6 @@ class TrinoEngineSpec(BaseEngineSpec):
engine = "trino"
engine_name = "Trino"
# pylint: disable=line-too-long
_time_grain_expressions = {
None: "{col}",
"PT1S": "date_trunc('second', CAST({col} AS TIMESTAMP))",
@ -110,9 +109,7 @@ class TrinoEngineSpec(BaseEngineSpec):
return True
@classmethod
def estimate_statement_cost( # pylint: disable=too-many-locals
cls, statement: str, cursor: Any
) -> Dict[str, Any]:
def estimate_statement_cost(cls, statement: str, cursor: Any) -> Dict[str, Any]:
"""
Run a SQL query that estimates the cost of a given statement.

View File

@ -14,8 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-few-public-methods,invalid-name
from dataclasses import dataclass # pylint: disable=wrong-import-order
from dataclasses import dataclass
from enum import Enum
from typing import Any, Dict, Optional

View File

@ -98,7 +98,6 @@ def load_birth_names(
only_metadata: bool = False, force: bool = False, sample: bool = False
) -> None:
"""Loading birth name dataset from a zip file in the repo"""
# pylint: disable=too-many-locals
tbl_name = "birth_names"
database = get_example_database()
table_exists = database.has_table_by_name(tbl_name)

View File

@ -46,7 +46,7 @@ def load_country_map_data(only_metadata: bool = False, force: bool = False) -> N
)
data = pd.read_csv(csv_bytes, encoding="utf-8")
data["dttm"] = datetime.datetime.now().date()
data.to_sql( # pylint: disable=no-member
data.to_sql(
tbl_name,
database.get_sqla_engine(),
if_exists="replace",

View File

@ -38,9 +38,9 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
airports = pd.read_csv(airports_bytes, encoding="latin-1")
airports = airports.set_index("IATA_CODE")
pdf["ds"] = ( # pylint: disable=unsupported-assignment-operation
pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
)
pdf[ # pylint: disable=unsupported-assignment-operation,useless-suppression
"ds"
] = (pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str))
pdf.ds = pd.to_datetime(pdf.ds)
pdf.drop(columns=["DAY", "MONTH", "YEAR"])
pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")

View File

@ -54,7 +54,7 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None
pdf["radius_miles"] = [random.uniform(1, 3) for _ in range(len(pdf))]
pdf["geohash"] = pdf[["LAT", "LON"]].apply(lambda x: geohash.encode(*x), axis=1)
pdf["delimited"] = pdf["LAT"].map(str).str.cat(pdf["LON"].map(str), sep=",")
pdf.to_sql( # pylint: disable=no-member
pdf.to_sql(
tbl_name,
database.get_sqla_engine(),
if_exists="replace",

View File

@ -41,7 +41,7 @@ from .helpers import (
)
def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-statements
def load_world_bank_health_n_pop( # pylint: disable=too-many-locals
only_metadata: bool = False, force: bool = False, sample: bool = False,
) -> None:
"""Loads the world bank health dataset, slices and a dashboard"""

View File

@ -65,9 +65,7 @@ class UIManifestProcessor:
self.parse_manifest_json()
@app.context_processor
def get_manifest() -> Dict[ # pylint: disable=unused-variable
str, Callable[[str], List[str]]
]:
def get_manifest() -> Dict[str, Callable[[str], List[str]]]:
loaded_chunks = set()
def get_files(bundle: str, asset_type: str = "js") -> List[str]:

View File

@ -67,7 +67,7 @@ class SupersetAppInitializer:
self.config = app.config
self.manifest: Dict[Any, Any] = {}
@deprecated(details="use self.superset_app instead of self.flask_app") # type: ignore # pylint: disable=line-too-long
@deprecated(details="use self.superset_app instead of self.flask_app") # type: ignore # pylint: disable=line-too-long,useless-suppression
@property
def flask_app(self) -> SupersetApp:
return self.superset_app
@ -112,7 +112,7 @@ class SupersetAppInitializer:
# models which in turn try to import
# the global Flask app
#
# pylint: disable=import-outside-toplevel,too-many-branches,too-many-locals,too-many-statements
# pylint: disable=import-outside-toplevel,too-many-locals,too-many-statements
from superset.annotation_layers.api import AnnotationLayerRestApi
from superset.annotation_layers.annotations.api import AnnotationRestApi
from superset.async_events.api import AsyncEventsRestApi

View File

@ -378,7 +378,7 @@ def validate_template_context(
return validate_context_types(context)
class BaseTemplateProcessor: # pylint: disable=too-few-public-methods
class BaseTemplateProcessor:
"""
Base class for database-specific jinja context
"""
@ -442,9 +442,7 @@ class JinjaTemplateProcessor(BaseTemplateProcessor):
)
class NoOpTemplateProcessor(
BaseTemplateProcessor
): # pylint: disable=too-few-public-methods
class NoOpTemplateProcessor(BaseTemplateProcessor):
def process_template(self, sql: str, **kwargs: Any) -> str:
"""
Makes processing a template a noop

View File

@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=line-too-long,unused-argument,ungrouped-imports
# pylint: disable=line-too-long,unused-argument
"""A collection of ORM sqlalchemy models for Superset"""
import enum
import json
@ -167,7 +167,7 @@ class Database(
extra_import_fields = ["password"]
export_children = ["tables"]
def __repr__(self) -> str: # pylint: disable=invalid-repr-returned
def __repr__(self) -> str:
return self.name
@property
@ -724,7 +724,7 @@ class Database(
@memoized
def get_dialect(self) -> Dialect:
sqla_url = url.make_url(self.sqlalchemy_uri_decrypted)
return sqla_url.get_dialect()() # pylint: disable=no-member
return sqla_url.get_dialect()()
sqla.event.listen(Database, "after_insert", security_manager.set_perm)

View File

@ -60,8 +60,6 @@ from superset.utils.decorators import debounce
from superset.utils.hashing import md5_sha_from_str
from superset.utils.urls import get_url_path
# pylint: disable=too-many-public-methods
metadata = Model.metadata # pylint: disable=no-member
config = app.config
logger = logging.getLogger(__name__)
@ -134,7 +132,6 @@ DashboardRoles = Table(
class Dashboard( # pylint: disable=too-many-instance-attributes
Model, AuditMixinNullable, ImportExportMixin
):
"""The dashboard object!"""
__tablename__ = "dashboards"

View File

@ -50,7 +50,7 @@ class ReportScheduleType(str, enum.Enum):
class ReportScheduleValidatorType(str, enum.Enum):
""" Validator types for alerts """
"""Validator types for alerts"""
NOT_NULL = "not null"
OPERATOR = "operator"
@ -153,10 +153,7 @@ class ReportSchedule(Model, AuditMixinNullable):
return get_description(self.crontab)
class ReportRecipients(
Model, AuditMixinNullable
): # pylint: disable=too-few-public-methods
class ReportRecipients(Model, AuditMixinNullable):
"""
Report Recipients, meant to support multiple notification types, eg: Slack, email
"""

View File

@ -40,7 +40,6 @@ class ImportSavedQueriesCommand(BaseCommand):
until it finds one that matches.
"""
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.args = args

View File

@ -336,7 +336,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi):
@safe
@statsd_metrics
@permission_name("put")
def put(self, pk: int) -> Response: # pylint: disable=too-many-return-statements
def put(self, pk: int) -> Response:
"""Updates an Report Schedule
---
put:

View File

@ -117,8 +117,8 @@ class BaseReportState:
self._session.merge(self._report_schedule)
self._session.commit()
def create_log( # pylint: disable=too-many-arguments
self, state: ReportState, error_message: Optional[str] = None,
def create_log(
self, state: ReportState, error_message: Optional[str] = None
) -> None:
"""
Creates a Report execution log, uses the current computed last_value for Alerts
@ -578,7 +578,7 @@ class ReportScheduleStateMachine: # pylint: disable=too-few-public-methods
if (self._report_schedule.last_state is None and state_cls.initial) or (
self._report_schedule.last_state in state_cls.current_states
):
state_cls( # pylint: disable=not-callable
state_cls(
self._session,
self._report_schedule,
self._scheduled_dttm,

View File

@ -72,7 +72,7 @@ def destringify(obj: str) -> Any:
class SupersetResultSet:
def __init__( # pylint: disable=too-many-locals,too-many-branches
def __init__( # pylint: disable=too-many-locals
self,
data: DbapiResult,
cursor_description: DbapiDescription,

View File

@ -900,7 +900,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
return pvm.permission.name in {"can_override_role_permissions", "can_approve"}
def set_perm( # pylint: disable=no-self-use,unused-argument
def set_perm( # pylint: disable=unused-argument
self, mapper: Mapper, connection: Connection, target: "BaseDatasource"
) -> None:
"""
@ -910,7 +910,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:param connection: The DB-API connection
:param target: The mapped instance being persisted
"""
link_table = target.__table__ # pylint: disable=no-member
link_table = target.__table__
if target.perm != target.get_perm():
connection.execute(
link_table.update()
@ -974,8 +974,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
)
def raise_for_access(
# pylint: disable=too-many-arguments,too-many-branches,
# pylint: disable=too-many-locals
# pylint: disable=too-many-arguments,too-many-locals
self,
database: Optional["Database"] = None,
datasource: Optional["BaseDatasource"] = None,

View File

@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import logging
from dataclasses import dataclass # pylint: disable=wrong-import-order
from dataclasses import dataclass
from enum import Enum
from typing import List, Optional, Set
from urllib import parse
@ -79,7 +79,7 @@ def strip_comments_from_sql(statement: str) -> str:
@dataclass(eq=True, frozen=True)
class Table: # pylint: disable=too-few-public-methods
class Table:
"""
A fully qualified SQL table conforming to [[catalog.]schema.]table.
"""
@ -265,9 +265,7 @@ class ParsedQuery:
exec_sql += f"CREATE {method} {full_table_name} AS \n{sql}"
return exec_sql
def _extract_from_token( # pylint: disable=too-many-branches
self, token: Token
) -> None:
def _extract_from_token(self, token: Token) -> None:
"""
<Identifier> store a list of subtokens and <IdentifierList> store lists of
subtoken list.

View File

@ -282,7 +282,7 @@ def deliver_dashboard( # pylint: disable=too-many-locals
except WebDriverException:
# Some webdrivers do not support screenshots for elements.
# In such cases, take a screenshot of the entire page.
screenshot = driver.screenshot() # pylint: disable=no-member
screenshot = driver.screenshot()
finally:
destroy_webdriver(driver)
@ -432,7 +432,7 @@ def _get_slice_visualization(
except WebDriverException:
# Some webdrivers do not support screenshots for elements.
# In such cases, take a screenshot of the entire page.
screenshot = driver.screenshot() # pylint: disable=no-member
screenshot = driver.screenshot()
finally:
destroy_webdriver(driver)

View File

@ -63,8 +63,6 @@ def increment_id(redis_id: str) -> str:
class AsyncQueryManager:
# pylint: disable=too-many-instance-attributes
MAX_EVENT_COUNT = 100
STATUS_PENDING = "pending"
STATUS_RUNNING = "running"
@ -114,9 +112,7 @@ class AsyncQueryManager:
self._jwt_secret = config["GLOBAL_ASYNC_QUERIES_JWT_SECRET"]
@app.after_request
def validate_session( # pylint: disable=unused-variable
response: Response,
) -> Response:
def validate_session(response: Response) -> Response:
user_id = None
try:

View File

@ -246,7 +246,7 @@ class FilterOperator(str, Enum):
ILIKE = "ILIKE"
IS_NULL = "IS NULL"
IS_NOT_NULL = "IS NOT NULL"
IN = "IN" # pylint: disable=invalid-name
IN = "IN"
NOT_IN = "NOT IN"
REGEX = "REGEX"
IS_TRUE = "IS TRUE"
@ -291,7 +291,7 @@ class QuerySource(Enum):
SQL_LAB = 2
class QueryStatus(str, Enum): # pylint: disable=too-few-public-methods
class QueryStatus(str, Enum):
"""Enum-type class for query statuses"""
STOPPED: str = "stopped"
@ -545,9 +545,7 @@ def format_timedelta(time_delta: timedelta) -> str:
return str(time_delta)
def base_json_conv( # pylint: disable=inconsistent-return-statements,too-many-return-statements
obj: Any,
) -> Any:
def base_json_conv(obj: Any,) -> Any: # pylint: disable=inconsistent-return-statements
if isinstance(obj, memoryview):
obj = obj.tobytes()
if isinstance(obj, np.int64):
@ -709,7 +707,7 @@ def generic_find_constraint_name(
return None
def generic_find_fk_constraint_name( # pylint: disable=invalid-name
def generic_find_fk_constraint_name(
table: str, columns: Set[str], referenced: str, insp: Inspector
) -> Optional[str]:
"""Utility to find a foreign-key constraint name in alembic migrations"""
@ -797,7 +795,7 @@ class SigalrmTimeout:
logger.warning("timeout can't be used in the current context")
logger.exception(ex)
def __exit__( # pylint: disable=redefined-outer-name,unused-variable,redefined-builtin
def __exit__( # pylint: disable=redefined-outer-name,redefined-builtin
self, type: Any, value: Any, traceback: TracebackType
) -> None:
try:
@ -816,7 +814,7 @@ class TimerTimeout:
def __enter__(self) -> None:
self.timer.start()
def __exit__( # pylint: disable=redefined-outer-name,unused-variable,redefined-builtin
def __exit__( # pylint: disable=redefined-outer-name,redefined-builtin
self, type: Any, value: Any, traceback: TracebackType
) -> None:
self.timer.cancel()
@ -837,9 +835,7 @@ timeout: Union[Type[TimerTimeout], Type[SigalrmTimeout]] = (
def pessimistic_connection_handling(some_engine: Engine) -> None:
@event.listens_for(some_engine, "engine_connect")
def ping_connection( # pylint: disable=unused-variable
connection: Connection, branch: bool
) -> None:
def ping_connection(connection: Connection, branch: bool) -> None:
if branch:
# 'branch' refers to a sub-connection of a connection,
# we don't want to bother pinging on these.
@ -1129,9 +1125,7 @@ def merge_extra_form_data(form_data: Dict[str, Any]) -> None:
)
def merge_extra_filters( # pylint: disable=too-many-branches
form_data: Dict[str, Any],
) -> None:
def merge_extra_filters(form_data: Dict[str, Any]) -> None:
# extra_filters are temporary/contextual filters (using the legacy constructs)
# that are external to the slice definition. We use those for dynamic
# interactive filters like the ones emitted by the "Filter Box" visualization.
@ -1613,7 +1607,7 @@ def is_test() -> bool:
return strtobool(os.environ.get("SUPERSET_TESTENV", "false"))
def get_time_filter_status( # pylint: disable=too-many-branches
def get_time_filter_status(
datasource: "BaseDatasource", applied_time_extras: Dict[str, str],
) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
temporal_columns = {col.column_name for col in datasource.columns if col.is_dttm}

View File

@ -53,7 +53,7 @@ logger = logging.getLogger(__name__)
def parse_human_datetime(human_readable: str) -> datetime:
""" Returns ``datetime.datetime`` from human readable strings """
"""Returns ``datetime.datetime`` from human readable strings"""
x_periods = r"^\s*([0-9]+)\s+(second|minute|hour|day|week|month|quarter|year)s?\s*$"
if re.search(x_periods, human_readable, re.IGNORECASE):
raise TimeRangeAmbiguousError(human_readable)
@ -75,7 +75,7 @@ def parse_human_datetime(human_readable: str) -> datetime:
def normalize_time_delta(human_readable: str) -> Dict[str, int]:
x_unit = r"^\s*([0-9]+)\s+(second|minute|hour|day|week|month|quarter|year)s?\s+(ago|later)*$" # pylint: disable=line-too-long
x_unit = r"^\s*([0-9]+)\s+(second|minute|hour|day|week|month|quarter|year)s?\s+(ago|later)*$" # pylint: disable=line-too-long,useless-suppression
matched = re.match(x_unit, human_readable, re.IGNORECASE)
if not matched:
raise TimeDeltaAmbiguousError(human_readable)
@ -189,19 +189,19 @@ def get_since_until(
and time_range.startswith("previous calendar week")
and separator not in time_range
):
time_range = "DATETRUNC(DATEADD(DATETIME('today'), -1, WEEK), WEEK) : DATETRUNC(DATETIME('today'), WEEK)" # pylint: disable=line-too-long
time_range = "DATETRUNC(DATEADD(DATETIME('today'), -1, WEEK), WEEK) : DATETRUNC(DATETIME('today'), WEEK)" # pylint: disable=line-too-long,useless-suppression
if (
time_range
and time_range.startswith("previous calendar month")
and separator not in time_range
):
time_range = "DATETRUNC(DATEADD(DATETIME('today'), -1, MONTH), MONTH) : DATETRUNC(DATETIME('today'), MONTH)" # pylint: disable=line-too-long
time_range = "DATETRUNC(DATEADD(DATETIME('today'), -1, MONTH), MONTH) : DATETRUNC(DATETIME('today'), MONTH)" # pylint: disable=line-too-long,useless-suppression
if (
time_range
and time_range.startswith("previous calendar year")
and separator not in time_range
):
time_range = "DATETRUNC(DATEADD(DATETIME('today'), -1, YEAR), YEAR) : DATETRUNC(DATETIME('today'), YEAR)" # pylint: disable=line-too-long
time_range = "DATETRUNC(DATEADD(DATETIME('today'), -1, YEAR), YEAR) : DATETRUNC(DATETIME('today'), YEAR)" # pylint: disable=line-too-long,useless-suppression
if time_range and separator in time_range:
time_range_lookup = [
@ -211,11 +211,11 @@ def get_since_until(
),
(
r"^last\s+([0-9]+)\s+(second|minute|hour|day|week|month|year)s?$",
lambda delta, unit: f"DATEADD(DATETIME('{_relative_start}'), -{int(delta)}, {unit})", # pylint: disable=line-too-long
lambda delta, unit: f"DATEADD(DATETIME('{_relative_start}'), -{int(delta)}, {unit})", # pylint: disable=line-too-long,useless-suppression
),
(
r"^next\s+([0-9]+)\s+(second|minute|hour|day|week|month|year)s?$",
lambda delta, unit: f"DATEADD(DATETIME('{_relative_end}'), {int(delta)}, {unit})", # pylint: disable=line-too-long
lambda delta, unit: f"DATEADD(DATETIME('{_relative_end}'), {int(delta)}, {unit})", # pylint: disable=line-too-long,useless-suppression
),
(
r"^(DATETIME.*|DATEADD.*|DATETRUNC.*|LASTDAY.*|HOLIDAY.*)$",

View File

@ -194,7 +194,7 @@ class AbstractEventLogger(ABC):
)
@contextmanager
def log_context( # pylint: disable=too-many-locals
def log_context(
self, action: str, object_ref: Optional[str] = None, log_to_statsd: bool = True,
) -> Iterator[Callable[..., None]]:
"""

View File

@ -21,7 +21,6 @@ from unittest import mock
from werkzeug.wrappers import Request, Response
try:
# pylint: disable=import-error,import-outside-toplevel
from pyinstrument import Profiler
except ModuleNotFoundError:
Profiler = None

View File

@ -25,9 +25,7 @@ from superset.views.base import DeleteMixin, SupersetModelView
from superset.views.core import DAR
class AccessRequestsModelView( # pylint: disable=too-many-ancestors
SupersetModelView, DeleteMixin
):
class AccessRequestsModelView(SupersetModelView, DeleteMixin):
datamodel = SQLAInterface(DAR)
include_route_methods = RouteMethod.CRUD_SET
list_columns = [

View File

@ -122,9 +122,7 @@ class ReportView(BaseAlertReportView):
class_permission_name = "ReportSchedule"
class AlertModelView(
EnsureEnabledMixin, SupersetModelView
): # pylint: disable=too-many-ancestors
class AlertModelView(EnsureEnabledMixin, SupersetModelView):
datamodel = SQLAInterface(Alert)
route_base = "/alerts"
include_route_methods = RouteMethod.CRUD_SET | {"log"}

View File

@ -48,9 +48,7 @@ class StartEndDttmValidator: # pylint: disable=too-few-public-methods
)
class AnnotationModelView(
SupersetModelView, CompactCRUDMixin
): # pylint: disable=too-many-ancestors
class AnnotationModelView(SupersetModelView, CompactCRUDMixin):
datamodel = SQLAInterface(Annotation)
include_route_methods = RouteMethod.CRUD_SET | {"annotation"}
@ -108,7 +106,7 @@ class AnnotationModelView(
return super().render_app_template()
class AnnotationLayerModelView(SupersetModelView): # pylint: disable=too-many-ancestors
class AnnotationLayerModelView(SupersetModelView):
datamodel = SQLAInterface(AnnotationLayer)
include_route_methods = RouteMethod.CRUD_SET | {RouteMethod.API_READ}
related_views = [AnnotationModelView]

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=R
from typing import Any
import simplejson as json
@ -45,7 +44,7 @@ class Api(BaseSupersetView):
@handle_api_exception
@has_access_api
@expose("/v1/query/", methods=["POST"])
def query(self) -> FlaskResponse:
def query(self) -> FlaskResponse: # pylint: disable=no-self-use
"""
Takes a query_obj constructed in the client and returns payload data response
for the given query_obj.
@ -65,7 +64,7 @@ class Api(BaseSupersetView):
@handle_api_exception
@has_access_api
@expose("/v1/form_data/", methods=["GET"])
def query_form_data(self) -> FlaskResponse:
def query_form_data(self) -> FlaskResponse: # pylint: disable=no-self-use
"""
Get the formdata stored in the database for existing slice.
params: slice_id: integer

View File

@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import dataclasses # pylint: disable=wrong-import-order
import dataclasses
import functools
import logging
import traceback
@ -77,9 +77,7 @@ from superset.utils import core as utils
from .utils import bootstrap_user_data
if TYPE_CHECKING:
from superset.connectors.druid.views import ( # pylint: disable=unused-import
DruidClusterModelView,
)
from superset.connectors.druid.views import DruidClusterModelView
FRONTEND_CONF_KEYS = (
"SUPERSET_WEBSERVER_TIMEOUT",
@ -180,7 +178,7 @@ def api(f: Callable[..., FlaskResponse]) -> Callable[..., FlaskResponse]:
def wraps(self: "BaseSupersetView", *args: Any, **kwargs: Any) -> FlaskResponse:
try:
return f(self, *args, **kwargs)
except NoAuthorizationError as ex: # pylint: disable=broad-except
except NoAuthorizationError as ex:
logger.warning(ex)
return json_error_response(get_error_msg(), status=401)
except Exception as ex: # pylint: disable=broad-except
@ -277,9 +275,7 @@ def is_user_admin() -> bool:
class BaseSupersetView(BaseView):
@staticmethod
def json_response(
obj: Any, status: int = 200
) -> FlaskResponse: # pylint: disable=no-self-use
def json_response(obj: Any, status: int = 200) -> FlaskResponse:
return Response(
json.dumps(obj, default=utils.json_int_dttm_ser, ignore_nan=True),
status=status,

View File

@ -164,7 +164,8 @@ class BaseSupersetModelRestApi(ModelRestApi):
"<RELATED_FIELD>": ("<RELATED_FIELD_FIELD>", "<asc|desc>"),
...
}
""" # pylint: disable=pointless-string-statement
"""
related_field_filters: Dict[str, Union[RelatedFieldFilter, str]] = {}
"""
Declare the filters for related fields::
@ -172,7 +173,8 @@ class BaseSupersetModelRestApi(ModelRestApi):
related_fields = {
"<RELATED_FIELD>": <RelatedFieldFilter>)
}
""" # pylint: disable=pointless-string-statement
"""
filter_rel_fields: Dict[str, BaseFilter] = {}
"""
Declare the related field base filter::
@ -180,11 +182,9 @@ class BaseSupersetModelRestApi(ModelRestApi):
filter_rel_fields_field = {
"<RELATED_FIELD>": "<FILTER>")
}
""" # pylint: disable=pointless-string-statement
allowed_rel_fields: Set[str] = set()
"""
Declare a set of allowed related fields that the `related` endpoint supports
""" # pylint: disable=pointless-string-statement
allowed_rel_fields: Set[str] = set()
# Declare a set of allowed related fields that the `related` endpoint supports.
text_field_rel_fields: Dict[str, str] = {}
"""
@ -193,15 +193,12 @@ class BaseSupersetModelRestApi(ModelRestApi):
text_field_rel_fields = {
"<RELATED_FIELD>": "<RELATED_OBJECT_FIELD>"
}
""" # pylint: disable=pointless-string-statement
"""
allowed_distinct_fields: Set[str] = set()
openapi_spec_component_schemas: Tuple[Type[Schema], ...] = tuple()
"""
Add extra schemas to the OpenAPI component schemas section
""" # pylint: disable=pointless-string-statement
# Add extra schemas to the OpenAPI component schemas section.
add_columns: List[str]
edit_columns: List[str]
list_columns: List[str]

View File

@ -713,7 +713,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@event_logger.log_this
@expose("/explore/<datasource_type>/<int:datasource_id>/", methods=["GET", "POST"])
@expose("/explore/", methods=["GET", "POST"])
def explore( # pylint: disable=too-many-locals,too-many-return-statements,too-many-statements
def explore( # pylint: disable=too-many-locals
self, datasource_type: Optional[str] = None, datasource_id: Optional[int] = None
) -> FlaskResponse:
user_id = g.user.get_id() if g.user else None
@ -918,7 +918,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return [f for f in filters if not f.get("isExtra")]
def save_or_overwrite_slice(
# pylint: disable=too-many-arguments,too-many-locals,no-self-use
# pylint: disable=too-many-arguments,too-many-locals
self,
slc: Optional[Slice],
slice_add_perm: bool,
@ -1286,9 +1286,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@has_access_api
@event_logger.log_this
@expose("/testconn", methods=["POST", "GET"])
def testconn( # pylint: disable=too-many-return-statements,no-self-use
self,
) -> FlaskResponse:
def testconn(self) -> FlaskResponse: # pylint: disable=no-self-use
"""Tests a sqla connection"""
db_name = request.json.get("name")
uri = request.json.get("uri")
@ -1547,11 +1545,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
Dash = Dashboard
qry = (
db.session.query(Dash)
.filter(
or_( # pylint: disable=comparison-with-callable
Dash.created_by_fk == user_id, Dash.changed_by_fk == user_id
)
)
.filter(or_(Dash.created_by_fk == user_id, Dash.changed_by_fk == user_id))
.order_by(Dash.changed_on.desc())
)
payload = [
@ -1864,8 +1858,8 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
utils.error_msg_from_exception(ex), status=403
)
)
def dashboard( # pylint: disable=too-many-locals
self, # pylint: disable=no-self-use
def dashboard(
self,
dashboard_id_or_slug: str, # pylint: disable=unused-argument
add_extra_log_payload: Callable[..., None] = lambda **kwargs: None,
dashboard: Optional[Dashboard] = None,
@ -2212,9 +2206,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return self.results_exec(key)
@staticmethod
def results_exec( # pylint: disable=too-many-return-statements
key: str,
) -> FlaskResponse:
def results_exec(key: str) -> FlaskResponse:
"""Serves a key off of the results backend
It is possible to pass the `rows` query argument to limit the number
@ -2358,7 +2350,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@event_logger.log_this
@expose("/validate_sql_json/", methods=["POST", "GET"])
def validate_sql_json(
# pylint: disable=too-many-locals,too-many-return-statements,no-self-use
# pylint: disable=too-many-locals,no-self-use
self,
) -> FlaskResponse:
"""Validates that arbitrary sql is acceptable for the given database.
@ -2426,7 +2418,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return json_error_response(f"{msg}")
@staticmethod
def _sql_json_async( # pylint: disable=too-many-arguments
def _sql_json_async(
session: Session,
rendered_query: str,
query: Query,
@ -2467,7 +2459,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
"Unable to forget Celery task as backend"
"does not support this operation"
)
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
logger.exception("Query %i: %s", query.id, str(ex))
message = __("Failed to start remote query on a worker.")
@ -2550,7 +2542,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
except SupersetTimeoutException as ex:
# re-raise exception for api exception handler
raise ex
except Exception as ex: # pylint: disable=broad-except
except Exception as ex:
logger.exception("Query %i failed unexpectedly", query.id)
raise SupersetGenericDBErrorException(
utils.error_msg_from_exception(ex)

View File

@ -26,9 +26,7 @@ from superset.typing import FlaskResponse
from superset.views.base import DeleteMixin, SupersetModelView
class CssTemplateModelView( # pylint: disable=too-many-ancestors
SupersetModelView, DeleteMixin
):
class CssTemplateModelView(SupersetModelView, DeleteMixin):
datamodel = SQLAInterface(models.CssTemplate)
include_route_methods = RouteMethod.CRUD_SET

View File

@ -43,7 +43,7 @@ from .mixins import DatabaseMixin
from .validators import schema_allows_csv_upload, sqlalchemy_uri_validator
if TYPE_CHECKING:
from werkzeug.datastructures import FileStorage # pylint: disable=unused-import
from werkzeug.datastructures import FileStorage
config = app.config
stats_logger = config["STATS_LOGGER"]

View File

@ -26,7 +26,7 @@ from superset.views.base import SupersetModelView
from . import LogMixin
class LogModelView(LogMixin, SupersetModelView): # pylint: disable=too-many-ancestors
class LogModelView(LogMixin, SupersetModelView):
datamodel = SQLAInterface(models.Log)
include_route_methods = {RouteMethod.LIST, RouteMethod.SHOW}
class_permission_name = "Log"

View File

@ -45,7 +45,7 @@ class R(BaseSupersetView): # pylint: disable=invalid-name
@event_logger.log_this
@expose("/<int:url_id>")
def index(self, url_id: int) -> FlaskResponse: # pylint: disable=no-self-use
def index(self, url_id: int) -> FlaskResponse:
url = db.session.query(models.Url).get(url_id)
if url and url.url:
explore_url = "//superset/explore/?"
@ -62,7 +62,7 @@ class R(BaseSupersetView): # pylint: disable=invalid-name
@event_logger.log_this
@has_access_api
@expose("/shortner/", methods=["POST"])
def shortner(self) -> FlaskResponse: # pylint: disable=no-self-use
def shortner(self) -> FlaskResponse:
url = request.form.get("data")
if not self._validate_url(url):
logger.warning("Invalid URL: %s", url)

View File

@ -50,9 +50,7 @@ from superset.views.core import json_success
from .base import DeleteMixin, SupersetModelView
class EmailScheduleView(
SupersetModelView, DeleteMixin
): # pylint: disable=too-many-ancestors
class EmailScheduleView(SupersetModelView, DeleteMixin):
include_route_methods = RouteMethod.CRUD_SET
_extra_data = {"test_email": False, "test_email_recipients": None}

View File

@ -30,9 +30,7 @@ from superset.utils import core as utils
from .base import BaseSupersetView, DeleteMixin, json_success, SupersetModelView
class SavedQueryView(
SupersetModelView, DeleteMixin
): # pylint: disable=too-many-ancestors
class SavedQueryView(SupersetModelView, DeleteMixin):
datamodel = SQLAInterface(SavedQuery)
include_route_methods = RouteMethod.CRUD_SET

View File

@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
# pylint: disable=C,R,W,useless-suppression
"""This module contains the 'Viz' objects
These objects represent the backend of all the visualizations that