chore: Update pre-commit packages (#23173)

This commit is contained in:
Hugh A. Miles II 2023-03-13 17:05:13 -06:00 committed by GitHub
parent 9ae81b7c33
commit b820eb8235
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
87 changed files with 112 additions and 218 deletions

View File

@ -20,16 +20,17 @@ repos:
hooks:
- id: isort
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.941
rev: v1.0.1
hooks:
- id: mypy
args: [--check-untyped-defs]
additional_dependencies: [types-all]
- repo: https://github.com/peterdemin/pip-compile-multi
rev: v2.4.1
rev: v2.6.2
hooks:
- id: pip-compile-multi-verify
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
rev: v4.4.0
hooks:
- id: check-docstring-first
- id: check-added-large-files
@ -41,7 +42,7 @@ repos:
- id: trailing-whitespace
args: ["--markdown-linebreak-ext=md"]
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 23.1.0
hooks:
- id: black
language_version: python3

View File

@ -138,7 +138,7 @@ class GitChangeLog:
title = pr_info.title if pr_info else git_log.message
pr_type = re.match(SUPERSET_PULL_REQUEST_TYPES, title)
if pr_type:
pr_type = pr_type.group().strip('"')
pr_type = pr_type.group().strip('"') # type: ignore
labels = (" | ").join([label.name for label in pr_info.labels])
is_risky = self._is_risk_pull_request(pr_info.labels)

View File

@ -59,11 +59,9 @@ export function ColumnOption({
<Tooltip id="metric-name-tooltip" title={tooltipText}>
<span
className="option-label column-option-label"
css={(theme: SupersetTheme) =>
css`
margin-right: ${theme.gridUnit}px;
`
}
css={(theme: SupersetTheme) => css`
margin-right: ${theme.gridUnit}px;
`}
ref={labelRef}
>
{getColumnLabelText(column)}

View File

@ -71,11 +71,9 @@ export function MetricOption({
const label = (
<span
className="option-label metric-option-label"
css={(theme: SupersetTheme) =>
css`
margin-right: ${theme.gridUnit}px;
`
}
css={(theme: SupersetTheme) => css`
margin-right: ${theme.gridUnit}px;
`}
ref={labelRef}
>
{link}

View File

@ -158,11 +158,9 @@ export const EmptyStateBig = ({
<EmptyStateContainer className={className}>
{image && <ImageContainer image={image} size={EmptyStateSize.Big} />}
<TextContainer
css={(theme: SupersetTheme) =>
css`
max-width: ${theme.gridUnit * 150}px;
`
}
css={(theme: SupersetTheme) => css`
max-width: ${theme.gridUnit * 150}px;
`}
>
<BigTitle>{title}</BigTitle>
{description && <BigDescription>{description}</BigDescription>}
@ -189,11 +187,9 @@ export const EmptyStateMedium = ({
<EmptyStateContainer>
{image && <ImageContainer image={image} size={EmptyStateSize.Medium} />}
<TextContainer
css={(theme: SupersetTheme) =>
css`
max-width: ${theme.gridUnit * 100}px;
`
}
css={(theme: SupersetTheme) => css`
max-width: ${theme.gridUnit * 100}px;
`}
>
<Title>{title}</Title>
{description && <Description>{description}</Description>}
@ -218,11 +214,9 @@ export const EmptyStateSmall = ({
<EmptyStateContainer>
{image && <ImageContainer image={image} size={EmptyStateSize.Small} />}
<TextContainer
css={(theme: SupersetTheme) =>
css`
max-width: ${theme.gridUnit * 75}px;
`
}
css={(theme: SupersetTheme) => css`
max-width: ${theme.gridUnit * 75}px;
`}
>
<Title>{title}</Title>
{description && <SmallDescription>{description}</SmallDescription>}

View File

@ -23,11 +23,9 @@ import { NULL_DISPLAY } from 'src/constants';
function NullCell() {
return (
<span
css={(theme: SupersetTheme) =>
css`
color: ${theme.colors.grayscale.light1};
`
}
css={(theme: SupersetTheme) => css`
color: ${theme.colors.grayscale.light1};
`}
>
{NULL_DISPLAY}
</span>

View File

@ -226,13 +226,11 @@ const FilterControls: FC<FilterControlsProps> = ({
const renderHorizontalContent = () => (
<div
css={(theme: SupersetTheme) =>
css`
padding: 0 ${theme.gridUnit * 4}px;
min-width: 0;
flex: 1;
`
}
css={(theme: SupersetTheme) => css`
padding: 0 ${theme.gridUnit * 4}px;
min-width: 0;
flex: 1;
`}
>
<DropdownContainer
items={items}

View File

@ -47,12 +47,10 @@ export const FiltersDropdownContent = ({
forceRenderOutOfScope,
}: FiltersDropdownContentProps) => (
<div
css={(theme: SupersetTheme) =>
css`
width: ${theme.gridUnit * 56}px;
padding: ${theme.gridUnit}px 0;
`
}
css={(theme: SupersetTheme) => css`
width: ${theme.gridUnit * 56}px;
padding: ${theme.gridUnit}px 0;
`}
>
{overflowedCrossFilters.map(crossFilter =>
rendererCrossFilter(

View File

@ -44,20 +44,16 @@ export const NameRow = ({
return (
<Row
css={(theme: SupersetTheme) =>
css`
margin-bottom: ${theme.gridUnit * 3}px;
justify-content: space-between;
`
}
css={(theme: SupersetTheme) => css`
margin-bottom: ${theme.gridUnit * 3}px;
justify-content: space-between;
`}
>
<InternalRow>
<Icons.FilterSmall
css={(theme: SupersetTheme) =>
css`
margin-right: ${theme.gridUnit}px;
`
}
css={(theme: SupersetTheme) => css`
margin-right: ${theme.gridUnit}px;
`}
/>
<TooltipWithTruncation title={elementsTruncated ? filter.name : null}>
<FilterName ref={filterNameRef}>{filter.name}</FilterName>

View File

@ -134,12 +134,10 @@ const ControlHeader: FC<ControlHeaderProps> = ({
<div className="ControlHeader" data-test={`${name}-header`}>
<div className="pull-left">
<FormLabel
css={(theme: SupersetTheme) =>
css`
margin-bottom: ${theme.gridUnit * 0.5}px;
position: relative;
`
}
css={(theme: SupersetTheme) => css`
margin-bottom: ${theme.gridUnit * 0.5}px;
position: relative;
`}
>
{leftNode && <span>{leftNode}</span>}
<span

View File

@ -56,11 +56,9 @@ function VizSupportValidation({ vizType }: { vizType: string }) {
return (
<div
className="text-danger"
css={(theme: SupersetTheme) =>
css`
margin-top: ${theme.gridUnit}px;
`
}
css={(theme: SupersetTheme) => css`
margin-top: ${theme.gridUnit}px;
`}
>
<i className="fa fa-exclamation-circle text-danger" />{' '}
<small>{t('This visualization type is not supported.')}</small>
@ -114,15 +112,13 @@ const VizTypeControl = ({
{initialValue && <VizSupportValidation vizType={initialValue} />}
</div>
<div
css={(theme: SupersetTheme) =>
css`
display: flex;
justify-content: flex-end;
margin-top: ${theme.gridUnit * 3}px;
color: ${theme.colors.grayscale.base};
text-decoration: underline;
`
}
css={(theme: SupersetTheme) => css`
display: flex;
justify-content: flex-end;
margin-top: ${theme.gridUnit * 3}px;
color: ${theme.colors.grayscale.base};
text-decoration: underline;
`}
>
<span role="button" tabIndex={0} onClick={openModal}>
{t('View all charts')}

View File

@ -371,10 +371,9 @@ const timezoneHeaderStyle = (theme: SupersetTheme) => css`
margin: ${theme.gridUnit * 3}px 0;
`;
const inputSpacer = (theme: SupersetTheme) =>
css`
margin-right: ${theme.gridUnit * 3}px;
`;
const inputSpacer = (theme: SupersetTheme) => css`
margin-right: ${theme.gridUnit * 3}px;
`;
type NotificationAddStatus = 'active' | 'disabled' | 'hidden';

View File

@ -59,9 +59,10 @@ const StyledHeader = styled.header`
justify-content: center;
/* must be exactly the height of the Antd navbar */
min-height: 50px;
padding: ${theme.gridUnit}px ${theme.gridUnit * 2}px ${
theme.gridUnit
}px ${theme.gridUnit * 4}px;
padding: ${theme.gridUnit}px
${theme.gridUnit * 2}px
${theme.gridUnit}px
${theme.gridUnit * 4}px;
max-width: ${theme.gridUnit * theme.brandIconMaxWidth}px;
img {
height: 100%;

View File

@ -55,11 +55,13 @@ export default function transformProps(chartProps: TableChartProps) {
typeof column === 'object' ? column : { label: column },
);
} else {
/* eslint-disable */
const metricMap = datasource.metrics.reduce((acc, current) => {
const map = acc;
map[current.metric_name] = current;
return map;
}, {} as Record<string, Metric>);
/* eslint-disable */
rows = metrics.map(metric =>
typeof metric === 'object' ? metric : metricMap[metric],
);

View File

@ -38,7 +38,6 @@ REMOVE_KEYS = ["datasource_type", "datasource_name", "url_params"]
class ExportChartsCommand(ExportModelsCommand):
dao = ChartDAO
not_found = ChartNotFoundError

View File

@ -841,7 +841,6 @@ class ChartDataFilterSchema(Schema):
class ChartDataExtrasSchema(Schema):
relative_start = fields.String(
description="Start time for relative time deltas. "
'Default: `config["DEFAULT_RELATIVE_START_TIME"]`',

View File

@ -45,7 +45,6 @@ def load_test_users_run() -> None:
Syncs permissions for those users/roles
"""
if app.config["TESTING"]:
sm = security_manager
examples_db = database_utils.get_example_database()

View File

@ -37,7 +37,6 @@ class ExportAssetsCommand(BaseCommand):
"""
def run(self) -> Iterator[Tuple[str, str]]:
metadata = {
"version": EXPORT_VERSION,
"type": "assets",

View File

@ -30,7 +30,6 @@ METADATA_FILE_NAME = "metadata.yaml"
class ExportModelsCommand(BaseCommand):
dao: Type[BaseDAO] = BaseDAO
not_found: Type[CommandException] = CommandException

View File

@ -403,11 +403,9 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
and hasattr(self.datasource, "database")
and self.datasource.database.impersonate_user
):
if key := self.datasource.database.db_engine_spec.get_impersonation_key(
getattr(g, "user", None)
):
logger.debug(
"Adding impersonation key to QueryObject cache dict: %s", key
)

View File

@ -1204,6 +1204,7 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
# functionality for both the SQL_Lab and Charts.
MUTATE_AFTER_SPLIT = False
# This allows for a user to add header data to any outgoing emails. For example,
# if you need to include metadata in the header or you want to change the specifications
# of the email title, header, or sender.

View File

@ -102,7 +102,6 @@ def append_charts(position: Dict[str, Any], charts: Set[Slice]) -> Dict[str, Any
class ExportDashboardsCommand(ExportModelsCommand):
dao = DashboardDAO
not_found = DashboardNotFoundError

View File

@ -1308,12 +1308,10 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
and hasattr(engine_spec, "sqlalchemy_uri_placeholder")
and getattr(engine_spec, "default_driver") in drivers
):
payload[
"parameters"
] = engine_spec.parameters_json_schema() # type: ignore
payload["parameters"] = engine_spec.parameters_json_schema()
payload[
"sqlalchemy_uri_placeholder"
] = engine_spec.sqlalchemy_uri_placeholder # type: ignore
] = engine_spec.sqlalchemy_uri_placeholder
available_databases.append(payload)

View File

@ -51,7 +51,6 @@ def parse_extra(extra_payload: str) -> Dict[str, Any]:
class ExportDatabasesCommand(ExportModelsCommand):
dao = DatabaseDAO
not_found = DatabaseNotFoundError

View File

@ -200,7 +200,6 @@ class TestConnectionDatabaseCommand(BaseCommand):
)
raise DatabaseSecurityUnsafeError(message=str(ex)) from ex
except SupersetTimeoutException as ex:
event_logger.log_with_context(
action=get_log_connection_action(
"test_connection_error", ssh_tunnel, ex

View File

@ -303,7 +303,7 @@ class DatabaseParametersSchemaMixin: # pylint: disable=too-few-public-methods
)
# validate parameters
parameters = engine_spec.parameters_schema.load(parameters) # type: ignore
parameters = engine_spec.parameters_schema.load(parameters)
serialized_encrypted_extra = data.get("masked_encrypted_extra") or "{}"
try:
@ -311,7 +311,7 @@ class DatabaseParametersSchemaMixin: # pylint: disable=too-few-public-methods
except json.decoder.JSONDecodeError:
encrypted_extra = {}
data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri( # type: ignore
data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri(
parameters,
encrypted_extra,
)
@ -488,7 +488,6 @@ class DatabasePutSchema(Schema, DatabaseParametersSchemaMixin):
class DatabaseTestConnectionSchema(Schema, DatabaseParametersSchemaMixin):
rename_encrypted_extra = pre_load(rename_encrypted_extra)
database_name = fields.String(

View File

@ -37,7 +37,6 @@ JSON_KEYS = {"params", "template_params", "extra"}
class ExportDatasetsCommand(ExportModelsCommand):
dao = DatasetDAO
not_found = DatasetNotFoundError

View File

@ -34,7 +34,6 @@ Datasource = Union[Dataset, SqlaTable, Table, Query, SavedQuery]
class DatasourceDAO(BaseDAO):
sources: Dict[Union[DatasourceType, str], Type[Datasource]] = {
DatasourceType.TABLE: SqlaTable,
DatasourceType.QUERY: Query,

View File

@ -19,7 +19,6 @@ from superset.db_engine_specs.postgres import PostgresEngineSpec
class AuroraMySQLDataAPI(MySQLEngineSpec):
engine = "mysql"
default_driver = "auroradataapi"
engine_name = "Aurora MySQL (Data API)"
@ -33,7 +32,6 @@ class AuroraMySQLDataAPI(MySQLEngineSpec):
class AuroraPostgresDataAPI(PostgresEngineSpec):
engine = "postgresql"
default_driver = "auroradataapi"
engine_name = "Aurora PostgreSQL (Data API)"

View File

@ -28,7 +28,6 @@ if TYPE_CHECKING:
class CrateEngineSpec(BaseEngineSpec):
engine = "crate"
engine_name = "CrateDB"

View File

@ -192,7 +192,6 @@ class DatabricksNativeEngineSpec(DatabricksODBCEngineSpec, BasicParametersMixin)
def build_sqlalchemy_uri( # type: ignore
cls, parameters: DatabricksParametersType, *_
) -> str:
query = {}
if parameters.get("encryption"):
if not cls.encryption_parameters:

View File

@ -23,7 +23,6 @@ from superset.db_engine_specs.base import BaseEngineSpec
class DremioEngineSpec(BaseEngineSpec):
engine = "dremio"
engine_name = "Dremio"

View File

@ -67,7 +67,6 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
def convert_dttm(
cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None
) -> Optional[str]:
db_extra = db_extra or {}
sqla_type = cls.get_sqla_column_type(target_type)
@ -99,7 +98,6 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
class OpenDistroEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
time_groupby_inline = True
time_secondary_columns = True
allows_joins = False

View File

@ -257,7 +257,6 @@ class GSheetsEngineSpec(SqliteEngineSpec):
idx = 0
for name, url in table_catalog.items():
if not name:
errors.append(
SupersetError(

View File

@ -191,7 +191,6 @@ class HiveEngineSpec(PrestoEngineSpec):
raise SupersetException("Append operation not currently supported")
if to_sql_kwargs["if_exists"] == "fail":
# Ensure table doesn't already exist.
if table.schema:
table_exists = not database.get_df(
@ -425,7 +424,7 @@ class HiveEngineSpec(PrestoEngineSpec):
return BaseEngineSpec._get_fields(cols) # pylint: disable=protected-access
@classmethod
def latest_sub_partition(
def latest_sub_partition( # type: ignore
cls, table_name: str, schema: Optional[str], database: "Database", **kwargs: Any
) -> str:
# TODO(bogdan): implement`

View File

@ -18,7 +18,6 @@ from superset.db_engine_specs.postgres import PostgresBaseEngineSpec
class NetezzaEngineSpec(PostgresBaseEngineSpec):
engine = "netezza"
default_driver = "nzpy"
engine_name = "IBM Netezza Performance Server"

View File

@ -1267,10 +1267,10 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
def _extract_error_message(cls, ex: Exception) -> str:
if (
hasattr(ex, "orig")
and type(ex.orig).__name__ == "DatabaseError" # type: ignore
and isinstance(ex.orig[0], dict) # type: ignore
and type(ex.orig).__name__ == "DatabaseError"
and isinstance(ex.orig[0], dict)
):
error_dict = ex.orig[0] # type: ignore
error_dict = ex.orig[0]
return "{} at {}: {}".format(
error_dict.get("errorName"),
error_dict.get("errorLocation"),

View File

@ -26,7 +26,6 @@ if TYPE_CHECKING:
class RocksetEngineSpec(BaseEngineSpec):
engine = "rockset"
engine_name = "Rockset"

View File

@ -222,7 +222,6 @@ class SnowflakeEngineSpec(PostgresBaseEngineSpec):
Dict[str, Any]
] = None,
) -> str:
return str(
URL(
"snowflake",

View File

@ -52,7 +52,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s
tbl_name = "wb_health_population"
database = superset.utils.database.get_example_database()
with database.get_sqla_engine_with_context() as engine:
schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name)

View File

@ -54,7 +54,7 @@ class SupersetException(Exception):
if self.error_type:
rv["error_type"] = self.error_type
if self.exception is not None and hasattr(self.exception, "to_dict"):
rv = {**rv, **self.exception.to_dict()} # type: ignore
rv = {**rv, **self.exception.to_dict()}
return rv

View File

@ -65,7 +65,6 @@ def upgrade():
# datasources.datasource_name column.
for foreign in ["columns", "metrics"]:
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
# Add the datasource_id column with the relevant constraints.
batch_op.add_column(sa.Column("datasource_id", sa.Integer))
@ -94,7 +93,6 @@ def upgrade():
)
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
# Drop the datasource_name column and associated constraints. Note
# due to prior revisions (1226819ee0e3, 3b626e2a6783) there may
# incorrectly be multiple duplicate constraints.
@ -146,7 +144,6 @@ def downgrade():
# datasources.datasource_id column.
for foreign in ["columns", "metrics"]:
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
# Add the datasource_name column with the relevant constraints.
batch_op.add_column(sa.Column("datasource_name", sa.String(255)))
@ -175,7 +172,6 @@ def downgrade():
)
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
# Drop the datasource_id column and associated constraint.
batch_op.drop_constraint(
"fk_{}_datasource_id_datasources".format(foreign), type_="foreignkey"
@ -184,7 +180,6 @@ def downgrade():
batch_op.drop_column("datasource_id")
with op.batch_alter_table("datasources", naming_convention=conv) as batch_op:
# Prior to dropping the uniqueness constraint, the foreign key
# associated with the cluster_name column needs to be dropped.
batch_op.drop_constraint(

View File

@ -30,7 +30,6 @@ down_revision = "e866bd2d4976"
def upgrade():
op.add_column(
"dbs",
sa.Column(

View File

@ -36,7 +36,6 @@ names = {"columns": "column_name", "metrics": "metric_name"}
def upgrade():
# Reduce the size of the metric_name column for constraint viability.
with op.batch_alter_table("metrics", naming_convention=conv) as batch_op:
batch_op.alter_column(
@ -55,7 +54,6 @@ def upgrade():
def downgrade():
bind = op.get_bind()
insp = sa.engine.reflection.Inspector.from_engine(bind)

View File

@ -31,7 +31,6 @@ from alembic import op
def upgrade():
# Enforce that the datasource_name column be non-nullable.
with op.batch_alter_table("datasources") as batch_op:
batch_op.alter_column(
@ -40,7 +39,6 @@ def upgrade():
def downgrade():
# Forego that the datasource_name column be non-nullable.
with op.batch_alter_table("datasources") as batch_op:
batch_op.alter_column(

View File

@ -69,7 +69,6 @@ def downgrade():
form_data = json.loads(slc.params)
if "time_range" in form_data:
# Note defaults and relative dates are not compatible with since/until
# and thus the time range is persisted.
try:

View File

@ -48,7 +48,6 @@ def upgrade():
def downgrade():
# One cannot simply re-add the uniqueness constraint as it may not have previously
# existed.
pass

View File

@ -39,7 +39,6 @@ Base = declarative_base()
class Database(Base):
__tablename__ = "dbs"
id = Column(Integer, primary_key=True)
extra = Column(Text)

View File

@ -31,7 +31,6 @@ from alembic import op
def upgrade():
with op.batch_alter_table("table_columns") as batch_op:
batch_op.alter_column(
"type", existing_type=sa.VARCHAR(length=32), type_=sa.TEXT()

View File

@ -39,7 +39,6 @@ Base = declarative_base()
class Database(Base):
__tablename__ = "dbs"
id = Column(Integer, primary_key=True)
extra = Column(Text)

View File

@ -103,7 +103,6 @@ def insert_from_select(
class Database(Base):
__tablename__ = "dbs"
__table_args__ = (UniqueConstraint("database_name"),)
@ -118,7 +117,6 @@ class Database(Base):
class TableColumn(AuxiliaryColumnsMixin, Base):
__tablename__ = "table_columns"
__table_args__ = (UniqueConstraint("table_id", "column_name"),)
@ -138,7 +136,6 @@ class TableColumn(AuxiliaryColumnsMixin, Base):
class SqlMetric(AuxiliaryColumnsMixin, Base):
__tablename__ = "sql_metrics"
__table_args__ = (UniqueConstraint("table_id", "metric_name"),)
@ -164,7 +161,6 @@ sqlatable_user_table = sa.Table(
class SqlaTable(AuxiliaryColumnsMixin, Base):
__tablename__ = "tables"
__table_args__ = (UniqueConstraint("database_id", "schema", "table_name"),)
@ -213,7 +209,6 @@ dataset_user_association_table = sa.Table(
class NewColumn(AuxiliaryColumnsMixin, Base):
__tablename__ = "sl_columns"
id = sa.Column(sa.Integer, primary_key=True)
@ -243,7 +238,6 @@ class NewColumn(AuxiliaryColumnsMixin, Base):
class NewTable(AuxiliaryColumnsMixin, Base):
__tablename__ = "sl_tables"
id = sa.Column(sa.Integer, primary_key=True)
@ -264,7 +258,6 @@ class NewTable(AuxiliaryColumnsMixin, Base):
class NewDataset(Base, AuxiliaryColumnsMixin):
__tablename__ = "sl_datasets"
id = sa.Column(sa.Integer, primary_key=True)
@ -636,7 +629,6 @@ def postprocess_columns(session: Session) -> None:
return
def get_joined_tables(offset, limit):
# Import aliased from sqlalchemy
from sqlalchemy.orm import aliased
@ -788,7 +780,7 @@ def postprocess_columns(session: Session) -> None:
updates["external_url"] = external_url
# update extra json
for (key, val) in (
for key, val in (
{
"verbose_name": verbose_name,
"python_date_format": python_date_format,

View File

@ -323,8 +323,8 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
return result
@property # type: ignore
def params(self) -> str: # type: ignore
@property
def params(self) -> str:
return self.json_metadata
@params.setter

View File

@ -430,7 +430,6 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "tab_state"
# basic info
@ -493,7 +492,6 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "table_schema"
id = Column(Integer, primary_key=True, autoincrement=True)

View File

@ -33,7 +33,6 @@ logger = logging.getLogger(__name__)
class ExportSavedQueriesCommand(ExportModelsCommand):
dao = SavedQueryDAO
not_found = SavedQueryNotFoundError

View File

@ -36,7 +36,6 @@ logger = logging.getLogger(__name__)
class BaseReportScheduleCommand(BaseCommand):
_properties: Dict[str, Any]
def run(self) -> Any:

View File

@ -2205,7 +2205,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
return hasattr(user, "is_guest_user") and user.is_guest_user
def get_current_guest_user_if_guest(self) -> Optional[GuestUser]:
if self.is_guest_user():
return g.user
return None

View File

@ -509,7 +509,6 @@ def has_table_query(token_list: TokenList) -> bool:
"""
state = InsertRLSState.SCANNING
for token in token_list.tokens:
# Recurse into child token list
if isinstance(token, TokenList) and has_table_query(token):
return True
@ -607,7 +606,6 @@ def insert_rls(
rls: Optional[TokenList] = None
state = InsertRLSState.SCANNING
for token in token_list.tokens:
# Recurse into child token list
if isinstance(token, TokenList):
i = token_list.tokens.index(token)

View File

@ -48,13 +48,13 @@ class SqlLabException(SupersetException):
if exception is not None:
if (
hasattr(exception, "error_type")
and exception.error_type is not None # type: ignore
and exception.error_type is not None
):
error_type = exception.error_type # type: ignore
error_type = exception.error_type
elif hasattr(exception, "error") and isinstance(
exception.error, SupersetError # type: ignore
exception.error, SupersetError
):
error_type = exception.error.error_type # type: ignore
error_type = exception.error.error_type
else:
error_type = SupersetErrorType.GENERIC_BACKEND_ERROR
@ -79,9 +79,9 @@ class SqlLabException(SupersetException):
return ": {}".format(reason_message)
if exception is not None:
if hasattr(exception, "get_message"):
return ": {}".format(exception.get_message()) # type: ignore
return ": {}".format(exception.get_message())
if hasattr(exception, "message"):
return ": {}".format(exception.message) # type: ignore
return ": {}".format(exception.message)
return ": {}".format(str(exception))
return ""

View File

@ -48,8 +48,7 @@ class SqlQueryRenderImpl(SqlQueryRender):
def __init__(
self, sql_template_factory: Callable[..., BaseTemplateProcessor]
) -> None:
self._sql_template_processor_factory = sql_template_factory # type: ignore
self._sql_template_processor_factory = sql_template_factory
def render(self, execution_context: SqlJsonExecutionContext) -> str:
query_model = execution_context.query

View File

@ -64,7 +64,7 @@ class SqlJsonExecutorBase(SqlJsonExecutor, ABC):
def __init__(self, query_dao: QueryDAO, get_sql_results_task: GetSqlResultsTask):
self._query_dao = query_dao
self._get_sql_results_task = get_sql_results_task # type: ignore
self._get_sql_results_task = get_sql_results_task
class SynchronousSqlJsonExecutor(SqlJsonExecutorBase):
@ -163,7 +163,6 @@ class ASynchronousSqlJsonExecutor(SqlJsonExecutorBase):
rendered_query: str,
log_params: Optional[Dict[str, Any]],
) -> SqlJsonExecutionStatus:
query_id = execution_context.query.id
logger.info("Query %i: Running query on a Celery worker", query_id)
try:

View File

@ -126,7 +126,6 @@ def get_object_type(class_name: str) -> ObjectTypes:
class ObjectUpdater:
object_type: Optional[str] = None
@classmethod
@ -218,7 +217,6 @@ class ObjectUpdater:
class ChartUpdater(ObjectUpdater):
object_type = "chart"
@classmethod
@ -227,7 +225,6 @@ class ChartUpdater(ObjectUpdater):
class DashboardUpdater(ObjectUpdater):
object_type = "dashboard"
@classmethod
@ -236,7 +233,6 @@ class DashboardUpdater(ObjectUpdater):
class QueryUpdater(ObjectUpdater):
object_type = "query"
@classmethod
@ -245,7 +241,6 @@ class QueryUpdater(ObjectUpdater):
class DatasetUpdater(ObjectUpdater):
object_type = "dataset"
@classmethod

View File

@ -90,7 +90,11 @@ def load_chart_data_into_cache(
raise ex
except Exception as ex:
# TODO: QueryContext should support SIP-40 style errors
error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member
error = (
ex.message # pylint: disable=no-member
if hasattr(ex, "message")
else str(ex)
)
errors = [{"message": error}]
async_query_manager.update_job(
job_metadata, async_query_manager.STATUS_ERROR, errors=errors
@ -157,7 +161,11 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals
if isinstance(ex, SupersetVizException):
errors = ex.errors # pylint: disable=no-member
else:
error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member
error = (
ex.message # pylint: disable=no-member
if hasattr(ex, "message")
else str(ex)
)
errors = [error]
async_query_manager.update_job(

View File

@ -27,6 +27,7 @@ from superset import app, db
logger = logging.getLogger(__name__)
# Null pool is used for the celery workers due process forking side effects.
# For more info see: https://github.com/apache/superset/issues/10530
@contextmanager

View File

@ -655,10 +655,10 @@ def error_msg_from_exception(ex: Exception) -> str:
"""
msg = ""
if hasattr(ex, "message"):
if isinstance(ex.message, dict): # type: ignore
if isinstance(ex.message, dict):
msg = ex.message.get("message") # type: ignore
elif ex.message: # type: ignore
msg = ex.message # type: ignore
elif ex.message:
msg = ex.message
return msg or str(ex)
@ -1778,14 +1778,13 @@ def indexed(
def is_test() -> bool:
return strtobool(os.environ.get("SUPERSET_TESTENV", "false"))
return strtobool(os.environ.get("SUPERSET_TESTENV", "false")) # type: ignore
def get_time_filter_status(
datasource: "BaseDatasource",
applied_time_extras: Dict[str, str],
) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
temporal_columns: Set[Any]
if datasource.type == "query":
temporal_columns = {

View File

@ -77,7 +77,7 @@ def copy_filter_scopes(
old_filter_scopes: Dict[int, Dict[str, Dict[str, Any]]],
) -> Dict[str, Dict[Any, Any]]:
new_filter_scopes: Dict[str, Dict[Any, Any]] = {}
for (filter_id, scopes) in old_filter_scopes.items():
for filter_id, scopes in old_filter_scopes.items():
new_filter_key = old_to_new_slc_id_dict.get(int(filter_id))
if new_filter_key:
new_filter_scopes[str(new_filter_key)] = scopes

View File

@ -47,7 +47,7 @@ def statsd_gauge(metric_prefix: Optional[str] = None) -> Callable[..., Any]:
except Exception as ex:
if (
hasattr(ex, "status")
and ex.status < 500 # type: ignore # pylint: disable=no-member
and ex.status < 500 # pylint: disable=no-member
):
current_app.config["STATS_LOGGER"].gauge(
f"{metric_prefix_}.warning", 1

View File

@ -52,7 +52,7 @@ class MachineAuthProvider:
:return: The WebDriver passed in (fluent)
"""
# Short-circuit this method if we have an override configured
if self._auth_webdriver_func_override:
if self._auth_webdriver_func_override: # type: ignore
return self._auth_webdriver_func_override(driver, user)
# Setting cookies requires doing a request first

View File

@ -114,10 +114,7 @@ def statsd_metrics(f: Callable[..., Any]) -> Callable[..., Any]:
try:
duration, response = time_function(f, self, *args, **kwargs)
except Exception as ex:
if (
hasattr(ex, "status")
and ex.status < 500 # type: ignore # pylint: disable=no-member
):
if hasattr(ex, "status") and ex.status < 500: # pylint: disable=no-member
self.incr_stats("warning", func_name)
else:
self.incr_stats("error", func_name)

View File

@ -22,7 +22,6 @@ from superset.views.chart.filters import SliceFilter
class SliceMixin: # pylint: disable=too-few-public-methods
list_title = _("Charts")
show_title = _("Show Chart")
add_title = _("Add Chart")

View File

@ -2397,7 +2397,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
self,
command_result: CommandResult,
) -> FlaskResponse:
status_code = 200
if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING:
status_code = 202

View File

@ -21,7 +21,6 @@ from superset.dashboards.filters import DashboardAccessFilter
class DashboardMixin: # pylint: disable=too-few-public-methods
list_title = _("Dashboards")
show_title = _("Show Dashboard")
add_title = _("Add Dashboard")

View File

@ -3030,7 +3030,6 @@ class PairedTTestViz(BaseViz):
class RoseViz(NVD3TimeSeriesViz):
viz_type = "rose"
verbose_name = _("Time Series - Nightingale Rose Chart")
sort_series = False

View File

@ -61,7 +61,6 @@ def log(
suffix_exit_msg: str = _DEFAULT_EXIT_MSG_SUFFIX,
return_value_msg_part=_DEFAULT_RETURN_VALUE_MSG_PART,
) -> Decorated:
decorator: Decorated = _make_decorator(
prefix_enter_msg,
suffix_enter_msg,

View File

@ -210,7 +210,6 @@ class TestPostChartDataApi(BaseTestChartDataApi):
{**app.config, "SAMPLES_ROW_LIMIT": 5, "SQL_MAX_ROW": 15},
)
def test_with_row_limit_as_samples__rowcount_as_row_limit(self):
expected_row_count = 10
self.query_context_payload["result_type"] = ChartDataResultType.SAMPLES
self.query_context_payload["queries"][0]["row_limit"] = expected_row_count
@ -234,7 +233,6 @@ class TestPostChartDataApi(BaseTestChartDataApi):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_with_invalid_payload__400(self):
invalid_query_context = {"form_data": "NOT VALID JSON"}
rv = self.client.post(
@ -585,7 +583,6 @@ class TestPostChartDataApi(BaseTestChartDataApi):
def test_when_where_parameter_is_template_and_query_result_type__query_is_templated(
self,
):
self.query_context_payload["result_type"] = ChartDataResultType.QUERY
self.query_context_payload["queries"][0]["filters"] = [
{"col": "gender", "op": "==", "val": "boy"}

View File

@ -502,7 +502,6 @@ def test_failing_import_datasets_versioned_export(
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch("superset.tasks.thumbnails.cache_dashboard_thumbnail")
def test_compute_thumbnails(thumbnail_mock, app_context, fs):
thumbnail_mock.return_value = None
runner = app.test_cli_runner()
dashboard = db.session.query(Dashboard).filter_by(slug="births").first()

View File

@ -200,7 +200,7 @@ def mock_upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
# only needed for the hive tests
import docker
client = docker.from_env()
client = docker.from_env() # type: ignore
container = client.containers.get("namenode")
# docker mounted volume that contains csv uploads
src = os.path.join("/tmp/superset_uploads", os.path.basename(filename))

View File

@ -61,7 +61,6 @@ from tests.integration_tests.fixtures.importexport import (
class TestDatasetApi(SupersetTestCase):
fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu")
fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2")

View File

@ -93,6 +93,7 @@ class TestBigQueryDbEngineSpec(TestDbEngineSpec):
"""
DB Eng Specs (bigquery): Test fetch data
"""
# Mock a google.cloud.bigquery.table.Row
class Row(object):
def __init__(self, value):

View File

@ -89,7 +89,6 @@ def _create_energy_table() -> List[Slice]:
slices = []
for slice_data in _get_energy_slices():
slice = _create_and_commit_energy_slice(
table,
slice_data["slice_title"],

View File

@ -23,7 +23,6 @@ from tests.integration_tests.test_app import app
@pytest.fixture()
def create_gamma_sqllab_no_data():
with app.app_context():
gamma_role = db.session.query(Role).filter(Role.name == "Gamma").one_or_none()
sqllab_role = (

View File

@ -64,7 +64,6 @@ def test_execute_query_as_report_executor(
app_context: None,
get_user,
) -> None:
from superset.reports.commands.alert import AlertCommand
from superset.reports.models import ReportSchedule
@ -104,7 +103,6 @@ def test_execute_query_as_report_executor(
def test_execute_query_succeeded_no_retry(
mocker: MockFixture, app_context: None
) -> None:
from superset.reports.commands.alert import AlertCommand
execute_query_mock = mocker.patch(

View File

@ -88,7 +88,6 @@ class TestReportSchedulesApi(SupersetTestCase):
@pytest.fixture()
def create_working_admin_report_schedule(self):
with self.create_app().app_context():
admin_user = self.get_user("admin")
chart = db.session.query(Slice).first()
example_db = get_example_database()
@ -114,7 +113,6 @@ class TestReportSchedulesApi(SupersetTestCase):
@pytest.fixture()
def create_working_gamma_report_schedule(self, gamma_user_with_alerts_role):
with self.create_app().app_context():
chart = db.session.query(Slice).first()
example_db = get_example_database()
@ -139,7 +137,6 @@ class TestReportSchedulesApi(SupersetTestCase):
@pytest.fixture()
def create_working_shared_report_schedule(self, gamma_user_with_alerts_role):
with self.create_app().app_context():
admin_user = self.get_user("admin")
alpha_user = self.get_user("alpha")
chart = db.session.query(Slice).first()
@ -213,7 +210,6 @@ class TestReportSchedulesApi(SupersetTestCase):
@pytest.fixture()
def create_alpha_users(self):
with self.create_app().app_context():
users = [
self.create_user(
"alpha1", "password", "Alpha", email="alpha1@superset.org"

View File

@ -466,7 +466,6 @@ def create_alert_email_chart(request):
chart = db.session.query(Slice).first()
example_database = get_example_database()
with create_test_table_context(example_database):
report_schedule = create_report_notification(
email_target="target@email.com",
chart=chart,
@ -549,7 +548,6 @@ def create_no_alert_email_chart(request):
chart = db.session.query(Slice).first()
example_database = get_example_database()
with create_test_table_context(example_database):
report_schedule = create_report_notification(
email_target="target@email.com",
chart=chart,
@ -584,7 +582,6 @@ def create_mul_alert_email_chart(request):
chart = db.session.query(Slice).first()
example_database = get_example_database()
with create_test_table_context(example_database):
report_schedule = create_report_notification(
email_target="target@email.com",
chart=chart,
@ -619,7 +616,6 @@ def create_invalid_sql_alert_email_chart(request):
chart = db.session.query(Slice).first()
example_database = get_example_database()
with create_test_table_context(example_database):
report_schedule = create_report_notification(
email_target="target@email.com",
chart=chart,
@ -1072,7 +1068,6 @@ def test_email_dashboard_report_schedule(
with freeze_time("2020-01-01T00:00:00Z"):
with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock:
AsyncExecuteReportScheduleCommand(
TEST_ID, create_report_email_dashboard.id, datetime.utcnow()
).run()
@ -1143,7 +1138,6 @@ def test_slack_chart_report_schedule(
with freeze_time("2020-01-01T00:00:00Z"):
with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock:
AsyncExecuteReportScheduleCommand(
TEST_ID, create_report_slack_chart.id, datetime.utcnow()
).run()
@ -1192,7 +1186,6 @@ def test_slack_chart_report_schedule_with_errors(
web_client_mock.side_effect = er
with pytest.raises(ReportScheduleClientErrorsException):
AsyncExecuteReportScheduleCommand(
TEST_ID, create_report_slack_chart.id, datetime.utcnow()
).run()
@ -1349,7 +1342,6 @@ def test_report_schedule_working_timeout(create_report_slack_chart_working):
seconds=create_report_slack_chart_working.working_timeout + 1
)
with freeze_time(current_time):
with pytest.raises(ReportScheduleWorkingTimeoutError):
AsyncExecuteReportScheduleCommand(
TEST_ID, create_report_slack_chart_working.id, datetime.utcnow()
@ -2020,7 +2012,6 @@ def test__send_with_multiple_errors(notification_mock, logger_mock):
@patch("superset.reports.commands.execute.logger")
@patch("superset.reports.commands.execute.create_notification")
def test__send_with_server_errors(notification_mock, logger_mock):
notification_content = "I am some content"
recipients = ["test@foo.com"]
notification_mock.return_value.send.side_effect = NotificationError()

View File

@ -43,7 +43,6 @@ def test_scheduler_celery_timeout_ny(execute_mock, owners):
Reports scheduler: Test scheduler setting celery soft and hard timeout
"""
with app.app_context():
report_schedule = insert_report_schedule(
type=ReportScheduleType.ALERT,
name="report",
@ -91,7 +90,6 @@ def test_scheduler_celery_timeout_utc(execute_mock, owners):
Reports scheduler: Test scheduler setting celery soft and hard timeout
"""
with app.app_context():
report_schedule = insert_report_schedule(
type=ReportScheduleType.ALERT,
name="report",

View File

@ -152,7 +152,6 @@ class TestRowLevelSecurity(SupersetTestCase):
@pytest.fixture()
def create_dataset(self):
with self.create_app().app_context():
dataset = SqlaTable(database_id=1, schema=None, table_name="table1")
db.session.add(dataset)
db.session.flush()

View File

@ -199,7 +199,6 @@ class TestWebDriverProxy(SupersetTestCase):
class TestThumbnails(SupersetTestCase):
mock_image = b"bytes mock image"
digest_return_value = "foo_bar"
digest_hash = "5c7d96a3dd7a87850a2ef34087565a6e"

View File

@ -86,7 +86,13 @@ def test_flat_should_drop_index_level():
df = pd.DataFrame(index=index, columns=columns, data=1)
# drop level by index
assert pp.flatten(df.copy(), drop_levels=(0, 1,)).equals(
assert pp.flatten(
df.copy(),
drop_levels=(
0,
1,
),
).equals(
pd.DataFrame(
{
"__timestamp": index,