mirror of https://github.com/apache/superset.git
chore: Update pre-commit packages (#23173)
This commit is contained in:
parent
9ae81b7c33
commit
b820eb8235
|
@ -20,16 +20,17 @@ repos:
|
|||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.941
|
||||
rev: v1.0.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--check-untyped-defs]
|
||||
additional_dependencies: [types-all]
|
||||
- repo: https://github.com/peterdemin/pip-compile-multi
|
||||
rev: v2.4.1
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pip-compile-multi-verify
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
|
@ -41,7 +42,7 @@ repos:
|
|||
- id: trailing-whitespace
|
||||
args: ["--markdown-linebreak-ext=md"]
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3
|
||||
|
|
|
@ -138,7 +138,7 @@ class GitChangeLog:
|
|||
title = pr_info.title if pr_info else git_log.message
|
||||
pr_type = re.match(SUPERSET_PULL_REQUEST_TYPES, title)
|
||||
if pr_type:
|
||||
pr_type = pr_type.group().strip('"')
|
||||
pr_type = pr_type.group().strip('"') # type: ignore
|
||||
|
||||
labels = (" | ").join([label.name for label in pr_info.labels])
|
||||
is_risky = self._is_risk_pull_request(pr_info.labels)
|
||||
|
|
|
@ -59,11 +59,9 @@ export function ColumnOption({
|
|||
<Tooltip id="metric-name-tooltip" title={tooltipText}>
|
||||
<span
|
||||
className="option-label column-option-label"
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-right: ${theme.gridUnit}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-right: ${theme.gridUnit}px;
|
||||
`}
|
||||
ref={labelRef}
|
||||
>
|
||||
{getColumnLabelText(column)}
|
||||
|
|
|
@ -71,11 +71,9 @@ export function MetricOption({
|
|||
const label = (
|
||||
<span
|
||||
className="option-label metric-option-label"
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-right: ${theme.gridUnit}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-right: ${theme.gridUnit}px;
|
||||
`}
|
||||
ref={labelRef}
|
||||
>
|
||||
{link}
|
||||
|
|
|
@ -158,11 +158,9 @@ export const EmptyStateBig = ({
|
|||
<EmptyStateContainer className={className}>
|
||||
{image && <ImageContainer image={image} size={EmptyStateSize.Big} />}
|
||||
<TextContainer
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
max-width: ${theme.gridUnit * 150}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
max-width: ${theme.gridUnit * 150}px;
|
||||
`}
|
||||
>
|
||||
<BigTitle>{title}</BigTitle>
|
||||
{description && <BigDescription>{description}</BigDescription>}
|
||||
|
@ -189,11 +187,9 @@ export const EmptyStateMedium = ({
|
|||
<EmptyStateContainer>
|
||||
{image && <ImageContainer image={image} size={EmptyStateSize.Medium} />}
|
||||
<TextContainer
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
max-width: ${theme.gridUnit * 100}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
max-width: ${theme.gridUnit * 100}px;
|
||||
`}
|
||||
>
|
||||
<Title>{title}</Title>
|
||||
{description && <Description>{description}</Description>}
|
||||
|
@ -218,11 +214,9 @@ export const EmptyStateSmall = ({
|
|||
<EmptyStateContainer>
|
||||
{image && <ImageContainer image={image} size={EmptyStateSize.Small} />}
|
||||
<TextContainer
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
max-width: ${theme.gridUnit * 75}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
max-width: ${theme.gridUnit * 75}px;
|
||||
`}
|
||||
>
|
||||
<Title>{title}</Title>
|
||||
{description && <SmallDescription>{description}</SmallDescription>}
|
||||
|
|
|
@ -23,11 +23,9 @@ import { NULL_DISPLAY } from 'src/constants';
|
|||
function NullCell() {
|
||||
return (
|
||||
<span
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
color: ${theme.colors.grayscale.light1};
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
color: ${theme.colors.grayscale.light1};
|
||||
`}
|
||||
>
|
||||
{NULL_DISPLAY}
|
||||
</span>
|
||||
|
|
|
@ -226,13 +226,11 @@ const FilterControls: FC<FilterControlsProps> = ({
|
|||
|
||||
const renderHorizontalContent = () => (
|
||||
<div
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
padding: 0 ${theme.gridUnit * 4}px;
|
||||
min-width: 0;
|
||||
flex: 1;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
padding: 0 ${theme.gridUnit * 4}px;
|
||||
min-width: 0;
|
||||
flex: 1;
|
||||
`}
|
||||
>
|
||||
<DropdownContainer
|
||||
items={items}
|
||||
|
|
|
@ -47,12 +47,10 @@ export const FiltersDropdownContent = ({
|
|||
forceRenderOutOfScope,
|
||||
}: FiltersDropdownContentProps) => (
|
||||
<div
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
width: ${theme.gridUnit * 56}px;
|
||||
padding: ${theme.gridUnit}px 0;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
width: ${theme.gridUnit * 56}px;
|
||||
padding: ${theme.gridUnit}px 0;
|
||||
`}
|
||||
>
|
||||
{overflowedCrossFilters.map(crossFilter =>
|
||||
rendererCrossFilter(
|
||||
|
|
|
@ -44,20 +44,16 @@ export const NameRow = ({
|
|||
|
||||
return (
|
||||
<Row
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-bottom: ${theme.gridUnit * 3}px;
|
||||
justify-content: space-between;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-bottom: ${theme.gridUnit * 3}px;
|
||||
justify-content: space-between;
|
||||
`}
|
||||
>
|
||||
<InternalRow>
|
||||
<Icons.FilterSmall
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-right: ${theme.gridUnit}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-right: ${theme.gridUnit}px;
|
||||
`}
|
||||
/>
|
||||
<TooltipWithTruncation title={elementsTruncated ? filter.name : null}>
|
||||
<FilterName ref={filterNameRef}>{filter.name}</FilterName>
|
||||
|
|
|
@ -134,12 +134,10 @@ const ControlHeader: FC<ControlHeaderProps> = ({
|
|||
<div className="ControlHeader" data-test={`${name}-header`}>
|
||||
<div className="pull-left">
|
||||
<FormLabel
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-bottom: ${theme.gridUnit * 0.5}px;
|
||||
position: relative;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-bottom: ${theme.gridUnit * 0.5}px;
|
||||
position: relative;
|
||||
`}
|
||||
>
|
||||
{leftNode && <span>{leftNode}</span>}
|
||||
<span
|
||||
|
|
|
@ -56,11 +56,9 @@ function VizSupportValidation({ vizType }: { vizType: string }) {
|
|||
return (
|
||||
<div
|
||||
className="text-danger"
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-top: ${theme.gridUnit}px;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-top: ${theme.gridUnit}px;
|
||||
`}
|
||||
>
|
||||
<i className="fa fa-exclamation-circle text-danger" />{' '}
|
||||
<small>{t('This visualization type is not supported.')}</small>
|
||||
|
@ -114,15 +112,13 @@ const VizTypeControl = ({
|
|||
{initialValue && <VizSupportValidation vizType={initialValue} />}
|
||||
</div>
|
||||
<div
|
||||
css={(theme: SupersetTheme) =>
|
||||
css`
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
margin-top: ${theme.gridUnit * 3}px;
|
||||
color: ${theme.colors.grayscale.base};
|
||||
text-decoration: underline;
|
||||
`
|
||||
}
|
||||
css={(theme: SupersetTheme) => css`
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
margin-top: ${theme.gridUnit * 3}px;
|
||||
color: ${theme.colors.grayscale.base};
|
||||
text-decoration: underline;
|
||||
`}
|
||||
>
|
||||
<span role="button" tabIndex={0} onClick={openModal}>
|
||||
{t('View all charts')}
|
||||
|
|
|
@ -371,10 +371,9 @@ const timezoneHeaderStyle = (theme: SupersetTheme) => css`
|
|||
margin: ${theme.gridUnit * 3}px 0;
|
||||
`;
|
||||
|
||||
const inputSpacer = (theme: SupersetTheme) =>
|
||||
css`
|
||||
margin-right: ${theme.gridUnit * 3}px;
|
||||
`;
|
||||
const inputSpacer = (theme: SupersetTheme) => css`
|
||||
margin-right: ${theme.gridUnit * 3}px;
|
||||
`;
|
||||
|
||||
type NotificationAddStatus = 'active' | 'disabled' | 'hidden';
|
||||
|
||||
|
|
|
@ -59,9 +59,10 @@ const StyledHeader = styled.header`
|
|||
justify-content: center;
|
||||
/* must be exactly the height of the Antd navbar */
|
||||
min-height: 50px;
|
||||
padding: ${theme.gridUnit}px ${theme.gridUnit * 2}px ${
|
||||
theme.gridUnit
|
||||
}px ${theme.gridUnit * 4}px;
|
||||
padding: ${theme.gridUnit}px
|
||||
${theme.gridUnit * 2}px
|
||||
${theme.gridUnit}px
|
||||
${theme.gridUnit * 4}px;
|
||||
max-width: ${theme.gridUnit * theme.brandIconMaxWidth}px;
|
||||
img {
|
||||
height: 100%;
|
||||
|
|
|
@ -55,11 +55,13 @@ export default function transformProps(chartProps: TableChartProps) {
|
|||
typeof column === 'object' ? column : { label: column },
|
||||
);
|
||||
} else {
|
||||
/* eslint-disable */
|
||||
const metricMap = datasource.metrics.reduce((acc, current) => {
|
||||
const map = acc;
|
||||
map[current.metric_name] = current;
|
||||
return map;
|
||||
}, {} as Record<string, Metric>);
|
||||
/* eslint-disable */
|
||||
rows = metrics.map(metric =>
|
||||
typeof metric === 'object' ? metric : metricMap[metric],
|
||||
);
|
||||
|
|
|
@ -38,7 +38,6 @@ REMOVE_KEYS = ["datasource_type", "datasource_name", "url_params"]
|
|||
|
||||
|
||||
class ExportChartsCommand(ExportModelsCommand):
|
||||
|
||||
dao = ChartDAO
|
||||
not_found = ChartNotFoundError
|
||||
|
||||
|
|
|
@ -841,7 +841,6 @@ class ChartDataFilterSchema(Schema):
|
|||
|
||||
|
||||
class ChartDataExtrasSchema(Schema):
|
||||
|
||||
relative_start = fields.String(
|
||||
description="Start time for relative time deltas. "
|
||||
'Default: `config["DEFAULT_RELATIVE_START_TIME"]`',
|
||||
|
|
|
@ -45,7 +45,6 @@ def load_test_users_run() -> None:
|
|||
Syncs permissions for those users/roles
|
||||
"""
|
||||
if app.config["TESTING"]:
|
||||
|
||||
sm = security_manager
|
||||
|
||||
examples_db = database_utils.get_example_database()
|
||||
|
|
|
@ -37,7 +37,6 @@ class ExportAssetsCommand(BaseCommand):
|
|||
"""
|
||||
|
||||
def run(self) -> Iterator[Tuple[str, str]]:
|
||||
|
||||
metadata = {
|
||||
"version": EXPORT_VERSION,
|
||||
"type": "assets",
|
||||
|
|
|
@ -30,7 +30,6 @@ METADATA_FILE_NAME = "metadata.yaml"
|
|||
|
||||
|
||||
class ExportModelsCommand(BaseCommand):
|
||||
|
||||
dao: Type[BaseDAO] = BaseDAO
|
||||
not_found: Type[CommandException] = CommandException
|
||||
|
||||
|
|
|
@ -403,11 +403,9 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
|
|||
and hasattr(self.datasource, "database")
|
||||
and self.datasource.database.impersonate_user
|
||||
):
|
||||
|
||||
if key := self.datasource.database.db_engine_spec.get_impersonation_key(
|
||||
getattr(g, "user", None)
|
||||
):
|
||||
|
||||
logger.debug(
|
||||
"Adding impersonation key to QueryObject cache dict: %s", key
|
||||
)
|
||||
|
|
|
@ -1204,6 +1204,7 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
|
|||
# functionality for both the SQL_Lab and Charts.
|
||||
MUTATE_AFTER_SPLIT = False
|
||||
|
||||
|
||||
# This allows for a user to add header data to any outgoing emails. For example,
|
||||
# if you need to include metadata in the header or you want to change the specifications
|
||||
# of the email title, header, or sender.
|
||||
|
|
|
@ -102,7 +102,6 @@ def append_charts(position: Dict[str, Any], charts: Set[Slice]) -> Dict[str, Any
|
|||
|
||||
|
||||
class ExportDashboardsCommand(ExportModelsCommand):
|
||||
|
||||
dao = DashboardDAO
|
||||
not_found = DashboardNotFoundError
|
||||
|
||||
|
|
|
@ -1308,12 +1308,10 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
|
|||
and hasattr(engine_spec, "sqlalchemy_uri_placeholder")
|
||||
and getattr(engine_spec, "default_driver") in drivers
|
||||
):
|
||||
payload[
|
||||
"parameters"
|
||||
] = engine_spec.parameters_json_schema() # type: ignore
|
||||
payload["parameters"] = engine_spec.parameters_json_schema()
|
||||
payload[
|
||||
"sqlalchemy_uri_placeholder"
|
||||
] = engine_spec.sqlalchemy_uri_placeholder # type: ignore
|
||||
] = engine_spec.sqlalchemy_uri_placeholder
|
||||
|
||||
available_databases.append(payload)
|
||||
|
||||
|
|
|
@ -51,7 +51,6 @@ def parse_extra(extra_payload: str) -> Dict[str, Any]:
|
|||
|
||||
|
||||
class ExportDatabasesCommand(ExportModelsCommand):
|
||||
|
||||
dao = DatabaseDAO
|
||||
not_found = DatabaseNotFoundError
|
||||
|
||||
|
|
|
@ -200,7 +200,6 @@ class TestConnectionDatabaseCommand(BaseCommand):
|
|||
)
|
||||
raise DatabaseSecurityUnsafeError(message=str(ex)) from ex
|
||||
except SupersetTimeoutException as ex:
|
||||
|
||||
event_logger.log_with_context(
|
||||
action=get_log_connection_action(
|
||||
"test_connection_error", ssh_tunnel, ex
|
||||
|
|
|
@ -303,7 +303,7 @@ class DatabaseParametersSchemaMixin: # pylint: disable=too-few-public-methods
|
|||
)
|
||||
|
||||
# validate parameters
|
||||
parameters = engine_spec.parameters_schema.load(parameters) # type: ignore
|
||||
parameters = engine_spec.parameters_schema.load(parameters)
|
||||
|
||||
serialized_encrypted_extra = data.get("masked_encrypted_extra") or "{}"
|
||||
try:
|
||||
|
@ -311,7 +311,7 @@ class DatabaseParametersSchemaMixin: # pylint: disable=too-few-public-methods
|
|||
except json.decoder.JSONDecodeError:
|
||||
encrypted_extra = {}
|
||||
|
||||
data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri( # type: ignore
|
||||
data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri(
|
||||
parameters,
|
||||
encrypted_extra,
|
||||
)
|
||||
|
@ -488,7 +488,6 @@ class DatabasePutSchema(Schema, DatabaseParametersSchemaMixin):
|
|||
|
||||
|
||||
class DatabaseTestConnectionSchema(Schema, DatabaseParametersSchemaMixin):
|
||||
|
||||
rename_encrypted_extra = pre_load(rename_encrypted_extra)
|
||||
|
||||
database_name = fields.String(
|
||||
|
|
|
@ -37,7 +37,6 @@ JSON_KEYS = {"params", "template_params", "extra"}
|
|||
|
||||
|
||||
class ExportDatasetsCommand(ExportModelsCommand):
|
||||
|
||||
dao = DatasetDAO
|
||||
not_found = DatasetNotFoundError
|
||||
|
||||
|
|
|
@ -34,7 +34,6 @@ Datasource = Union[Dataset, SqlaTable, Table, Query, SavedQuery]
|
|||
|
||||
|
||||
class DatasourceDAO(BaseDAO):
|
||||
|
||||
sources: Dict[Union[DatasourceType, str], Type[Datasource]] = {
|
||||
DatasourceType.TABLE: SqlaTable,
|
||||
DatasourceType.QUERY: Query,
|
||||
|
|
|
@ -19,7 +19,6 @@ from superset.db_engine_specs.postgres import PostgresEngineSpec
|
|||
|
||||
|
||||
class AuroraMySQLDataAPI(MySQLEngineSpec):
|
||||
|
||||
engine = "mysql"
|
||||
default_driver = "auroradataapi"
|
||||
engine_name = "Aurora MySQL (Data API)"
|
||||
|
@ -33,7 +32,6 @@ class AuroraMySQLDataAPI(MySQLEngineSpec):
|
|||
|
||||
|
||||
class AuroraPostgresDataAPI(PostgresEngineSpec):
|
||||
|
||||
engine = "postgresql"
|
||||
default_driver = "auroradataapi"
|
||||
engine_name = "Aurora PostgreSQL (Data API)"
|
||||
|
|
|
@ -28,7 +28,6 @@ if TYPE_CHECKING:
|
|||
|
||||
|
||||
class CrateEngineSpec(BaseEngineSpec):
|
||||
|
||||
engine = "crate"
|
||||
engine_name = "CrateDB"
|
||||
|
||||
|
|
|
@ -192,7 +192,6 @@ class DatabricksNativeEngineSpec(DatabricksODBCEngineSpec, BasicParametersMixin)
|
|||
def build_sqlalchemy_uri( # type: ignore
|
||||
cls, parameters: DatabricksParametersType, *_
|
||||
) -> str:
|
||||
|
||||
query = {}
|
||||
if parameters.get("encryption"):
|
||||
if not cls.encryption_parameters:
|
||||
|
|
|
@ -23,7 +23,6 @@ from superset.db_engine_specs.base import BaseEngineSpec
|
|||
|
||||
|
||||
class DremioEngineSpec(BaseEngineSpec):
|
||||
|
||||
engine = "dremio"
|
||||
engine_name = "Dremio"
|
||||
|
||||
|
|
|
@ -67,7 +67,6 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
|
|||
def convert_dttm(
|
||||
cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None
|
||||
) -> Optional[str]:
|
||||
|
||||
db_extra = db_extra or {}
|
||||
|
||||
sqla_type = cls.get_sqla_column_type(target_type)
|
||||
|
@ -99,7 +98,6 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
|
|||
|
||||
|
||||
class OpenDistroEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
|
||||
|
||||
time_groupby_inline = True
|
||||
time_secondary_columns = True
|
||||
allows_joins = False
|
||||
|
|
|
@ -257,7 +257,6 @@ class GSheetsEngineSpec(SqliteEngineSpec):
|
|||
idx = 0
|
||||
|
||||
for name, url in table_catalog.items():
|
||||
|
||||
if not name:
|
||||
errors.append(
|
||||
SupersetError(
|
||||
|
|
|
@ -191,7 +191,6 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
raise SupersetException("Append operation not currently supported")
|
||||
|
||||
if to_sql_kwargs["if_exists"] == "fail":
|
||||
|
||||
# Ensure table doesn't already exist.
|
||||
if table.schema:
|
||||
table_exists = not database.get_df(
|
||||
|
@ -425,7 +424,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
return BaseEngineSpec._get_fields(cols) # pylint: disable=protected-access
|
||||
|
||||
@classmethod
|
||||
def latest_sub_partition(
|
||||
def latest_sub_partition( # type: ignore
|
||||
cls, table_name: str, schema: Optional[str], database: "Database", **kwargs: Any
|
||||
) -> str:
|
||||
# TODO(bogdan): implement`
|
||||
|
|
|
@ -18,7 +18,6 @@ from superset.db_engine_specs.postgres import PostgresBaseEngineSpec
|
|||
|
||||
|
||||
class NetezzaEngineSpec(PostgresBaseEngineSpec):
|
||||
|
||||
engine = "netezza"
|
||||
default_driver = "nzpy"
|
||||
engine_name = "IBM Netezza Performance Server"
|
||||
|
|
|
@ -1267,10 +1267,10 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
|
|||
def _extract_error_message(cls, ex: Exception) -> str:
|
||||
if (
|
||||
hasattr(ex, "orig")
|
||||
and type(ex.orig).__name__ == "DatabaseError" # type: ignore
|
||||
and isinstance(ex.orig[0], dict) # type: ignore
|
||||
and type(ex.orig).__name__ == "DatabaseError"
|
||||
and isinstance(ex.orig[0], dict)
|
||||
):
|
||||
error_dict = ex.orig[0] # type: ignore
|
||||
error_dict = ex.orig[0]
|
||||
return "{} at {}: {}".format(
|
||||
error_dict.get("errorName"),
|
||||
error_dict.get("errorLocation"),
|
||||
|
|
|
@ -26,7 +26,6 @@ if TYPE_CHECKING:
|
|||
|
||||
|
||||
class RocksetEngineSpec(BaseEngineSpec):
|
||||
|
||||
engine = "rockset"
|
||||
engine_name = "Rockset"
|
||||
|
||||
|
|
|
@ -222,7 +222,6 @@ class SnowflakeEngineSpec(PostgresBaseEngineSpec):
|
|||
Dict[str, Any]
|
||||
] = None,
|
||||
) -> str:
|
||||
|
||||
return str(
|
||||
URL(
|
||||
"snowflake",
|
||||
|
|
|
@ -52,7 +52,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s
|
|||
tbl_name = "wb_health_population"
|
||||
database = superset.utils.database.get_example_database()
|
||||
with database.get_sqla_engine_with_context() as engine:
|
||||
|
||||
schema = inspect(engine).default_schema_name
|
||||
table_exists = database.has_table_by_name(tbl_name)
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ class SupersetException(Exception):
|
|||
if self.error_type:
|
||||
rv["error_type"] = self.error_type
|
||||
if self.exception is not None and hasattr(self.exception, "to_dict"):
|
||||
rv = {**rv, **self.exception.to_dict()} # type: ignore
|
||||
rv = {**rv, **self.exception.to_dict()}
|
||||
return rv
|
||||
|
||||
|
||||
|
|
|
@ -65,7 +65,6 @@ def upgrade():
|
|||
# datasources.datasource_name column.
|
||||
for foreign in ["columns", "metrics"]:
|
||||
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
|
||||
|
||||
# Add the datasource_id column with the relevant constraints.
|
||||
batch_op.add_column(sa.Column("datasource_id", sa.Integer))
|
||||
|
||||
|
@ -94,7 +93,6 @@ def upgrade():
|
|||
)
|
||||
|
||||
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
|
||||
|
||||
# Drop the datasource_name column and associated constraints. Note
|
||||
# due to prior revisions (1226819ee0e3, 3b626e2a6783) there may
|
||||
# incorrectly be multiple duplicate constraints.
|
||||
|
@ -146,7 +144,6 @@ def downgrade():
|
|||
# datasources.datasource_id column.
|
||||
for foreign in ["columns", "metrics"]:
|
||||
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
|
||||
|
||||
# Add the datasource_name column with the relevant constraints.
|
||||
batch_op.add_column(sa.Column("datasource_name", sa.String(255)))
|
||||
|
||||
|
@ -175,7 +172,6 @@ def downgrade():
|
|||
)
|
||||
|
||||
with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
|
||||
|
||||
# Drop the datasource_id column and associated constraint.
|
||||
batch_op.drop_constraint(
|
||||
"fk_{}_datasource_id_datasources".format(foreign), type_="foreignkey"
|
||||
|
@ -184,7 +180,6 @@ def downgrade():
|
|||
batch_op.drop_column("datasource_id")
|
||||
|
||||
with op.batch_alter_table("datasources", naming_convention=conv) as batch_op:
|
||||
|
||||
# Prior to dropping the uniqueness constraint, the foreign key
|
||||
# associated with the cluster_name column needs to be dropped.
|
||||
batch_op.drop_constraint(
|
||||
|
|
|
@ -30,7 +30,6 @@ down_revision = "e866bd2d4976"
|
|||
|
||||
|
||||
def upgrade():
|
||||
|
||||
op.add_column(
|
||||
"dbs",
|
||||
sa.Column(
|
||||
|
|
|
@ -36,7 +36,6 @@ names = {"columns": "column_name", "metrics": "metric_name"}
|
|||
|
||||
|
||||
def upgrade():
|
||||
|
||||
# Reduce the size of the metric_name column for constraint viability.
|
||||
with op.batch_alter_table("metrics", naming_convention=conv) as batch_op:
|
||||
batch_op.alter_column(
|
||||
|
@ -55,7 +54,6 @@ def upgrade():
|
|||
|
||||
|
||||
def downgrade():
|
||||
|
||||
bind = op.get_bind()
|
||||
insp = sa.engine.reflection.Inspector.from_engine(bind)
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ from alembic import op
|
|||
|
||||
|
||||
def upgrade():
|
||||
|
||||
# Enforce that the datasource_name column be non-nullable.
|
||||
with op.batch_alter_table("datasources") as batch_op:
|
||||
batch_op.alter_column(
|
||||
|
@ -40,7 +39,6 @@ def upgrade():
|
|||
|
||||
|
||||
def downgrade():
|
||||
|
||||
# Forego that the datasource_name column be non-nullable.
|
||||
with op.batch_alter_table("datasources") as batch_op:
|
||||
batch_op.alter_column(
|
||||
|
|
|
@ -69,7 +69,6 @@ def downgrade():
|
|||
form_data = json.loads(slc.params)
|
||||
|
||||
if "time_range" in form_data:
|
||||
|
||||
# Note defaults and relative dates are not compatible with since/until
|
||||
# and thus the time range is persisted.
|
||||
try:
|
||||
|
|
|
@ -48,7 +48,6 @@ def upgrade():
|
|||
|
||||
|
||||
def downgrade():
|
||||
|
||||
# One cannot simply re-add the uniqueness constraint as it may not have previously
|
||||
# existed.
|
||||
pass
|
||||
|
|
|
@ -39,7 +39,6 @@ Base = declarative_base()
|
|||
|
||||
|
||||
class Database(Base):
|
||||
|
||||
__tablename__ = "dbs"
|
||||
id = Column(Integer, primary_key=True)
|
||||
extra = Column(Text)
|
||||
|
|
|
@ -31,7 +31,6 @@ from alembic import op
|
|||
|
||||
|
||||
def upgrade():
|
||||
|
||||
with op.batch_alter_table("table_columns") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"type", existing_type=sa.VARCHAR(length=32), type_=sa.TEXT()
|
||||
|
|
|
@ -39,7 +39,6 @@ Base = declarative_base()
|
|||
|
||||
|
||||
class Database(Base):
|
||||
|
||||
__tablename__ = "dbs"
|
||||
id = Column(Integer, primary_key=True)
|
||||
extra = Column(Text)
|
||||
|
|
|
@ -103,7 +103,6 @@ def insert_from_select(
|
|||
|
||||
|
||||
class Database(Base):
|
||||
|
||||
__tablename__ = "dbs"
|
||||
__table_args__ = (UniqueConstraint("database_name"),)
|
||||
|
||||
|
@ -118,7 +117,6 @@ class Database(Base):
|
|||
|
||||
|
||||
class TableColumn(AuxiliaryColumnsMixin, Base):
|
||||
|
||||
__tablename__ = "table_columns"
|
||||
__table_args__ = (UniqueConstraint("table_id", "column_name"),)
|
||||
|
||||
|
@ -138,7 +136,6 @@ class TableColumn(AuxiliaryColumnsMixin, Base):
|
|||
|
||||
|
||||
class SqlMetric(AuxiliaryColumnsMixin, Base):
|
||||
|
||||
__tablename__ = "sql_metrics"
|
||||
__table_args__ = (UniqueConstraint("table_id", "metric_name"),)
|
||||
|
||||
|
@ -164,7 +161,6 @@ sqlatable_user_table = sa.Table(
|
|||
|
||||
|
||||
class SqlaTable(AuxiliaryColumnsMixin, Base):
|
||||
|
||||
__tablename__ = "tables"
|
||||
__table_args__ = (UniqueConstraint("database_id", "schema", "table_name"),)
|
||||
|
||||
|
@ -213,7 +209,6 @@ dataset_user_association_table = sa.Table(
|
|||
|
||||
|
||||
class NewColumn(AuxiliaryColumnsMixin, Base):
|
||||
|
||||
__tablename__ = "sl_columns"
|
||||
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
|
@ -243,7 +238,6 @@ class NewColumn(AuxiliaryColumnsMixin, Base):
|
|||
|
||||
|
||||
class NewTable(AuxiliaryColumnsMixin, Base):
|
||||
|
||||
__tablename__ = "sl_tables"
|
||||
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
|
@ -264,7 +258,6 @@ class NewTable(AuxiliaryColumnsMixin, Base):
|
|||
|
||||
|
||||
class NewDataset(Base, AuxiliaryColumnsMixin):
|
||||
|
||||
__tablename__ = "sl_datasets"
|
||||
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
|
@ -636,7 +629,6 @@ def postprocess_columns(session: Session) -> None:
|
|||
return
|
||||
|
||||
def get_joined_tables(offset, limit):
|
||||
|
||||
# Import aliased from sqlalchemy
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
|
@ -788,7 +780,7 @@ def postprocess_columns(session: Session) -> None:
|
|||
updates["external_url"] = external_url
|
||||
|
||||
# update extra json
|
||||
for (key, val) in (
|
||||
for key, val in (
|
||||
{
|
||||
"verbose_name": verbose_name,
|
||||
"python_date_format": python_date_format,
|
||||
|
|
|
@ -323,8 +323,8 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
|
|||
|
||||
return result
|
||||
|
||||
@property # type: ignore
|
||||
def params(self) -> str: # type: ignore
|
||||
@property
|
||||
def params(self) -> str:
|
||||
return self.json_metadata
|
||||
|
||||
@params.setter
|
||||
|
|
|
@ -430,7 +430,6 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
|
|||
|
||||
|
||||
class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
|
||||
|
||||
__tablename__ = "tab_state"
|
||||
|
||||
# basic info
|
||||
|
@ -493,7 +492,6 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
|
|||
|
||||
|
||||
class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
|
||||
|
||||
__tablename__ = "table_schema"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
|
|
@ -33,7 +33,6 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class ExportSavedQueriesCommand(ExportModelsCommand):
|
||||
|
||||
dao = SavedQueryDAO
|
||||
not_found = SavedQueryNotFoundError
|
||||
|
||||
|
|
|
@ -36,7 +36,6 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class BaseReportScheduleCommand(BaseCommand):
|
||||
|
||||
_properties: Dict[str, Any]
|
||||
|
||||
def run(self) -> Any:
|
||||
|
|
|
@ -2205,7 +2205,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
|||
return hasattr(user, "is_guest_user") and user.is_guest_user
|
||||
|
||||
def get_current_guest_user_if_guest(self) -> Optional[GuestUser]:
|
||||
|
||||
if self.is_guest_user():
|
||||
return g.user
|
||||
return None
|
||||
|
|
|
@ -509,7 +509,6 @@ def has_table_query(token_list: TokenList) -> bool:
|
|||
"""
|
||||
state = InsertRLSState.SCANNING
|
||||
for token in token_list.tokens:
|
||||
|
||||
# Recurse into child token list
|
||||
if isinstance(token, TokenList) and has_table_query(token):
|
||||
return True
|
||||
|
@ -607,7 +606,6 @@ def insert_rls(
|
|||
rls: Optional[TokenList] = None
|
||||
state = InsertRLSState.SCANNING
|
||||
for token in token_list.tokens:
|
||||
|
||||
# Recurse into child token list
|
||||
if isinstance(token, TokenList):
|
||||
i = token_list.tokens.index(token)
|
||||
|
|
|
@ -48,13 +48,13 @@ class SqlLabException(SupersetException):
|
|||
if exception is not None:
|
||||
if (
|
||||
hasattr(exception, "error_type")
|
||||
and exception.error_type is not None # type: ignore
|
||||
and exception.error_type is not None
|
||||
):
|
||||
error_type = exception.error_type # type: ignore
|
||||
error_type = exception.error_type
|
||||
elif hasattr(exception, "error") and isinstance(
|
||||
exception.error, SupersetError # type: ignore
|
||||
exception.error, SupersetError
|
||||
):
|
||||
error_type = exception.error.error_type # type: ignore
|
||||
error_type = exception.error.error_type
|
||||
else:
|
||||
error_type = SupersetErrorType.GENERIC_BACKEND_ERROR
|
||||
|
||||
|
@ -79,9 +79,9 @@ class SqlLabException(SupersetException):
|
|||
return ": {}".format(reason_message)
|
||||
if exception is not None:
|
||||
if hasattr(exception, "get_message"):
|
||||
return ": {}".format(exception.get_message()) # type: ignore
|
||||
return ": {}".format(exception.get_message())
|
||||
if hasattr(exception, "message"):
|
||||
return ": {}".format(exception.message) # type: ignore
|
||||
return ": {}".format(exception.message)
|
||||
return ": {}".format(str(exception))
|
||||
return ""
|
||||
|
||||
|
|
|
@ -48,8 +48,7 @@ class SqlQueryRenderImpl(SqlQueryRender):
|
|||
def __init__(
|
||||
self, sql_template_factory: Callable[..., BaseTemplateProcessor]
|
||||
) -> None:
|
||||
|
||||
self._sql_template_processor_factory = sql_template_factory # type: ignore
|
||||
self._sql_template_processor_factory = sql_template_factory
|
||||
|
||||
def render(self, execution_context: SqlJsonExecutionContext) -> str:
|
||||
query_model = execution_context.query
|
||||
|
|
|
@ -64,7 +64,7 @@ class SqlJsonExecutorBase(SqlJsonExecutor, ABC):
|
|||
|
||||
def __init__(self, query_dao: QueryDAO, get_sql_results_task: GetSqlResultsTask):
|
||||
self._query_dao = query_dao
|
||||
self._get_sql_results_task = get_sql_results_task # type: ignore
|
||||
self._get_sql_results_task = get_sql_results_task
|
||||
|
||||
|
||||
class SynchronousSqlJsonExecutor(SqlJsonExecutorBase):
|
||||
|
@ -163,7 +163,6 @@ class ASynchronousSqlJsonExecutor(SqlJsonExecutorBase):
|
|||
rendered_query: str,
|
||||
log_params: Optional[Dict[str, Any]],
|
||||
) -> SqlJsonExecutionStatus:
|
||||
|
||||
query_id = execution_context.query.id
|
||||
logger.info("Query %i: Running query on a Celery worker", query_id)
|
||||
try:
|
||||
|
|
|
@ -126,7 +126,6 @@ def get_object_type(class_name: str) -> ObjectTypes:
|
|||
|
||||
|
||||
class ObjectUpdater:
|
||||
|
||||
object_type: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
|
@ -218,7 +217,6 @@ class ObjectUpdater:
|
|||
|
||||
|
||||
class ChartUpdater(ObjectUpdater):
|
||||
|
||||
object_type = "chart"
|
||||
|
||||
@classmethod
|
||||
|
@ -227,7 +225,6 @@ class ChartUpdater(ObjectUpdater):
|
|||
|
||||
|
||||
class DashboardUpdater(ObjectUpdater):
|
||||
|
||||
object_type = "dashboard"
|
||||
|
||||
@classmethod
|
||||
|
@ -236,7 +233,6 @@ class DashboardUpdater(ObjectUpdater):
|
|||
|
||||
|
||||
class QueryUpdater(ObjectUpdater):
|
||||
|
||||
object_type = "query"
|
||||
|
||||
@classmethod
|
||||
|
@ -245,7 +241,6 @@ class QueryUpdater(ObjectUpdater):
|
|||
|
||||
|
||||
class DatasetUpdater(ObjectUpdater):
|
||||
|
||||
object_type = "dataset"
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -90,7 +90,11 @@ def load_chart_data_into_cache(
|
|||
raise ex
|
||||
except Exception as ex:
|
||||
# TODO: QueryContext should support SIP-40 style errors
|
||||
error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member
|
||||
error = (
|
||||
ex.message # pylint: disable=no-member
|
||||
if hasattr(ex, "message")
|
||||
else str(ex)
|
||||
)
|
||||
errors = [{"message": error}]
|
||||
async_query_manager.update_job(
|
||||
job_metadata, async_query_manager.STATUS_ERROR, errors=errors
|
||||
|
@ -157,7 +161,11 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals
|
|||
if isinstance(ex, SupersetVizException):
|
||||
errors = ex.errors # pylint: disable=no-member
|
||||
else:
|
||||
error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member
|
||||
error = (
|
||||
ex.message # pylint: disable=no-member
|
||||
if hasattr(ex, "message")
|
||||
else str(ex)
|
||||
)
|
||||
errors = [error]
|
||||
|
||||
async_query_manager.update_job(
|
||||
|
|
|
@ -27,6 +27,7 @@ from superset import app, db
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Null pool is used for the celery workers due process forking side effects.
|
||||
# For more info see: https://github.com/apache/superset/issues/10530
|
||||
@contextmanager
|
||||
|
|
|
@ -655,10 +655,10 @@ def error_msg_from_exception(ex: Exception) -> str:
|
|||
"""
|
||||
msg = ""
|
||||
if hasattr(ex, "message"):
|
||||
if isinstance(ex.message, dict): # type: ignore
|
||||
if isinstance(ex.message, dict):
|
||||
msg = ex.message.get("message") # type: ignore
|
||||
elif ex.message: # type: ignore
|
||||
msg = ex.message # type: ignore
|
||||
elif ex.message:
|
||||
msg = ex.message
|
||||
return msg or str(ex)
|
||||
|
||||
|
||||
|
@ -1778,14 +1778,13 @@ def indexed(
|
|||
|
||||
|
||||
def is_test() -> bool:
|
||||
return strtobool(os.environ.get("SUPERSET_TESTENV", "false"))
|
||||
return strtobool(os.environ.get("SUPERSET_TESTENV", "false")) # type: ignore
|
||||
|
||||
|
||||
def get_time_filter_status(
|
||||
datasource: "BaseDatasource",
|
||||
applied_time_extras: Dict[str, str],
|
||||
) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
|
||||
|
||||
temporal_columns: Set[Any]
|
||||
if datasource.type == "query":
|
||||
temporal_columns = {
|
||||
|
|
|
@ -77,7 +77,7 @@ def copy_filter_scopes(
|
|||
old_filter_scopes: Dict[int, Dict[str, Dict[str, Any]]],
|
||||
) -> Dict[str, Dict[Any, Any]]:
|
||||
new_filter_scopes: Dict[str, Dict[Any, Any]] = {}
|
||||
for (filter_id, scopes) in old_filter_scopes.items():
|
||||
for filter_id, scopes in old_filter_scopes.items():
|
||||
new_filter_key = old_to_new_slc_id_dict.get(int(filter_id))
|
||||
if new_filter_key:
|
||||
new_filter_scopes[str(new_filter_key)] = scopes
|
||||
|
|
|
@ -47,7 +47,7 @@ def statsd_gauge(metric_prefix: Optional[str] = None) -> Callable[..., Any]:
|
|||
except Exception as ex:
|
||||
if (
|
||||
hasattr(ex, "status")
|
||||
and ex.status < 500 # type: ignore # pylint: disable=no-member
|
||||
and ex.status < 500 # pylint: disable=no-member
|
||||
):
|
||||
current_app.config["STATS_LOGGER"].gauge(
|
||||
f"{metric_prefix_}.warning", 1
|
||||
|
|
|
@ -52,7 +52,7 @@ class MachineAuthProvider:
|
|||
:return: The WebDriver passed in (fluent)
|
||||
"""
|
||||
# Short-circuit this method if we have an override configured
|
||||
if self._auth_webdriver_func_override:
|
||||
if self._auth_webdriver_func_override: # type: ignore
|
||||
return self._auth_webdriver_func_override(driver, user)
|
||||
|
||||
# Setting cookies requires doing a request first
|
||||
|
|
|
@ -114,10 +114,7 @@ def statsd_metrics(f: Callable[..., Any]) -> Callable[..., Any]:
|
|||
try:
|
||||
duration, response = time_function(f, self, *args, **kwargs)
|
||||
except Exception as ex:
|
||||
if (
|
||||
hasattr(ex, "status")
|
||||
and ex.status < 500 # type: ignore # pylint: disable=no-member
|
||||
):
|
||||
if hasattr(ex, "status") and ex.status < 500: # pylint: disable=no-member
|
||||
self.incr_stats("warning", func_name)
|
||||
else:
|
||||
self.incr_stats("error", func_name)
|
||||
|
|
|
@ -22,7 +22,6 @@ from superset.views.chart.filters import SliceFilter
|
|||
|
||||
|
||||
class SliceMixin: # pylint: disable=too-few-public-methods
|
||||
|
||||
list_title = _("Charts")
|
||||
show_title = _("Show Chart")
|
||||
add_title = _("Add Chart")
|
||||
|
|
|
@ -2397,7 +2397,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
|
|||
self,
|
||||
command_result: CommandResult,
|
||||
) -> FlaskResponse:
|
||||
|
||||
status_code = 200
|
||||
if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING:
|
||||
status_code = 202
|
||||
|
|
|
@ -21,7 +21,6 @@ from superset.dashboards.filters import DashboardAccessFilter
|
|||
|
||||
|
||||
class DashboardMixin: # pylint: disable=too-few-public-methods
|
||||
|
||||
list_title = _("Dashboards")
|
||||
show_title = _("Show Dashboard")
|
||||
add_title = _("Add Dashboard")
|
||||
|
|
|
@ -3030,7 +3030,6 @@ class PairedTTestViz(BaseViz):
|
|||
|
||||
|
||||
class RoseViz(NVD3TimeSeriesViz):
|
||||
|
||||
viz_type = "rose"
|
||||
verbose_name = _("Time Series - Nightingale Rose Chart")
|
||||
sort_series = False
|
||||
|
|
|
@ -61,7 +61,6 @@ def log(
|
|||
suffix_exit_msg: str = _DEFAULT_EXIT_MSG_SUFFIX,
|
||||
return_value_msg_part=_DEFAULT_RETURN_VALUE_MSG_PART,
|
||||
) -> Decorated:
|
||||
|
||||
decorator: Decorated = _make_decorator(
|
||||
prefix_enter_msg,
|
||||
suffix_enter_msg,
|
||||
|
|
|
@ -210,7 +210,6 @@ class TestPostChartDataApi(BaseTestChartDataApi):
|
|||
{**app.config, "SAMPLES_ROW_LIMIT": 5, "SQL_MAX_ROW": 15},
|
||||
)
|
||||
def test_with_row_limit_as_samples__rowcount_as_row_limit(self):
|
||||
|
||||
expected_row_count = 10
|
||||
self.query_context_payload["result_type"] = ChartDataResultType.SAMPLES
|
||||
self.query_context_payload["queries"][0]["row_limit"] = expected_row_count
|
||||
|
@ -234,7 +233,6 @@ class TestPostChartDataApi(BaseTestChartDataApi):
|
|||
|
||||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
def test_with_invalid_payload__400(self):
|
||||
|
||||
invalid_query_context = {"form_data": "NOT VALID JSON"}
|
||||
|
||||
rv = self.client.post(
|
||||
|
@ -585,7 +583,6 @@ class TestPostChartDataApi(BaseTestChartDataApi):
|
|||
def test_when_where_parameter_is_template_and_query_result_type__query_is_templated(
|
||||
self,
|
||||
):
|
||||
|
||||
self.query_context_payload["result_type"] = ChartDataResultType.QUERY
|
||||
self.query_context_payload["queries"][0]["filters"] = [
|
||||
{"col": "gender", "op": "==", "val": "boy"}
|
||||
|
|
|
@ -502,7 +502,6 @@ def test_failing_import_datasets_versioned_export(
|
|||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
@mock.patch("superset.tasks.thumbnails.cache_dashboard_thumbnail")
|
||||
def test_compute_thumbnails(thumbnail_mock, app_context, fs):
|
||||
|
||||
thumbnail_mock.return_value = None
|
||||
runner = app.test_cli_runner()
|
||||
dashboard = db.session.query(Dashboard).filter_by(slug="births").first()
|
||||
|
|
|
@ -200,7 +200,7 @@ def mock_upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
|
|||
# only needed for the hive tests
|
||||
import docker
|
||||
|
||||
client = docker.from_env()
|
||||
client = docker.from_env() # type: ignore
|
||||
container = client.containers.get("namenode")
|
||||
# docker mounted volume that contains csv uploads
|
||||
src = os.path.join("/tmp/superset_uploads", os.path.basename(filename))
|
||||
|
|
|
@ -61,7 +61,6 @@ from tests.integration_tests.fixtures.importexport import (
|
|||
|
||||
|
||||
class TestDatasetApi(SupersetTestCase):
|
||||
|
||||
fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu")
|
||||
fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2")
|
||||
|
||||
|
|
|
@ -93,6 +93,7 @@ class TestBigQueryDbEngineSpec(TestDbEngineSpec):
|
|||
"""
|
||||
DB Eng Specs (bigquery): Test fetch data
|
||||
"""
|
||||
|
||||
# Mock a google.cloud.bigquery.table.Row
|
||||
class Row(object):
|
||||
def __init__(self, value):
|
||||
|
|
|
@ -89,7 +89,6 @@ def _create_energy_table() -> List[Slice]:
|
|||
|
||||
slices = []
|
||||
for slice_data in _get_energy_slices():
|
||||
|
||||
slice = _create_and_commit_energy_slice(
|
||||
table,
|
||||
slice_data["slice_title"],
|
||||
|
|
|
@ -23,7 +23,6 @@ from tests.integration_tests.test_app import app
|
|||
|
||||
@pytest.fixture()
|
||||
def create_gamma_sqllab_no_data():
|
||||
|
||||
with app.app_context():
|
||||
gamma_role = db.session.query(Role).filter(Role.name == "Gamma").one_or_none()
|
||||
sqllab_role = (
|
||||
|
|
|
@ -64,7 +64,6 @@ def test_execute_query_as_report_executor(
|
|||
app_context: None,
|
||||
get_user,
|
||||
) -> None:
|
||||
|
||||
from superset.reports.commands.alert import AlertCommand
|
||||
from superset.reports.models import ReportSchedule
|
||||
|
||||
|
@ -104,7 +103,6 @@ def test_execute_query_as_report_executor(
|
|||
def test_execute_query_succeeded_no_retry(
|
||||
mocker: MockFixture, app_context: None
|
||||
) -> None:
|
||||
|
||||
from superset.reports.commands.alert import AlertCommand
|
||||
|
||||
execute_query_mock = mocker.patch(
|
||||
|
|
|
@ -88,7 +88,6 @@ class TestReportSchedulesApi(SupersetTestCase):
|
|||
@pytest.fixture()
|
||||
def create_working_admin_report_schedule(self):
|
||||
with self.create_app().app_context():
|
||||
|
||||
admin_user = self.get_user("admin")
|
||||
chart = db.session.query(Slice).first()
|
||||
example_db = get_example_database()
|
||||
|
@ -114,7 +113,6 @@ class TestReportSchedulesApi(SupersetTestCase):
|
|||
@pytest.fixture()
|
||||
def create_working_gamma_report_schedule(self, gamma_user_with_alerts_role):
|
||||
with self.create_app().app_context():
|
||||
|
||||
chart = db.session.query(Slice).first()
|
||||
example_db = get_example_database()
|
||||
|
||||
|
@ -139,7 +137,6 @@ class TestReportSchedulesApi(SupersetTestCase):
|
|||
@pytest.fixture()
|
||||
def create_working_shared_report_schedule(self, gamma_user_with_alerts_role):
|
||||
with self.create_app().app_context():
|
||||
|
||||
admin_user = self.get_user("admin")
|
||||
alpha_user = self.get_user("alpha")
|
||||
chart = db.session.query(Slice).first()
|
||||
|
@ -213,7 +210,6 @@ class TestReportSchedulesApi(SupersetTestCase):
|
|||
@pytest.fixture()
|
||||
def create_alpha_users(self):
|
||||
with self.create_app().app_context():
|
||||
|
||||
users = [
|
||||
self.create_user(
|
||||
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
||||
|
|
|
@ -466,7 +466,6 @@ def create_alert_email_chart(request):
|
|||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
with create_test_table_context(example_database):
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
|
@ -549,7 +548,6 @@ def create_no_alert_email_chart(request):
|
|||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
with create_test_table_context(example_database):
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
|
@ -584,7 +582,6 @@ def create_mul_alert_email_chart(request):
|
|||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
with create_test_table_context(example_database):
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
|
@ -619,7 +616,6 @@ def create_invalid_sql_alert_email_chart(request):
|
|||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
with create_test_table_context(example_database):
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
|
@ -1072,7 +1068,6 @@ def test_email_dashboard_report_schedule(
|
|||
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock:
|
||||
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
TEST_ID, create_report_email_dashboard.id, datetime.utcnow()
|
||||
).run()
|
||||
|
@ -1143,7 +1138,6 @@ def test_slack_chart_report_schedule(
|
|||
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock:
|
||||
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
TEST_ID, create_report_slack_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
|
@ -1192,7 +1186,6 @@ def test_slack_chart_report_schedule_with_errors(
|
|||
web_client_mock.side_effect = er
|
||||
|
||||
with pytest.raises(ReportScheduleClientErrorsException):
|
||||
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
TEST_ID, create_report_slack_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
|
@ -1349,7 +1342,6 @@ def test_report_schedule_working_timeout(create_report_slack_chart_working):
|
|||
seconds=create_report_slack_chart_working.working_timeout + 1
|
||||
)
|
||||
with freeze_time(current_time):
|
||||
|
||||
with pytest.raises(ReportScheduleWorkingTimeoutError):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
TEST_ID, create_report_slack_chart_working.id, datetime.utcnow()
|
||||
|
@ -2020,7 +2012,6 @@ def test__send_with_multiple_errors(notification_mock, logger_mock):
|
|||
@patch("superset.reports.commands.execute.logger")
|
||||
@patch("superset.reports.commands.execute.create_notification")
|
||||
def test__send_with_server_errors(notification_mock, logger_mock):
|
||||
|
||||
notification_content = "I am some content"
|
||||
recipients = ["test@foo.com"]
|
||||
notification_mock.return_value.send.side_effect = NotificationError()
|
||||
|
|
|
@ -43,7 +43,6 @@ def test_scheduler_celery_timeout_ny(execute_mock, owners):
|
|||
Reports scheduler: Test scheduler setting celery soft and hard timeout
|
||||
"""
|
||||
with app.app_context():
|
||||
|
||||
report_schedule = insert_report_schedule(
|
||||
type=ReportScheduleType.ALERT,
|
||||
name="report",
|
||||
|
@ -91,7 +90,6 @@ def test_scheduler_celery_timeout_utc(execute_mock, owners):
|
|||
Reports scheduler: Test scheduler setting celery soft and hard timeout
|
||||
"""
|
||||
with app.app_context():
|
||||
|
||||
report_schedule = insert_report_schedule(
|
||||
type=ReportScheduleType.ALERT,
|
||||
name="report",
|
||||
|
|
|
@ -152,7 +152,6 @@ class TestRowLevelSecurity(SupersetTestCase):
|
|||
@pytest.fixture()
|
||||
def create_dataset(self):
|
||||
with self.create_app().app_context():
|
||||
|
||||
dataset = SqlaTable(database_id=1, schema=None, table_name="table1")
|
||||
db.session.add(dataset)
|
||||
db.session.flush()
|
||||
|
|
|
@ -199,7 +199,6 @@ class TestWebDriverProxy(SupersetTestCase):
|
|||
|
||||
|
||||
class TestThumbnails(SupersetTestCase):
|
||||
|
||||
mock_image = b"bytes mock image"
|
||||
digest_return_value = "foo_bar"
|
||||
digest_hash = "5c7d96a3dd7a87850a2ef34087565a6e"
|
||||
|
|
|
@ -86,7 +86,13 @@ def test_flat_should_drop_index_level():
|
|||
df = pd.DataFrame(index=index, columns=columns, data=1)
|
||||
|
||||
# drop level by index
|
||||
assert pp.flatten(df.copy(), drop_levels=(0, 1,)).equals(
|
||||
assert pp.flatten(
|
||||
df.copy(),
|
||||
drop_levels=(
|
||||
0,
|
||||
1,
|
||||
),
|
||||
).equals(
|
||||
pd.DataFrame(
|
||||
{
|
||||
"__timestamp": index,
|
||||
|
|
Loading…
Reference in New Issue