chore: Ensure Mixins are ordered according to the MRO (#26288)

This commit is contained in:
John Bodley 2023-12-16 14:11:58 +13:00 committed by GitHub
parent 8450cca998
commit aafb54d042
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 31 additions and 31 deletions

View File

@ -757,7 +757,7 @@ class AnnotationDatasource(BaseDatasource):
raise NotImplementedError()
class TableColumn(Model, AuditMixinNullable, ImportExportMixin, CertificationMixin):
class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model):
"""ORM object for table columns, each table can have multiple columns"""
@ -971,7 +971,7 @@ class TableColumn(Model, AuditMixinNullable, ImportExportMixin, CertificationMix
return {s: getattr(self, s) for s in attrs if hasattr(self, s)}
class SqlMetric(Model, AuditMixinNullable, ImportExportMixin, CertificationMixin):
class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model):
"""ORM object for metrics, each table can have multiple metrics"""

View File

@ -422,7 +422,7 @@ class DatabaseSSHTunnel(Schema):
private_key_password = fields.String(required=False)
class DatabasePostSchema(Schema, DatabaseParametersSchemaMixin):
class DatabasePostSchema(DatabaseParametersSchemaMixin, Schema):
class Meta: # pylint: disable=too-few-public-methods
unknown = EXCLUDE
@ -479,7 +479,7 @@ class DatabasePostSchema(Schema, DatabaseParametersSchemaMixin):
ssh_tunnel = fields.Nested(DatabaseSSHTunnel, allow_none=True)
class DatabasePutSchema(Schema, DatabaseParametersSchemaMixin):
class DatabasePutSchema(DatabaseParametersSchemaMixin, Schema):
class Meta: # pylint: disable=too-few-public-methods
unknown = EXCLUDE
@ -536,7 +536,7 @@ class DatabasePutSchema(Schema, DatabaseParametersSchemaMixin):
uuid = fields.String(required=False)
class DatabaseTestConnectionSchema(Schema, DatabaseParametersSchemaMixin):
class DatabaseTestConnectionSchema(DatabaseParametersSchemaMixin, Schema):
rename_encrypted_extra = pre_load(rename_encrypted_extra)
database_name = fields.String(

View File

@ -34,7 +34,7 @@ from superset.models.helpers import (
app_config = current_app.config
class SSHTunnel(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
class SSHTunnel(AuditMixinNullable, ExtraJSONMixin, ImportExportMixin, Model):
"""
A ssh tunnel configuration in a database.
"""

View File

@ -69,7 +69,7 @@ dataset_user_association_table = sa.Table(
)
class Dataset(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
class Dataset(AuditMixinNullable, ExtraJSONMixin, ImportExportMixin, Model):
"""
A table/view in a database.
"""

View File

@ -248,7 +248,7 @@ except ImportError: # ClickHouse Connect not installed, do nothing
pass
class ClickHouseConnectEngineSpec(ClickHouseEngineSpec, BasicParametersMixin):
class ClickHouseConnectEngineSpec(BasicParametersMixin, ClickHouseEngineSpec):
"""Engine spec for clickhouse-connect connector"""
engine = "clickhousedb"

View File

@ -194,7 +194,7 @@ class DatabendParametersSchema(Schema):
)
class DatabendConnectEngineSpec(DatabendEngineSpec, BasicParametersMixin):
class DatabendConnectEngineSpec(BasicParametersMixin, DatabendEngineSpec):
"""Engine spec for databend sqlalchemy connector"""
engine = "databend"

View File

@ -145,7 +145,7 @@ class DatabricksODBCEngineSpec(BaseEngineSpec):
return HiveEngineSpec.epoch_to_dttm()
class DatabricksNativeEngineSpec(DatabricksODBCEngineSpec, BasicParametersMixin):
class DatabricksNativeEngineSpec(BasicParametersMixin, DatabricksODBCEngineSpec):
engine_name = "Databricks"
engine = "databricks"

View File

@ -62,7 +62,7 @@ SYNTAX_ERROR_REGEX = re.compile(
)
class MySQLEngineSpec(BaseEngineSpec, BasicParametersMixin):
class MySQLEngineSpec(BasicParametersMixin, BaseEngineSpec):
engine = "mysql"
engine_name = "MySQL"
max_column_name_length = 64

View File

@ -183,7 +183,7 @@ class PostgresBaseEngineSpec(BaseEngineSpec):
return "(timestamp 'epoch' + {col} * interval '1 second')"
class PostgresEngineSpec(PostgresBaseEngineSpec, BasicParametersMixin):
class PostgresEngineSpec(BasicParametersMixin, PostgresBaseEngineSpec):
engine = "postgresql"
engine_aliases = {"postgres"}
supports_dynamic_schema = True

View File

@ -55,7 +55,7 @@ CONNECTION_UNKNOWN_DATABASE_REGEX = re.compile(
)
class RedshiftEngineSpec(PostgresBaseEngineSpec, BasicParametersMixin):
class RedshiftEngineSpec(BasicParametersMixin, PostgresBaseEngineSpec):
engine = "redshift"
engine_name = "Amazon Redshift"
max_column_name_length = 127

View File

@ -24,7 +24,7 @@ from superset.models.helpers import AuditMixinNullable, ImportExportMixin
VALUE_MAX_SIZE = 2**24 - 1
class KeyValueEntry(Model, AuditMixinNullable, ImportExportMixin):
class KeyValueEntry(AuditMixinNullable, ImportExportMixin, Model):
"""Key value store entity"""
__tablename__ = "key_value"

View File

@ -79,7 +79,7 @@ class AuditMixin:
)
class Slice(Base, AuditMixin):
class Slice(AuditMixin, Base):
"""Declarative class to do query in upgrade"""
__tablename__ = "slices"
@ -87,7 +87,7 @@ class Slice(Base, AuditMixin):
owners = relationship("User", secondary=slice_user)
class Dashboard(Base, AuditMixin):
class Dashboard(AuditMixin, Base):
"""Declarative class to do query in upgrade"""
__tablename__ = "dashboards"

View File

@ -80,7 +80,7 @@ class Tag(Base, AuditMixinNullable):
type = Column(Enum(TagType))
class TaggedObject(Base, AuditMixinNullable):
class TaggedObject(AuditMixinNullable, Base):
__tablename__ = "tagged_object"
id = Column(Integer, primary_key=True)

View File

@ -68,7 +68,7 @@ table_names = [
"slice_email_schedules",
]
models = {
table_name: type(table_name, (Base, ImportMixin), {"__tablename__": table_name})
table_name: type(table_name, (ImportMixin, Base), {"__tablename__": table_name})
for table_name in table_names
}

View File

@ -47,7 +47,7 @@ class ImportMixin:
uuid = sa.Column(UUIDType(binary=True), primary_key=False, default=uuid4)
class SavedQuery(Base, ImportMixin):
class SavedQuery(ImportMixin, Base):
__tablename__ = "saved_query"

View File

@ -257,7 +257,7 @@ class NewTable(AuxiliaryColumnsMixin, Base):
)
class NewDataset(Base, AuxiliaryColumnsMixin):
class NewDataset(AuxiliaryColumnsMixin, Base):
__tablename__ = "sl_datasets"
id = sa.Column(sa.Integer, primary_key=True)

View File

@ -147,7 +147,7 @@ DashboardRoles = Table(
# pylint: disable=too-many-public-methods
class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
class Dashboard(AuditMixinNullable, ImportExportMixin, Model):
"""The dashboard object!"""
__tablename__ = "dashboards"

View File

@ -66,7 +66,7 @@ logger = logging.getLogger(__name__)
class Query(
Model, ExtraJSONMixin, ExploreMixin
ExtraJSONMixin, ExploreMixin, Model
): # pylint: disable=abstract-method,too-many-public-methods
"""ORM model for SQL query
@ -355,7 +355,7 @@ class Query(
return self.make_sqla_column_compatible(sqla_column, label)
class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
class SavedQuery(AuditMixinNullable, ExtraJSONMixin, ImportExportMixin, Model):
"""ORM model for SQL query"""
__tablename__ = "saved_query"
@ -442,7 +442,7 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
return self._last_run_delta_humanized
class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
class TabState(AuditMixinNullable, ExtraJSONMixin, Model):
__tablename__ = "tab_state"
# basic info
@ -505,7 +505,7 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
}
class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
class TableSchema(AuditMixinNullable, ExtraJSONMixin, Model):
__tablename__ = "table_schema"
id = Column(Integer, primary_key=True, autoincrement=True)

View File

@ -106,7 +106,7 @@ report_schedule_user = Table(
)
class ReportSchedule(Model, AuditMixinNullable, ExtraJSONMixin):
class ReportSchedule(AuditMixinNullable, ExtraJSONMixin, Model):
"""
Report Schedules, supports alerts and reports

View File

@ -64,7 +64,7 @@ table_column_association_table = sa.Table(
)
class Table(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
class Table(AuditMixinNullable, ExtraJSONMixin, ImportExportMixin, Model):
"""
A table/view in a database.
"""

View File

@ -73,7 +73,7 @@ CHART_DATA_URI = "api/v1/chart/data"
CHARTS_FIXTURE_COUNT = 10
class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
resource_name = "chart"
@pytest.fixture(autouse=True)

View File

@ -63,7 +63,7 @@ from tests.integration_tests.fixtures.world_bank_dashboard import (
DASHBOARDS_FIXTURE_COUNT = 10
class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
resource_name = "dashboard"
dashboards: list[Dashboard] = []

View File

@ -31,7 +31,7 @@ from tests.integration_tests.insert_chart_mixin import InsertChartMixin
from .base_tests import SupersetTestCase
class TestProfile(SupersetTestCase, InsertChartMixin):
class TestProfile(InsertChartMixin, SupersetTestCase):
def insert_dashboard_created_by(self, username: str) -> Dashboard:
user = self.get_user(username)
dashboard = self.insert_dashboard(

View File

@ -42,7 +42,7 @@ def dummy_schema() -> "DatabaseParametersSchemaMixin":
"""
from superset.databases.schemas import DatabaseParametersSchemaMixin
class DummySchema(Schema, DatabaseParametersSchemaMixin):
class DummySchema(DatabaseParametersSchemaMixin, Schema):
sqlalchemy_uri = fields.String()
return DummySchema()