chore(supeset.utils.core): move all database utils to database utils module (#18058)

This commit is contained in:
ofekisr 2022-01-16 08:32:50 +02:00 committed by GitHub
parent 0b67fe1beb
commit e53a50a155
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 184 additions and 143 deletions

View File

@ -37,9 +37,9 @@ from flask_appbuilder import Model
from flask_appbuilder.api import BaseApi from flask_appbuilder.api import BaseApi
from flask_appbuilder.api.manager import resolver from flask_appbuilder.api.manager import resolver
import superset.utils.database as database_utils
from superset import app, appbuilder, config, security_manager from superset import app, appbuilder, config, security_manager
from superset.extensions import celery_app, db from superset.extensions import celery_app, db
from superset.utils import core as utils
from superset.utils.celery import session_scope from superset.utils.celery import session_scope
from superset.utils.encrypt import SecretsMigrator from superset.utils.encrypt import SecretsMigrator
from superset.utils.urls import get_url_path from superset.utils.urls import get_url_path
@ -120,7 +120,7 @@ def load_examples_run(
if only_metadata: if only_metadata:
print("Loading examples metadata") print("Loading examples metadata")
else: else:
examples_db = utils.get_example_database() examples_db = database_utils.get_example_database()
print(f"Loading examples metadata and related data into {examples_db}") print(f"Loading examples metadata and related data into {examples_db}")
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel
@ -227,7 +227,7 @@ def import_directory(directory: str, overwrite: bool, force: bool) -> None:
) )
def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None: def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
"""Updates a database connection URI""" """Updates a database connection URI"""
utils.get_or_create_db(database_name, uri, not skip_create) database_utils.get_or_create_db(database_name, uri, not skip_create)
@superset.command() @superset.command()
@ -768,7 +768,7 @@ def load_test_users_run() -> None:
sm = security_manager sm = security_manager
examples_db = utils.get_example_database() examples_db = database_utils.get_example_database()
examples_pv = sm.add_permission_view_menu("database_access", examples_db.perm) examples_pv = sm.add_permission_view_menu("database_access", examples_db.perm)

View File

@ -42,7 +42,8 @@ from superset.datasets.commands.importers.v1 import ImportDatasetsCommand
from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.commands.importers.v1.utils import import_dataset
from superset.datasets.schemas import ImportV1DatasetSchema from superset.datasets.schemas import ImportV1DatasetSchema
from superset.models.dashboard import dashboard_slices from superset.models.dashboard import dashboard_slices
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
class ImportExamplesCommand(ImportModelsCommand): class ImportExamplesCommand(ImportModelsCommand):

View File

@ -21,8 +21,8 @@ import polyline
from sqlalchemy import inspect, String, Text from sqlalchemy import inspect, String, Text
from superset import db from superset import db
from superset.utils.core import get_example_database
from ..utils.database import get_example_database
from .helpers import get_example_data, get_table_connector_registry from .helpers import get_example_data, get_table_connector_registry

View File

@ -29,8 +29,8 @@ from superset.exceptions import NoDataException
from superset.models.core import Database from superset.models.core import Database
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database
from ..utils.database import get_example_database
from .helpers import ( from .helpers import (
get_example_data, get_example_data,
get_slice_json, get_slice_json,

View File

@ -20,10 +20,10 @@ import pandas as pd
from sqlalchemy import BigInteger, Date, inspect, String from sqlalchemy import BigInteger, Date, inspect, String
from sqlalchemy.sql import column from sqlalchemy.sql import column
import superset.utils.database as database_utils
from superset import db from superset import db
from superset.connectors.sqla.models import SqlMetric from superset.connectors.sqla.models import SqlMetric
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils import core as utils
from .helpers import ( from .helpers import (
get_example_data, get_example_data,
@ -37,7 +37,7 @@ from .helpers import (
def load_country_map_data(only_metadata: bool = False, force: bool = False) -> None: def load_country_map_data(only_metadata: bool = False, force: bool = False) -> None:
"""Loading data for map with country map""" """Loading data for map with country map"""
tbl_name = "birth_france_by_region" tbl_name = "birth_france_by_region"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -21,10 +21,10 @@ import pandas as pd
from sqlalchemy import Float, inspect, String from sqlalchemy import Float, inspect, String
from sqlalchemy.sql import column from sqlalchemy.sql import column
import superset.utils.database as database_utils
from superset import db from superset import db
from superset.connectors.sqla.models import SqlMetric from superset.connectors.sqla.models import SqlMetric
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils import core as utils
from .helpers import ( from .helpers import (
get_example_data, get_example_data,
@ -39,7 +39,7 @@ def load_energy(
) -> None: ) -> None:
"""Loads an energy related dataset to use with sankey and graphs""" """Loads an energy related dataset to use with sankey and graphs"""
tbl_name = "energy_usage" tbl_name = "energy_usage"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -17,8 +17,8 @@
import pandas as pd import pandas as pd
from sqlalchemy import DateTime, inspect from sqlalchemy import DateTime, inspect
import superset.utils.database as database_utils
from superset import db from superset import db
from superset.utils import core as utils
from .helpers import get_example_data, get_table_connector_registry from .helpers import get_example_data, get_table_connector_registry
@ -26,7 +26,7 @@ from .helpers import get_example_data, get_table_connector_registry
def load_flights(only_metadata: bool = False, force: bool = False) -> None: def load_flights(only_metadata: bool = False, force: bool = False) -> None:
"""Loading random time series data from a zip file in the repo""" """Loading random time series data from a zip file in the repo"""
tbl_name = "flights" tbl_name = "flights"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -21,9 +21,9 @@ import geohash
import pandas as pd import pandas as pd
from sqlalchemy import DateTime, Float, inspect, String from sqlalchemy import DateTime, Float, inspect, String
import superset.utils.database as database_utils
from superset import db from superset import db
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils import core as utils
from .helpers import ( from .helpers import (
get_example_data, get_example_data,
@ -37,7 +37,7 @@ from .helpers import (
def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None: def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None:
"""Loading lat/long data from a csv file in the repo""" """Loading lat/long data from a csv file in the repo"""
tbl_name = "long_lat" tbl_name = "long_lat"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -21,8 +21,8 @@ from sqlalchemy import BigInteger, Date, DateTime, inspect, String
from superset import app, db from superset import app, db
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database
from ..utils.database import get_example_database
from .helpers import ( from .helpers import (
get_example_data, get_example_data,
get_slice_json, get_slice_json,

View File

@ -19,15 +19,15 @@ import json
import pandas as pd import pandas as pd
from sqlalchemy import inspect, String, Text from sqlalchemy import inspect, String, Text
import superset.utils.database as database_utils
from superset import db from superset import db
from superset.utils import core as utils
from .helpers import get_example_data, get_table_connector_registry from .helpers import get_example_data, get_table_connector_registry
def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) -> None: def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) -> None:
tbl_name = "paris_iris_mapping" tbl_name = "paris_iris_mapping"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -18,9 +18,9 @@
import pandas as pd import pandas as pd
from sqlalchemy import DateTime, inspect, String from sqlalchemy import DateTime, inspect, String
import superset.utils.database as database_utils
from superset import app, db from superset import app, db
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils import core as utils
from .helpers import ( from .helpers import (
get_example_data, get_example_data,
@ -35,7 +35,7 @@ def load_random_time_series_data(
) -> None: ) -> None:
"""Loading random time series data from a zip file in the repo""" """Loading random time series data from a zip file in the repo"""
tbl_name = "random_time_series" tbl_name = "random_time_series"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -19,8 +19,8 @@ import json
import pandas as pd import pandas as pd
from sqlalchemy import BigInteger, Float, inspect, Text from sqlalchemy import BigInteger, Float, inspect, Text
import superset.utils.database as database_utils
from superset import db from superset import db
from superset.utils import core as utils
from .helpers import get_example_data, get_table_connector_registry from .helpers import get_example_data, get_table_connector_registry
@ -29,7 +29,7 @@ def load_sf_population_polygons(
only_metadata: bool = False, force: bool = False only_metadata: bool = False, force: bool = False
) -> None: ) -> None:
tbl_name = "sf_population_polygons" tbl_name = "sf_population_polygons"
database = utils.get_example_database() database = database_utils.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -23,6 +23,7 @@ import pandas as pd
from sqlalchemy import DateTime, inspect, String from sqlalchemy import DateTime, inspect, String
from sqlalchemy.sql import column from sqlalchemy.sql import column
import superset.utils.database
from superset import app, db from superset import app, db
from superset.connectors.sqla.models import SqlMetric from superset.connectors.sqla.models import SqlMetric
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
@ -46,7 +47,7 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s
) -> None: ) -> None:
"""Loads the world bank health dataset, slices and a dashboard""" """Loads the world bank health dataset, slices and a dashboard"""
tbl_name = "wb_health_population" tbl_name = "wb_health_population"
database = utils.get_example_database() database = superset.utils.database.get_example_database()
engine = database.get_sqla_engine() engine = database.get_sqla_engine()
schema = inspect(engine).default_schema_name schema = inspect(engine).default_schema_name
table_exists = database.has_table_by_name(tbl_name) table_exists = database.has_table_by_name(tbl_name)

View File

@ -86,7 +86,6 @@ from sqlalchemy.types import TEXT, TypeDecorator, TypeEngine
from typing_extensions import TypedDict, TypeGuard from typing_extensions import TypedDict, TypeGuard
from superset.constants import ( from superset.constants import (
EXAMPLES_DB_UUID,
EXTRA_FORM_DATA_APPEND_KEYS, EXTRA_FORM_DATA_APPEND_KEYS,
EXTRA_FORM_DATA_OVERRIDE_EXTRA_KEYS, EXTRA_FORM_DATA_OVERRIDE_EXTRA_KEYS,
EXTRA_FORM_DATA_OVERRIDE_REGULAR_MAPPINGS, EXTRA_FORM_DATA_OVERRIDE_REGULAR_MAPPINGS,
@ -107,6 +106,7 @@ from superset.typing import (
FormData, FormData,
Metric, Metric,
) )
from superset.utils.database import get_example_database
from superset.utils.dates import datetime_to_epoch, EPOCH from superset.utils.dates import datetime_to_epoch, EPOCH
from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str
@ -117,8 +117,6 @@ except ImportError:
if TYPE_CHECKING: if TYPE_CHECKING:
from superset.connectors.base.models import BaseColumn, BaseDatasource from superset.connectors.base.models import BaseColumn, BaseDatasource
from superset.models.core import Database
logging.getLogger("MARKDOWN").setLevel(logging.INFO) logging.getLogger("MARKDOWN").setLevel(logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -1207,49 +1205,6 @@ def user_label(user: User) -> Optional[str]:
return None return None
def get_or_create_db(
database_name: str, sqlalchemy_uri: str, always_create: Optional[bool] = True
) -> "Database":
# pylint: disable=import-outside-toplevel
from superset import db
from superset.models import core as models
database = (
db.session.query(models.Database).filter_by(database_name=database_name).first()
)
# databases with a fixed UUID
uuids = {
"examples": EXAMPLES_DB_UUID,
}
if not database and always_create:
logger.info("Creating database reference for %s", database_name)
database = models.Database(
database_name=database_name, uuid=uuids.get(database_name)
)
db.session.add(database)
if database:
database.set_sqlalchemy_uri(sqlalchemy_uri)
db.session.commit()
return database
def get_example_database() -> "Database":
db_uri = (
current_app.config.get("SQLALCHEMY_EXAMPLES_URI")
or current_app.config["SQLALCHEMY_DATABASE_URI"]
)
return get_or_create_db("examples", db_uri)
def get_main_database() -> "Database":
db_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
return get_or_create_db("main", db_uri)
def get_example_default_schema() -> Optional[str]: def get_example_default_schema() -> Optional[str]:
""" """
Return the default schema of the examples database, if any. Return the default schema of the examples database, if any.

View File

@ -0,0 +1,74 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import Optional, TYPE_CHECKING
from flask import current_app
from superset.constants import EXAMPLES_DB_UUID
if TYPE_CHECKING:
from superset.connectors.sqla.models import Database
logging.getLogger("MARKDOWN").setLevel(logging.INFO)
logger = logging.getLogger(__name__)
# TODO: duplicate code with DatabaseDao, below function should be moved or use dao
def get_or_create_db(
database_name: str, sqlalchemy_uri: str, always_create: Optional[bool] = True
) -> Database:
# pylint: disable=import-outside-toplevel
from superset import db
from superset.models import core as models
database = (
db.session.query(models.Database).filter_by(database_name=database_name).first()
)
# databases with a fixed UUID
uuids = {
"examples": EXAMPLES_DB_UUID,
}
if not database and always_create:
logger.info("Creating database reference for %s", database_name)
database = models.Database(
database_name=database_name, uuid=uuids.get(database_name)
)
db.session.add(database)
if database:
database.set_sqlalchemy_uri(sqlalchemy_uri)
db.session.commit()
return database
def get_example_database() -> Database:
db_uri = (
current_app.config.get("SQLALCHEMY_EXAMPLES_URI")
or current_app.config["SQLALCHEMY_DATABASE_URI"]
)
return get_or_create_db("examples", db_uri)
def get_main_database() -> Database:
db_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
return get_or_create_db("main", db_uri)

View File

@ -181,7 +181,7 @@ def add_data(
:param bool append: if the table already exists, append data or replace? :param bool append: if the table already exists, append data or replace?
""" """
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
database = get_example_database() database = get_example_database()
table_exists = database.has_table_by_name(table_name) table_exists = database.has_table_by_name(table_name)

View File

@ -42,7 +42,7 @@ from superset.connectors.druid.models import DruidDatasource
from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.models import SqlaTable
from superset.models import core as models from superset.models import core as models
from superset.models.datasource_access_request import DatasourceAccessRequest from superset.models.datasource_access_request import DatasourceAccessRequest
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase

View File

@ -22,6 +22,7 @@ from unittest.mock import patch
import pytest import pytest
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
import superset.utils.database
from superset import db from superset import db
from superset.exceptions import SupersetException from superset.exceptions import SupersetException
from superset.models.alerts import Alert, AlertLog, SQLObservation from superset.models.alerts import Alert, AlertLog, SQLObservation
@ -56,7 +57,7 @@ logger = logging.getLogger(__name__)
@pytest.yield_fixture(scope="module") @pytest.yield_fixture(scope="module")
def setup_database(): def setup_database():
with app.app_context(): with app.app_context():
example_database = utils.get_example_database() example_database = superset.utils.database.get_example_database()
example_database.get_sqla_engine().execute( example_database.get_sqla_engine().execute(
"CREATE TABLE test_table AS SELECT 1 as first, 2 as second" "CREATE TABLE test_table AS SELECT 1 as first, 2 as second"
) )
@ -88,7 +89,7 @@ def create_alert(
recipients="recipient1@superset.com", recipients="recipient1@superset.com",
slack_channel="#test_channel", slack_channel="#test_channel",
sql=sql, sql=sql,
database_id=utils.get_example_database().id, database_id=superset.utils.database.get_example_database().id,
validator_type=validator_type, validator_type=validator_type,
validator_config=validator_config, validator_config=validator_config,
) )

View File

@ -45,7 +45,8 @@ from superset.models.slice import Slice
from superset.models.core import Database from superset.models.core import Database
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
from superset.models.datasource_access_request import DatasourceAccessRequest from superset.models.datasource_access_request import DatasourceAccessRequest
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from superset.views.base_api import BaseSupersetModelRestApi from superset.views.base_api import BaseSupersetModelRestApi
FAKE_DB_NAME = "fake_db_100" FAKE_DB_NAME = "fake_db_100"

View File

@ -44,7 +44,8 @@ from superset.errors import ErrorLevel, SupersetErrorType
from superset.extensions import celery_app from superset.extensions import celery_app
from superset.models.sql_lab import Query from superset.models.sql_lab import Query
from superset.sql_parse import ParsedQuery, CtasMethod from superset.sql_parse import ParsedQuery, CtasMethod
from superset.utils.core import get_example_database, backend from superset.utils.core import backend
from superset.utils.database import get_example_database
CELERY_SLEEP_TIME = 6 CELERY_SLEEP_TIME = 6
QUERY = "SELECT name FROM birth_names LIMIT 1" QUERY = "SELECT name FROM birth_names LIMIT 1"

View File

@ -47,11 +47,10 @@ from superset.models.slice import Slice
from superset.typing import AdhocColumn from superset.typing import AdhocColumn
from superset.utils.core import ( from superset.utils.core import (
AnnotationType, AnnotationType,
get_example_database,
get_example_default_schema, get_example_default_schema,
get_main_database,
AdhocMetricExpressionType, AdhocMetricExpressionType,
) )
from superset.utils.database import get_example_database, get_main_database
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from tests.common.query_context_generator import ANNOTATION_LAYERS from tests.common.query_context_generator import ANNOTATION_LAYERS

View File

@ -24,7 +24,7 @@ from tests.integration_tests.test_app import app
from superset import db from superset import db
from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.models import SqlaTable
from superset.utils.core import get_or_create_db from superset.utils.database import get_or_create_db
FULL_DTTM_DEFAULTS_EXAMPLE = { FULL_DTTM_DEFAULTS_EXAMPLE = {
"main_dttm_col": "id", "main_dttm_col": "id",

View File

@ -25,8 +25,8 @@ from unittest.mock import patch
from tests.integration_tests.test_app import app from tests.integration_tests.test_app import app
from superset import db from superset import db
from superset.extensions import feature_flag_manager from superset.extensions import feature_flag_manager
from superset.utils.core import get_example_database, json_dumps_w_dates from superset.utils.core import json_dumps_w_dates
from superset.utils.database import get_example_database
CTAS_SCHEMA_NAME = "sqllab_test_db" CTAS_SCHEMA_NAME = "sqllab_test_db"
ADMIN_SCHEMA_NAME = "admin_database" ADMIN_SCHEMA_NAME = "admin_database"

View File

@ -25,6 +25,8 @@ import json
import logging import logging
from typing import Dict, List from typing import Dict, List
from urllib.parse import quote from urllib.parse import quote
import superset.utils.database
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_dashboard_with_slices,
load_birth_names_data, load_birth_names_data,
@ -41,7 +43,7 @@ import pandas as pd
import sqlalchemy as sqla import sqlalchemy as sqla
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
from superset.models.cache import CacheKey from superset.models.cache import CacheKey
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.fixtures.energy_dashboard import ( from tests.integration_tests.fixtures.energy_dashboard import (
load_energy_table_with_slice, load_energy_table_with_slice,
@ -154,7 +156,7 @@ class TestCore(SupersetTestCase):
self.assertEqual(cache_key_with_groupby, viz.cache_key(qobj)) self.assertEqual(cache_key_with_groupby, viz.cache_key(qobj))
def test_get_superset_tables_not_allowed(self): def test_get_superset_tables_not_allowed(self):
example_db = utils.get_example_database() example_db = superset.utils.database.get_example_database()
schema_name = self.default_schema_backend_map[example_db.backend] schema_name = self.default_schema_backend_map[example_db.backend]
self.login(username="gamma") self.login(username="gamma")
uri = f"superset/tables/{example_db.id}/{schema_name}/undefined/" uri = f"superset/tables/{example_db.id}/{schema_name}/undefined/"
@ -162,7 +164,7 @@ class TestCore(SupersetTestCase):
self.assertEqual(rv.status_code, 404) self.assertEqual(rv.status_code, 404)
def test_get_superset_tables_substr(self): def test_get_superset_tables_substr(self):
example_db = utils.get_example_database() example_db = superset.utils.database.get_example_database()
if example_db.backend in {"presto", "hive"}: if example_db.backend in {"presto", "hive"}:
# TODO: change table to the real table that is in examples. # TODO: change table to the real table that is in examples.
return return
@ -471,7 +473,7 @@ class TestCore(SupersetTestCase):
# need to temporarily allow sqlite dbs, teardown will undo this # need to temporarily allow sqlite dbs, teardown will undo this
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False
self.login(username=username) self.login(username=username)
database = utils.get_example_database() database = superset.utils.database.get_example_database()
# validate that the endpoint works with the password-masked sqlalchemy uri # validate that the endpoint works with the password-masked sqlalchemy uri
data = json.dumps( data = json.dumps(
{ {
@ -560,7 +562,7 @@ class TestCore(SupersetTestCase):
self.assertEqual(expected_body, response_body) self.assertEqual(expected_body, response_body)
def test_custom_password_store(self): def test_custom_password_store(self):
database = utils.get_example_database() database = superset.utils.database.get_example_database()
conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted) conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
def custom_password_store(uri): def custom_password_store(uri):
@ -578,13 +580,13 @@ class TestCore(SupersetTestCase):
# validate that sending a password-masked uri does not over-write the decrypted # validate that sending a password-masked uri does not over-write the decrypted
# uri # uri
self.login(username=username) self.login(username=username)
database = utils.get_example_database() database = superset.utils.database.get_example_database()
sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted
url = "databaseview/edit/{}".format(database.id) url = "databaseview/edit/{}".format(database.id)
data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns} data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns}
data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri() data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri()
self.client.post(url, data=data) self.client.post(url, data=data)
database = utils.get_example_database() database = superset.utils.database.get_example_database()
self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted) self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted)
# Need to clean up after ourselves # Need to clean up after ourselves
@ -737,14 +739,14 @@ class TestCore(SupersetTestCase):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_extra_table_metadata(self): def test_extra_table_metadata(self):
self.login() self.login()
example_db = utils.get_example_database() example_db = superset.utils.database.get_example_database()
schema = "default" if example_db.backend in {"presto", "hive"} else "superset" schema = "default" if example_db.backend in {"presto", "hive"} else "superset"
self.get_json_resp( self.get_json_resp(
f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/" f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/"
) )
def test_templated_sql_json(self): def test_templated_sql_json(self):
if utils.get_example_database().backend == "presto": if superset.utils.database.get_example_database().backend == "presto":
# TODO: make it work for presto # TODO: make it work for presto
return return
self.login() self.login()
@ -1222,7 +1224,7 @@ class TestCore(SupersetTestCase):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_select_star(self): def test_select_star(self):
self.login(username="admin") self.login(username="admin")
examples_db = utils.get_example_database() examples_db = superset.utils.database.get_example_database()
resp = self.get_resp(f"/superset/select_star/{examples_db.id}/birth_names") resp = self.get_resp(f"/superset/select_star/{examples_db.id}/birth_names")
self.assertIn("gender", resp) self.assertIn("gender", resp)
@ -1231,7 +1233,7 @@ class TestCore(SupersetTestCase):
Database API: Test get select star not allowed Database API: Test get select star not allowed
""" """
self.login(username="gamma") self.login(username="gamma")
example_db = utils.get_example_database() example_db = superset.utils.database.get_example_database()
resp = self.client.get(f"/superset/select_star/{example_db.id}/birth_names") resp = self.client.get(f"/superset/select_star/{example_db.id}/birth_names")
self.assertEqual(resp.status_code, 403) self.assertEqual(resp.status_code, 403)
@ -1467,7 +1469,7 @@ class TestCore(SupersetTestCase):
def test_virtual_table_explore_visibility(self): def test_virtual_table_explore_visibility(self):
# test that default visibility it set to True # test that default visibility it set to True
database = utils.get_example_database() database = superset.utils.database.get_example_database()
self.assertEqual(database.allows_virtual_table_explore, True) self.assertEqual(database.allows_virtual_table_explore, True)
# test that visibility is disabled when extra is set to False # test that visibility is disabled when extra is set to False
@ -1489,8 +1491,8 @@ class TestCore(SupersetTestCase):
self.assertEqual(database.allows_virtual_table_explore, True) self.assertEqual(database.allows_virtual_table_explore, True)
def test_explore_database_id(self): def test_explore_database_id(self):
database = utils.get_example_database() database = superset.utils.database.get_example_database()
explore_database = utils.get_example_database() explore_database = superset.utils.database.get_example_database()
# test that explore_database_id is the regular database # test that explore_database_id is the regular database
# id if none is set in the extra # id if none is set in the extra

View File

@ -24,7 +24,7 @@ from sqlalchemy.sql import func
import tests.integration_tests.test_app import tests.integration_tests.test_app
from superset import db from superset import db
from superset.models.core import CssTemplate from superset.models.core import CssTemplate
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase

View File

@ -27,6 +27,7 @@ from unittest import mock
import pandas as pd import pandas as pd
import pytest import pytest
import superset.utils.database
from superset.sql_parse import Table from superset.sql_parse import Table
from tests.integration_tests.conftest import ADMIN_SCHEMA_NAME from tests.integration_tests.conftest import ADMIN_SCHEMA_NAME
from tests.integration_tests.test_app import app # isort:skip from tests.integration_tests.test_app import app # isort:skip
@ -61,11 +62,11 @@ def setup_csv_upload():
with app.app_context(): with app.app_context():
login(test_client, username="admin") login(test_client, username="admin")
upload_db = utils.get_or_create_db( upload_db = superset.utils.database.get_or_create_db(
CSV_UPLOAD_DATABASE, app.config["SQLALCHEMY_EXAMPLES_URI"] CSV_UPLOAD_DATABASE, app.config["SQLALCHEMY_EXAMPLES_URI"]
) )
extra = upload_db.get_extra() extra = upload_db.get_extra()
extra["explore_database_id"] = utils.get_example_database().id extra["explore_database_id"] = superset.utils.database.get_example_database().id
upload_db.extra = json.dumps(extra) upload_db.extra = json.dumps(extra)
upload_db.allow_file_upload = True upload_db.allow_file_upload = True
db.session.commit() db.session.commit()
@ -275,7 +276,7 @@ def test_import_csv_explore_database(setup_csv_upload, create_csv_files):
resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_EXPLORE) resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_EXPLORE)
assert f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"' in resp assert f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"' in resp
table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE_W_EXPLORE) table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE_W_EXPLORE)
assert table.database_id == utils.get_example_database().id assert table.database_id == superset.utils.database.get_example_database().id
@pytest.mark.usefixtures("setup_csv_upload") @pytest.mark.usefixtures("setup_csv_upload")

View File

@ -43,7 +43,7 @@ from superset.db_engine_specs.hana import HanaEngineSpec
from superset.errors import SupersetError from superset.errors import SupersetError
from superset.models.core import Database, ConfigurationMethod from superset.models.core import Database, ConfigurationMethod
from superset.models.reports import ReportSchedule, ReportScheduleType from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.utils.core import get_example_database, get_main_database from superset.utils.database import get_example_database, get_main_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_dashboard_with_slices,

View File

@ -45,7 +45,8 @@ from superset.exceptions import (
SupersetTimeoutException, SupersetTimeoutException,
) )
from superset.models.core import Database from superset.models.core import Database
from superset.utils.core import backend, get_example_database from superset.utils.core import backend
from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_dashboard_with_slices,

View File

@ -35,12 +35,8 @@ from superset.dao.exceptions import (
) )
from superset.extensions import db, security_manager from superset.extensions import db, security_manager
from superset.models.core import Database from superset.models.core import Database
from superset.utils.core import ( from superset.utils.core import backend, get_example_default_schema
backend, from superset.utils.database import get_example_database, get_main_database
get_example_database,
get_example_default_schema,
get_main_database,
)
from superset.utils.dict_import_export import export_to_dict from superset.utils.dict_import_export import export_to_dict
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import CTAS_SCHEMA_NAME from tests.integration_tests.conftest import CTAS_SCHEMA_NAME

View File

@ -30,7 +30,8 @@ from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasets.commands.export import ExportDatasetsCommand from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.commands.importers import v0, v1 from superset.datasets.commands.importers import v0, v1
from superset.models.core import Database from superset.models.core import Database
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.energy_dashboard import ( from tests.integration_tests.fixtures.energy_dashboard import (
load_energy_table_data, load_energy_table_data,

View File

@ -27,7 +27,8 @@ from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.exceptions import SupersetGenericDBErrorException from superset.exceptions import SupersetGenericDBErrorException
from superset.models.core import Database from superset.models.core import Database
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import db_insert_temp_object, SupersetTestCase from tests.integration_tests.base_tests import db_insert_temp_object, SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_dashboard_with_slices,

View File

@ -31,7 +31,7 @@ from superset.db_engine_specs.mysql import MySQLEngineSpec
from superset.db_engine_specs.sqlite import SqliteEngineSpec from superset.db_engine_specs.sqlite import SqliteEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.sql_parse import ParsedQuery from superset.sql_parse import ParsedQuery
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec
from tests.integration_tests.test_app import app from tests.integration_tests.test_app import app

View File

@ -31,7 +31,7 @@ from superset.connectors.druid.models import (
DruidCluster, DruidCluster,
) )
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from superset.utils.dict_import_export import export_to_dict from superset.utils.dict_import_export import export_to_dict
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase

View File

@ -26,7 +26,8 @@ from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database from superset.models.core import Database
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.common.example_data_generator.birth_names.birth_names_generator_factory import ( from tests.common.example_data_generator.birth_names.birth_names_generator_factory import (
BirthNamesGeneratorFactory, BirthNamesGeneratorFactory,
) )

View File

@ -17,7 +17,8 @@
"""Fixtures for test_datasource.py""" """Fixtures for test_datasource.py"""
from typing import Any, Dict from typing import Any, Dict
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
def get_datasource_post() -> Dict[str, Any]: def get_datasource_post() -> Dict[str, Any]:

View File

@ -24,7 +24,8 @@ from sqlalchemy import column, Float, String
from superset import db from superset import db
from superset.connectors.sqla.models import SqlaTable, SqlMetric from superset.connectors.sqla.models import SqlaTable, SqlMetric
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.dashboard_utils import create_slice, create_table_metadata from tests.integration_tests.dashboard_utils import create_slice, create_table_metadata
from tests.integration_tests.test_app import app from tests.integration_tests.test_app import app

View File

@ -22,7 +22,8 @@ from superset import db
from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.models import SqlaTable
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.dashboard_utils import ( from tests.integration_tests.dashboard_utils import (
create_dashboard, create_dashboard,
create_slice, create_slice,

View File

@ -29,7 +29,8 @@ from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database from superset.models.core import Database
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.dashboard_utils import ( from tests.integration_tests.dashboard_utils import (
create_dashboard, create_dashboard,
create_table_metadata, create_table_metadata,

View File

@ -45,7 +45,8 @@ from superset.dashboards.commands.importers.v0 import import_chart, import_dashb
from superset.datasets.commands.importers.v0 import import_dataset from superset.datasets.commands.importers.v0 import import_dataset
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.world_bank_dashboard import ( from tests.integration_tests.fixtures.world_bank_dashboard import (
load_world_bank_dashboard_with_slices, load_world_bank_dashboard_with_slices,

View File

@ -22,7 +22,7 @@ from freezegun import freeze_time
from superset import security_manager from superset import security_manager
from superset.databases.commands.export import ExportDatabasesCommand from superset.databases.commands.export import ExportDatabasesCommand
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase

View File

@ -22,6 +22,7 @@ from unittest import mock
import pytest import pytest
from sqlalchemy.dialects.postgresql import dialect from sqlalchemy.dialects.postgresql import dialect
import superset.utils.database
import tests.integration_tests.test_app import tests.integration_tests.test_app
from superset import app from superset import app
from superset.exceptions import SupersetTemplateException from superset.exceptions import SupersetTemplateException
@ -264,77 +265,77 @@ class TestJinja2Context(SupersetTestCase):
safe_proxy(func, {"foo": lambda: "bar"}) safe_proxy(func, {"foo": lambda: "bar"})
def test_process_template(self) -> None: def test_process_template(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
sql = "SELECT '{{ 1+1 }}'" sql = "SELECT '{{ 1+1 }}'"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
rendered = tp.process_template(sql) rendered = tp.process_template(sql)
self.assertEqual("SELECT '2'", rendered) self.assertEqual("SELECT '2'", rendered)
def test_get_template_kwarg(self) -> None: def test_get_template_kwarg(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo }}" s = "{{ foo }}"
tp = get_template_processor(database=maindb, foo="bar") tp = get_template_processor(database=maindb, foo="bar")
rendered = tp.process_template(s) rendered = tp.process_template(s)
self.assertEqual("bar", rendered) self.assertEqual("bar", rendered)
def test_template_kwarg(self) -> None: def test_template_kwarg(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo }}" s = "{{ foo }}"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
rendered = tp.process_template(s, foo="bar") rendered = tp.process_template(s, foo="bar")
self.assertEqual("bar", rendered) self.assertEqual("bar", rendered)
def test_get_template_kwarg_dict(self) -> None: def test_get_template_kwarg_dict(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo.bar }}" s = "{{ foo.bar }}"
tp = get_template_processor(database=maindb, foo={"bar": "baz"}) tp = get_template_processor(database=maindb, foo={"bar": "baz"})
rendered = tp.process_template(s) rendered = tp.process_template(s)
self.assertEqual("baz", rendered) self.assertEqual("baz", rendered)
def test_template_kwarg_dict(self) -> None: def test_template_kwarg_dict(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo.bar }}" s = "{{ foo.bar }}"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
rendered = tp.process_template(s, foo={"bar": "baz"}) rendered = tp.process_template(s, foo={"bar": "baz"})
self.assertEqual("baz", rendered) self.assertEqual("baz", rendered)
def test_get_template_kwarg_lambda(self) -> None: def test_get_template_kwarg_lambda(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo() }}" s = "{{ foo() }}"
tp = get_template_processor(database=maindb, foo=lambda: "bar") tp = get_template_processor(database=maindb, foo=lambda: "bar")
with pytest.raises(SupersetTemplateException): with pytest.raises(SupersetTemplateException):
tp.process_template(s) tp.process_template(s)
def test_template_kwarg_lambda(self) -> None: def test_template_kwarg_lambda(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo() }}" s = "{{ foo() }}"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
with pytest.raises(SupersetTemplateException): with pytest.raises(SupersetTemplateException):
tp.process_template(s, foo=lambda: "bar") tp.process_template(s, foo=lambda: "bar")
def test_get_template_kwarg_module(self) -> None: def test_get_template_kwarg_module(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ dt(2017, 1, 1).isoformat() }}" s = "{{ dt(2017, 1, 1).isoformat() }}"
tp = get_template_processor(database=maindb, dt=datetime) tp = get_template_processor(database=maindb, dt=datetime)
with pytest.raises(SupersetTemplateException): with pytest.raises(SupersetTemplateException):
tp.process_template(s) tp.process_template(s)
def test_template_kwarg_module(self) -> None: def test_template_kwarg_module(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ dt(2017, 1, 1).isoformat() }}" s = "{{ dt(2017, 1, 1).isoformat() }}"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
with pytest.raises(SupersetTemplateException): with pytest.raises(SupersetTemplateException):
tp.process_template(s, dt=datetime) tp.process_template(s, dt=datetime)
def test_get_template_kwarg_nested_module(self) -> None: def test_get_template_kwarg_nested_module(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo.dt }}" s = "{{ foo.dt }}"
tp = get_template_processor(database=maindb, foo={"dt": datetime}) tp = get_template_processor(database=maindb, foo={"dt": datetime})
with pytest.raises(SupersetTemplateException): with pytest.raises(SupersetTemplateException):
tp.process_template(s) tp.process_template(s)
def test_template_kwarg_nested_module(self) -> None: def test_template_kwarg_nested_module(self) -> None:
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "{{ foo.dt }}" s = "{{ foo.dt }}"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
with pytest.raises(SupersetTemplateException): with pytest.raises(SupersetTemplateException):
@ -353,7 +354,7 @@ class TestJinja2Context(SupersetTestCase):
@mock.patch("superset.jinja_context.context_addons") @mock.patch("superset.jinja_context.context_addons")
def test_template_context_addons(self, addons_mock) -> None: def test_template_context_addons(self, addons_mock) -> None:
addons_mock.return_value = {"datetime": datetime} addons_mock.return_value = {"datetime": datetime}
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
s = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" s = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
rendered = tp.process_template(s) rendered = tp.process_template(s)
@ -414,7 +415,7 @@ class TestJinja2Context(SupersetTestCase):
def test_custom_template_processors_ignored(self) -> None: def test_custom_template_processors_ignored(self) -> None:
"""Test custom template processor is ignored for a difference backend """Test custom template processor is ignored for a difference backend
database.""" database."""
maindb = utils.get_example_database() maindb = superset.utils.database.get_example_database()
sql = "SELECT '$DATE()'" sql = "SELECT '$DATE()'"
tp = get_template_processor(database=maindb) tp = get_template_processor(database=maindb)
rendered = tp.process_template(sql) rendered = tp.process_template(sql)

View File

@ -36,7 +36,7 @@ from superset.common.db_query_status import QueryStatus
from superset.models.core import Database from superset.models.core import Database
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.models.sql_types.base import literal_dttm_type_factory from superset.models.sql_types.base import literal_dttm_type_factory
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase
from .fixtures.energy_dashboard import ( from .fixtures.energy_dashboard import (

View File

@ -29,7 +29,7 @@ import tests.integration_tests.test_app
from superset import db, security_manager from superset import db, security_manager
from superset.common.db_query_status import QueryStatus from superset.common.db_query_status import QueryStatus
from superset.models.core import Database from superset.models.core import Database
from superset.utils.core import get_example_database, get_main_database from superset.utils.database import get_example_database, get_main_database
from superset.models.sql_lab import Query from superset.models.sql_lab import Query
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase

View File

@ -31,7 +31,7 @@ from superset import db
from superset.models.core import Database from superset.models.core import Database
from superset.models.core import FavStar from superset.models.core import FavStar
from superset.models.sql_lab import SavedQuery from superset.models.sql_lab import SavedQuery
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.importexport import ( from tests.integration_tests.fixtures.importexport import (

View File

@ -30,7 +30,7 @@ from superset.queries.saved_queries.commands.export import ExportSavedQueriesCom
from superset.queries.saved_queries.commands.importers.v1 import ( from superset.queries.saved_queries.commands.importers.v1 import (
ImportSavedQueriesCommand, ImportSavedQueriesCommand,
) )
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.importexport import ( from tests.integration_tests.fixtures.importexport import (
database_config, database_config,

View File

@ -38,7 +38,7 @@ from superset.models.reports import (
ReportRecipientType, ReportRecipientType,
ReportState, ReportState,
) )
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (

View File

@ -56,7 +56,7 @@ from superset.reports.commands.exceptions import (
) )
from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand
from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_dashboard_with_slices,
load_birth_names_data, load_birth_names_data,

View File

@ -40,9 +40,9 @@ from superset.models.slice import Slice
from superset.sql_parse import Table from superset.sql_parse import Table
from superset.utils.core import ( from superset.utils.core import (
backend, backend,
get_example_database,
get_example_default_schema, get_example_default_schema,
) )
from superset.utils.database import get_example_database
from superset.views.access_requests import AccessRequestsModelView from superset.views.access_requests import AccessRequestsModelView
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase

View File

@ -30,7 +30,7 @@ from superset.sql_validators.presto_db import (
PrestoDBSQLValidator, PrestoDBSQLValidator,
PrestoSQLValidationError, PrestoSQLValidationError,
) )
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase

View File

@ -39,9 +39,9 @@ from superset.utils.core import (
AdhocMetricExpressionType, AdhocMetricExpressionType,
FilterOperator, FilterOperator,
GenericDataType, GenericDataType,
get_example_database,
TemporalType, TemporalType,
) )
from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_dashboard_with_slices,
load_birth_names_data, load_birth_names_data,

View File

@ -50,9 +50,8 @@ from superset.sql_parse import CtasMethod
from superset.utils.core import ( from superset.utils.core import (
backend, backend,
datetime_to_epoch, datetime_to_epoch,
get_example_database,
get_main_database,
) )
from superset.utils.database import get_example_database, get_main_database
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase
from .conftest import CTAS_SCHEMA_NAME from .conftest import CTAS_SCHEMA_NAME

View File

@ -30,7 +30,7 @@ import pytest
import pandas as pd import pandas as pd
from superset.models.slice import Slice from superset.models.slice import Slice
from superset.utils.core import get_example_database from superset.utils.database import get_example_database
from superset import db from superset import db

View File

@ -54,7 +54,6 @@ from superset.utils.core import (
get_form_data_token, get_form_data_token,
get_iterable, get_iterable,
get_email_address_list, get_email_address_list,
get_or_create_db,
get_stacktrace, get_stacktrace,
json_int_dttm_ser, json_int_dttm_ser,
json_iso_dttm_ser, json_iso_dttm_ser,
@ -72,6 +71,7 @@ from superset.utils.core import (
zlib_compress, zlib_compress,
zlib_decompress, zlib_decompress,
) )
from superset.utils.database import get_or_create_db
from superset.utils import schema from superset.utils import schema
from superset.utils.hashing import md5_sha_from_str from superset.utils.hashing import md5_sha_from_str
from superset.views.utils import ( from superset.views.utils import (