2022-05-13 12:28:57 -04:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
|
2023-06-01 15:01:10 -04:00
|
|
|
from collections.abc import Iterator
|
2022-05-13 12:28:57 -04:00
|
|
|
|
|
|
|
import pytest
|
|
|
|
from sqlalchemy.orm.session import Session
|
|
|
|
|
|
|
|
from superset.utils.core import DatasourceType
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def session_with_data(session: Session) -> Iterator[Session]:
|
|
|
|
from superset.columns.models import Column
|
|
|
|
from superset.connectors.sqla.models import SqlaTable, TableColumn
|
|
|
|
from superset.datasets.models import Dataset
|
|
|
|
from superset.models.core import Database
|
|
|
|
from superset.models.sql_lab import Query, SavedQuery
|
|
|
|
from superset.tables.models import Table
|
|
|
|
|
|
|
|
engine = session.get_bind()
|
|
|
|
SqlaTable.metadata.create_all(engine) # pylint: disable=no-member
|
|
|
|
|
2024-02-13 12:20:15 -05:00
|
|
|
database = Database(database_name="my_database", sqlalchemy_uri="sqlite://")
|
2022-05-13 12:28:57 -04:00
|
|
|
|
|
|
|
columns = [
|
|
|
|
TableColumn(column_name="a", type="INTEGER"),
|
|
|
|
]
|
|
|
|
|
|
|
|
sqla_table = SqlaTable(
|
|
|
|
table_name="my_sqla_table",
|
|
|
|
columns=columns,
|
|
|
|
metrics=[],
|
2024-02-13 12:20:15 -05:00
|
|
|
database=database,
|
2022-05-13 12:28:57 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
query_obj = Query(
|
|
|
|
client_id="foo",
|
2024-02-13 12:20:15 -05:00
|
|
|
database=database,
|
2022-05-13 12:28:57 -04:00
|
|
|
tab_name="test_tab",
|
|
|
|
sql_editor_id="test_editor_id",
|
|
|
|
sql="select * from bar",
|
|
|
|
select_sql="select * from bar",
|
|
|
|
executed_sql="select * from bar",
|
|
|
|
limit=100,
|
|
|
|
select_as_cta=False,
|
|
|
|
rows=100,
|
|
|
|
error_message="none",
|
|
|
|
results_key="abc",
|
|
|
|
)
|
|
|
|
|
2024-02-13 12:20:15 -05:00
|
|
|
saved_query = SavedQuery(database=database, sql="select * from foo")
|
2022-05-13 12:28:57 -04:00
|
|
|
|
|
|
|
table = Table(
|
|
|
|
name="my_table",
|
|
|
|
schema="my_schema",
|
|
|
|
catalog="my_catalog",
|
2024-02-13 12:20:15 -05:00
|
|
|
database=database,
|
2022-05-13 12:28:57 -04:00
|
|
|
columns=[],
|
|
|
|
)
|
|
|
|
|
|
|
|
dataset = Dataset(
|
|
|
|
database=table.database,
|
|
|
|
name="positions",
|
|
|
|
expression="""
|
|
|
|
SELECT array_agg(array[longitude,latitude]) AS position
|
|
|
|
FROM my_catalog.my_schema.my_table
|
|
|
|
""",
|
|
|
|
tables=[table],
|
|
|
|
columns=[
|
|
|
|
Column(
|
|
|
|
name="position",
|
|
|
|
expression="array_agg(array[longitude,latitude])",
|
|
|
|
),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
session.add(dataset)
|
|
|
|
session.add(table)
|
|
|
|
session.add(saved_query)
|
|
|
|
session.add(query_obj)
|
2024-02-13 12:20:15 -05:00
|
|
|
session.add(database)
|
2022-05-13 12:28:57 -04:00
|
|
|
session.add(sqla_table)
|
|
|
|
session.flush()
|
|
|
|
yield session
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_datasource_sqlatable(session_with_data: Session) -> None:
|
2022-05-13 12:28:57 -04:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
2022-05-13 12:28:57 -04:00
|
|
|
|
|
|
|
result = DatasourceDAO.get_datasource(
|
2022-06-02 19:48:16 -04:00
|
|
|
datasource_type=DatasourceType.TABLE,
|
2022-05-13 12:28:57 -04:00
|
|
|
datasource_id=1,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert 1 == result.id
|
|
|
|
assert "my_sqla_table" == result.table_name
|
|
|
|
assert isinstance(result, SqlaTable)
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_datasource_query(session_with_data: Session) -> None:
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
2022-05-13 12:28:57 -04:00
|
|
|
from superset.models.sql_lab import Query
|
|
|
|
|
|
|
|
result = DatasourceDAO.get_datasource(
|
2024-01-17 14:27:29 -05:00
|
|
|
datasource_type=DatasourceType.QUERY, datasource_id=1
|
2022-05-13 12:28:57 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
assert result.id == 1
|
|
|
|
assert isinstance(result, Query)
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_datasource_saved_query(session_with_data: Session) -> None:
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
2022-05-13 12:28:57 -04:00
|
|
|
from superset.models.sql_lab import SavedQuery
|
|
|
|
|
|
|
|
result = DatasourceDAO.get_datasource(
|
|
|
|
datasource_type=DatasourceType.SAVEDQUERY,
|
|
|
|
datasource_id=1,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert result.id == 1
|
|
|
|
assert isinstance(result, SavedQuery)
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_datasource_sl_table(session_with_data: Session) -> None:
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
2022-05-13 12:28:57 -04:00
|
|
|
from superset.tables.models import Table
|
|
|
|
|
|
|
|
result = DatasourceDAO.get_datasource(
|
2022-06-02 19:48:16 -04:00
|
|
|
datasource_type=DatasourceType.SLTABLE,
|
2022-11-14 12:55:53 -05:00
|
|
|
datasource_id=1,
|
2022-05-13 12:28:57 -04:00
|
|
|
)
|
|
|
|
|
2022-11-14 12:55:53 -05:00
|
|
|
assert result.id == 1
|
2022-05-13 12:28:57 -04:00
|
|
|
assert isinstance(result, Table)
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_datasource_sl_dataset(session_with_data: Session) -> None:
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
2022-05-13 12:28:57 -04:00
|
|
|
from superset.datasets.models import Dataset
|
|
|
|
|
|
|
|
result = DatasourceDAO.get_datasource(
|
|
|
|
datasource_type=DatasourceType.DATASET,
|
2022-11-14 12:55:53 -05:00
|
|
|
datasource_id=1,
|
2022-05-13 12:28:57 -04:00
|
|
|
)
|
|
|
|
|
2022-11-14 12:55:53 -05:00
|
|
|
assert result.id == 1
|
2022-05-13 12:28:57 -04:00
|
|
|
assert isinstance(result, Dataset)
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_datasource_w_str_param(session_with_data: Session) -> None:
|
2022-06-21 07:22:39 -04:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
2022-06-21 07:22:39 -04:00
|
|
|
from superset.tables.models import Table
|
|
|
|
|
|
|
|
assert isinstance(
|
|
|
|
DatasourceDAO.get_datasource(
|
|
|
|
datasource_type="table",
|
|
|
|
datasource_id=1,
|
|
|
|
),
|
|
|
|
SqlaTable,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert isinstance(
|
|
|
|
DatasourceDAO.get_datasource(
|
|
|
|
datasource_type="sl_table",
|
|
|
|
datasource_id=1,
|
|
|
|
),
|
|
|
|
Table,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-08-02 18:42:50 -04:00
|
|
|
def test_get_all_datasources(session_with_data: Session) -> None:
|
2022-06-21 07:22:39 -04:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2022-05-13 12:28:57 -04:00
|
|
|
|
2024-02-13 12:20:15 -05:00
|
|
|
result = SqlaTable.get_all_datasources()
|
2022-05-13 12:28:57 -04:00
|
|
|
assert len(result) == 1
|
2022-09-09 13:54:39 -04:00
|
|
|
|
|
|
|
|
|
|
|
def test_not_found_datasource(session_with_data: Session) -> None:
|
2023-06-18 21:32:32 -04:00
|
|
|
from superset.daos.datasource import DatasourceDAO
|
|
|
|
from superset.daos.exceptions import DatasourceNotFound
|
2022-09-09 13:54:39 -04:00
|
|
|
|
|
|
|
with pytest.raises(DatasourceNotFound):
|
|
|
|
DatasourceDAO.get_datasource(
|
|
|
|
datasource_type="table",
|
|
|
|
datasource_id=500000,
|
|
|
|
)
|