fix(datasets): add custom filter for virtual datasets based on sql attribute (#11452)

This commit is contained in:
ʈᵃᵢ 2020-10-29 13:11:33 -07:00 committed by GitHub
parent ed3dca420f
commit f918ca14aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 113 additions and 9 deletions

View File

@ -55,7 +55,8 @@ export interface Filter {
| 'title_or_slug' | 'title_or_slug'
| 'name_or_description' | 'name_or_description'
| 'all_text' | 'all_text'
| 'chart_all_text'; | 'chart_all_text'
| 'dataset_is_null_or_empty';
input?: 'text' | 'textarea' | 'select' | 'checkbox' | 'search'; input?: 'text' | 'textarea' | 'select' | 'checkbox' | 'search';
unfilteredLabel?: string; unfilteredLabel?: string;
selects?: SelectOption[]; selects?: SelectOption[];

View File

@ -242,7 +242,7 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
size: 'lg', size: 'lg',
}, },
{ {
accessor: 'is_sqllab_view', accessor: 'sql',
hidden: true, hidden: true,
disableSortBy: true, disableSortBy: true,
}, },
@ -350,13 +350,13 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
}, },
{ {
Header: t('Type'), Header: t('Type'),
id: 'is_sqllab_view', id: 'sql',
input: 'select', input: 'select',
operator: 'eq', operator: 'dataset_is_null_or_empty',
unfilteredLabel: 'All', unfilteredLabel: 'All',
selects: [ selects: [
{ label: 'Virtual', value: true }, { label: 'Virtual', value: false },
{ label: 'Physical', value: false }, { label: 'Physical', value: true },
], ],
}, },
{ {

View File

@ -48,6 +48,7 @@ from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.commands.refresh import RefreshDatasetCommand from superset.datasets.commands.refresh import RefreshDatasetCommand
from superset.datasets.commands.update import UpdateDatasetCommand from superset.datasets.commands.update import UpdateDatasetCommand
from superset.datasets.dao import DatasetDAO from superset.datasets.dao import DatasetDAO
from superset.datasets.filters import DatasetIsNullOrEmptyFilter
from superset.datasets.schemas import ( from superset.datasets.schemas import (
DatasetPostSchema, DatasetPostSchema,
DatasetPutSchema, DatasetPutSchema,
@ -160,6 +161,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"owners": RelatedFieldFilter("first_name", FilterRelatedOwners), "owners": RelatedFieldFilter("first_name", FilterRelatedOwners),
"database": "database_name", "database": "database_name",
} }
search_filters = {"sql": [DatasetIsNullOrEmptyFilter]}
filter_rel_fields = {"database": [["id", DatabaseFilter, lambda: []]]} filter_rel_fields = {"database": [["id", DatabaseFilter, lambda: []]]}
allowed_rel_fields = {"database", "owners"} allowed_rel_fields = {"database", "owners"}
allowed_distinct_fields = {"schema"} allowed_distinct_fields = {"schema"}

View File

@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask_babel import lazy_gettext as _
from sqlalchemy import not_, or_
from sqlalchemy.orm.query import Query
from superset.connectors.sqla.models import SqlaTable
from superset.views.base import BaseFilter
class DatasetIsNullOrEmptyFilter(BaseFilter): # pylint: disable=too-few-public-methods
name = _("Null or Empty")
arg_name = "dataset_is_null_or_empty"
def apply(self, query: Query, value: bool) -> Query:
filter_clause = or_(SqlaTable.sql.is_(None), SqlaTable.sql == "")
if not value:
filter_clause = not_(filter_clause)
return query.filter(filter_clause)

View File

@ -17,7 +17,7 @@
"""Unit tests for Superset""" """Unit tests for Superset"""
import json import json
from io import BytesIO from io import BytesIO
from typing import List from typing import List, Optional
from unittest.mock import patch from unittest.mock import patch
from zipfile import is_zipfile from zipfile import is_zipfile
@ -43,17 +43,26 @@ from tests.conftest import CTAS_SCHEMA_NAME
class TestDatasetApi(SupersetTestCase): class TestDatasetApi(SupersetTestCase):
fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu") fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu")
fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2")
@staticmethod @staticmethod
def insert_dataset( def insert_dataset(
table_name: str, schema: str, owners: List[int], database: Database table_name: str,
schema: str,
owners: List[int],
database: Database,
sql: Optional[str] = None,
) -> SqlaTable: ) -> SqlaTable:
obj_owners = list() obj_owners = list()
for owner in owners: for owner in owners:
user = db.session.query(security_manager.user_model).get(owner) user = db.session.query(security_manager.user_model).get(owner)
obj_owners.append(user) obj_owners.append(user)
table = SqlaTable( table = SqlaTable(
table_name=table_name, schema=schema, owners=obj_owners, database=database table_name=table_name,
schema=schema,
owners=obj_owners,
database=database,
sql=sql,
) )
db.session.add(table) db.session.add(table)
db.session.commit() db.session.commit()
@ -72,6 +81,29 @@ class TestDatasetApi(SupersetTestCase):
.all() .all()
) )
@pytest.fixture()
def create_virtual_datasets(self):
with self.create_app().app_context():
datasets = []
admin = self.get_user("admin")
main_db = get_main_database()
for table_name in self.fixture_virtual_table_names:
datasets.append(
self.insert_dataset(
table_name,
"",
[admin.id],
main_db,
"SELECT * from ab_view_menu;",
)
)
yield datasets
# rollback changes
for dataset in datasets:
db.session.delete(dataset)
db.session.commit()
@pytest.fixture() @pytest.fixture()
def create_datasets(self): def create_datasets(self):
with self.create_app().app_context(): with self.create_app().app_context():
@ -1101,3 +1133,37 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{table.id}/related_objects" uri = f"api/v1/dataset/{table.id}/related_objects"
rv = self.client.get(uri) rv = self.client.get(uri)
assert rv.status_code == 404 assert rv.status_code == 404
@pytest.mark.usefixtures("create_datasets", "create_virtual_datasets")
def test_get_datasets_custom_filter_sql(self):
"""
Dataset API: Test custom dataset_is_null_or_empty filter for sql
"""
arguments = {
"filters": [
{"col": "sql", "opr": "dataset_is_null_or_empty", "value": False}
]
}
self.login(username="admin")
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
rv = self.client.get(uri)
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
for table_name in self.fixture_virtual_table_names:
assert table_name in [ds["table_name"] for ds in data["result"]]
arguments = {
"filters": [
{"col": "sql", "opr": "dataset_is_null_or_empty", "value": True}
]
}
self.login(username="admin")
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
rv = self.client.get(uri)
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
for table_name in self.fixture_tables_names:
assert table_name in [ds["table_name"] for ds in data["result"]]