chore: remove deprecated apis on superset, testconn, validate_sql_json, schemas_access_for_file_upload, extra_table_metadata (#24354)

This commit is contained in:
Daniel Vaz Gaspar 2023-06-13 11:03:19 +01:00 committed by GitHub
parent c04bd4c6a7
commit 6f145dfe36
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 2 additions and 555 deletions

View File

@ -82,10 +82,8 @@
|can fetch datasource metadata on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can override role permissions on Superset|:heavy_check_mark:|O|O|O|
|can created dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can extra table metadata on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can csrf token on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can created slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can testconn on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can annotation json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can add slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can fave dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
@ -96,7 +94,6 @@
|can warm up cache on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can sqllab table viz on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can profile on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can validate sql json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can available domains on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can queries on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can stop query on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
@ -202,7 +199,6 @@
|can edit on FilterSets|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can this form get on ColumnarToDatabaseView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can this form post on ColumnarToDatabaseView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can schemas access for file upload on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|menu access on Upload a Columnar file|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can export on Chart|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can write on DashboardFilterStateRestApi|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|

View File

@ -34,6 +34,7 @@ assists people when migrating to a new version.
### Breaking Changes
- [24354](https://github.com/apache/superset/pull/24354): Removed deprecated APIs `/superset/testconn`, `/superset/validate_sql_json/`, `/superset/schemas_access_for_file_upload`, `/superset/extra_table_metadata`
- [24381](https://github.com/apache/superset/pull/24381): Removed deprecated API `/superset/available_domains/`
- [24359](https://github.com/apache/superset/pull/24359): Removed deprecated APIs `/superset/estimate_query_cost/..`, `/superset/results/..`, `/superset/sql_json/..`, `/superset/csv/..`
- [24345](https://github.com/apache/superset/pull/24345) Converts `ENABLE_BROAD_ACTIVITY_ACCESS` and `MENU_HIDE_USER_INFO` into feature flags and changes the value of `ENABLE_BROAD_ACTIVITY_ACCESS` to `False` as it's more secure.

View File

@ -18,8 +18,6 @@
from __future__ import annotations
import logging
import re
from contextlib import closing
from datetime import datetime
from typing import Any, Callable, cast
from urllib import parse
@ -36,7 +34,7 @@ from flask_appbuilder.security.decorators import (
from flask_appbuilder.security.sqla import models as ab_models
from flask_babel import gettext as __, lazy_gettext as _
from sqlalchemy import and_, or_
from sqlalchemy.exc import DBAPIError, NoSuchModuleError, SQLAlchemyError
from sqlalchemy.exc import SQLAlchemyError
from superset import (
app,
@ -66,15 +64,12 @@ from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
from superset.dashboards.dao import DashboardDAO
from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand
from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError
from superset.databases.commands.exceptions import DatabaseInvalidError
from superset.databases.dao import DatabaseDAO
from superset.databases.utils import make_url_safe
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasource.dao import DatasourceDAO
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
CacheLoadError,
CertificateException,
DatabaseNotFound,
SupersetCancelQueryException,
SupersetErrorException,
@ -93,9 +88,7 @@ from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import Query, TabState
from superset.models.user_attributes import UserAttribute
from superset.security.analytics_db_safety import check_sqlalchemy_uri
from superset.sql_parse import ParsedQuery
from superset.sql_validators import get_validator_by_name
from superset.superset_typing import FlaskResponse
from superset.tasks.async_queries import load_explore_json_into_cache
from superset.utils import core as utils
@ -985,90 +978,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
session.close()
return "SLICES ADDED"
@api
@has_access_api
@event_logger.log_this
@expose(
"/testconn",
methods=(
"GET",
"POST",
),
) # pylint: disable=no-self-use
@deprecated(new_target="/api/v1/database/test_connection/")
def testconn(self) -> FlaskResponse:
"""Tests a sqla connection"""
db_name = request.json.get("name")
uri = request.json.get("uri")
try:
if app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]:
check_sqlalchemy_uri(make_url_safe(uri))
# if the database already exists in the database, only its safe
# (password-masked) URI would be shown in the UI and would be passed in the
# form data so if the database already exists and the form was submitted
# with the safe URI, we assume we should retrieve the decrypted URI to test
# the connection.
if db_name:
existing_database = (
db.session.query(Database)
.filter_by(database_name=db_name)
.one_or_none()
)
if existing_database and uri == existing_database.safe_sqlalchemy_uri():
uri = existing_database.sqlalchemy_uri_decrypted
# This is the database instance that will be tested. Note the extra fields
# are represented as JSON encoded strings in the model.
database = Database(
server_cert=request.json.get("server_cert"),
extra=json.dumps(request.json.get("extra", {})),
impersonate_user=request.json.get("impersonate_user"),
encrypted_extra=json.dumps(request.json.get("encrypted_extra", {})),
)
database.set_sqlalchemy_uri(uri)
database.db_engine_spec.mutate_db_for_connection_test(database)
with database.get_sqla_engine_with_context() as engine:
with closing(engine.raw_connection()) as conn:
if engine.dialect.do_ping(conn):
return json_success('"OK"')
raise DBAPIError(None, None, None)
except CertificateException as ex:
logger.info("Certificate exception")
return json_error_response(ex.message)
except (NoSuchModuleError, ModuleNotFoundError):
logger.info("Invalid driver")
driver_name = make_url_safe(uri).drivername
return json_error_response(
_(
"Could not load database driver: %(driver_name)s",
driver_name=driver_name,
),
400,
)
except DatabaseInvalidError:
logger.info("Invalid URI")
return json_error_response(
_(
"Invalid connection string, a valid string usually follows:\n"
"'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'"
)
)
except DBAPIError:
logger.warning("Connection failed")
return json_error_response(
_("Connection failed, please check your connection settings"), 400
)
except SupersetSecurityException as ex:
logger.warning("Stopped an unsafe database connection")
return json_error_response(_(str(ex)), 400)
except Exception as ex: # pylint: disable=broad-except
logger.warning("Unexpected error %s", type(ex).__name__)
return json_error_response(
_("Unexpected error occurred, please check your logs for details"), 400
)
@staticmethod
def get_user_activity_access_error(user_id: int) -> FlaskResponse | None:
try:
@ -1695,84 +1604,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return self.json_response("OK")
@has_access_api
@event_logger.log_this
@expose(
"/validate_sql_json/",
methods=(
"GET",
"POST",
),
)
@deprecated(new_target="/api/v1/database/<pk>/validate_sql/")
def validate_sql_json(
# pylint: disable=too-many-locals,no-self-use
self,
) -> FlaskResponse:
"""Validates that arbitrary sql is acceptable for the given database.
Returns a list of error/warning annotations as json.
"""
sql = request.form["sql"]
database_id = request.form["database_id"]
schema = request.form.get("schema") or None
template_params = json.loads(request.form.get("templateParams") or "{}")
if template_params is not None and len(template_params) > 0:
# TODO: factor the Database object out of template rendering
# or provide it as mydb so we can render template params
# without having to also persist a Query ORM object.
return json_error_response(
"SQL validation does not support template parameters", status=400
)
session = db.session()
mydb = session.query(Database).filter_by(id=database_id).one_or_none()
if not mydb:
return json_error_response(
f"Database with id {database_id} is missing.", status=400
)
spec = mydb.db_engine_spec
validators_by_engine = app.config["SQL_VALIDATORS_BY_ENGINE"]
if not validators_by_engine or spec.engine not in validators_by_engine:
return json_error_response(
f"no SQL validator is configured for {spec.engine}", status=400
)
validator_name = validators_by_engine[spec.engine]
validator = get_validator_by_name(validator_name)
if not validator:
return json_error_response(
"No validator named {} found (configured for the {} engine)".format(
validator_name, spec.engine
)
)
try:
timeout = config["SQLLAB_VALIDATION_TIMEOUT"]
timeout_msg = f"The query exceeded the {timeout} seconds timeout."
with utils.timeout(seconds=timeout, error_message=timeout_msg):
errors = validator.validate(sql, schema, mydb)
payload = json.dumps(
[err.to_dict() for err in errors],
default=utils.pessimistic_json_iso_dttm_ser,
ignore_nan=True,
encoding=None,
)
return json_success(payload)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
msg = _(
"%(validator)s was unable to check your query.\n"
"Please recheck your query.\n"
"Exception: %(ex)s",
validator=validator.name,
ex=ex,
)
# Return as a 400 if the database error message says we got a 4xx error
if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(ex)):
return json_error_response(f"{msg}", status=400)
return json_error_response(f"{msg}")
@api
@handle_api_exception
@has_access
@ -2042,38 +1873,3 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@event_logger.log_this
def sqllab_history(self) -> FlaskResponse:
return super().render_app_template()
@api
@has_access_api
@event_logger.log_this
@expose("/schemas_access_for_file_upload")
@deprecated(new_target="api/v1/database/{pk}/schemas_access_for_file_upload/")
def schemas_access_for_file_upload(self) -> FlaskResponse:
"""
This method exposes an API endpoint to
get the schema access control settings for file upload in this database
"""
if not request.args.get("db_id"):
return json_error_response("No database is allowed for your file upload")
db_id = int(request.args["db_id"])
database = db.session.query(Database).filter_by(id=db_id).one()
try:
schemas_allowed = database.get_schema_access_for_file_upload()
if security_manager.can_access_database(database):
return self.json_response(schemas_allowed)
# the list schemas_allowed should not be empty here
# and the list schemas_allowed_processed returned from security_manager
# should not be empty either,
# otherwise the database should have been filtered out
# in CsvToDatabaseForm
schemas_allowed_processed = security_manager.get_schemas_accessible_by_user(
database, schemas_allowed, False
)
return self.json_response(schemas_allowed_processed)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
return json_error_response(
"Failed to fetch schemas allowed for csv upload in this database! "
"Please contact your Superset Admin!"
)

View File

@ -398,33 +398,6 @@ class SupersetTestCase(TestCase):
db.session.delete(database)
db.session.commit()
def validate_sql(
self,
sql,
client_id=None,
username=None,
raise_on_error=False,
database_name="examples",
template_params=None,
):
if username:
self.logout()
self.login(username=username)
dbid = SupersetTestCase.get_database_by_name(database_name).id
resp = self.get_json_resp(
"/superset/validate_sql_json/",
raise_on_error=False,
data=dict(
database_id=dbid,
sql=sql,
client_id=client_id,
templateParams=template_params,
),
)
if raise_on_error and "error" in resp:
raise Exception("validate_sql failed")
return resp
def get_dash_by_slug(self, dash_slug):
sesh = db.session()
return sesh.query(Dashboard).filter_by(slug=dash_slug).first()

View File

@ -342,98 +342,6 @@ class TestCore(SupersetTestCase):
assert self.get_resp("/healthcheck") == "OK"
assert self.get_resp("/ping") == "OK"
def test_testconn(self, username="admin"):
# need to temporarily allow sqlite dbs, teardown will undo this
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False
self.login(username=username)
database = superset.utils.database.get_example_database()
# validate that the endpoint works with the password-masked sqlalchemy uri
data = json.dumps(
{
"uri": database.safe_sqlalchemy_uri(),
"name": "examples",
"impersonate_user": False,
}
)
response = self.client.post(
"/superset/testconn", data=data, content_type="application/json"
)
assert response.status_code == 200
assert response.headers["Content-Type"] == "application/json"
# validate that the endpoint works with the decrypted sqlalchemy uri
data = json.dumps(
{
"uri": database.sqlalchemy_uri_decrypted,
"name": "examples",
"impersonate_user": False,
}
)
response = self.client.post(
"/superset/testconn", data=data, content_type="application/json"
)
assert response.status_code == 200
assert response.headers["Content-Type"] == "application/json"
def test_testconn_failed_conn(self, username="admin"):
self.login(username=username)
data = json.dumps(
{"uri": "broken://url", "name": "examples", "impersonate_user": False}
)
response = self.client.post(
"/superset/testconn", data=data, content_type="application/json"
)
assert response.status_code == 400
assert response.headers["Content-Type"] == "application/json"
response_body = json.loads(response.data.decode("utf-8"))
expected_body = {"error": "Could not load database driver: broken"}
assert response_body == expected_body, "{} != {}".format(
response_body,
expected_body,
)
data = json.dumps(
{
"uri": "mssql+pymssql://url",
"name": "examples",
"impersonate_user": False,
}
)
response = self.client.post(
"/superset/testconn", data=data, content_type="application/json"
)
assert response.status_code == 400
assert response.headers["Content-Type"] == "application/json"
response_body = json.loads(response.data.decode("utf-8"))
expected_body = {"error": "Could not load database driver: mssql+pymssql"}
assert response_body == expected_body, "{} != {}".format(
response_body,
expected_body,
)
def test_testconn_unsafe_uri(self, username="admin"):
self.login(username=username)
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True
response = self.client.post(
"/superset/testconn",
data=json.dumps(
{
"uri": "sqlite:///home/superset/unsafe.db",
"name": "unsafe",
"impersonate_user": False,
}
),
content_type="application/json",
)
self.assertEqual(400, response.status_code)
response_body = json.loads(response.data.decode("utf-8"))
expected_body = {
"error": "SQLiteDialect_pysqlite cannot be used as a data source for security reasons."
}
self.assertEqual(expected_body, response_body)
def test_custom_password_store(self):
database = superset.utils.database.get_example_database()
conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
@ -551,15 +459,6 @@ class TestCore(SupersetTestCase):
assert "Charts" in self.get_resp("/chart/list/")
assert "Dashboards" in self.get_resp("/dashboard/list/")
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_extra_table_metadata(self):
self.login()
example_db = superset.utils.database.get_example_database()
schema = "default" if example_db.backend in {"presto", "hive"} else "superset"
self.get_json_resp(
f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/"
)
def test_templated_sql_json(self):
if superset.utils.database.get_example_database().backend == "presto":
# TODO: make it work for presto
@ -994,30 +893,6 @@ class TestCore(SupersetTestCase):
self.assertEqual(rv.status_code, 404)
self.assertEqual(data["error"], "Cached data not found")
@mock.patch(
"superset.security.SupersetSecurityManager.get_schemas_accessible_by_user"
)
@mock.patch("superset.security.SupersetSecurityManager.can_access_database")
@mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources")
def test_schemas_access_for_csv_upload_endpoint(
self,
mock_can_access_all_datasources,
mock_can_access_database,
mock_schemas_accessible,
):
self.login(username="admin")
dbobj = self.create_fake_db()
mock_can_access_all_datasources.return_value = False
mock_can_access_database.return_value = False
mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"]
data = self.get_json_resp(
url="/superset/schemas_access_for_file_upload?db_id={db_id}".format(
db_id=dbobj.id
)
)
assert data == ["this_schema_is_allowed_too"]
self.delete_fake_db()
def test_results_default_deserialization(self):
use_new_deserialization = False
data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]

View File

@ -3617,44 +3617,3 @@ class TestDatabaseApi(SupersetTestCase):
return
self.assertEqual(rv.status_code, 422)
self.assertIn("Kaboom!", response["errors"][0]["message"])
@mock.patch(
"superset.security.SupersetSecurityManager.get_schemas_accessible_by_user"
)
@mock.patch("superset.security.SupersetSecurityManager.can_access_database")
@mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources")
def test_schemas_access_for_csv_upload_not_found_endpoint(
self,
mock_can_access_all_datasources,
mock_can_access_database,
mock_schemas_accessible,
):
self.login(username="gamma")
self.create_fake_db()
mock_can_access_database.return_value = False
mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"]
rv = self.client.get(f"/api/v1/database/120ff/schemas_access_for_file_upload")
self.assertEqual(rv.status_code, 404)
self.delete_fake_db()
@mock.patch(
"superset.security.SupersetSecurityManager.get_schemas_accessible_by_user"
)
@mock.patch("superset.security.SupersetSecurityManager.can_access_database")
@mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources")
def test_schemas_access_for_csv_upload_endpoint(
self,
mock_can_access_all_datasources,
mock_can_access_database,
mock_schemas_accessible,
):
self.login(username="admin")
dbobj = self.create_fake_db()
mock_can_access_all_datasources.return_value = False
mock_can_access_database.return_value = False
mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"]
data = self.get_json_resp(
url=f"/api/v1/database/{dbobj.id}/schemas_access_for_file_upload"
)
assert data == {"schemas": ["this_schema_is_allowed_too"]}
self.delete_fake_db()

View File

@ -19,12 +19,8 @@
import unittest
from unittest.mock import MagicMock, patch
import pytest
from pyhive.exc import DatabaseError
from superset import app
from superset.sql_validators import SQLValidationAnnotation
from superset.sql_validators.base import BaseSQLValidator
from superset.sql_validators.postgres import PostgreSQLValidator
from superset.sql_validators.presto_db import (
PrestoDBSQLValidator,
@ -34,139 +30,6 @@ from superset.utils.database import get_example_database
from .base_tests import SupersetTestCase
PRESTO_SQL_VALIDATORS_BY_ENGINE = {
"presto": "PrestoDBSQLValidator",
"sqlite": "PrestoDBSQLValidator",
"postgresql": "PrestoDBSQLValidator",
"mysql": "PrestoDBSQLValidator",
}
class TestSqlValidatorEndpoint(SupersetTestCase):
"""Testing for Sql Lab querytext validation endpoint"""
def tearDown(self):
self.logout()
@patch.dict(
"superset.config.SQL_VALIDATORS_BY_ENGINE",
{},
clear=True,
)
def test_validate_sql_endpoint_noconfig(self):
"""Assert that validate_sql_json errors out when no validators are
configured for any db"""
self.login("admin")
resp = self.validate_sql(
"SELECT * FROM birth_names", client_id="1", raise_on_error=False
)
self.assertIn("error", resp)
self.assertIn("no SQL validator is configured", resp["error"])
@patch("superset.views.core.get_validator_by_name")
@patch.dict(
"superset.config.SQL_VALIDATORS_BY_ENGINE",
PRESTO_SQL_VALIDATORS_BY_ENGINE,
clear=True,
)
def test_validate_sql_endpoint_mocked(self, get_validator_by_name):
"""Assert that, with a mocked validator, annotations make it back out
from the validate_sql_json endpoint as a list of json dictionaries"""
if get_example_database().backend == "hive":
pytest.skip("Hive validator is not implemented")
self.login("admin")
validator = MagicMock()
get_validator_by_name.return_value = validator
validator.validate.return_value = [
SQLValidationAnnotation(
message="I don't know what I expected, but it wasn't this",
line_number=4,
start_column=12,
end_column=42,
)
]
resp = self.validate_sql(
"SELECT * FROM somewhere_over_the_rainbow",
client_id="1",
raise_on_error=False,
)
self.assertEqual(1, len(resp))
self.assertIn("expected,", resp[0]["message"])
@patch("superset.views.core.get_validator_by_name")
@patch.dict(
"superset.config.SQL_VALIDATORS_BY_ENGINE",
PRESTO_SQL_VALIDATORS_BY_ENGINE,
clear=True,
)
def test_validate_sql_endpoint_mocked_params(self, get_validator_by_name):
"""Assert that, with a mocked validator, annotations make it back out
from the validate_sql_json endpoint as a list of json dictionaries"""
if get_example_database().backend == "hive":
pytest.skip("Hive validator is not implemented")
self.login("admin")
validator = MagicMock()
get_validator_by_name.return_value = validator
validator.validate.return_value = [
SQLValidationAnnotation(
message="This worked",
line_number=4,
start_column=12,
end_column=42,
)
]
resp = self.validate_sql(
"SELECT * FROM somewhere_over_the_rainbow",
client_id="1",
raise_on_error=False,
template_params="null",
)
self.assertEqual(1, len(resp))
self.assertNotIn("error,", resp[0]["message"])
@patch("superset.views.core.get_validator_by_name")
@patch.dict(
"superset.config.SQL_VALIDATORS_BY_ENGINE",
PRESTO_SQL_VALIDATORS_BY_ENGINE,
clear=True,
)
def test_validate_sql_endpoint_failure(self, get_validator_by_name):
"""Assert that validate_sql_json errors out when the selected validator
raises an unexpected exception"""
self.login("admin")
validator = MagicMock()
get_validator_by_name.return_value = validator
validator.validate.side_effect = Exception("Kaboom!")
resp = self.validate_sql(
"SELECT * FROM birth_names", client_id="1", raise_on_error=False
)
# TODO(bkyryliuk): properly handle hive error
if get_example_database().backend == "hive":
assert resp["error"] == "no SQL validator is configured for hive"
else:
self.assertIn("error", resp)
self.assertIn("Kaboom!", resp["error"])
class TestBaseValidator(SupersetTestCase):
"""Testing for the base sql validator"""
def setUp(self):
self.validator = BaseSQLValidator
def test_validator_excepts(self):
with self.assertRaises(NotImplementedError):
self.validator.validate(None, None, None)
class TestPrestoValidator(SupersetTestCase):
"""Testing for the prestodb sql validator"""
@ -236,22 +99,6 @@ class TestPrestoValidator(SupersetTestCase):
self.assertEqual(1, len(errors))
@patch.dict(
"superset.config.SQL_VALIDATORS_BY_ENGINE",
{},
clear=True,
)
def test_validate_sql_endpoint(self):
self.login("admin")
# NB this is effectively an integration test -- when there's a default
# validator for sqlite, this test will fail because the validator
# will no longer error out.
resp = self.validate_sql(
"SELECT * FROM birth_names", client_id="1", raise_on_error=False
)
self.assertIn("error", resp)
self.assertIn("no SQL validator is configured", resp["error"])
class TestPostgreSQLValidator(SupersetTestCase):
def test_valid_syntax(self):