chore: remove deprecated apis estimate_query_cost, results, sql_json, csv (#24359)

This commit is contained in:
Daniel Vaz Gaspar 2023-06-13 09:31:16 +01:00 committed by GitHub
parent ebca20b800
commit 93ba59d868
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 4 additions and 493 deletions

View File

@ -63,7 +63,6 @@
|can schemas access for csv upload on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can user slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can favstar on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can estimate query cost on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can import dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can search queries on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can sqllab viz on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
@ -73,7 +72,6 @@
|can publish on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can csv on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can fave dashboards by username on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can sql json on Superset|:heavy_check_mark:|O|O|:heavy_check_mark:|
|can slice on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can sync druid source on Superset|:heavy_check_mark:|O|O|O|
|can explore on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
@ -104,7 +102,6 @@
|can stop query on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can request access on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can dashboard on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can results on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can post on TableSchemaView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can expanded on TableSchemaView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can delete on TableSchemaView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|

View File

@ -34,6 +34,7 @@ assists people when migrating to a new version.
### Breaking Changes
- [24359](https://github.com/apache/superset/pull/24359): Removed deprecated APIs `/superset/estimate_query_cost/..`, `/superset/results/..`, `/superset/sql_json/..`, `/superset/csv/..`
- [24345](https://github.com/apache/superset/pull/24345) Converts `ENABLE_BROAD_ACTIVITY_ACCESS` and `MENU_HIDE_USER_INFO` into feature flags and changes the value of `ENABLE_BROAD_ACTIVITY_ACCESS` to `False` as it's more secure.
- [24342](https://github.com/apache/superset/pull/24342): Removed deprecated API `/superset/tables/<int:db_id>/<schema>/...`
- [24335](https://github.com/apache/superset/pull/24335): Removed deprecated API `/superset/filter/<datasource_type>/<int:datasource_id>/<column>/`

View File

@ -29,7 +29,6 @@ export const user: UserWithPermissionsAndRoles = {
],
sql_lab: [
['menu_access', 'SQL Lab'],
['can_sql_json', 'Superset'],
['can_search_queries', 'Superset'],
['can_csv', 'Superset'],
],

View File

@ -235,7 +235,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
("can_execute_sql_query", "SQLLab"),
("can_estimate_query_cost", "SQL Lab"),
("can_export_csv", "SQLLab"),
("can_sql_json", "Superset"), # Deprecated permission remove on 3.0.0
("can_sqllab_history", "Superset"),
("can_sqllab_viz", "Superset"),
("can_sqllab_table_viz", "Superset"), # Deprecated permission remove on 3.0.0
@ -713,6 +712,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
self.add_permission_view_menu("all_datasource_access", "all_datasource_access")
self.add_permission_view_menu("all_database_access", "all_database_access")
self.add_permission_view_menu("all_query_access", "all_query_access")
self.add_permission_view_menu("can_csv", "Superset")
self.add_permission_view_menu("can_share_dashboard", "Superset")
self.add_permission_view_menu("can_share_chart", "Superset")

View File

@ -21,11 +21,10 @@ import logging
import re
from contextlib import closing
from datetime import datetime
from typing import Any, Callable, cast, Optional
from typing import Any, Callable, cast
from urllib import parse
import backoff
import pandas as pd
import simplejson as json
from flask import abort, flash, g, redirect, render_template, request, Response
from flask_appbuilder import expose
@ -46,8 +45,6 @@ from superset import (
db,
event_logger,
is_feature_enabled,
results_backend,
results_backend_use_msgpack,
security_manager,
sql_lab,
viz,
@ -79,13 +76,11 @@ from superset.exceptions import (
CacheLoadError,
CertificateException,
DatabaseNotFound,
SerializationError,
SupersetCancelQueryException,
SupersetErrorException,
SupersetException,
SupersetGenericErrorException,
SupersetSecurityException,
SupersetTimeoutException,
)
from superset.explore.form_data.commands.create import CreateFormDataCommand
from superset.explore.form_data.commands.get import GetFormDataCommand
@ -93,37 +88,17 @@ from superset.explore.form_data.commands.parameters import CommandParameters
from superset.explore.permalink.commands.get import GetExplorePermalinkCommand
from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError
from superset.extensions import async_query_manager, cache_manager
from superset.jinja_context import get_template_processor
from superset.models.core import Database, FavStar
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import Query, TabState
from superset.models.user_attributes import UserAttribute
from superset.queries.dao import QueryDAO
from superset.security.analytics_db_safety import check_sqlalchemy_uri
from superset.sql_lab import get_sql_results
from superset.sql_parse import ParsedQuery
from superset.sql_validators import get_validator_by_name
from superset.sqllab.command_status import SqlJsonExecutionStatus
from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand
from superset.sqllab.exceptions import (
QueryIsForbiddenToAccessException,
SqlLabException,
)
from superset.sqllab.execution_context_convertor import ExecutionContextConvertor
from superset.sqllab.limiting_factor import LimitingFactor
from superset.sqllab.query_render import SqlQueryRenderImpl
from superset.sqllab.sql_json_executer import (
ASynchronousSqlJsonExecutor,
SqlJsonExecutor,
SynchronousSqlJsonExecutor,
)
from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext
from superset.sqllab.utils import apply_display_max_row_configuration_if_require
from superset.sqllab.validators import CanAccessQueryValidatorImpl
from superset.superset_typing import FlaskResponse
from superset.tasks.async_queries import load_explore_json_into_cache
from superset.utils import core as utils, csv
from superset.utils import core as utils
from superset.utils.async_query_manager import AsyncQueryTokenException
from superset.utils.cache import etag_cache
from superset.utils.core import DatasourceType, get_user_id, ReservedUrlParameters
@ -144,9 +119,7 @@ from superset.views.base import (
validate_sqlatable,
)
from superset.views.log.dao import LogDAO
from superset.views.sql_lab.schemas import SqlJsonPayloadSchema
from superset.views.utils import (
_deserialize_results_payload,
bootstrap_user_data,
check_datasource_perms,
check_explore_cache_perms,
@ -738,7 +711,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
bootstrap_data = {
"can_add": slice_add_perm,
"can_download": slice_download_perm,
"datasource": sanitize_datasource_data(datasource_data),
"form_data": form_data,
"datasource_id": datasource_id,
@ -1696,162 +1668,10 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
)
return json_success(json.dumps(payload))
@has_access_api
@expose("/estimate_query_cost/<int:database_id>/", methods=("POST",))
@expose("/estimate_query_cost/<int:database_id>/<schema>/", methods=("POST",))
@event_logger.log_this
@deprecated(new_target="api/v1/sqllab/estimate/")
def estimate_query_cost( # pylint: disable=no-self-use
self, database_id: int, schema: str | None = None
) -> FlaskResponse:
mydb = db.session.query(Database).get(database_id)
sql = json.loads(request.form.get("sql", '""'))
if template_params := json.loads(request.form.get("templateParams") or "{}"):
template_processor = get_template_processor(mydb)
sql = template_processor.process_template(sql, **template_params)
timeout = SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT
timeout_msg = f"The estimation exceeded the {timeout} seconds timeout."
try:
with utils.timeout(seconds=timeout, error_message=timeout_msg):
cost = mydb.db_engine_spec.estimate_query_cost(
mydb, schema, sql, utils.QuerySource.SQL_LAB
)
except SupersetTimeoutException as ex:
logger.exception(ex)
return json_errors_response([ex.error])
except Exception as ex: # pylint: disable=broad-except
return json_error_response(utils.error_msg_from_exception(ex))
spec = mydb.db_engine_spec
query_cost_formatters: dict[str, Any] = app.config[
"QUERY_COST_FORMATTERS_BY_ENGINE"
]
query_cost_formatter = query_cost_formatters.get(
spec.engine, spec.query_cost_formatter
)
cost = query_cost_formatter(cost)
return json_success(json.dumps(cost))
@expose("/theme/")
def theme(self) -> FlaskResponse:
return self.render_template("superset/theme.html")
@has_access_api
@expose("/results/<key>/")
@event_logger.log_this
@deprecated(new_target="api/v1/sqllab/results/")
def results(self, key: str) -> FlaskResponse:
return self.results_exec(key)
@staticmethod
def results_exec(key: str) -> FlaskResponse:
"""Serves a key off of the results backend
It is possible to pass the `rows` query argument to limit the number
of rows returned.
"""
if not results_backend:
raise SupersetErrorException(
SupersetError(
message=__("Results backend is not configured."),
error_type=SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR,
level=ErrorLevel.ERROR,
)
)
read_from_results_backend_start = now_as_float()
blob = results_backend.get(key)
stats_logger.timing(
"sqllab.query.results_backend_read",
now_as_float() - read_from_results_backend_start,
)
if not blob:
raise SupersetErrorException(
SupersetError(
message=__(
"Data could not be retrieved from the results backend. You "
"need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
level=ErrorLevel.ERROR,
),
status=410,
)
query = db.session.query(Query).filter_by(results_key=key).one_or_none()
if query is None:
raise SupersetErrorException(
SupersetError(
message=__(
"The query associated with these results could not be found. "
"You need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
level=ErrorLevel.ERROR,
),
status=404,
)
try:
query.raise_for_access()
except SupersetSecurityException as ex:
raise SupersetErrorException(
SupersetError(
message=__(
"You are not authorized to see this query. If you think this "
"is an error, please reach out to your administrator."
),
error_type=SupersetErrorType.QUERY_SECURITY_ACCESS_ERROR,
level=ErrorLevel.ERROR,
),
status=403,
) from ex
payload = utils.zlib_decompress(blob, decode=not results_backend_use_msgpack)
try:
obj = _deserialize_results_payload(
payload, query, cast(bool, results_backend_use_msgpack)
)
except SerializationError as ex:
raise SupersetErrorException(
SupersetError(
message=__(
"Data could not be deserialized from the results backend. The "
"storage format might have changed, rendering the old data "
"stake. You need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
level=ErrorLevel.ERROR,
),
status=404,
) from ex
if "rows" in request.args:
try:
rows = int(request.args["rows"])
except ValueError as ex:
raise SupersetErrorException(
SupersetError(
message=__(
"The provided `rows` argument is not a valid integer."
),
error_type=SupersetErrorType.INVALID_PAYLOAD_SCHEMA_ERROR,
level=ErrorLevel.ERROR,
),
status=400,
) from ex
obj = apply_display_max_row_configuration_if_require(obj, rows)
return json_success(
json.dumps(
obj, default=utils.json_iso_dttm_ser, ignore_nan=True, encoding=None
)
)
@has_access_api
@handle_api_exception
@expose("/stop_query/", methods=("POST",))
@ -1968,155 +1788,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return json_error_response(f"{msg}", status=400)
return json_error_response(f"{msg}")
@has_access_api
@handle_api_exception
@event_logger.log_this
@expose("/sql_json/", methods=("POST",))
@deprecated(new_target="/api/v1/sqllab/execute/")
def sql_json(self) -> FlaskResponse:
if errors := SqlJsonPayloadSchema().validate(request.json):
return json_error_response(status=400, payload=errors)
try:
log_params = {
"user_agent": cast(Optional[str], request.headers.get("USER_AGENT"))
}
execution_context = SqlJsonExecutionContext(request.json)
command = self._create_sql_json_command(execution_context, log_params)
command_result: CommandResult = command.run()
return self._create_response_from_execution_context(command_result)
except SqlLabException as ex:
logger.error(ex.message)
self._set_http_status_into_Sql_lab_exception(ex)
payload = {"errors": [ex.to_dict()]}
return json_error_response(status=ex.status, payload=payload)
@staticmethod
def _create_sql_json_command(
execution_context: SqlJsonExecutionContext, log_params: dict[str, Any] | None
) -> ExecuteSqlCommand:
query_dao = QueryDAO()
sql_json_executor = Superset._create_sql_json_executor(
execution_context, query_dao
)
execution_context_convertor = ExecutionContextConvertor()
execution_context_convertor.set_max_row_in_display(
int(config.get("DISPLAY_MAX_ROW"))
)
return ExecuteSqlCommand(
execution_context,
query_dao,
DatabaseDAO(),
CanAccessQueryValidatorImpl(),
SqlQueryRenderImpl(get_template_processor),
sql_json_executor,
execution_context_convertor,
config["SQLLAB_CTAS_NO_LIMIT"],
log_params,
)
@staticmethod
def _create_sql_json_executor(
execution_context: SqlJsonExecutionContext, query_dao: QueryDAO
) -> SqlJsonExecutor:
sql_json_executor: SqlJsonExecutor
if execution_context.is_run_asynchronous():
sql_json_executor = ASynchronousSqlJsonExecutor(query_dao, get_sql_results)
else:
sql_json_executor = SynchronousSqlJsonExecutor(
query_dao,
get_sql_results,
config.get("SQLLAB_TIMEOUT"),
is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"),
)
return sql_json_executor
@staticmethod
def _set_http_status_into_Sql_lab_exception(ex: SqlLabException) -> None:
if isinstance(ex, QueryIsForbiddenToAccessException):
ex.status = 403
def _create_response_from_execution_context( # pylint: disable=invalid-name, no-self-use
self,
command_result: CommandResult,
) -> FlaskResponse:
status_code = 200
if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING:
status_code = 202
return json_success(command_result["payload"], status_code)
@has_access
@event_logger.log_this
@expose("/csv/<client_id>")
@deprecated(new_target="api/v1/sqllab/export/")
def csv(self, client_id: str) -> FlaskResponse: # pylint: disable=no-self-use
"""Download the query results as csv."""
logger.info("Exporting CSV file [%s]", client_id)
query = db.session.query(Query).filter_by(client_id=client_id).one()
try:
query.raise_for_access()
except SupersetSecurityException as ex:
flash(ex.error.message)
return redirect("/")
blob = None
if results_backend and query.results_key:
logger.info("Fetching CSV from results backend [%s]", query.results_key)
blob = results_backend.get(query.results_key)
if blob:
logger.info("Decompressing")
payload = utils.zlib_decompress(
blob, decode=not results_backend_use_msgpack
)
obj = _deserialize_results_payload(
payload, query, cast(bool, results_backend_use_msgpack)
)
df = pd.DataFrame(
data=obj["data"],
dtype=object,
columns=[c["name"] for c in obj["columns"]],
)
logger.info("Using pandas to convert to CSV")
else:
logger.info("Running a query to turn into CSV")
if query.select_sql:
sql = query.select_sql
limit = None
else:
sql = query.executed_sql
limit = ParsedQuery(sql).limit
if limit is not None and query.limiting_factor in {
LimitingFactor.QUERY,
LimitingFactor.DROPDOWN,
LimitingFactor.QUERY_AND_DROPDOWN,
}:
# remove extra row from `increased_limit`
limit -= 1
df = query.database.get_df(sql, query.schema)[:limit]
csv_data = csv.df_to_escaped_csv(df, index=False, **config["CSV_EXPORT"])
quoted_csv_name = parse.quote(query.name)
response = CsvResponse(
csv_data, headers=generate_download_headers("csv", quoted_csv_name)
)
event_info = {
"event_type": "data_export",
"client_id": client_id,
"row_count": len(df.index),
"database": query.database.name,
"schema": query.schema,
"sql": query.sql,
"exported_format": "csv",
}
event_rep = repr(event_info)
logger.debug(
"CSV exported: %s", event_rep, extra={"superset_event": event_info}
)
return response
@api
@handle_api_exception
@has_access

View File

@ -16,11 +16,9 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
import csv
import datetime
import doctest
import html
import io
import json
import logging
from urllib.parse import quote
@ -553,40 +551,6 @@ class TestCore(SupersetTestCase):
assert "Charts" in self.get_resp("/chart/list/")
assert "Dashboards" in self.get_resp("/dashboard/list/")
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_csv_endpoint(self):
self.login()
client_id = f"{random.getrandbits(64)}"[:10]
get_name_sql = """
SELECT name
FROM birth_names
LIMIT 1
"""
resp = self.run_sql(get_name_sql, client_id, raise_on_error=True)
name = resp["data"][0]["name"]
sql = f"""
SELECT name
FROM birth_names
WHERE name = '{name}'
LIMIT 1
"""
client_id = f"{random.getrandbits(64)}"[:10]
self.run_sql(sql, client_id, raise_on_error=True)
resp = self.get_resp(f"/superset/csv/{client_id}")
data = csv.reader(io.StringIO(resp))
expected_data = csv.reader(io.StringIO(f"name\n{name}\n"))
client_id = f"{random.getrandbits(64)}"[:10]
self.run_sql(sql, client_id, raise_on_error=True)
resp = self.get_resp(f"/superset/csv/{client_id}")
data = csv.reader(io.StringIO(resp))
expected_data = csv.reader(io.StringIO(f"name\n{name}\n"))
self.assertEqual(list(expected_data), list(data))
self.logout()
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_extra_table_metadata(self):
self.login()
@ -596,52 +560,6 @@ class TestCore(SupersetTestCase):
f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/"
)
def test_required_params_in_sql_json(self):
self.login()
client_id = f"{random.getrandbits(64)}"[:10]
data = {"client_id": client_id}
rv = self.client.post(
"/superset/sql_json/",
json=data,
)
failed_resp = {
"sql": ["Missing data for required field."],
"database_id": ["Missing data for required field."],
}
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertDictEqual(resp_data, failed_resp)
self.assertEqual(rv.status_code, 400)
data = {"sql": "SELECT 1", "client_id": client_id}
rv = self.client.post(
"/superset/sql_json/",
json=data,
)
failed_resp = {"database_id": ["Missing data for required field."]}
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertDictEqual(resp_data, failed_resp)
self.assertEqual(rv.status_code, 400)
data = {"database_id": 1, "client_id": client_id}
rv = self.client.post(
"/superset/sql_json/",
json=data,
)
failed_resp = {"sql": ["Missing data for required field."]}
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertDictEqual(resp_data, failed_resp)
self.assertEqual(rv.status_code, 400)
data = {"sql": "SELECT 1", "database_id": 1, "client_id": client_id}
rv = self.client.post(
"/superset/sql_json/",
json=data,
)
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertEqual(resp_data.get("status"), "success")
self.assertEqual(rv.status_code, 200)
def test_templated_sql_json(self):
if superset.utils.database.get_example_database().backend == "presto":
# TODO: make it work for presto
@ -651,32 +569,6 @@ class TestCore(SupersetTestCase):
data = self.run_sql(sql, "fdaklj3ws")
self.assertEqual(data["data"][0]["test"], "2")
@mock.patch(
"tests.integration_tests.superset_test_custom_template_processors.datetime"
)
@mock.patch("superset.views.core.get_sql_results")
def test_custom_templated_sql_json(self, sql_lab_mock, mock_dt) -> None:
"""Test sqllab receives macros expanded query."""
mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1))
self.login()
sql = "SELECT '$DATE()' as test"
resp = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 1},
"data": [{"test": "'1970-01-01'"}],
}
sql_lab_mock.return_value = resp
dbobj = self.create_fake_db_for_macros()
json_payload = dict(database_id=dbobj.id, sql=sql)
self.get_json_resp(
"/superset/sql_json/", raise_on_error=False, json_=json_payload
)
assert sql_lab_mock.called
self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test")
self.delete_fake_db_for_macros()
def test_fetch_datasource_metadata(self):
self.login(username="admin")
url = "/superset/fetch_datasource_metadata?" "datasourceKey=1__table"
@ -1126,54 +1018,6 @@ class TestCore(SupersetTestCase):
assert data == ["this_schema_is_allowed_too"]
self.delete_fake_db()
@mock.patch("superset.views.core.results_backend_use_msgpack", False)
def test_display_limit(self):
from superset.views import core
core.results_backend = mock.Mock()
self.login()
data = [{"col_0": i} for i in range(100)]
payload = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 100},
"data": data,
}
# limit results to 1
expected_key = {"status": "success", "query": {"rows": 100}, "data": data}
limited_data = data[:1]
expected_limited = {
"status": "success",
"query": {"rows": 100},
"data": limited_data,
"displayLimitReached": True,
}
query_mock = mock.Mock()
query_mock.sql = "SELECT *"
query_mock.database = 1
query_mock.schema = "superset"
# do not apply msgpack serialization
use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"]
app.config["RESULTS_BACKEND_USE_MSGPACK"] = False
serialized_payload = sql_lab._serialize_payload(payload, False)
compressed = utils.zlib_compress(serialized_payload)
core.results_backend.get.return_value = compressed
with mock.patch("superset.views.core.db") as mock_superset_db:
mock_superset_db.session.query().filter_by().one_or_none.return_value = (
query_mock
)
# get all results
result_key = json.loads(self.get_resp("/superset/results/key/"))
result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1"))
self.assertEqual(result_key, expected_key)
self.assertEqual(result_limited, expected_limited)
app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack
def test_results_default_deserialization(self):
use_new_deserialization = False
data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]

View File

@ -1500,7 +1500,6 @@ class TestRolePermission(SupersetTestCase):
self.assertIn(("can_csv", "Superset"), sql_lab_set)
self.assertIn(("can_read", "Database"), sql_lab_set)
self.assertIn(("can_read", "SavedQuery"), sql_lab_set)
self.assertIn(("can_sql_json", "Superset"), sql_lab_set)
self.assertIn(("can_sqllab_viz", "Superset"), sql_lab_set)
self.assertIn(("can_sqllab_table_viz", "Superset"), sql_lab_set)
self.assertIn(("can_sqllab", "Superset"), sql_lab_set)