2019-01-15 18:53:27 -05:00
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
2020-02-06 19:45:37 -05:00
# isort:skip_file
2016-11-01 23:48:31 -04:00
""" Unit tests for Sql Lab """
import json
2019-10-18 17:44:27 -04:00
from datetime import datetime , timedelta
2016-11-01 23:48:31 -04:00
2021-01-11 08:57:55 -05:00
import pytest
2021-06-24 11:02:49 -04:00
from celery . exceptions import SoftTimeLimitExceeded
2021-01-04 12:22:35 -05:00
from parameterized import parameterized
2021-01-11 08:57:55 -05:00
from random import random
from unittest import mock
from superset . extensions import db
2019-08-08 14:37:00 -04:00
import prison
2017-03-10 12:11:51 -05:00
2020-05-08 17:59:49 -04:00
from superset import db , security_manager
2019-11-19 13:48:42 -05:00
from superset . connectors . sqla . models import SqlaTable
2018-06-28 00:35:12 -04:00
from superset . db_engine_specs import BaseEngineSpec
2021-07-26 11:04:56 -04:00
from superset . db_engine_specs . hive import HiveEngineSpec
from superset . db_engine_specs . presto import PrestoEngineSpec
2021-06-23 10:58:20 -04:00
from superset . errors import ErrorLevel , SupersetError , SupersetErrorType
from superset . exceptions import SupersetErrorException
2021-01-11 08:57:55 -05:00
from superset . models . core import Database
2021-09-26 14:15:57 -04:00
from superset . models . sql_lab import Query , SavedQuery
2020-01-03 11:55:39 -05:00
from superset . result_set import SupersetResultSet
2021-09-26 14:15:57 -04:00
from superset . sqllab . limiting_factor import LimitingFactor
2021-04-12 16:18:17 -04:00
from superset . sql_lab import (
2021-07-26 11:04:56 -04:00
cancel_query ,
2021-04-12 16:18:17 -04:00
execute_sql_statements ,
2021-04-30 18:15:18 -04:00
execute_sql_statement ,
2021-04-12 16:18:17 -04:00
get_sql_results ,
SqlLabException ,
2021-10-20 16:43:14 -04:00
apply_limit_if_exists ,
2021-04-12 16:18:17 -04:00
)
2020-06-24 12:50:41 -04:00
from superset . sql_parse import CtasMethod
2020-08-06 15:07:22 -04:00
from superset . utils . core import (
2021-06-15 10:10:50 -04:00
backend ,
2020-08-06 15:07:22 -04:00
datetime_to_epoch ,
get_example_database ,
get_main_database ,
)
2019-10-18 17:44:27 -04:00
2016-11-10 02:08:22 -05:00
from . base_tests import SupersetTestCase
2020-08-27 12:49:18 -04:00
from . conftest import CTAS_SCHEMA_NAME
2021-07-01 11:03:07 -04:00
from tests . integration_tests . fixtures . birth_names_dashboard import (
load_birth_names_dashboard_with_slices ,
)
2016-11-01 23:48:31 -04:00
2019-09-08 13:18:09 -04:00
QUERY_1 = " SELECT * FROM birth_names LIMIT 1 "
QUERY_2 = " SELECT * FROM NO_TABLE "
QUERY_3 = " SELECT * FROM birth_names LIMIT 10 "
2016-11-01 23:48:31 -04:00
2020-06-29 18:36:06 -04:00
class TestSqlLab ( SupersetTestCase ) :
2016-11-01 23:48:31 -04:00
""" Testings for Sql Lab """
2016-11-17 14:58:33 -05:00
def run_some_queries ( self ) :
2017-04-04 23:15:19 -04:00
db . session . query ( Query ) . delete ( )
2016-11-17 14:58:33 -05:00
db . session . commit ( )
2019-09-08 13:18:09 -04:00
self . run_sql ( QUERY_1 , client_id = " client_id_1 " , user_name = " admin " )
self . run_sql ( QUERY_2 , client_id = " client_id_3 " , user_name = " admin " )
self . run_sql ( QUERY_3 , client_id = " client_id_2 " , user_name = " gamma_sqllab " )
2016-11-17 14:58:33 -05:00
self . logout ( )
2016-11-01 23:48:31 -04:00
def tearDown ( self ) :
2019-04-17 19:11:11 -04:00
self . logout ( )
2017-04-04 23:15:19 -04:00
db . session . query ( Query ) . delete ( )
2016-11-17 14:58:33 -05:00
db . session . commit ( )
2020-08-06 18:33:48 -04:00
db . session . close ( )
2016-11-01 23:48:31 -04:00
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2016-11-01 23:48:31 -04:00
def test_sql_json ( self ) :
2021-06-10 18:20:31 -04:00
examples_db = get_example_database ( )
engine_name = examples_db . db_engine_spec . engine_name
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2016-11-17 14:58:33 -05:00
2019-09-08 13:18:09 -04:00
data = self . run_sql ( " SELECT * FROM birth_names LIMIT 10 " , " 1 " )
2019-06-25 16:34:48 -04:00
self . assertLess ( 0 , len ( data [ " data " ] ) )
2016-11-01 23:48:31 -04:00
2019-06-25 16:34:48 -04:00
data = self . run_sql ( " SELECT * FROM unexistant_table " , " 2 " )
2021-06-15 10:10:50 -04:00
if backend ( ) == " presto " :
assert (
data [ " errors " ] [ 0 ] [ " error_type " ]
== SupersetErrorType . TABLE_DOES_NOT_EXIST_ERROR
)
assert data [ " errors " ] [ 0 ] [ " level " ] == ErrorLevel . ERROR
assert data [ " errors " ] [ 0 ] [ " extra " ] == {
" engine_name " : " Presto " ,
" issue_codes " : [
{
" code " : 1003 ,
" message " : " Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo. " ,
} ,
{
" code " : 1005 ,
" message " : " Issue 1005 - The table was deleted or renamed in the database. " ,
} ,
] ,
}
else :
assert (
data [ " errors " ] [ 0 ] [ " error_type " ]
== SupersetErrorType . GENERIC_DB_ENGINE_ERROR
)
assert data [ " errors " ] [ 0 ] [ " level " ] == ErrorLevel . ERROR
assert data [ " errors " ] [ 0 ] [ " extra " ] == {
" issue_codes " : [
{
" code " : 1002 ,
" message " : " Issue 1002 - The database returned an unexpected error. " ,
}
] ,
" engine_name " : engine_name ,
}
2016-11-01 23:48:31 -04:00
2021-06-23 10:58:20 -04:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
def test_sql_json_dml_disallowed ( self ) :
self . login ( " admin " )
data = self . run_sql ( " DELETE FROM birth_names " , " 1 " )
assert data == {
" errors " : [
{
" message " : " Only SELECT statements are allowed against this database. " ,
" error_type " : SupersetErrorType . DML_NOT_ALLOWED_ERROR ,
" level " : ErrorLevel . ERROR ,
" extra " : {
" issue_codes " : [
{
" code " : 1022 ,
" message " : " Issue 1022 - Database does not allow data manipulation. " ,
}
]
} ,
}
]
}
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2020-10-26 14:20:07 -04:00
def test_sql_json_to_saved_query_info ( self ) :
"""
SQLLab : Test SQLLab query execution info propagation to saved queries
"""
from freezegun import freeze_time
self . login ( " admin " )
sql_statement = " SELECT * FROM birth_names LIMIT 10 "
examples_db_id = get_example_database ( ) . id
saved_query = SavedQuery ( db_id = examples_db_id , sql = sql_statement )
db . session . add ( saved_query )
db . session . commit ( )
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
self . run_sql ( sql_statement , " 1 " )
saved_query_ = (
db . session . query ( SavedQuery )
. filter (
SavedQuery . db_id == examples_db_id , SavedQuery . sql == sql_statement
)
. one_or_none ( )
)
assert saved_query_ . rows is not None
assert saved_query_ . last_run == datetime . now ( )
# Rollback changes
db . session . delete ( saved_query_ )
db . session . commit ( )
2020-06-24 12:50:41 -04:00
@parameterized.expand ( [ CtasMethod . TABLE , CtasMethod . VIEW ] )
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2020-06-24 12:50:41 -04:00
def test_sql_json_cta_dynamic_db ( self , ctas_method ) :
2020-08-06 15:07:22 -04:00
examples_db = get_example_database ( )
if examples_db . backend == " sqlite " :
2020-03-03 12:52:20 -05:00
# sqlite doesn't support database creation
return
2020-06-24 12:50:41 -04:00
with mock . patch (
2021-10-03 04:15:46 -04:00
" superset.sqllab.sqllab_execution_context.get_cta_schema_name " ,
2020-06-24 12:50:41 -04:00
lambda d , u , s , sql : f " { u . username } _database " ,
) :
2020-08-06 15:07:22 -04:00
old_allow_ctas = examples_db . allow_ctas
examples_db . allow_ctas = True # enable cta
2020-06-24 12:50:41 -04:00
self . login ( " admin " )
tmp_table_name = f " test_target_ { ctas_method . lower ( ) } "
self . run_sql (
" SELECT * FROM birth_names " ,
" 1 " ,
database_name = " examples " ,
tmp_table_name = tmp_table_name ,
select_as_cta = True ,
ctas_method = ctas_method ,
)
# assertions
db . session . commit ( )
2020-08-06 15:07:22 -04:00
examples_db = get_example_database ( )
engine = examples_db . get_sqla_engine ( )
data = engine . execute (
2020-06-24 12:50:41 -04:00
f " SELECT * FROM admin_database. { tmp_table_name } "
) . fetchall ( )
2021-01-11 08:57:55 -05:00
names_count = engine . execute ( f " SELECT COUNT(*) FROM birth_names " ) . first ( )
2020-06-24 12:50:41 -04:00
self . assertEqual (
2021-01-11 08:57:55 -05:00
names_count [ 0 ] , len ( data )
2020-06-24 12:50:41 -04:00
) # SQL_MAX_ROW not applied due to the SQLLAB_CTAS_NO_LIMIT set to True
# cleanup
2020-08-06 15:07:22 -04:00
engine . execute ( f " DROP { ctas_method } admin_database. { tmp_table_name } " )
examples_db . allow_ctas = old_allow_ctas
2020-06-24 12:50:41 -04:00
db . session . commit ( )
2020-03-03 12:52:20 -05:00
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2018-12-22 13:28:22 -05:00
def test_multi_sql ( self ) :
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2018-12-22 13:28:22 -05:00
multi_sql = """
2019-09-08 13:18:09 -04:00
SELECT * FROM birth_names LIMIT 1 ;
SELECT * FROM birth_names LIMIT 2 ;
2018-12-22 13:28:22 -05:00
"""
2019-06-25 16:34:48 -04:00
data = self . run_sql ( multi_sql , " 2234 " )
self . assertLess ( 0 , len ( data [ " data " ] ) )
2018-12-22 13:28:22 -05:00
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2018-08-03 18:33:33 -04:00
def test_explain ( self ) :
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2018-08-03 18:33:33 -04:00
2019-09-08 13:18:09 -04:00
data = self . run_sql ( " EXPLAIN SELECT * FROM birth_names " , " 1 " )
2019-06-25 16:34:48 -04:00
self . assertLess ( 0 , len ( data [ " data " ] ) )
2018-08-03 18:33:33 -04:00
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2016-11-01 23:48:31 -04:00
def test_sql_json_has_access ( self ) :
2019-09-08 13:18:09 -04:00
examples_db = get_example_database ( )
examples_db_permission_view = security_manager . add_permission_view_menu (
" database_access " , examples_db . perm
2016-11-01 23:48:31 -04:00
)
2020-05-08 17:59:49 -04:00
astronaut = security_manager . add_role ( " ExampleDBAccess " )
2019-09-08 13:18:09 -04:00
security_manager . add_permission_role ( astronaut , examples_db_permission_view )
2020-05-08 17:59:49 -04:00
# Gamma user, with sqllab and db permission
self . create_user_with_roles ( " Gagarin " , [ " ExampleDBAccess " , " Gamma " , " sql_lab " ] )
data = self . run_sql ( QUERY_1 , " 1 " , user_name = " Gagarin " )
2017-04-04 23:15:19 -04:00
db . session . query ( Query ) . delete ( )
2016-11-01 23:48:31 -04:00
db . session . commit ( )
2019-06-25 16:34:48 -04:00
self . assertLess ( 0 , len ( data [ " data " ] ) )
2016-11-01 23:48:31 -04:00
2020-05-08 17:59:49 -04:00
def test_sql_json_schema_access ( self ) :
examples_db = get_example_database ( )
db_backend = examples_db . backend
if db_backend == " sqlite " :
# sqlite doesn't support database creation
return
sqllab_test_db_schema_permission_view = security_manager . add_permission_view_menu (
2020-08-06 15:07:22 -04:00
" schema_access " , f " [ { examples_db . name } ].[ { CTAS_SCHEMA_NAME } ] "
2020-05-08 17:59:49 -04:00
)
schema_perm_role = security_manager . add_role ( " SchemaPermission " )
security_manager . add_permission_role (
schema_perm_role , sqllab_test_db_schema_permission_view
)
self . create_user_with_roles (
" SchemaUser " , [ " SchemaPermission " , " Gamma " , " sql_lab " ]
)
2020-08-06 15:07:22 -04:00
examples_db . get_sqla_engine ( ) . execute (
f " CREATE TABLE IF NOT EXISTS { CTAS_SCHEMA_NAME } .test_table AS SELECT 1 as c1, 2 as c2 "
2020-05-08 17:59:49 -04:00
)
data = self . run_sql (
2020-08-06 15:07:22 -04:00
f " SELECT * FROM { CTAS_SCHEMA_NAME } .test_table " , " 3 " , user_name = " SchemaUser "
2020-05-08 17:59:49 -04:00
)
self . assertEqual ( 1 , len ( data [ " data " ] ) )
data = self . run_sql (
2020-08-06 15:07:22 -04:00
f " SELECT * FROM { CTAS_SCHEMA_NAME } .test_table " ,
2020-05-08 17:59:49 -04:00
" 4 " ,
user_name = " SchemaUser " ,
2020-08-06 15:07:22 -04:00
schema = CTAS_SCHEMA_NAME ,
2020-05-08 17:59:49 -04:00
)
self . assertEqual ( 1 , len ( data [ " data " ] ) )
# postgres needs a schema as a part of the table name.
if db_backend == " mysql " :
data = self . run_sql (
" SELECT * FROM test_table " ,
" 5 " ,
user_name = " SchemaUser " ,
2020-08-06 15:07:22 -04:00
schema = CTAS_SCHEMA_NAME ,
2020-05-08 17:59:49 -04:00
)
self . assertEqual ( 1 , len ( data [ " data " ] ) )
db . session . query ( Query ) . delete ( )
2020-08-06 15:07:22 -04:00
get_example_database ( ) . get_sqla_engine ( ) . execute (
f " DROP TABLE IF EXISTS { CTAS_SCHEMA_NAME } .test_table "
)
2020-05-08 17:59:49 -04:00
db . session . commit ( )
2016-11-01 23:48:31 -04:00
def test_queries_endpoint ( self ) :
2016-11-17 14:58:33 -05:00
self . run_some_queries ( )
# Not logged in, should error out
2019-06-25 16:34:48 -04:00
resp = self . client . get ( " /superset/queries/0 " )
2016-12-15 08:38:34 -05:00
# Redirects to the login page
2021-02-23 04:50:22 -05:00
self . assertEqual ( 401 , resp . status_code )
2016-11-01 23:48:31 -04:00
2016-11-17 14:58:33 -05:00
# Admin sees queries
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
data = self . get_json_resp ( " /superset/queries/0 " )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 2 , len ( data ) )
2020-06-17 11:02:49 -04:00
data = self . get_json_resp ( " /superset/queries/0.0 " )
self . assertEqual ( 2 , len ( data ) )
2016-11-01 23:48:31 -04:00
2016-11-17 14:58:33 -05:00
# Run 2 more queries
2019-09-08 13:18:09 -04:00
self . run_sql ( " SELECT * FROM birth_names LIMIT 1 " , client_id = " client_id_4 " )
self . run_sql ( " SELECT * FROM birth_names LIMIT 2 " , client_id = " client_id_5 " )
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
data = self . get_json_resp ( " /superset/queries/0 " )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 4 , len ( data ) )
2016-11-01 23:48:31 -04:00
2016-11-17 14:58:33 -05:00
now = datetime . now ( ) + timedelta ( days = 1 )
2019-06-25 16:34:48 -04:00
query = (
db . session . query ( Query )
2019-09-08 13:18:09 -04:00
. filter_by ( sql = " SELECT * FROM birth_names LIMIT 1 " )
2019-06-25 16:34:48 -04:00
. first ( )
)
2016-11-17 14:58:33 -05:00
query . changed_on = now
2016-11-01 23:48:31 -04:00
db . session . commit ( )
2016-11-17 14:58:33 -05:00
data = self . get_json_resp (
2020-06-16 15:58:32 -04:00
" /superset/queries/ {} " . format ( float ( datetime_to_epoch ( now ) ) - 1000 )
2019-06-25 16:34:48 -04:00
)
2019-10-21 10:49:12 -04:00
self . assertEqual ( 1 , len ( data ) )
2016-11-01 23:48:31 -04:00
self . logout ( )
2019-06-25 16:34:48 -04:00
resp = self . client . get ( " /superset/queries/0 " )
2016-12-15 08:38:34 -05:00
# Redirects to the login page
2021-02-23 04:50:22 -05:00
self . assertEqual ( 401 , resp . status_code )
2016-11-01 23:48:31 -04:00
def test_search_query_on_db_id ( self ) :
2016-11-17 14:58:33 -05:00
self . run_some_queries ( )
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2019-09-08 13:18:09 -04:00
examples_dbid = get_example_database ( ) . id
2016-11-17 14:58:33 -05:00
# Test search queries on database Id
2019-09-08 13:18:09 -04:00
data = self . get_json_resp (
f " /superset/search_queries?database_id= { examples_dbid } "
)
2019-10-21 10:49:12 -04:00
self . assertEqual ( 3 , len ( data ) )
2019-06-25 16:34:48 -04:00
db_ids = [ k [ " dbId " ] for k in data ]
2019-10-21 10:49:12 -04:00
self . assertEqual ( [ examples_dbid for i in range ( 3 ) ] , db_ids )
2016-11-17 14:58:33 -05:00
2019-06-25 16:34:48 -04:00
resp = self . get_resp ( " /superset/search_queries?database_id=-1 " )
2016-11-17 14:58:33 -05:00
data = json . loads ( resp )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 0 , len ( data ) )
2016-11-01 23:48:31 -04:00
def test_search_query_on_user ( self ) :
2016-11-17 14:58:33 -05:00
self . run_some_queries ( )
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2016-11-17 14:58:33 -05:00
# Test search queries on user Id
2019-06-25 16:34:48 -04:00
user_id = security_manager . find_user ( " admin " ) . id
data = self . get_json_resp ( " /superset/search_queries?user_id= {} " . format ( user_id ) )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 2 , len ( data ) )
2019-06-25 16:34:48 -04:00
user_ids = { k [ " userId " ] for k in data }
2019-10-21 10:49:12 -04:00
self . assertEqual ( set ( [ user_id ] ) , user_ids )
2016-11-17 14:58:33 -05:00
2019-06-25 16:34:48 -04:00
user_id = security_manager . find_user ( " gamma_sqllab " ) . id
resp = self . get_resp ( " /superset/search_queries?user_id= {} " . format ( user_id ) )
2016-11-17 14:58:33 -05:00
data = json . loads ( resp )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 1 , len ( data ) )
self . assertEqual ( data [ 0 ] [ " userId " ] , user_id )
2016-11-01 23:48:31 -04:00
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2016-11-01 23:48:31 -04:00
def test_search_query_on_status ( self ) :
2016-11-17 14:58:33 -05:00
self . run_some_queries ( )
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2016-11-17 14:58:33 -05:00
# Test search queries on status
2019-06-25 16:34:48 -04:00
resp = self . get_resp ( " /superset/search_queries?status=success " )
2016-11-17 14:58:33 -05:00
data = json . loads ( resp )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 2 , len ( data ) )
2019-06-25 16:34:48 -04:00
states = [ k [ " state " ] for k in data ]
2019-10-21 10:49:12 -04:00
self . assertEqual ( [ " success " , " success " ] , states )
2016-11-17 14:58:33 -05:00
2019-06-25 16:34:48 -04:00
resp = self . get_resp ( " /superset/search_queries?status=failed " )
2016-11-17 14:58:33 -05:00
data = json . loads ( resp )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 1 , len ( data ) )
self . assertEqual ( data [ 0 ] [ " state " ] , " failed " )
2016-11-01 23:48:31 -04:00
def test_search_query_on_text ( self ) :
2016-11-17 14:58:33 -05:00
self . run_some_queries ( )
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2019-09-08 13:18:09 -04:00
url = " /superset/search_queries?search_text=birth "
2016-12-01 18:21:18 -05:00
data = self . get_json_resp ( url )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 2 , len ( data ) )
2019-09-08 13:18:09 -04:00
self . assertIn ( " birth " , data [ 0 ] [ " sql " ] )
2016-11-01 23:48:31 -04:00
def test_search_query_on_time ( self ) :
2016-11-17 14:58:33 -05:00
self . run_some_queries ( )
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2016-11-17 14:58:33 -05:00
first_query_time = (
2019-09-08 13:18:09 -04:00
db . session . query ( Query ) . filter_by ( sql = QUERY_1 ) . one ( )
2016-11-17 14:58:33 -05:00
) . start_time
second_query_time = (
2019-09-08 13:18:09 -04:00
db . session . query ( Query ) . filter_by ( sql = QUERY_3 ) . one ( )
2016-11-17 14:58:33 -05:00
) . start_time
# Test search queries on time filter
2019-06-25 16:34:48 -04:00
from_time = " from= {} " . format ( int ( first_query_time ) )
to_time = " to= {} " . format ( int ( second_query_time ) )
2016-11-17 14:58:33 -05:00
params = [ from_time , to_time ]
2019-06-25 16:34:48 -04:00
resp = self . get_resp ( " /superset/search_queries? " + " & " . join ( params ) )
2016-11-17 14:58:33 -05:00
data = json . loads ( resp )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 2 , len ( data ) )
2016-11-01 23:48:31 -04:00
2020-02-05 03:58:12 -05:00
def test_search_query_only_owned ( self ) - > None :
2019-04-17 19:11:11 -04:00
"""
2020-02-05 03:58:12 -05:00
Test a search query with a user that does not have can_access_all_queries .
2019-04-17 19:11:11 -04:00
"""
2020-02-05 03:58:12 -05:00
# Test search_queries for Alpha user
2019-04-17 19:11:11 -04:00
self . run_some_queries ( )
2020-02-05 03:58:12 -05:00
self . login ( " gamma_sqllab " )
2019-04-17 19:11:11 -04:00
2020-02-05 03:58:12 -05:00
user_id = security_manager . find_user ( " gamma_sqllab " ) . id
2019-06-25 16:34:48 -04:00
data = self . get_json_resp ( " /superset/search_queries " )
2020-02-05 03:58:12 -05:00
self . assertEqual ( 1 , len ( data ) )
2019-06-25 16:34:48 -04:00
user_ids = { k [ " userId " ] for k in data }
2019-10-21 10:49:12 -04:00
self . assertEqual ( set ( [ user_id ] ) , user_ids )
2019-04-17 19:11:11 -04:00
2016-12-01 22:53:23 -05:00
def test_alias_duplicate ( self ) :
self . run_sql (
2019-09-08 13:18:09 -04:00
" SELECT name as col, gender as col FROM birth_names LIMIT 10 " ,
2019-06-25 16:34:48 -04:00
client_id = " 2e2df3 " ,
user_name = " admin " ,
raise_on_error = True ,
)
2016-12-01 22:53:23 -05:00
2020-01-03 11:55:39 -05:00
def test_ps_conversion_no_dict ( self ) :
2019-06-25 16:34:48 -04:00
cols = [ [ " string_col " , " string " ] , [ " int_col " , " int " ] , [ " float_col " , " float " ] ]
data = [ [ " a " , 4 , 4.0 ] ]
2020-01-03 11:55:39 -05:00
results = SupersetResultSet ( data , cols , BaseEngineSpec )
2018-03-19 14:43:04 -04:00
2020-01-03 11:55:39 -05:00
self . assertEqual ( len ( data ) , results . size )
self . assertEqual ( len ( cols ) , len ( results . columns ) )
2018-03-19 14:43:04 -04:00
2020-01-03 11:55:39 -05:00
def test_pa_conversion_tuple ( self ) :
2019-06-25 16:34:48 -04:00
cols = [ " string_col " , " int_col " , " list_col " , " float_col " ]
2019-09-08 13:18:09 -04:00
data = [ ( " Text " , 111 , [ 123 ] , 1.0 ) ]
2020-01-03 11:55:39 -05:00
results = SupersetResultSet ( data , cols , BaseEngineSpec )
2018-01-23 23:58:06 -05:00
2020-01-03 11:55:39 -05:00
self . assertEqual ( len ( data ) , results . size )
self . assertEqual ( len ( cols ) , len ( results . columns ) )
2018-01-23 23:58:06 -05:00
2020-01-03 11:55:39 -05:00
def test_pa_conversion_dict ( self ) :
2019-06-25 16:34:48 -04:00
cols = [ " string_col " , " dict_col " , " int_col " ]
data = [ [ " a " , { " c1 " : 1 , " c2 " : 2 , " c3 " : 3 } , 4 ] ]
2020-01-03 11:55:39 -05:00
results = SupersetResultSet ( data , cols , BaseEngineSpec )
2018-01-23 23:58:06 -05:00
2020-01-03 11:55:39 -05:00
self . assertEqual ( len ( data ) , results . size )
self . assertEqual ( len ( cols ) , len ( results . columns ) )
2018-01-23 23:58:06 -05:00
2018-04-04 16:38:37 -04:00
def test_sqllab_viz ( self ) :
2019-11-19 13:48:42 -05:00
self . login ( " admin " )
2019-09-08 13:18:09 -04:00
examples_dbid = get_example_database ( ) . id
2018-04-04 16:38:37 -04:00
payload = {
2019-06-25 16:34:48 -04:00
" chartType " : " dist_bar " ,
2019-11-20 10:47:06 -05:00
" datasourceName " : f " test_viz_flow_table_ { random ( ) } " ,
2019-06-25 16:34:48 -04:00
" schema " : " superset " ,
" columns " : [
2020-01-03 11:55:39 -05:00
{ " is_date " : False , " type " : " STRING " , " name " : f " viz_type_ { random ( ) } " } ,
{ " is_date " : False , " type " : " OBJECT " , " name " : f " ccount_ { random ( ) } " } ,
2019-06-25 16:34:48 -04:00
] ,
" sql " : """ \
2019-09-08 13:18:09 -04:00
SELECT *
FROM birth_names
LIMIT 10 """ ,
" dbId " : examples_dbid ,
2018-04-04 16:38:37 -04:00
}
2019-06-25 16:34:48 -04:00
data = { " data " : json . dumps ( payload ) }
resp = self . get_json_resp ( " /superset/sqllab_viz/ " , data = data )
self . assertIn ( " table_id " , resp )
2018-04-04 16:38:37 -04:00
2019-11-19 13:48:42 -05:00
# ensure owner is set correctly
table_id = resp [ " table_id " ]
table = db . session . query ( SqlaTable ) . filter_by ( id = table_id ) . one ( )
self . assertEqual ( [ owner . username for owner in table . owners ] , [ " admin " ] )
2020-09-29 07:33:07 -04:00
view_menu = security_manager . find_view_menu ( table . get_perm ( ) )
assert view_menu is not None
2020-10-16 14:10:39 -04:00
# Cleanup
db . session . delete ( table )
db . session . commit ( )
2020-09-29 07:33:07 -04:00
def test_sqllab_viz_bad_payload ( self ) :
self . login ( " admin " )
payload = {
" chartType " : " dist_bar " ,
" schema " : " superset " ,
" columns " : [
{ " is_date " : False , " type " : " STRING " , " name " : f " viz_type_ { random ( ) } " } ,
{ " is_date " : False , " type " : " OBJECT " , " name " : f " ccount_ { random ( ) } " } ,
] ,
" sql " : """ \
SELECT *
FROM birth_names
LIMIT 10 """ ,
}
data = { " data " : json . dumps ( payload ) }
url = " /superset/sqllab_viz/ "
response = self . client . post ( url , data = data , follow_redirects = True )
assert response . status_code == 400
2019-11-19 13:48:42 -05:00
2020-04-10 17:46:46 -04:00
def test_sqllab_table_viz ( self ) :
self . login ( " admin " )
2020-08-06 15:07:22 -04:00
examples_db = get_example_database ( )
examples_db . get_sqla_engine ( ) . execute (
" DROP TABLE IF EXISTS test_sqllab_table_viz "
)
examples_db . get_sqla_engine ( ) . execute (
" CREATE TABLE test_sqllab_table_viz AS SELECT 2 as col "
)
examples_dbid = examples_db . id
payload = {
" datasourceName " : " test_sqllab_table_viz " ,
" columns " : [ ] ,
" dbId " : examples_dbid ,
}
2020-04-10 17:46:46 -04:00
data = { " data " : json . dumps ( payload ) }
resp = self . get_json_resp ( " /superset/get_or_create_table/ " , data = data )
self . assertIn ( " table_id " , resp )
# ensure owner is set correctly
table_id = resp [ " table_id " ]
table = db . session . query ( SqlaTable ) . filter_by ( id = table_id ) . one ( )
self . assertEqual ( [ owner . username for owner in table . owners ] , [ " admin " ] )
db . session . delete ( table )
2020-08-06 15:07:22 -04:00
get_example_database ( ) . get_sqla_engine ( ) . execute (
" DROP TABLE test_sqllab_table_viz "
)
2020-04-10 17:46:46 -04:00
db . session . commit ( )
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2018-11-07 18:57:44 -05:00
def test_sql_limit ( self ) :
2019-06-25 16:34:48 -04:00
self . login ( " admin " )
2018-11-07 18:57:44 -05:00
test_limit = 1
2019-09-08 13:18:09 -04:00
data = self . run_sql ( " SELECT * FROM birth_names " , client_id = " sql_limit_1 " )
2019-06-25 16:34:48 -04:00
self . assertGreater ( len ( data [ " data " ] ) , test_limit )
2018-11-07 18:57:44 -05:00
data = self . run_sql (
2019-09-08 13:18:09 -04:00
" SELECT * FROM birth_names " , client_id = " sql_limit_2 " , query_limit = test_limit
2019-06-25 16:34:48 -04:00
)
2019-10-21 10:49:12 -04:00
self . assertEqual ( len ( data [ " data " ] ) , test_limit )
2021-04-30 18:15:18 -04:00
2018-11-07 18:57:44 -05:00
data = self . run_sql (
2019-09-08 13:18:09 -04:00
" SELECT * FROM birth_names LIMIT {} " . format ( test_limit ) ,
2019-06-25 16:34:48 -04:00
client_id = " sql_limit_3 " ,
query_limit = test_limit + 1 ,
)
2019-10-21 10:49:12 -04:00
self . assertEqual ( len ( data [ " data " ] ) , test_limit )
2021-04-30 18:15:18 -04:00
self . assertEqual ( data [ " query " ] [ " limitingFactor " ] , LimitingFactor . QUERY )
2018-11-07 18:57:44 -05:00
data = self . run_sql (
2019-09-08 13:18:09 -04:00
" SELECT * FROM birth_names LIMIT {} " . format ( test_limit + 1 ) ,
2019-06-25 16:34:48 -04:00
client_id = " sql_limit_4 " ,
query_limit = test_limit ,
)
2019-10-21 10:49:12 -04:00
self . assertEqual ( len ( data [ " data " ] ) , test_limit )
2021-04-30 18:15:18 -04:00
self . assertEqual ( data [ " query " ] [ " limitingFactor " ] , LimitingFactor . DROPDOWN )
data = self . run_sql (
" SELECT * FROM birth_names LIMIT {} " . format ( test_limit ) ,
client_id = " sql_limit_5 " ,
query_limit = test_limit ,
)
self . assertEqual ( len ( data [ " data " ] ) , test_limit )
self . assertEqual (
data [ " query " ] [ " limitingFactor " ] , LimitingFactor . QUERY_AND_DROPDOWN
)
data = self . run_sql (
" SELECT * FROM birth_names " , client_id = " sql_limit_6 " , query_limit = 10000 ,
)
self . assertEqual ( len ( data [ " data " ] ) , 1200 )
self . assertEqual ( data [ " query " ] [ " limitingFactor " ] , LimitingFactor . NOT_LIMITED )
data = self . run_sql (
" SELECT * FROM birth_names " , client_id = " sql_limit_7 " , query_limit = 1200 ,
)
self . assertEqual ( len ( data [ " data " ] ) , 1200 )
self . assertEqual ( data [ " query " ] [ " limitingFactor " ] , LimitingFactor . NOT_LIMITED )
2018-11-07 18:57:44 -05:00
2020-07-21 12:14:15 -04:00
def test_query_api_filter ( self ) - > None :
2019-04-17 19:11:11 -04:00
"""
2020-07-21 12:14:15 -04:00
Test query api without can_only_access_owned_queries perm added to
2019-04-17 19:11:11 -04:00
Admin and make sure all queries show up .
"""
self . run_some_queries ( )
2019-06-25 16:34:48 -04:00
self . login ( username = " admin " )
2019-04-17 19:11:11 -04:00
2020-07-21 12:14:15 -04:00
url = " /api/v1/query/ "
2019-04-17 19:11:11 -04:00
data = self . get_json_resp ( url )
2019-06-25 16:34:48 -04:00
admin = security_manager . find_user ( " admin " )
gamma_sqllab = security_manager . find_user ( " gamma_sqllab " )
2019-10-21 10:49:12 -04:00
self . assertEqual ( 3 , len ( data [ " result " ] ) )
2020-07-21 12:14:15 -04:00
user_queries = [ result . get ( " user " ) . get ( " username " ) for result in data [ " result " ] ]
2019-04-17 19:11:11 -04:00
assert admin . username in user_queries
assert gamma_sqllab . username in user_queries
2020-07-21 12:14:15 -04:00
def test_query_api_can_access_all_queries ( self ) - > None :
2019-04-17 19:11:11 -04:00
"""
2020-07-21 12:14:15 -04:00
Test query api with can_access_all_queries perm added to
2020-02-05 03:58:12 -05:00
gamma and make sure all queries show up .
2019-04-17 19:11:11 -04:00
"""
2020-08-06 18:33:48 -04:00
session = db . session
2019-04-17 19:11:11 -04:00
2020-02-05 03:58:12 -05:00
# Add all_query_access perm to Gamma user
all_queries_view = security_manager . find_permission_view_menu (
" all_query_access " , " all_query_access "
2019-04-17 19:11:11 -04:00
)
2020-02-05 03:58:12 -05:00
2019-04-17 19:11:11 -04:00
security_manager . add_permission_role (
2020-02-05 03:58:12 -05:00
security_manager . find_role ( " gamma_sqllab " ) , all_queries_view
2019-04-17 19:11:11 -04:00
)
2020-08-06 18:33:48 -04:00
session . commit ( )
2019-04-17 19:11:11 -04:00
# Test search_queries for Admin user
self . run_some_queries ( )
2020-02-05 03:58:12 -05:00
self . login ( " gamma_sqllab " )
2020-07-21 12:14:15 -04:00
url = " /api/v1/query/ "
2019-04-17 19:11:11 -04:00
data = self . get_json_resp ( url )
2020-02-05 03:58:12 -05:00
self . assertEqual ( 3 , len ( data [ " result " ] ) )
2019-04-17 19:11:11 -04:00
2020-02-05 03:58:12 -05:00
# Remove all_query_access from gamma sqllab
all_queries_view = security_manager . find_permission_view_menu (
" all_query_access " , " all_query_access "
2019-04-17 19:11:11 -04:00
)
security_manager . del_permission_role (
2020-02-05 03:58:12 -05:00
security_manager . find_role ( " gamma_sqllab " ) , all_queries_view
2019-04-17 19:11:11 -04:00
)
2020-08-06 18:33:48 -04:00
session . commit ( )
2019-04-17 19:11:11 -04:00
2020-07-21 12:14:15 -04:00
def test_query_admin_can_access_all_queries ( self ) - > None :
2020-02-05 03:58:12 -05:00
"""
2020-07-21 12:14:15 -04:00
Test query api with all_query_access perm added to
2020-02-05 03:58:12 -05:00
Admin and make sure only Admin queries show up . This is the default
"""
# Test search_queries for Admin user
self . run_some_queries ( )
self . login ( " admin " )
2020-07-21 12:14:15 -04:00
url = " /api/v1/query/ "
2020-02-05 03:58:12 -05:00
data = self . get_json_resp ( url )
self . assertEqual ( 3 , len ( data [ " result " ] ) )
2019-08-08 14:37:00 -04:00
def test_api_database ( self ) :
self . login ( " admin " )
2019-09-08 13:18:09 -04:00
self . create_fake_db ( )
2020-08-06 15:07:22 -04:00
get_example_database ( )
get_main_database ( )
2019-08-08 14:37:00 -04:00
arguments = {
" keys " : [ ] ,
" filters " : [ { " col " : " expose_in_sqllab " , " opr " : " eq " , " value " : True } ] ,
" order_column " : " database_name " ,
" order_direction " : " asc " ,
" page " : 0 ,
" page_size " : - 1 ,
}
2020-02-05 03:58:12 -05:00
url = f " api/v1/database/?q= { prison . dumps ( arguments ) } "
2020-09-02 14:48:21 -04:00
2019-10-21 10:49:12 -04:00
self . assertEqual (
2020-08-06 15:07:22 -04:00
{ " examples " , " fake_db_100 " , " main " } ,
2019-09-08 13:18:09 -04:00
{ r . get ( " database_name " ) for r in self . get_json_resp ( url ) [ " result " ] } ,
)
2019-11-20 10:47:06 -05:00
self . delete_fake_db ( )
2020-12-15 21:47:40 -05:00
2021-01-11 08:57:55 -05:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
2020-12-15 21:47:40 -05:00
@mock.patch.dict (
" superset.extensions.feature_flag_manager._feature_flags " ,
{ " ENABLE_TEMPLATE_PROCESSING " : True } ,
clear = True ,
)
def test_sql_json_parameter_error ( self ) :
self . login ( " admin " )
data = self . run_sql (
" SELECT * FROM birth_names WHERE state = ' {{ state }} ' LIMIT 10 " ,
" 1 " ,
template_params = json . dumps ( { " state " : " CA " } ) ,
)
assert data [ " status " ] == " success "
data = self . run_sql (
2020-12-18 01:58:21 -05:00
" SELECT * FROM birth_names WHERE state = ' {{ stat }} ' LIMIT 10 " ,
" 2 " ,
template_params = json . dumps ( { " state " : " CA " } ) ,
2020-12-15 21:47:40 -05:00
)
assert data [ " errors " ] [ 0 ] [ " error_type " ] == " MISSING_TEMPLATE_PARAMS_ERROR "
2020-12-18 01:58:21 -05:00
assert data [ " errors " ] [ 0 ] [ " extra " ] == {
" issue_codes " : [
{
" code " : 1006 ,
" message " : " Issue 1006 - One or more parameters specified in the query are missing. " ,
}
] ,
" template_parameters " : { " state " : " CA " } ,
" undefined_parameters " : [ " stat " ] ,
}
2021-01-04 12:22:35 -05:00
@mock.patch ( " superset.sql_lab.get_query " )
@mock.patch ( " superset.sql_lab.execute_sql_statement " )
def test_execute_sql_statements ( self , mock_execute_sql_statement , mock_get_query ) :
sql = """
- - comment
SET @value = 42 ;
SELECT @value AS foo ;
- - comment
"""
mock_session = mock . MagicMock ( )
mock_query = mock . MagicMock ( )
mock_query . database . allow_run_async = False
mock_cursor = mock . MagicMock ( )
mock_query . database . get_sqla_engine . return_value . raw_connection . return_value . cursor . return_value = (
mock_cursor
)
mock_query . database . db_engine_spec . run_multiple_statements_as_one = False
mock_get_query . return_value = mock_query
execute_sql_statements (
query_id = 1 ,
rendered_query = sql ,
return_results = True ,
store_results = False ,
user_name = " admin " ,
session = mock_session ,
start_time = None ,
expand_data = False ,
log_params = None ,
)
mock_execute_sql_statement . assert_has_calls (
[
mock . call (
" SET @value = 42 " ,
mock_query ,
" admin " ,
mock_session ,
mock_cursor ,
None ,
False ,
) ,
mock . call (
" SELECT @value AS foo " ,
mock_query ,
" admin " ,
mock_session ,
mock_cursor ,
None ,
False ,
) ,
]
)
2021-06-23 10:58:20 -04:00
@mock.patch ( " superset.sql_lab.results_backend " , None )
@mock.patch ( " superset.sql_lab.get_query " )
@mock.patch ( " superset.sql_lab.execute_sql_statement " )
def test_execute_sql_statements_no_results_backend (
self , mock_execute_sql_statement , mock_get_query
) :
sql = """
- - comment
SET @value = 42 ;
SELECT @value AS foo ;
- - comment
"""
mock_session = mock . MagicMock ( )
mock_query = mock . MagicMock ( )
mock_query . database . allow_run_async = True
mock_cursor = mock . MagicMock ( )
mock_query . database . get_sqla_engine . return_value . raw_connection . return_value . cursor . return_value = (
mock_cursor
)
mock_query . database . db_engine_spec . run_multiple_statements_as_one = False
mock_get_query . return_value = mock_query
with pytest . raises ( SupersetErrorException ) as excinfo :
execute_sql_statements (
query_id = 1 ,
rendered_query = sql ,
return_results = True ,
store_results = False ,
user_name = " admin " ,
session = mock_session ,
start_time = None ,
expand_data = False ,
log_params = None ,
)
assert excinfo . value . error == SupersetError (
message = " Results backend is not configured. " ,
error_type = SupersetErrorType . RESULTS_BACKEND_NOT_CONFIGURED_ERROR ,
level = ErrorLevel . ERROR ,
extra = {
" issue_codes " : [
{
" code " : 1021 ,
" message " : (
" Issue 1021 - Results backend needed for asynchronous "
" queries is not configured. "
) ,
}
]
} ,
)
2021-01-04 12:22:35 -05:00
@mock.patch ( " superset.sql_lab.get_query " )
@mock.patch ( " superset.sql_lab.execute_sql_statement " )
def test_execute_sql_statements_ctas (
self , mock_execute_sql_statement , mock_get_query
) :
sql = """
- - comment
SET @value = 42 ;
SELECT @value AS foo ;
- - comment
"""
mock_session = mock . MagicMock ( )
mock_query = mock . MagicMock ( )
mock_query . database . allow_run_async = False
mock_cursor = mock . MagicMock ( )
mock_query . database . get_sqla_engine . return_value . raw_connection . return_value . cursor . return_value = (
mock_cursor
)
mock_query . database . db_engine_spec . run_multiple_statements_as_one = False
mock_get_query . return_value = mock_query
# set the query to CTAS
mock_query . select_as_cta = True
mock_query . ctas_method = CtasMethod . TABLE
execute_sql_statements (
query_id = 1 ,
rendered_query = sql ,
return_results = True ,
store_results = False ,
user_name = " admin " ,
session = mock_session ,
start_time = None ,
expand_data = False ,
log_params = None ,
)
mock_execute_sql_statement . assert_has_calls (
[
mock . call (
" SET @value = 42 " ,
mock_query ,
" admin " ,
mock_session ,
mock_cursor ,
None ,
False ,
) ,
mock . call (
" SELECT @value AS foo " ,
mock_query ,
" admin " ,
mock_session ,
mock_cursor ,
None ,
True , # apply_ctas
) ,
]
)
# try invalid CTAS
sql = " DROP TABLE my_table "
2021-06-23 10:58:20 -04:00
with pytest . raises ( SupersetErrorException ) as excinfo :
2021-01-04 12:22:35 -05:00
execute_sql_statements (
query_id = 1 ,
rendered_query = sql ,
return_results = True ,
store_results = False ,
user_name = " admin " ,
session = mock_session ,
start_time = None ,
expand_data = False ,
log_params = None ,
)
2021-06-23 10:58:20 -04:00
assert excinfo . value . error == SupersetError (
message = " CTAS (create table as select) can only be run with a query where the last statement is a SELECT. Please make sure your query has a SELECT as its last statement. Then, try running your query again. " ,
error_type = SupersetErrorType . INVALID_CTAS_QUERY_ERROR ,
level = ErrorLevel . ERROR ,
extra = {
" issue_codes " : [
{
" code " : 1023 ,
" message " : " Issue 1023 - The CTAS (create table as select) doesn ' t have a SELECT statement at the end. Please make sure your query has a SELECT as its last statement. Then, try running your query again. " ,
}
]
} ,
2021-01-04 12:22:35 -05:00
)
# try invalid CVAS
mock_query . ctas_method = CtasMethod . VIEW
sql = """
- - comment
SET @value = 42 ;
SELECT @value AS foo ;
- - comment
"""
2021-06-23 10:58:20 -04:00
with pytest . raises ( SupersetErrorException ) as excinfo :
2021-01-04 12:22:35 -05:00
execute_sql_statements (
query_id = 1 ,
rendered_query = sql ,
return_results = True ,
store_results = False ,
user_name = " admin " ,
session = mock_session ,
start_time = None ,
expand_data = False ,
log_params = None ,
)
2021-06-23 10:58:20 -04:00
assert excinfo . value . error == SupersetError (
message = " CVAS (create view as select) can only be run with a query with a single SELECT statement. Please make sure your query has only a SELECT statement. Then, try running your query again. " ,
error_type = SupersetErrorType . INVALID_CVAS_QUERY_ERROR ,
level = ErrorLevel . ERROR ,
extra = {
" issue_codes " : [
{
" code " : 1024 ,
" message " : " Issue 1024 - CVAS (create view as select) query has more than one statement. " ,
} ,
{
" code " : 1025 ,
" message " : " Issue 1025 - CVAS (create view as select) query is not a SELECT statement. " ,
} ,
]
} ,
2021-01-04 12:22:35 -05:00
)
2021-04-12 16:18:17 -04:00
2021-06-24 11:02:49 -04:00
@pytest.mark.usefixtures ( " load_birth_names_dashboard_with_slices " )
def test_sql_json_soft_timeout ( self ) :
examples_db = get_example_database ( )
if examples_db . backend == " sqlite " :
return
2021-04-12 16:18:17 -04:00
2021-06-24 11:02:49 -04:00
self . login ( " admin " )
with mock . patch . object (
examples_db . db_engine_spec , " handle_cursor "
) as handle_cursor :
handle_cursor . side_effect = SoftTimeLimitExceeded ( )
data = self . run_sql ( " SELECT * FROM birth_names LIMIT 1 " , " 1 " )
assert data == {
" errors " : [
{
" message " : (
" The query was killed after 21600 seconds. It might be too complex, "
" or the database might be under heavy load. "
) ,
" error_type " : SupersetErrorType . SQLLAB_TIMEOUT_ERROR ,
" level " : ErrorLevel . ERROR ,
" extra " : {
" issue_codes " : [
{
" code " : 1026 ,
" message " : " Issue 1026 - Query is too complex and takes too long to run. " ,
} ,
{
" code " : 1027 ,
" message " : " Issue 1027 - The database is currently running too many queries. " ,
} ,
]
} ,
}
]
}
2021-07-26 11:04:56 -04:00
2021-10-20 16:43:14 -04:00
def test_apply_limit_if_exists_when_incremented_limit_is_none ( self ) :
sql = """
SET @value = 42 ;
SELECT @value AS foo ;
"""
database = get_example_database ( )
mock_query = mock . MagicMock ( )
mock_query . limit = 300
final_sql = apply_limit_if_exists ( database , None , mock_query , sql )
assert final_sql == sql
def test_apply_limit_if_exists_when_increased_limit ( self ) :
sql = """
SET @value = 42 ;
SELECT @value AS foo ;
"""
database = get_example_database ( )
mock_query = mock . MagicMock ( )
mock_query . limit = 300
final_sql = apply_limit_if_exists ( database , 1000 , mock_query , sql )
assert " LIMIT 1000 " in final_sql
2021-07-26 11:04:56 -04:00
@pytest.mark.parametrize ( " spec " , [ HiveEngineSpec , PrestoEngineSpec ] )
def test_cancel_query_implicit ( spec : BaseEngineSpec ) - > None :
query = mock . MagicMock ( )
query . database . db_engine_spec = spec
assert cancel_query ( query )