fix: don't strip SQL comments in Explore - 2nd try (#28753)

This commit is contained in:
Maxime Beauchemin 2024-06-20 14:30:03 -07:00 committed by GitHub
parent 5e060cef7c
commit 514eda82fb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 109 additions and 34 deletions

View File

@ -24,7 +24,7 @@ jobs:
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
services: services:
mysql: mysql:
image: mysql:5.7 image: mysql:8.0
env: env:
MYSQL_ROOT_PASSWORD: root MYSQL_ROOT_PASSWORD: root
ports: ports:

View File

@ -1450,7 +1450,7 @@ class SqlaTable(
if not self.is_virtual: if not self.is_virtual:
return self.get_sqla_table(), None return self.get_sqla_table(), None
from_sql = self.get_rendered_sql(template_processor) from_sql = self.get_rendered_sql(template_processor) + "\n"
parsed_query = ParsedQuery(from_sql, engine=self.db_engine_spec.engine) parsed_query = ParsedQuery(from_sql, engine=self.db_engine_spec.engine)
if not ( if not (
parsed_query.is_unknown() parsed_query.is_unknown()

View File

@ -1133,9 +1133,8 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
cte = None cte = None
sql_remainder = None sql_remainder = None
sql = sql.strip(" \t\n;") sql = sql.strip(" \t\n;")
sql_statement = sqlparse.format(sql, strip_comments=True)
query_limit: int | None = sql_parse.extract_top_from_query( query_limit: int | None = sql_parse.extract_top_from_query(
sql_statement, cls.top_keywords sql, cls.top_keywords
) )
if not limit: if not limit:
final_limit = query_limit final_limit = query_limit
@ -1144,7 +1143,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
else: else:
final_limit = limit final_limit = limit
if not cls.allows_cte_in_subquery: if not cls.allows_cte_in_subquery:
cte, sql_remainder = sql_parse.get_cte_remainder_query(sql_statement) cte, sql_remainder = sql_parse.get_cte_remainder_query(sql)
if cte: if cte:
str_statement = str(sql_remainder) str_statement = str(sql_remainder)
cte = cte + "\n" cte = cte + "\n"

View File

@ -1070,8 +1070,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods
""" """
Render sql with template engine (Jinja). Render sql with template engine (Jinja).
""" """
sql = self.sql.strip("\t\r\n; ")
sql = self.sql
if template_processor: if template_processor:
try: try:
sql = template_processor.process_template(sql) sql = template_processor.process_template(sql)
@ -1083,13 +1082,12 @@ class ExploreMixin: # pylint: disable=too-many-public-methods
) )
) from ex ) from ex
script = SQLScript(sql.strip("\t\r\n; "), engine=self.db_engine_spec.engine) script = SQLScript(sql, engine=self.db_engine_spec.engine)
if len(script.statements) > 1: if len(script.statements) > 1:
raise QueryObjectValidationError( raise QueryObjectValidationError(
_("Virtual dataset query cannot consist of multiple statements") _("Virtual dataset query cannot consist of multiple statements")
) )
sql = script.statements[0].format(comments=False)
if not sql: if not sql:
raise QueryObjectValidationError(_("Virtual dataset query cannot be empty")) raise QueryObjectValidationError(_("Virtual dataset query cannot be empty"))
return sql return sql
@ -1106,7 +1104,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods
CTE, the CTE is returned as the second value in the return tuple. CTE, the CTE is returned as the second value in the return tuple.
""" """
from_sql = self.get_rendered_sql(template_processor) from_sql = self.get_rendered_sql(template_processor) + "\n"
parsed_query = ParsedQuery(from_sql, engine=self.db_engine_spec.engine) parsed_query = ParsedQuery(from_sql, engine=self.db_engine_spec.engine)
if not ( if not (
parsed_query.is_unknown() parsed_query.is_unknown()

View File

@ -19,6 +19,7 @@ from __future__ import annotations
import contextlib import contextlib
import functools import functools
import os import os
from textwrap import dedent
from typing import Any, Callable, TYPE_CHECKING from typing import Any, Callable, TYPE_CHECKING
from unittest.mock import patch from unittest.mock import patch
@ -295,25 +296,67 @@ def virtual_dataset():
dataset = SqlaTable( dataset = SqlaTable(
table_name="virtual_dataset", table_name="virtual_dataset",
sql=( sql=(
"SELECT 0 as col1, 'a' as col2, 1.0 as col3, NULL as col4, '2000-01-01 00:00:00' as col5, 1 as col6 " dedent("""\
"UNION ALL " SELECT 0 as col1, 'a' as col2, 1.0 as col3, NULL as col4, '2000-01-01 00:00:00' as col5, 1 as col6
"SELECT 1, 'b', 1.1, NULL, '2000-01-02 00:00:00', NULL " UNION ALL
"UNION ALL " SELECT 1, 'b', 1.1, NULL, '2000-01-02 00:00:00', NULL
"SELECT 2 as col1, 'c' as col2, 1.2, NULL, '2000-01-03 00:00:00', 3 " UNION ALL
"UNION ALL " SELECT 2 as col1, 'c' as col2, 1.2, NULL, '2000-01-03 00:00:00', 3
"SELECT 3 as col1, 'd' as col2, 1.3, NULL, '2000-01-04 00:00:00', 4 " UNION ALL
"UNION ALL " SELECT 3 as col1, 'd' as col2, 1.3, NULL, '2000-01-04 00:00:00', 4
"SELECT 4 as col1, 'e' as col2, 1.4, NULL, '2000-01-05 00:00:00', 5 " UNION ALL
"UNION ALL " SELECT 4 as col1, 'e' as col2, 1.4, NULL, '2000-01-05 00:00:00', 5
"SELECT 5 as col1, 'f' as col2, 1.5, NULL, '2000-01-06 00:00:00', 6 " UNION ALL
"UNION ALL " SELECT 5 as col1, 'f' as col2, 1.5, NULL, '2000-01-06 00:00:00', 6
"SELECT 6 as col1, 'g' as col2, 1.6, NULL, '2000-01-07 00:00:00', 7 " UNION ALL
"UNION ALL " SELECT 6 as col1, 'g' as col2, 1.6, NULL, '2000-01-07 00:00:00', 7
"SELECT 7 as col1, 'h' as col2, 1.7, NULL, '2000-01-08 00:00:00', 8 " UNION ALL
"UNION ALL " SELECT 7 as col1, 'h' as col2, 1.7, NULL, '2000-01-08 00:00:00', 8
"SELECT 8 as col1, 'i' as col2, 1.8, NULL, '2000-01-09 00:00:00', 9 " UNION ALL
"UNION ALL " SELECT 8 as col1, 'i' as col2, 1.8, NULL, '2000-01-09 00:00:00', 9
"SELECT 9 as col1, 'j' as col2, 1.9, NULL, '2000-01-10 00:00:00', 10" UNION ALL
SELECT 9 as col1, 'j' as col2, 1.9, NULL, '2000-01-10 00:00:00', 10
""")
),
database=get_example_database(),
)
TableColumn(column_name="col1", type="INTEGER", table=dataset)
TableColumn(column_name="col2", type="VARCHAR(255)", table=dataset)
TableColumn(column_name="col3", type="DECIMAL(4,2)", table=dataset)
TableColumn(column_name="col4", type="VARCHAR(255)", table=dataset)
# Different database dialect datetime type is not consistent, so temporarily use varchar
TableColumn(column_name="col5", type="VARCHAR(255)", table=dataset)
TableColumn(column_name="col6", type="INTEGER", table=dataset)
SqlMetric(metric_name="count", expression="count(*)", table=dataset)
db.session.add(dataset)
db.session.commit()
yield dataset
db.session.delete(dataset)
db.session.commit()
@pytest.fixture
def virtual_dataset_with_comments():
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
dataset = SqlaTable(
table_name="virtual_dataset_with_comments",
sql=(
dedent("""\
--COMMENT
/*COMMENT*/
WITH cte as (--COMMENT
SELECT 2 as col1, /*COMMENT*/'j' as col2, 1.9, NULL, '2000-01-10 00:00:00', 10
)
SELECT 0 as col1, 'a' as col2, 1.0 as col3, NULL as col4, '2000-01-01 00:00:00' as col5, 1 as col6
\n /* COMMENT */ \n
UNION ALL/*COMMENT*/
SELECT 1 as col1, 'f' as col2, 1.5, NULL, '2000-01-06 00:00:00', 6 --COMMENT
UNION ALL--COMMENT
SELECT * FROM cte --COMMENT""")
), ),
database=get_example_database(), database=get_example_database(),
) )

View File

@ -539,7 +539,9 @@ class TestCore(SupersetTestCase):
database=get_example_database(), database=get_example_database(),
) )
rendered_query = str(table.get_from_clause()[0]) rendered_query = str(table.get_from_clause()[0])
self.assertEqual(clean_query, rendered_query) assert "comment 1" in rendered_query
assert "comment 2" in rendered_query
assert "FROM tbl" in rendered_query
def test_slice_payload_no_datasource(self): def test_slice_payload_no_datasource(self):
form_data = { form_data = {

View File

@ -538,10 +538,12 @@ def test_get_samples(test_client, login_as_admin, virtual_dataset):
assert "coltypes" in rv2.json["result"] assert "coltypes" in rv2.json["result"]
assert "data" in rv2.json["result"] assert "data" in rv2.json["result"]
eager_samples = virtual_dataset.database.get_df( sql = (
f"select * from ({virtual_dataset.sql}) as tbl" f"select * from ({virtual_dataset.sql}) as tbl "
f' limit {app.config["SAMPLES_ROW_LIMIT"]}' f'limit {app.config["SAMPLES_ROW_LIMIT"]}'
) )
eager_samples = virtual_dataset.database.get_df(sql)
# the col3 is Decimal # the col3 is Decimal
eager_samples["col3"] = eager_samples["col3"].apply(float) eager_samples["col3"] = eager_samples["col3"].apply(float)
eager_samples = eager_samples.to_dict(orient="records") eager_samples = eager_samples.to_dict(orient="records")

View File

@ -1168,3 +1168,34 @@ OFFSET 0
re.search(r"WHERE\n col6 >= .*2001-10-01", sqls[1]) re.search(r"WHERE\n col6 >= .*2001-10-01", sqls[1])
and re.search(r"AND col6 < .*2002-10-01", sqls[1]) and re.search(r"AND col6 < .*2002-10-01", sqls[1])
) is not None ) is not None
def test_virtual_dataset_with_comments(app_context, virtual_dataset_with_comments):
qc = QueryContextFactory().create(
datasource={
"type": virtual_dataset_with_comments.type,
"id": virtual_dataset_with_comments.id,
},
queries=[
{
"columns": ["col1", "col2"],
"metrics": ["count"],
"post_processing": [
{
"operation": "pivot",
"options": {
"aggregates": {"count": {"operator": "mean"}},
"columns": ["col2"],
"index": ["col1"],
},
},
{"operation": "flatten"},
],
}
],
result_type=ChartDataResultType.FULL,
force=True,
)
query_object = qc.queries[0]
df = qc.get_df_payload(query_object)["df"]
assert len(df) == 3

View File

@ -253,7 +253,7 @@ class TestDatabaseModel(SupersetTestCase):
query = table.database.compile_sqla_query(sqla_query.sqla_query) query = table.database.compile_sqla_query(sqla_query.sqla_query)
# assert virtual dataset # assert virtual dataset
assert "SELECT\n 'user_abc' AS user,\n 'xyz_P1D' AS time_grain" in query assert "SELECT 'user_abc' as user, 'xyz_P1D' as time_grain" in query
# assert dataset calculated column # assert dataset calculated column
assert "case when 'abc' = 'abc' then 'yes' else 'no' end" in query assert "case when 'abc' = 'abc' then 'yes' else 'no' end" in query
# assert adhoc column # assert adhoc column