feat: convert backend chart errors to the new error type (#9753)

This commit is contained in:
Erik Ritter 2020-05-12 18:15:53 -07:00 committed by GitHub
parent 0e7888c659
commit 83ec7365a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 175 additions and 33 deletions

View File

@ -49,7 +49,8 @@ export default () =>
requests.map(async xhr => {
expect(xhr.status).to.eq(200);
const responseBody = await readResponseBlob(xhr.response.body);
expect(responseBody).to.have.property('error', null);
expect(responseBody).to.have.property('errors');
expect(responseBody.errors.length).to.eq(0);
const sliceId = responseBody.form_data.slice_id;
cy.get(`#chart-id-${sliceId}`).should('be.visible');
}),

View File

@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { ErrorTypeEnum } from 'src/components/ErrorMessage/types';
import getClientErrorObject from 'src/utils/getClientErrorObject';
describe('getClientErrorObject()', () => {
@ -43,6 +44,26 @@ describe('getClientErrorObject()', () => {
);
});
it('Handles backwards compatibility between old error messages and the new SIP-40 errors format', () => {
const jsonError = {
errors: [
{
errorType: ErrorTypeEnum.GENERIC_DB_ENGINE_ERROR,
extra: { engine: 'presto' },
level: 'error',
message: 'presto error: test error',
},
],
};
const jsonErrorString = JSON.stringify(jsonError);
return getClientErrorObject(new Response(jsonErrorString)).then(
errorObj => {
expect(errorObj.error).toEqual(jsonError.errors[0].message);
},
);
});
it('Handles Response that can be parsed as text', () => {
const textError = 'Hello I am a text error';

View File

@ -142,6 +142,7 @@ class Chart extends React.PureComponent {
const { chartAlert, chartStackTrace, queryResponse } = this.props;
return (
<ErrorMessageWithStackTrace
error={queryResponse?.errors?.[0]}
message={chartAlert}
link={queryResponse ? queryResponse.link : null}
stackTrace={chartStackTrace}

View File

@ -17,23 +17,27 @@
* under the License.
*/
// TODO: Add more error types as we classify more errors
// Keep in sync with superset/views/errors.py
export const ErrorTypeEnum = {
// Generic errors created on the frontend
FRONTEND_CSRF_ERROR: 'FRONTEND_CSRF_ERROR',
FRONTEND_NETWORK_ERROR: 'FRONTEND_NETWORK_ERROR',
FRONTEND_TIMEOUT_ERROR: 'FRONTEND_TIMEOUT_ERROR',
GENERIC_DB_ENGINE_ERROR: 'GENERIC_DB_ENGINE_ERROR',
VIZ_GET_DF_ERROR: 'VIZ_GET_DF_ERROR',
} as const;
type ValueOf<T> = T[keyof T];
export type ErrorType = ValueOf<typeof ErrorTypeEnum>;
// Keep in sync with superset/views/errors.py
export type ErrorLevel = 'info' | 'warning' | 'error';
export type SupersetError = {
errorType: ErrorType;
extra: Record<string, any>;
extra: Record<string, any> | null;
level: ErrorLevel;
message: string;
};

View File

@ -18,12 +18,14 @@
*/
import { SupersetClientResponse } from '@superset-ui/connection';
import { t } from '@superset-ui/translation';
import { SupersetError } from 'src/components/ErrorMessage/types';
import COMMON_ERR_MESSAGES from './errorMessages';
// The response always contains an error attribute, can contain anything from the
// SupersetClientResponse object, and can contain a spread JSON blob
export type ClientErrorObject = {
error: string;
errors?: SupersetError[];
severity?: string;
message?: string;
stacktrace?: string;
@ -48,6 +50,12 @@ export default function getClientErrorObject(
.json()
.then(errorJson => {
let error = { ...responseObject, ...errorJson };
// Backwards compatibility for old error renderers with the new error object
if (error.errors && error.errors.length > 0) {
error.error = error.description = error.errors[0].message;
}
if (error.stack) {
error = {
...error,

View File

@ -1055,7 +1055,7 @@ class SqlaTable(Model, BaseDatasource):
query_str_ext = self.get_query_str_extended(query_obj)
sql = query_str_ext.sql
status = utils.QueryStatus.SUCCESS
error_message = None
errors = None
def mutator(df: pd.DataFrame) -> None:
"""
@ -1084,14 +1084,14 @@ class SqlaTable(Model, BaseDatasource):
status = utils.QueryStatus.FAILED
logger.exception(f"Query {sql} on schema {self.schema} failed")
db_engine_spec = self.database.db_engine_spec
error_message = db_engine_spec.extract_error_message(ex)
errors = db_engine_spec.extract_errors(ex)
return QueryResult(
status=status,
df=df,
duration=datetime.now() - qry_start_dttm,
query=sql,
error_message=error_message,
errors=errors,
)
def get_sqla_table_object(self) -> Table:

View File

@ -34,6 +34,7 @@ from typing import (
Union,
)
import dataclasses
import pandas as pd
import sqlparse
from flask import g
@ -51,6 +52,7 @@ from sqlalchemy.types import TypeEngine
from wtforms.form import Form
from superset import app, sql_parse
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.models.sql_lab import Query
from superset.utils import core as utils
@ -568,6 +570,19 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
"""Extract error message for queries"""
return utils.error_msg_from_exception(ex)
@classmethod
def extract_errors(cls, ex: Exception) -> List[Dict[str, Any]]:
return [
dataclasses.asdict(
SupersetError(
error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR,
message=cls.extract_error_message(ex),
level=ErrorLevel.ERROR,
extra={"engine": cls.engine},
)
)
]
@classmethod
def adjust_database_uri(cls, uri: URL, selected_schema: Optional[str]) -> None:
"""

61
superset/errors.py Normal file
View File

@ -0,0 +1,61 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-few-public-methods
from enum import Enum
from typing import Any, Dict, Optional
from dataclasses import dataclass
class SupersetErrorType(str, Enum):
"""
Types of errors that can exist within Superset.
Keep in sync with superset-frontend/src/components/ErrorMessage/types.ts
"""
FRONTEND_CSRF_ERROR = "FRONTEND_CSRF_ERROR"
FRONTEND_NETWORK_ERROR = "FRONTEND_NETWORK_ERROR"
FRONTEND_TIMEOUT_ERROR = "FRONTEND_TIMEOUT_ERROR"
GENERIC_DB_ENGINE_ERROR = "GENERIC_DB_ENGINE_ERROR"
VIZ_GET_DF_ERROR = "VIZ_GET_DF_ERROR"
class ErrorLevel(str, Enum):
"""
Levels of errors that can exist within Superset.
Keep in sync with superset-frontend/src/components/ErrorMessage/types.ts
"""
INFO = "info"
WARNING = "warning"
ERROR = "error"
@dataclass
class SupersetError:
"""
An error that is returned to a client.
"""
message: str
error_type: SupersetErrorType
level: ErrorLevel
extra: Optional[Dict[str, Any]] = None

View File

@ -19,7 +19,7 @@ import json
import logging
import re
from datetime import datetime
from typing import List, Optional
from typing import Any, Dict, List, Optional
# isort and pylint disagree, isort should win
# pylint: disable=ungrouped-imports
@ -374,13 +374,20 @@ class QueryResult: # pylint: disable=too-few-public-methods
"""Object returned by the query interface"""
def __init__( # pylint: disable=too-many-arguments
self, df, query, duration, status=QueryStatus.SUCCESS, error_message=None
self,
df,
query,
duration,
status=QueryStatus.SUCCESS,
error_message=None,
errors=None,
):
self.df: pd.DataFrame = df
self.query: str = query
self.duration: int = duration
self.status: str = status
self.error_message: Optional[str] = error_message
self.errors: List[Dict[str, Any]] = errors or []
class ExtraJSONMixin:

View File

@ -66,7 +66,7 @@ logger = logging.getLogger(__name__)
config = superset_app.config
def get_error_msg():
def get_error_msg() -> str:
if conf.get("SHOW_STACKTRACE"):
error_msg = traceback.format_exc()
else:
@ -78,7 +78,12 @@ def get_error_msg():
return error_msg
def json_error_response(msg=None, status=500, payload=None, link=None):
def json_error_response(
msg: Optional[str] = None,
status: int = 500,
payload: Optional[dict] = None,
link: Optional[str] = None,
) -> Response:
if not payload:
payload = {"error": "{}".format(msg)}
if link:
@ -91,11 +96,11 @@ def json_error_response(msg=None, status=500, payload=None, link=None):
)
def json_success(json_msg, status=200):
def json_success(json_msg: str, status: int = 200) -> Response:
return Response(json_msg, status=status, mimetype="application/json")
def data_payload_response(payload_json, has_error=False):
def data_payload_response(payload_json: str, has_error: bool = False) -> Response:
status = 400 if has_error else 200
return json_success(payload_json, status=status)

View File

@ -1723,7 +1723,7 @@ class Superset(BaseSupersetView):
):
payload = obj.get_payload()
error = payload["error"]
error = payload["errors"] or None
status = payload["status"]
except Exception as ex:
error = utils.error_msg_from_exception(ex)
@ -2310,14 +2310,14 @@ class Superset(BaseSupersetView):
query: Query,
expand_data: bool,
log_params: Optional[Dict[str, Any]] = None,
) -> str:
) -> Response:
"""
Send SQL JSON query to celery workers
:param session: SQLAlchemy session object
:param rendered_query: the rendered query to perform by workers
:param query: The query (SQLAlchemy) object
:return: String JSON response
:return: A Flask Response
"""
logger.info(f"Query {query.id}: Running query on a Celery worker")
# Ignore the celery future object and the request may time out.
@ -2361,13 +2361,13 @@ class Superset(BaseSupersetView):
query: Query,
expand_data: bool,
log_params: Optional[Dict[str, Any]] = None,
) -> str:
) -> Response:
"""
Execute SQL query (sql json)
:param rendered_query: The rendered query (included templates)
:param query: The query SQL (SQLAlchemy) object
:return: String JSON response
:return: A Flask Response
"""
try:
timeout = config["SQLLAB_TIMEOUT"]

View File

@ -39,7 +39,7 @@ class Datasource(BaseSupersetView):
def save(self) -> Response:
data = request.form.get("data")
if not isinstance(data, str):
return json_error_response("Request missing data field.", status="500")
return json_error_response("Request missing data field.", status=500)
datasource_dict = json.loads(data)
datasource_id = datasource_dict.get("id")
@ -66,7 +66,7 @@ class Datasource(BaseSupersetView):
]
if duplicates:
return json_error_response(
f"Duplicate column name(s): {','.join(duplicates)}", status="409"
f"Duplicate column name(s): {','.join(duplicates)}", status=409
)
orm_datasource.update_from_object(datasource_dict)
data = orm_datasource.data
@ -85,11 +85,11 @@ class Datasource(BaseSupersetView):
)
if not orm_datasource.data:
return json_error_response(
"Error fetching datasource data.", status="500"
"Error fetching datasource data.", status=500
)
return self.json_response(orm_datasource.data)
except NoResultFound:
return json_error_response("This datasource does not exist", status="400")
return json_error_response("This datasource does not exist", status=400)
@expose("/external_metadata/<datasource_type>/<datasource_id>/")
@has_access_api

View File

@ -33,6 +33,7 @@ from datetime import datetime, timedelta
from itertools import product
from typing import Any, Dict, List, Optional, Set, Tuple, TYPE_CHECKING
import dataclasses
import geohash
import numpy as np
import pandas as pd
@ -47,6 +48,7 @@ from pandas.tseries.frequencies import to_offset
from superset import app, cache, get_manifest_files, security_manager
from superset.constants import NULL_STRING
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
NullValueException,
QueryObjectValidationError,
@ -117,7 +119,7 @@ class BaseViz:
self.status: Optional[str] = None
self.error_msg = ""
self.results: Optional[QueryResult] = None
self.error_message: Optional[str] = None
self.errors: List[Dict[str, Any]] = []
self.force = force
self.from_dttm: Optional[datetime] = None
self.to_dttm: Optional[datetime] = None
@ -236,7 +238,7 @@ class BaseViz:
self.results = self.datasource.query(query_obj)
self.query = self.results.query
self.status = self.results.status
self.error_message = self.results.error_message
self.errors = self.results.errors
df = self.results.df
# Transform the timestamp we received from database to pandas supported
@ -460,8 +462,15 @@ class BaseViz:
is_loaded = True
except Exception as ex:
logger.exception(ex)
if not self.error_message:
self.error_message = "{}".format(ex)
error = dataclasses.asdict(
SupersetError(
message=str(ex),
level=ErrorLevel.ERROR,
type=SupersetErrorType.VIZ_GET_DF_ERROR,
)
)
self.errors.append(error)
self.status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace()
@ -492,7 +501,7 @@ class BaseViz:
"cached_dttm": self._any_cached_dttm,
"cache_timeout": self.cache_timeout,
"df": df,
"error": self.error_message,
"errors": self.errors,
"form_data": self.form_data,
"is_cached": self._any_cache_key is not None,
"query": self.query,
@ -512,6 +521,7 @@ class BaseViz:
has_error = (
payload.get("status") == utils.QueryStatus.FAILED
or payload.get("error") is not None
or len(payload.get("errors")) > 0
)
return self.json_dumps(payload), has_error

View File

@ -33,6 +33,7 @@ from datetime import datetime, timedelta
from itertools import product
from typing import Any, Dict, List, Optional, Set, Tuple, TYPE_CHECKING
import dataclasses
import geohash
import numpy as np
import pandas as pd
@ -47,6 +48,7 @@ from pandas.tseries.frequencies import to_offset
from superset import app, cache, get_manifest_files, security_manager
from superset.constants import NULL_STRING
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
NullValueException,
QueryObjectValidationError,
@ -161,7 +163,7 @@ class BaseViz:
self.status: Optional[str] = None
self.error_msg = ""
self.results: Optional[QueryResult] = None
self.error_message: Optional[str] = None
self.errors: List[Dict[str, Any]] = []
self.force = force
self.from_ddtm: Optional[datetime] = None
self.to_dttm: Optional[datetime] = None
@ -279,7 +281,7 @@ class BaseViz:
self.results = self.datasource.query(query_obj)
self.query = self.results.query
self.status = self.results.status
self.error_message = self.results.error_message
self.errors = self.results.errors
df = self.results.df
# Transform the timestamp we received from database to pandas supported
@ -501,8 +503,14 @@ class BaseViz:
is_loaded = True
except Exception as ex:
logger.exception(ex)
if not self.error_message:
self.error_message = "{}".format(ex)
error = dataclasses.asdict(
SupersetError(
message=str(ex),
level=ErrorLevel.ERROR,
type=SupersetErrorType.VIZ_GET_DF_ERROR,
)
)
self.errors.append(error)
self.status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace()
@ -534,7 +542,7 @@ class BaseViz:
"cached_dttm": self._any_cached_dttm,
"cache_timeout": self.cache_timeout,
"df": df,
"error": self.error_message,
"errors": self.errors,
"form_data": self.form_data,
"is_cached": self._any_cache_key is not None,
"query": self.query,
@ -554,6 +562,7 @@ class BaseViz:
has_error = (
payload.get("status") == utils.QueryStatus.FAILED
or payload.get("error") is not None
or len(payload.get("errors")) > 0
)
return self.json_dumps(payload), has_error

View File

@ -933,7 +933,7 @@ class CoreTests(SupersetTestCase):
)
data = self.get_json_resp(json_endpoint, {"form_data": json.dumps(form_data)})
self.assertEqual(data["status"], utils.QueryStatus.SUCCESS)
self.assertEqual(data["error"], None)
self.assertEqual(data["errors"], [])
def test_slice_payload_invalid_query(self):
self.login(username="admin")