chore(pylint): Reenable raise-missing-from check (#16266)

Co-authored-by: John Bodley <john.bodley@airbnb.com>
This commit is contained in:
John Bodley 2021-08-16 15:26:10 -07:00 committed by GitHub
parent 36bc7b0b80
commit be7065faf8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
80 changed files with 216 additions and 211 deletions

View File

@ -84,7 +84,6 @@ confidence=
disable= disable=
missing-docstring, missing-docstring,
too-many-lines, too-many-lines,
raise-missing-from,
duplicate-code, duplicate-code,
[REPORTS] [REPORTS]

View File

@ -44,7 +44,7 @@ class BulkDeleteAnnotationCommand(BaseCommand):
return None return None
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationBulkDeleteFailedError() raise AnnotationBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists

View File

@ -48,7 +48,7 @@ class CreateAnnotationCommand(BaseCommand):
annotation = AnnotationDAO.create(self._properties) annotation = AnnotationDAO.create(self._properties)
except DAOCreateFailedError as ex: except DAOCreateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationCreateFailedError() raise AnnotationCreateFailedError() from ex
return annotation return annotation
def validate(self) -> None: def validate(self) -> None:

View File

@ -44,7 +44,7 @@ class DeleteAnnotationCommand(BaseCommand):
annotation = AnnotationDAO.delete(self._model) annotation = AnnotationDAO.delete(self._model)
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationDeleteFailedError() raise AnnotationDeleteFailedError() from ex
return annotation return annotation
def validate(self) -> None: def validate(self) -> None:

View File

@ -52,7 +52,7 @@ class UpdateAnnotationCommand(BaseCommand):
annotation = AnnotationDAO.update(self._model, self._properties) annotation = AnnotationDAO.update(self._model, self._properties)
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationUpdateFailedError() raise AnnotationUpdateFailedError() from ex
return annotation return annotation
def validate(self) -> None: def validate(self) -> None:

View File

@ -39,10 +39,10 @@ class AnnotationDAO(BaseDAO):
) )
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError() raise DAODeleteFailedError() from ex
@staticmethod @staticmethod
def validate_update_uniqueness( def validate_update_uniqueness(

View File

@ -51,8 +51,8 @@ annotation_json_metadata = "JSON metadata"
def validate_json(value: Union[bytes, bytearray, str]) -> None: def validate_json(value: Union[bytes, bytearray, str]) -> None:
try: try:
utils.validate_json(value) utils.validate_json(value)
except SupersetException: except SupersetException as ex:
raise ValidationError("JSON not valid") raise ValidationError("JSON not valid") from ex
class AnnotationPostSchema(Schema): class AnnotationPostSchema(Schema):

View File

@ -45,7 +45,7 @@ class BulkDeleteAnnotationLayerCommand(BaseCommand):
return None return None
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationLayerBulkDeleteFailedError() raise AnnotationLayerBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists

View File

@ -44,7 +44,7 @@ class CreateAnnotationLayerCommand(BaseCommand):
annotation_layer = AnnotationLayerDAO.create(self._properties) annotation_layer = AnnotationLayerDAO.create(self._properties)
except DAOCreateFailedError as ex: except DAOCreateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationLayerCreateFailedError() raise AnnotationLayerCreateFailedError() from ex
return annotation_layer return annotation_layer
def validate(self) -> None: def validate(self) -> None:

View File

@ -45,7 +45,7 @@ class DeleteAnnotationLayerCommand(BaseCommand):
annotation_layer = AnnotationLayerDAO.delete(self._model) annotation_layer = AnnotationLayerDAO.delete(self._model)
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationLayerDeleteFailedError() raise AnnotationLayerDeleteFailedError() from ex
return annotation_layer return annotation_layer
def validate(self) -> None: def validate(self) -> None:

View File

@ -48,7 +48,7 @@ class UpdateAnnotationLayerCommand(BaseCommand):
annotation_layer = AnnotationLayerDAO.update(self._model, self._properties) annotation_layer = AnnotationLayerDAO.update(self._model, self._properties)
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise AnnotationLayerUpdateFailedError() raise AnnotationLayerUpdateFailedError() from ex
return annotation_layer return annotation_layer
def validate(self) -> None: def validate(self) -> None:

View File

@ -41,10 +41,10 @@ class AnnotationLayerDAO(BaseDAO):
).delete(synchronize_session="fetch") ).delete(synchronize_session="fetch")
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError() raise DAODeleteFailedError() from ex
@staticmethod @staticmethod
def has_annotations(model_id: Union[int, List[int]]) -> bool: def has_annotations(model_id: Union[int, List[int]]) -> bool:

View File

@ -49,7 +49,7 @@ class BulkDeleteChartCommand(BaseCommand):
ChartDAO.bulk_delete(self._models) ChartDAO.bulk_delete(self._models)
except DeleteFailedError as ex: except DeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ChartBulkDeleteFailedError() raise ChartBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists
@ -67,5 +67,5 @@ class BulkDeleteChartCommand(BaseCommand):
for model in self._models: for model in self._models:
try: try:
check_ownership(model) check_ownership(model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise ChartForbiddenError() raise ChartForbiddenError() from ex

View File

@ -49,7 +49,7 @@ class CreateChartCommand(CreateMixin, BaseCommand):
chart = ChartDAO.create(self._properties) chart = ChartDAO.create(self._properties)
except DAOCreateFailedError as ex: except DAOCreateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ChartCreateFailedError() raise ChartCreateFailedError() from ex
return chart return chart
def validate(self) -> None: def validate(self) -> None:

View File

@ -50,8 +50,8 @@ class ChartDataCommand(BaseCommand):
payload = self._query_context.get_payload( payload = self._query_context.get_payload(
cache_query_context=cache_query_context, force_cached=force_cached cache_query_context=cache_query_context, force_cached=force_cached
) )
except CacheLoadError as exc: except CacheLoadError as ex:
raise ChartDataCacheLoadError(exc.message) raise ChartDataCacheLoadError(ex.message) from ex
# TODO: QueryContext should support SIP-40 style errors # TODO: QueryContext should support SIP-40 style errors
for query in payload["queries"]: for query in payload["queries"]:
@ -77,8 +77,8 @@ class ChartDataCommand(BaseCommand):
self._form_data = form_data self._form_data = form_data
try: try:
self._query_context = ChartDataQueryContextSchema().load(self._form_data) self._query_context = ChartDataQueryContextSchema().load(self._form_data)
except KeyError: except KeyError as ex:
raise ValidationError("Request is incorrect") raise ValidationError("Request is incorrect") from ex
except ValidationError as error: except ValidationError as error:
raise error raise error

View File

@ -52,7 +52,7 @@ class DeleteChartCommand(BaseCommand):
chart = ChartDAO.delete(self._model) chart = ChartDAO.delete(self._model)
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ChartDeleteFailedError() raise ChartDeleteFailedError() from ex
return chart return chart
def validate(self) -> None: def validate(self) -> None:
@ -70,5 +70,5 @@ class DeleteChartCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise ChartForbiddenError() raise ChartForbiddenError() from ex

View File

@ -64,7 +64,7 @@ class UpdateChartCommand(UpdateMixin, BaseCommand):
chart = ChartDAO.update(self._model, self._properties) chart = ChartDAO.update(self._model, self._properties)
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ChartUpdateFailedError() raise ChartUpdateFailedError() from ex
return chart return chart
def validate(self) -> None: def validate(self) -> None:
@ -89,8 +89,8 @@ class UpdateChartCommand(UpdateMixin, BaseCommand):
if not is_query_context_update(self._properties): if not is_query_context_update(self._properties):
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise ChartForbiddenError() raise ChartForbiddenError() from ex
# Validate/Populate datasource # Validate/Populate datasource
if datasource_id is not None: if datasource_id is not None:

View File

@ -66,9 +66,9 @@ class ImportModelsCommand(BaseCommand):
try: try:
self._import(db.session, self._configs, self.overwrite) self._import(db.session, self._configs, self.overwrite)
db.session.commit() db.session.commit()
except Exception: except Exception as ex:
db.session.rollback() db.session.rollback()
raise self.import_error() raise self.import_error() from ex
# pylint: disable=too-many-branches # pylint: disable=too-many-branches
def validate(self) -> None: def validate(self) -> None:

View File

@ -72,9 +72,9 @@ class ImportExamplesCommand(ImportModelsCommand):
try: try:
self._import(db.session, self._configs, self.overwrite, self.force_data) self._import(db.session, self._configs, self.overwrite, self.force_data)
db.session.commit() db.session.commit()
except Exception: except Exception as ex:
db.session.rollback() db.session.rollback()
raise self.import_error() raise self.import_error() from ex
@classmethod @classmethod
def _get_uuids(cls) -> Set[str]: def _get_uuids(cls) -> Set[str]:

View File

@ -47,9 +47,9 @@ def load_yaml(file_name: str, content: str) -> Dict[str, Any]:
"""Try to load a YAML file""" """Try to load a YAML file"""
try: try:
return yaml.safe_load(content) return yaml.safe_load(content)
except yaml.parser.ParserError: except yaml.parser.ParserError as ex:
logger.exception("Invalid YAML in %s", file_name) logger.exception("Invalid YAML in %s", file_name)
raise ValidationError({file_name: "Not a valid YAML file"}) raise ValidationError({file_name: "Not a valid YAML file"}) from ex
def load_metadata(contents: Dict[str, str]) -> Dict[str, str]: def load_metadata(contents: Dict[str, str]) -> Dict[str, str]:
@ -63,15 +63,15 @@ def load_metadata(contents: Dict[str, str]) -> Dict[str, str]:
metadata = load_yaml(METADATA_FILE_NAME, contents[METADATA_FILE_NAME]) metadata = load_yaml(METADATA_FILE_NAME, contents[METADATA_FILE_NAME])
try: try:
MetadataSchema().load(metadata) MetadataSchema().load(metadata)
except ValidationError as exc: except ValidationError as ex:
# if the version doesn't match raise an exception so that the # if the version doesn't match raise an exception so that the
# dispatcher can try a different command version # dispatcher can try a different command version
if "version" in exc.messages: if "version" in ex.messages:
raise IncorrectVersionError(exc.messages["version"][0]) raise IncorrectVersionError(ex.messages["version"][0]) from ex
# otherwise we raise the validation error # otherwise we raise the validation error
exc.messages = {METADATA_FILE_NAME: exc.messages} ex.messages = {METADATA_FILE_NAME: ex.messages}
raise exc raise ex
return metadata return metadata

View File

@ -76,5 +76,5 @@ def get_datasource_by_id(datasource_id: int, datasource_type: str) -> BaseDataso
return ConnectorRegistry.get_datasource( return ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session datasource_type, datasource_id, db.session
) )
except DatasetNotFoundError: except DatasetNotFoundError as ex:
raise DatasourceNotFoundValidationError() raise DatasourceNotFoundValidationError() from ex

View File

@ -138,7 +138,7 @@ class QueryContext:
) )
query_object_clone.to_dttm = get_past_or_future(offset, outer_to_dttm) query_object_clone.to_dttm = get_past_or_future(offset, outer_to_dttm)
except ValueError as ex: except ValueError as ex:
raise QueryObjectValidationError(str(ex)) raise QueryObjectValidationError(str(ex)) from ex
# make sure subquery use main query where clause # make sure subquery use main query where clause
query_object_clone.inner_from_dttm = outer_from_dttm query_object_clone.inner_from_dttm = outer_from_dttm
query_object_clone.inner_to_dttm = outer_to_dttm query_object_clone.inner_to_dttm = outer_to_dttm
@ -417,7 +417,7 @@ class QueryContext:
payload = viz_obj.get_payload() payload = viz_obj.get_payload()
return payload["data"] return payload["data"]
except SupersetException as ex: except SupersetException as ex:
raise QueryObjectValidationError(error_msg_from_exception(ex)) raise QueryObjectValidationError(error_msg_from_exception(ex)) from ex
def get_annotation_data(self, query_obj: QueryObject) -> Dict[str, Any]: def get_annotation_data(self, query_obj: QueryObject) -> Dict[str, Any]:
""" """

View File

@ -127,7 +127,7 @@ class DruidColumnInlineView(CompactCRUDMixin, EnsureEnabledMixin, SupersetModelV
try: try:
dimension_spec = json.loads(item.dimension_spec_json) dimension_spec = json.loads(item.dimension_spec_json)
except ValueError as ex: except ValueError as ex:
raise ValueError("Invalid Dimension Spec JSON: " + str(ex)) raise ValueError("Invalid Dimension Spec JSON: " + str(ex)) from ex
if not isinstance(dimension_spec, dict): if not isinstance(dimension_spec, dict):
raise ValueError("Dimension Spec must be a JSON object") raise ValueError("Dimension Spec must be a JSON object")
if "outputName" not in dimension_spec: if "outputName" not in dimension_spec:

View File

@ -719,7 +719,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"Error in jinja expression in fetch values predicate: %(msg)s", "Error in jinja expression in fetch values predicate: %(msg)s",
msg=ex.message, msg=ex.message,
) )
) ) from ex
def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]: def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
"""Runs query against sqla to retrieve some """Runs query against sqla to retrieve some
@ -818,7 +818,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"Error while rendering virtual dataset query: %(msg)s", "Error while rendering virtual dataset query: %(msg)s",
msg=ex.message, msg=ex.message,
) )
) ) from ex
sql = sqlparse.format(sql.strip("\t\r\n; "), strip_comments=True) sql = sqlparse.format(sql.strip("\t\r\n; "), strip_comments=True)
if not sql: if not sql:
raise QueryObjectValidationError(_("Virtual dataset query cannot be empty")) raise QueryObjectValidationError(_("Virtual dataset query cannot be empty"))
@ -929,7 +929,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
except TemplateError as ex: except TemplateError as ex:
raise QueryObjectValidationError( raise QueryObjectValidationError(
_("Error in jinja expression in RLS filters: %(msg)s", msg=ex.message,) _("Error in jinja expression in RLS filters: %(msg)s", msg=ex.message,)
) ) from ex
def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
self, self,
@ -1252,7 +1252,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"Error in jinja expression in WHERE clause: %(msg)s", "Error in jinja expression in WHERE clause: %(msg)s",
msg=ex.message, msg=ex.message,
) )
) ) from ex
where_clause_and += [sa.text("({})".format(where))] where_clause_and += [sa.text("({})".format(where))]
having = extras.get("having") having = extras.get("having")
if having: if having:
@ -1264,7 +1264,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"Error in jinja expression in HAVING clause: %(msg)s", "Error in jinja expression in HAVING clause: %(msg)s",
msg=ex.message, msg=ex.message,
) )
) ) from ex
having_clause_and += [sa.text("({})".format(having))] having_clause_and += [sa.text("({})".format(having))]
if apply_fetch_values_predicate and self.fetch_values_predicate: if apply_fetch_values_predicate and self.fetch_values_predicate:
qry = qry.where(self.get_fetch_values_predicate()) qry = qry.where(self.get_fetch_values_predicate())

View File

@ -110,6 +110,6 @@ def get_virtual_table_metadata(dataset: "SqlaTable") -> List[Dict[str, str]]:
result = db_engine_spec.fetch_data(cursor, limit=1) result = db_engine_spec.fetch_data(cursor, limit=1)
result_set = SupersetResultSet(result, cursor.description, db_engine_spec) result_set = SupersetResultSet(result, cursor.description, db_engine_spec)
cols = result_set.columns cols = result_set.columns
except Exception as exc: except Exception as ex:
raise SupersetGenericDBErrorException(message=str(exc)) raise SupersetGenericDBErrorException(message=str(ex)) from ex
return cols return cols

View File

@ -44,7 +44,7 @@ class BulkDeleteCssTemplateCommand(BaseCommand):
return None return None
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise CssTemplateBulkDeleteFailedError() raise CssTemplateBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists

View File

@ -39,7 +39,7 @@ class CssTemplateDAO(BaseDAO):
) )
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError() raise DAODeleteFailedError() from ex

View File

@ -106,7 +106,7 @@ class BaseDAO:
db.session.commit() db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback() db.session.rollback()
raise DAOCreateFailedError(exception=ex) raise DAOCreateFailedError(exception=ex) from ex
return model return model
@classmethod @classmethod
@ -125,7 +125,7 @@ class BaseDAO:
db.session.commit() db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback() db.session.rollback()
raise DAOUpdateFailedError(exception=ex) raise DAOUpdateFailedError(exception=ex) from ex
return model return model
@classmethod @classmethod
@ -140,5 +140,5 @@ class BaseDAO:
db.session.commit() db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError(exception=ex) raise DAODeleteFailedError(exception=ex) from ex
return model return model

View File

@ -50,7 +50,7 @@ class BulkDeleteDashboardCommand(BaseCommand):
return None return None
except DeleteFailedError as ex: except DeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DashboardBulkDeleteFailedError() raise DashboardBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists
@ -68,5 +68,5 @@ class BulkDeleteDashboardCommand(BaseCommand):
for model in self._models: for model in self._models:
try: try:
check_ownership(model) check_ownership(model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DashboardForbiddenError() raise DashboardForbiddenError() from ex

View File

@ -46,7 +46,7 @@ class CreateDashboardCommand(CreateMixin, BaseCommand):
dashboard = DashboardDAO.update_charts_owners(dashboard, commit=True) dashboard = DashboardDAO.update_charts_owners(dashboard, commit=True)
except DAOCreateFailedError as ex: except DAOCreateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DashboardCreateFailedError() raise DashboardCreateFailedError() from ex
return dashboard return dashboard
def validate(self) -> None: def validate(self) -> None:

View File

@ -50,7 +50,7 @@ class DeleteDashboardCommand(BaseCommand):
dashboard = DashboardDAO.delete(self._model) dashboard = DashboardDAO.delete(self._model)
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DashboardDeleteFailedError() raise DashboardDeleteFailedError() from ex
return dashboard return dashboard
def validate(self) -> None: def validate(self) -> None:
@ -68,5 +68,5 @@ class DeleteDashboardCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DashboardForbiddenError() raise DashboardForbiddenError() from ex

View File

@ -53,7 +53,7 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
dashboard = DashboardDAO.update_charts_owners(dashboard, commit=True) dashboard = DashboardDAO.update_charts_owners(dashboard, commit=True)
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DashboardUpdateFailedError() raise DashboardUpdateFailedError() from ex
return dashboard return dashboard
def validate(self) -> None: def validate(self) -> None:
@ -69,8 +69,8 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DashboardForbiddenError() raise DashboardForbiddenError() from ex
# Validate slug uniqueness # Validate slug uniqueness
if not DashboardDAO.validate_update_slug_uniqueness(self._model_id, slug): if not DashboardDAO.validate_update_slug_uniqueness(self._model_id, slug):

View File

@ -89,8 +89,8 @@ openapi_spec_methods_override = {
def validate_json(value: Union[bytes, bytearray, str]) -> None: def validate_json(value: Union[bytes, bytearray, str]) -> None:
try: try:
utils.validate_json(value) utils.validate_json(value)
except SupersetException: except SupersetException as ex:
raise ValidationError("JSON not valid") raise ValidationError("JSON not valid") from ex
def validate_json_metadata(value: Union[bytes, bytearray, str]) -> None: def validate_json_metadata(value: Union[bytes, bytearray, str]) -> None:
@ -98,8 +98,8 @@ def validate_json_metadata(value: Union[bytes, bytearray, str]) -> None:
return return
try: try:
value_obj = json.loads(value) value_obj = json.loads(value)
except json.decoder.JSONDecodeError: except json.decoder.JSONDecodeError as ex:
raise ValidationError("JSON not valid") raise ValidationError("JSON not valid") from ex
errors = DashboardJSONMetadataSchema().validate(value_obj, partial=False) errors = DashboardJSONMetadataSchema().validate(value_obj, partial=False)
if errors: if errors:
raise ValidationError(errors) raise ValidationError(errors)

View File

@ -1014,7 +1014,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
try: try:
payload = DatabaseValidateParametersSchema().load(request.json) payload = DatabaseValidateParametersSchema().load(request.json)
except ValidationError as error: except ValidationError as ex:
errors = [ errors = [
SupersetError( SupersetError(
message="\n".join(messages), message="\n".join(messages),
@ -1022,9 +1022,9 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
level=ErrorLevel.ERROR, level=ErrorLevel.ERROR,
extra={"invalid": [attribute]}, extra={"invalid": [attribute]},
) )
for attribute, messages in error.messages.items() for attribute, messages in ex.messages.items()
] ]
raise InvalidParametersError(errors) raise InvalidParametersError(errors) from ex
command = ValidateDatabaseParametersCommand(g.user, payload) command = ValidateDatabaseParametersCommand(g.user, payload)
command.run() command.run()

View File

@ -53,7 +53,7 @@ class CreateDatabaseCommand(BaseCommand):
action=f"db_creation_failed.{ex.__class__.__name__}", action=f"db_creation_failed.{ex.__class__.__name__}",
engine=self._properties.get("sqlalchemy_uri", "").split(":")[0], engine=self._properties.get("sqlalchemy_uri", "").split(":")[0],
) )
raise DatabaseConnectionFailedError() raise DatabaseConnectionFailedError() from ex
try: try:
database = DatabaseDAO.create(self._properties, commit=False) database = DatabaseDAO.create(self._properties, commit=False)
@ -73,7 +73,7 @@ class CreateDatabaseCommand(BaseCommand):
action=f"db_creation_failed.{ex.__class__.__name__}", action=f"db_creation_failed.{ex.__class__.__name__}",
engine=database.db_engine_spec.__name__, engine=database.db_engine_spec.__name__,
) )
raise DatabaseCreateFailedError() raise DatabaseCreateFailedError() from ex
return database return database
def validate(self) -> None: def validate(self) -> None:

View File

@ -48,7 +48,7 @@ class DeleteDatabaseCommand(BaseCommand):
database = DatabaseDAO.delete(self._model) database = DatabaseDAO.delete(self._model)
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DatabaseDeleteFailedError() raise DatabaseDeleteFailedError() from ex
return database return database
def validate(self) -> None: def validate(self) -> None:

View File

@ -95,7 +95,7 @@ class TestConnectionDatabaseCommand(BaseCommand):
message=_("Could not load database driver: {}").format( message=_("Could not load database driver: {}").format(
database.db_engine_spec.__name__ database.db_engine_spec.__name__
), ),
) ) from ex
except DBAPIError as ex: except DBAPIError as ex:
event_logger.log_with_context( event_logger.log_with_context(
action=f"test_connection_error.{ex.__class__.__name__}", action=f"test_connection_error.{ex.__class__.__name__}",
@ -103,20 +103,20 @@ class TestConnectionDatabaseCommand(BaseCommand):
) )
# check for custom errors (wrong username, wrong password, etc) # check for custom errors (wrong username, wrong password, etc)
errors = database.db_engine_spec.extract_errors(ex, context) errors = database.db_engine_spec.extract_errors(ex, context)
raise DatabaseTestConnectionFailedError(errors) raise DatabaseTestConnectionFailedError(errors) from ex
except SupersetSecurityException as ex: except SupersetSecurityException as ex:
event_logger.log_with_context( event_logger.log_with_context(
action=f"test_connection_error.{ex.__class__.__name__}", action=f"test_connection_error.{ex.__class__.__name__}",
engine=database.db_engine_spec.__name__, engine=database.db_engine_spec.__name__,
) )
raise DatabaseSecurityUnsafeError(message=str(ex)) raise DatabaseSecurityUnsafeError(message=str(ex)) from ex
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
event_logger.log_with_context( event_logger.log_with_context(
action=f"test_connection_error.{ex.__class__.__name__}", action=f"test_connection_error.{ex.__class__.__name__}",
engine=database.db_engine_spec.__name__, engine=database.db_engine_spec.__name__,
) )
errors = database.db_engine_spec.extract_errors(ex, context) errors = database.db_engine_spec.extract_errors(ex, context)
raise DatabaseTestConnectionUnexpectedError(errors) raise DatabaseTestConnectionUnexpectedError(errors) from ex
def validate(self) -> None: def validate(self) -> None:
database_name = self._properties.get("database_name") database_name = self._properties.get("database_name")

View File

@ -56,7 +56,7 @@ class UpdateDatabaseCommand(BaseCommand):
schemas = database.get_all_schema_names() schemas = database.get_all_schema_names()
except Exception as ex: except Exception as ex:
db.session.rollback() db.session.rollback()
raise DatabaseConnectionFailedError() raise DatabaseConnectionFailedError() from ex
for schema in schemas: for schema in schemas:
security_manager.add_permission_view_menu( security_manager.add_permission_view_menu(
"schema_access", security_manager.get_schema_perm(database, schema) "schema_access", security_manager.get_schema_perm(database, schema)
@ -65,7 +65,7 @@ class UpdateDatabaseCommand(BaseCommand):
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DatabaseUpdateFailedError() raise DatabaseUpdateFailedError() from ex
return database return database
def validate(self) -> None: def validate(self) -> None:

View File

@ -126,7 +126,7 @@ class ValidateDatabaseParametersCommand(BaseCommand):
"database": url.database, "database": url.database,
} }
errors = database.db_engine_spec.extract_errors(ex, context) errors = database.db_engine_spec.extract_errors(ex, context)
raise DatabaseTestConnectionFailedError(errors) raise DatabaseTestConnectionFailedError(errors) from ex
if not alive: if not alive:
raise DatabaseOfflineError( raise DatabaseOfflineError(

View File

@ -142,7 +142,7 @@ def sqlalchemy_uri_validator(value: str) -> str:
""" """
try: try:
uri = make_url(value.strip()) uri = make_url(value.strip())
except (ArgumentError, AttributeError, ValueError): except (ArgumentError, AttributeError, ValueError) as ex:
raise ValidationError( raise ValidationError(
[ [
_( _(
@ -150,12 +150,12 @@ def sqlalchemy_uri_validator(value: str) -> str:
"driver://user:password@database-host/database-name" "driver://user:password@database-host/database-name"
) )
] ]
) ) from ex
if current_app.config.get("PREVENT_UNSAFE_DB_CONNECTIONS", True): if current_app.config.get("PREVENT_UNSAFE_DB_CONNECTIONS", True):
try: try:
check_sqlalchemy_uri(uri) check_sqlalchemy_uri(uri)
except SupersetSecurityException as ex: except SupersetSecurityException as ex:
raise ValidationError([str(ex)]) raise ValidationError([str(ex)]) from ex
return value return value
@ -166,8 +166,8 @@ def server_cert_validator(value: str) -> str:
if value: if value:
try: try:
parse_ssl_cert(value) parse_ssl_cert(value)
except CertificateException: except CertificateException as ex:
raise ValidationError([_("Invalid certificate")]) raise ValidationError([_("Invalid certificate")]) from ex
return value return value
@ -181,7 +181,7 @@ def encrypted_extra_validator(value: str) -> str:
except json.JSONDecodeError as ex: except json.JSONDecodeError as ex:
raise ValidationError( raise ValidationError(
[_("Field cannot be decoded by JSON. %(msg)s", msg=str(ex))] [_("Field cannot be decoded by JSON. %(msg)s", msg=str(ex))]
) ) from ex
return value return value
@ -196,7 +196,7 @@ def extra_validator(value: str) -> str:
except json.JSONDecodeError as ex: except json.JSONDecodeError as ex:
raise ValidationError( raise ValidationError(
[_("Field cannot be decoded by JSON. %(msg)s", msg=str(ex))] [_("Field cannot be decoded by JSON. %(msg)s", msg=str(ex))]
) ) from ex
else: else:
metadata_signature = inspect.signature(MetaData) metadata_signature = inspect.signature(MetaData)
for key in extra_.get("metadata_params", {}): for key in extra_.get("metadata_params", {}):

View File

@ -51,7 +51,7 @@ class DeleteDatasetColumnCommand(BaseCommand):
return column return column
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DatasetColumnDeleteFailedError() raise DatasetColumnDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists
@ -61,5 +61,5 @@ class DeleteDatasetColumnCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetColumnForbiddenError() raise DatasetColumnForbiddenError() from ex

View File

@ -74,7 +74,7 @@ class BulkDeleteDatasetCommand(BaseCommand):
return None return None
except DeleteFailedError as ex: except DeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DatasetBulkDeleteFailedError() raise DatasetBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists
@ -85,5 +85,5 @@ class BulkDeleteDatasetCommand(BaseCommand):
for model in self._models: for model in self._models:
try: try:
check_ownership(model) check_ownership(model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetForbiddenError() raise DatasetForbiddenError() from ex

View File

@ -62,7 +62,7 @@ class CreateDatasetCommand(CreateMixin, BaseCommand):
except (SQLAlchemyError, DAOCreateFailedError) as ex: except (SQLAlchemyError, DAOCreateFailedError) as ex:
logger.warning(ex, exc_info=True) logger.warning(ex, exc_info=True)
db.session.rollback() db.session.rollback()
raise DatasetCreateFailedError() raise DatasetCreateFailedError() from ex
return dataset return dataset
def validate(self) -> None: def validate(self) -> None:

View File

@ -75,7 +75,7 @@ class DeleteDatasetCommand(BaseCommand):
except (SQLAlchemyError, DAODeleteFailedError) as ex: except (SQLAlchemyError, DAODeleteFailedError) as ex:
logger.exception(ex) logger.exception(ex)
db.session.rollback() db.session.rollback()
raise DatasetDeleteFailedError() raise DatasetDeleteFailedError() from ex
return dataset return dataset
def validate(self) -> None: def validate(self) -> None:
@ -86,5 +86,5 @@ class DeleteDatasetCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetForbiddenError() raise DatasetForbiddenError() from ex

View File

@ -322,9 +322,11 @@ class ImportDatasetsCommand(BaseCommand):
for file_name, content in self.contents.items(): for file_name, content in self.contents.items():
try: try:
config = yaml.safe_load(content) config = yaml.safe_load(content)
except yaml.parser.ParserError: except yaml.parser.ParserError as ex:
logger.exception("Invalid YAML file") logger.exception("Invalid YAML file")
raise IncorrectVersionError(f"{file_name} is not a valid YAML file") raise IncorrectVersionError(
f"{file_name} is not a valid YAML file"
) from ex
# CLI export # CLI export
if isinstance(config, dict): if isinstance(config, dict):

View File

@ -48,7 +48,7 @@ class RefreshDatasetCommand(BaseCommand):
return self._model return self._model
except Exception as ex: except Exception as ex:
logger.exception(ex) logger.exception(ex)
raise DatasetRefreshFailedError() raise DatasetRefreshFailedError() from ex
raise DatasetRefreshFailedError() raise DatasetRefreshFailedError()
def validate(self) -> None: def validate(self) -> None:
@ -59,5 +59,5 @@ class RefreshDatasetCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetForbiddenError() raise DatasetForbiddenError() from ex

View File

@ -72,7 +72,7 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
return dataset return dataset
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DatasetUpdateFailedError() raise DatasetUpdateFailedError() from ex
raise DatasetUpdateFailedError() raise DatasetUpdateFailedError()
def validate(self) -> None: def validate(self) -> None:
@ -85,8 +85,8 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetForbiddenError() raise DatasetForbiddenError() from ex
database_id = self._properties.get("database", None) database_id = self._properties.get("database", None)
table_name = self._properties.get("table_name", None) table_name = self._properties.get("table_name", None)

View File

@ -51,7 +51,7 @@ class DeleteDatasetMetricCommand(BaseCommand):
return column return column
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise DatasetMetricDeleteFailedError() raise DatasetMetricDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists
@ -61,5 +61,5 @@ class DeleteDatasetMetricCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetMetricForbiddenError() raise DatasetMetricForbiddenError() from ex

View File

@ -327,12 +327,12 @@ class BigQueryEngineSpec(BaseEngineSpec):
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel
import pandas_gbq import pandas_gbq
from google.oauth2 import service_account from google.oauth2 import service_account
except ImportError: except ImportError as ex:
raise Exception( raise Exception(
"Could not import libraries `pandas_gbq` or `google.oauth2`, which are " "Could not import libraries `pandas_gbq` or `google.oauth2`, which are "
"required to be installed in your environment in order " "required to be installed in your environment in order "
"to upload data to BigQuery" "to upload data to BigQuery"
) ) from ex
if not table.schema: if not table.schema:
raise Exception("The table schema must be defined") raise Exception("The table schema must be defined")

View File

@ -79,8 +79,8 @@ class DruidEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
""" """
try: try:
extra = json.loads(database.extra or "{}") extra = json.loads(database.extra or "{}")
except json.JSONDecodeError: except json.JSONDecodeError as ex:
raise SupersetException("Unable to parse database extras") raise SupersetException("Unable to parse database extras") from ex
if database.server_cert: if database.server_cert:
engine_params = extra.get("engine_params", {}) engine_params = extra.get("engine_params", {})

View File

@ -262,8 +262,8 @@ class PostgresEngineSpec(PostgresBaseEngineSpec, BasicParametersMixin):
""" """
try: try:
extra = json.loads(database.extra or "{}") extra = json.loads(database.extra or "{}")
except json.JSONDecodeError: except json.JSONDecodeError as ex:
raise SupersetException("Unable to parse database extras") raise SupersetException("Unable to parse database extras") from ex
if database.server_cert: if database.server_cert:
engine_params = extra.get("engine_params", {}) engine_params = extra.get("engine_params", {})

View File

@ -46,9 +46,9 @@ def fetch_logs(
logs = self._connection.client.GetLog(req).log logs = self._connection.client.GetLog(req).log
return logs return logs
# raised if Hive is used # raised if Hive is used
except (ttypes.TApplicationException, Thrift.TApplicationException): except (ttypes.TApplicationException, Thrift.TApplicationException) as ex:
if self._state == self._STATE_NONE: if self._state == self._STATE_NONE:
raise hive.ProgrammingError("No query yet") raise hive.ProgrammingError("No query yet") from ex
logs = [] logs = []
while True: while True:
req = ttypes.TFetchResultsReq( req = ttypes.TFetchResultsReq(

View File

@ -333,10 +333,10 @@ def safe_proxy(func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any:
if value_type in COLLECTION_TYPES: if value_type in COLLECTION_TYPES:
try: try:
return_value = json.loads(json.dumps(return_value)) return_value = json.loads(json.dumps(return_value))
except TypeError: except TypeError as ex:
raise SupersetTemplateException( raise SupersetTemplateException(
_("Unsupported return value for method %(name)s", name=func.__name__,) _("Unsupported return value for method %(name)s", name=func.__name__,)
) ) from ex
return return_value return return_value
@ -357,10 +357,10 @@ def validate_context_types(context: Dict[str, Any]) -> Dict[str, Any]:
if arg_type in COLLECTION_TYPES: if arg_type in COLLECTION_TYPES:
try: try:
context[key] = json.loads(json.dumps(context[key])) context[key] = json.loads(json.dumps(context[key]))
except TypeError: except TypeError as ex:
raise SupersetTemplateException( raise SupersetTemplateException(
_("Unsupported template value for key %(key)s", key=key) _("Unsupported template value for key %(key)s", key=key)
) ) from ex
return context return context

View File

@ -106,8 +106,8 @@ def get_object_type(class_name: str) -> ObjectTypes:
} }
try: try:
return mapping[class_name.lower()] return mapping[class_name.lower()]
except KeyError: except KeyError as ex:
raise Exception("No mapping found for {0}".format(class_name)) raise Exception("No mapping found for {0}".format(class_name)) from ex
class ObjectUpdater: class ObjectUpdater:

View File

@ -44,7 +44,7 @@ class BulkDeleteSavedQueryCommand(BaseCommand):
return None return None
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise SavedQueryBulkDeleteFailedError() raise SavedQueryBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists

View File

@ -41,7 +41,7 @@ class SavedQueryDAO(BaseDAO):
) )
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError() raise DAODeleteFailedError() from ex

View File

@ -79,8 +79,8 @@ class AlertCommand(BaseCommand):
] ]
return OPERATOR_FUNCTIONS[operator](self._result, threshold) return OPERATOR_FUNCTIONS[operator](self._result, threshold)
except (KeyError, json.JSONDecodeError): except (KeyError, json.JSONDecodeError) as ex:
raise AlertValidatorConfigError() raise AlertValidatorConfigError() from ex
def _validate_not_null(self, rows: np.recarray) -> None: def _validate_not_null(self, rows: np.recarray) -> None:
self._validate_result(rows) self._validate_result(rows)
@ -115,8 +115,8 @@ class AlertCommand(BaseCommand):
# Check if it's float or if we can convert it # Check if it's float or if we can convert it
self._result = float(rows[0][1]) self._result = float(rows[0][1])
return return
except (AssertionError, TypeError, ValueError): except (AssertionError, TypeError, ValueError) as ex:
raise AlertQueryInvalidTypeError() raise AlertQueryInvalidTypeError() from ex
@property @property
def _is_validator_not_null(self) -> bool: def _is_validator_not_null(self) -> bool:
@ -157,9 +157,9 @@ class AlertCommand(BaseCommand):
return df return df
except SoftTimeLimitExceeded as ex: except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while executing the alert query: %s", ex) logger.warning("A timeout occurred while executing the alert query: %s", ex)
raise AlertQueryTimeout() raise AlertQueryTimeout() from ex
except Exception as ex: except Exception as ex:
raise AlertQueryError(message=str(ex)) raise AlertQueryError(message=str(ex)) from ex
def validate(self) -> None: def validate(self) -> None:
""" """

View File

@ -47,7 +47,7 @@ class BulkDeleteReportScheduleCommand(BaseCommand):
return None return None
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ReportScheduleBulkDeleteFailedError() raise ReportScheduleBulkDeleteFailedError() from ex
def validate(self) -> None: def validate(self) -> None:
# Validate/populate model exists # Validate/populate model exists
@ -59,5 +59,5 @@ class BulkDeleteReportScheduleCommand(BaseCommand):
for model in self._models: for model in self._models:
try: try:
check_ownership(model) check_ownership(model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise ReportScheduleForbiddenError() raise ReportScheduleForbiddenError() from ex

View File

@ -51,7 +51,7 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand):
report_schedule = ReportScheduleDAO.create(self._properties) report_schedule = ReportScheduleDAO.create(self._properties)
except DAOCreateFailedError as ex: except DAOCreateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ReportScheduleCreateFailedError() raise ReportScheduleCreateFailedError() from ex
return report_schedule return report_schedule
def validate(self) -> None: def validate(self) -> None:

View File

@ -47,7 +47,7 @@ class DeleteReportScheduleCommand(BaseCommand):
report_schedule = ReportScheduleDAO.delete(self._model) report_schedule = ReportScheduleDAO.delete(self._model)
except DAODeleteFailedError as ex: except DAODeleteFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ReportScheduleDeleteFailedError() raise ReportScheduleDeleteFailedError() from ex
return report_schedule return report_schedule
def validate(self) -> None: def validate(self) -> None:
@ -59,5 +59,5 @@ class DeleteReportScheduleCommand(BaseCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise ReportScheduleForbiddenError() raise ReportScheduleForbiddenError() from ex

View File

@ -201,13 +201,13 @@ class BaseReportState:
user = self._get_user() user = self._get_user()
try: try:
image_data = screenshot.get_screenshot(user=user) image_data = screenshot.get_screenshot(user=user)
except SoftTimeLimitExceeded: except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while taking a screenshot.") logger.warning("A timeout occurred while taking a screenshot.")
raise ReportScheduleScreenshotTimeout() raise ReportScheduleScreenshotTimeout() from ex
except Exception as ex: except Exception as ex:
raise ReportScheduleScreenshotFailedError( raise ReportScheduleScreenshotFailedError(
f"Failed taking a screenshot {str(ex)}" f"Failed taking a screenshot {str(ex)}"
) ) from ex
if not image_data: if not image_data:
raise ReportScheduleScreenshotFailedError() raise ReportScheduleScreenshotFailedError()
return image_data return image_data
@ -239,10 +239,12 @@ class BaseReportState:
try: try:
logger.info("Getting chart from %s", url) logger.info("Getting chart from %s", url)
csv_data = get_chart_csv_data(url, auth_cookies) csv_data = get_chart_csv_data(url, auth_cookies)
except SoftTimeLimitExceeded: except SoftTimeLimitExceeded as ex:
raise ReportScheduleCsvTimeout() raise ReportScheduleCsvTimeout() from ex
except Exception as ex: except Exception as ex:
raise ReportScheduleCsvFailedError(f"Failed generating csv {str(ex)}") raise ReportScheduleCsvFailedError(
f"Failed generating csv {str(ex)}"
) from ex
if not csv_data: if not csv_data:
raise ReportScheduleCsvFailedError() raise ReportScheduleCsvFailedError()
return csv_data return csv_data
@ -581,7 +583,7 @@ class AsyncExecuteReportScheduleCommand(BaseCommand):
except CommandException as ex: except CommandException as ex:
raise ex raise ex
except Exception as ex: except Exception as ex:
raise ReportScheduleUnexpectedError(str(ex)) raise ReportScheduleUnexpectedError(str(ex)) from ex
def validate( # pylint: disable=arguments-differ def validate( # pylint: disable=arguments-differ
self, session: Session = None self, session: Session = None

View File

@ -55,7 +55,7 @@ class UpdateReportScheduleCommand(UpdateMixin, BaseReportScheduleCommand):
report_schedule = ReportScheduleDAO.update(self._model, self._properties) report_schedule = ReportScheduleDAO.update(self._model, self._properties)
except DAOUpdateFailedError as ex: except DAOUpdateFailedError as ex:
logger.exception(ex.exception) logger.exception(ex.exception)
raise ReportScheduleUpdateFailedError() raise ReportScheduleUpdateFailedError() from ex
return report_schedule return report_schedule
def validate(self) -> None: def validate(self) -> None:
@ -110,8 +110,8 @@ class UpdateReportScheduleCommand(UpdateMixin, BaseReportScheduleCommand):
# Check ownership # Check ownership
try: try:
check_ownership(self._model) check_ownership(self._model)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise ReportScheduleForbiddenError() raise ReportScheduleForbiddenError() from ex
# Validate/Populate owner # Validate/Populate owner
if owner_ids is None: if owner_ids is None:

View File

@ -111,7 +111,7 @@ class ReportScheduleDAO(BaseDAO):
except SQLAlchemyError as ex: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError(str(ex)) raise DAODeleteFailedError(str(ex)) from ex
@staticmethod @staticmethod
def validate_update_uniqueness( def validate_update_uniqueness(
@ -161,7 +161,7 @@ class ReportScheduleDAO(BaseDAO):
return model return model
except SQLAlchemyError as ex: except SQLAlchemyError as ex:
db.session.rollback() db.session.rollback()
raise DAOCreateFailedError(str(ex)) raise DAOCreateFailedError(str(ex)) from ex
@classmethod @classmethod
def update( def update(
@ -194,7 +194,7 @@ class ReportScheduleDAO(BaseDAO):
return model return model
except SQLAlchemyError as ex: except SQLAlchemyError as ex:
db.session.rollback() db.session.rollback()
raise DAOCreateFailedError(str(ex)) raise DAOCreateFailedError(str(ex)) from ex
@staticmethod @staticmethod
def find_active(session: Optional[Session] = None) -> List[ReportSchedule]: def find_active(session: Optional[Session] = None) -> List[ReportSchedule]:
@ -302,4 +302,4 @@ class ReportScheduleDAO(BaseDAO):
except SQLAlchemyError as ex: except SQLAlchemyError as ex:
if commit: if commit:
session.rollback() session.rollback()
raise DAODeleteFailedError(str(ex)) raise DAODeleteFailedError(str(ex)) from ex

View File

@ -133,4 +133,4 @@ class EmailNotification(BaseNotification): # pylint: disable=too-few-public-met
) )
logger.info("Report sent to email") logger.info("Report sent to email")
except Exception as ex: except Exception as ex:
raise NotificationError(ex) raise NotificationError(ex) from ex

View File

@ -152,4 +152,4 @@ Error: %(text)s
client.chat_postMessage(channel=channel, text=body) client.chat_postMessage(channel=channel, text=body)
logger.info("Report sent to slack") logger.info("Report sent to slack")
except SlackClientError as ex: except SlackClientError as ex:
raise NotificationError(ex) raise NotificationError(ex) from ex

View File

@ -149,8 +149,8 @@ def get_query(query_id: int, session: Session) -> Query:
"""attempts to get the query and retry if it cannot""" """attempts to get the query and retry if it cannot"""
try: try:
return session.query(Query).filter_by(id=query_id).one() return session.query(Query).filter_by(id=query_id).one()
except Exception: except Exception as ex:
raise SqlLabException("Failed at getting query") raise SqlLabException("Failed at getting query") from ex
@celery_app.task( @celery_app.task(
@ -291,17 +291,17 @@ def execute_sql_statement(
error_type=SupersetErrorType.SQLLAB_TIMEOUT_ERROR, error_type=SupersetErrorType.SQLLAB_TIMEOUT_ERROR,
level=ErrorLevel.ERROR, level=ErrorLevel.ERROR,
) )
) ) from ex
except Exception as ex: except Exception as ex:
# query is stopped in another thread/worker # query is stopped in another thread/worker
# stopping raises expected exceptions which we should skip # stopping raises expected exceptions which we should skip
session.refresh(query) session.refresh(query)
if query.status == QueryStatus.STOPPED: if query.status == QueryStatus.STOPPED:
raise SqlLabQueryStoppedException() raise SqlLabQueryStoppedException() from ex
logger.error("Query %d: %s", query.id, type(ex), exc_info=True) logger.error("Query %d: %s", query.id, type(ex), exc_info=True)
logger.debug("Query %d: %s", query.id, ex) logger.debug("Query %d: %s", query.id, ex)
raise SqlLabException(db_engine_spec.extract_error_message(ex)) raise SqlLabException(db_engine_spec.extract_error_message(ex)) from ex
logger.debug("Query %d: Fetching cursor description", query.id) logger.debug("Query %d: Fetching cursor description", query.id)
cursor_description = cursor.description cursor_description = cursor.description

View File

@ -165,9 +165,9 @@ class AsyncQueryManager:
try: try:
return self.parse_jwt(token) return self.parse_jwt(token)
except Exception as exc: except Exception as ex:
logger.warning(exc) logger.warning(ex)
raise AsyncQueryTokenException("Failed to parse token") raise AsyncQueryTokenException("Failed to parse token") from ex
def init_job(self, channel_id: str, user_id: Optional[str]) -> Dict[str, Any]: def init_job(self, channel_id: str, user_id: Optional[str]) -> Dict[str, Any]:
job_id = str(uuid.uuid4()) job_id = str(uuid.uuid4())

View File

@ -765,7 +765,7 @@ def validate_json(obj: Union[bytes, bytearray, str]) -> None:
json.loads(obj) json.loads(obj)
except Exception as ex: except Exception as ex:
logger.error("JSON is not valid %s", str(ex), exc_info=True) logger.error("JSON is not valid %s", str(ex), exc_info=True)
raise SupersetException("JSON is not valid") raise SupersetException("JSON is not valid") from ex
class SigalrmTimeout: class SigalrmTimeout:
@ -1424,8 +1424,8 @@ def parse_ssl_cert(certificate: str) -> _Certificate:
return x509.load_pem_x509_certificate( return x509.load_pem_x509_certificate(
certificate.encode("utf-8"), default_backend() certificate.encode("utf-8"), default_backend()
) )
except ValueError: except ValueError as ex:
raise CertificateException("Invalid certificate") raise CertificateException("Invalid certificate") from ex
def create_ssl_cert_file(certificate: str) -> str: def create_ssl_cert_file(certificate: str) -> str:

View File

@ -66,7 +66,7 @@ def parse_human_datetime(human_readable: str) -> datetime:
# 0 == not parsed at all # 0 == not parsed at all
if parsed_flags == 0: if parsed_flags == 0:
logger.debug(ex) logger.debug(ex)
raise TimeRangeParseFailError(human_readable) raise TimeRangeParseFailError(human_readable) from ex
# when time is not extracted, we 'reset to midnight' # when time is not extracted, we 'reset to midnight'
if parsed_flags & 2 == 0: if parsed_flags & 2 == 0:
parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0) parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0)
@ -476,8 +476,8 @@ def datetime_eval(datetime_expression: Optional[str] = None) -> Optional[datetim
if datetime_expression: if datetime_expression:
try: try:
return datetime_parser().parseString(datetime_expression)[0].eval() return datetime_parser().parseString(datetime_expression)[0].eval()
except ParseException as error: except ParseException as ex:
raise ValueError(error) raise ValueError(ex) from ex
return None return None

View File

@ -397,14 +397,14 @@ def rolling( # pylint: disable=too-many-arguments
) )
try: try:
df_rolling = getattr(df_rolling, rolling_type)(**rolling_type_options) df_rolling = getattr(df_rolling, rolling_type)(**rolling_type_options)
except TypeError: except TypeError as ex:
raise QueryObjectValidationError( raise QueryObjectValidationError(
_( _(
"Invalid options for %(rolling_type)s: %(options)s", "Invalid options for %(rolling_type)s: %(options)s",
rolling_type=rolling_type, rolling_type=rolling_type,
options=rolling_type_options, options=rolling_type_options,
) )
) ) from ex
df = _append_columns(df, df_rolling, columns) df = _append_columns(df, df_rolling, columns)
if min_periods: if min_periods:
df = df[min_periods:] df = df[min_periods:]
@ -569,8 +569,8 @@ def geohash_decode(
return _append_columns( return _append_columns(
df, lonlat_df, {"latitude": latitude, "longitude": longitude} df, lonlat_df, {"latitude": latitude, "longitude": longitude}
) )
except ValueError: except ValueError as ex:
raise QueryObjectValidationError(_("Invalid geohash string")) raise QueryObjectValidationError(_("Invalid geohash string")) from ex
def geohash_encode( def geohash_encode(
@ -592,8 +592,8 @@ def geohash_encode(
lambda row: geohash_lib.encode(row["latitude"], row["longitude"]), axis=1, lambda row: geohash_lib.encode(row["latitude"], row["longitude"]), axis=1,
) )
return _append_columns(df, encode_df, {"geohash": geohash}) return _append_columns(df, encode_df, {"geohash": geohash})
except ValueError: except ValueError as ex:
raise QueryObjectValidationError(_("Invalid longitude/latitude")) raise QueryObjectValidationError(_("Invalid longitude/latitude")) from ex
def geodetic_parse( def geodetic_parse(
@ -634,8 +634,8 @@ def geodetic_parse(
if altitude: if altitude:
columns["altitude"] = altitude columns["altitude"] = altitude
return _append_columns(df, geodetic_df, columns) return _append_columns(df, geodetic_df, columns)
except ValueError: except ValueError as ex:
raise QueryObjectValidationError(_("Invalid geodetic string")) raise QueryObjectValidationError(_("Invalid geodetic string")) from ex
@validate_column_args("columns") @validate_column_args("columns")
@ -720,8 +720,8 @@ def _prophet_fit_and_predict( # pylint: disable=too-many-arguments
prophet_logger = logging.getLogger("prophet.plot") prophet_logger = logging.getLogger("prophet.plot")
prophet_logger.setLevel(logging.CRITICAL) prophet_logger.setLevel(logging.CRITICAL)
prophet_logger.setLevel(logging.NOTSET) prophet_logger.setLevel(logging.NOTSET)
except ModuleNotFoundError: except ModuleNotFoundError as ex:
raise QueryObjectValidationError(_("`prophet` package not installed")) raise QueryObjectValidationError(_("`prophet` package not installed")) from ex
model = Prophet( model = Prophet(
interval_width=confidence_interval, interval_width=confidence_interval,
yearly_seasonality=yearly_seasonality, yearly_seasonality=yearly_seasonality,

View File

@ -50,5 +50,5 @@ def validate_json(value: Union[bytes, bytearray, str]) -> None:
""" """
try: try:
utils.validate_json(value) utils.validate_json(value)
except SupersetException: except SupersetException as ex:
raise ValidationError("JSON not valid") raise ValidationError("JSON not valid") from ex

View File

@ -254,7 +254,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
"database connection, schema, and " "database connection, schema, and "
"table name, error: {}" "table name, error: {}"
).format(table.name, str(ex)) ).format(table.name, str(ex))
) ) from ex
def create_table_permissions(table: models.SqlaTable) -> None: def create_table_permissions(table: models.SqlaTable) -> None:
@ -501,7 +501,7 @@ def validate_json(form: Form, field: Field) -> None: # pylint: disable=unused-a
json.loads(field.data) json.loads(field.data)
except Exception as ex: except Exception as ex:
logger.exception(ex) logger.exception(ex)
raise Exception(_("json isn't valid")) raise Exception(_("json isn't valid")) from ex
class YamlExportMixin: # pylint: disable=too-few-public-methods class YamlExportMixin: # pylint: disable=too-few-public-methods

View File

@ -31,8 +31,8 @@ def validate_owner(value: int) -> None:
.filter_by(id=value) .filter_by(id=value)
.one() .one()
) )
except NoResultFound: except NoResultFound as ex:
raise ValidationError(f"User {value} does not exist") raise ValidationError(f"User {value} does not exist") from ex
class BaseSupersetSchema(Schema): class BaseSupersetSchema(Schema):

View File

@ -2040,14 +2040,14 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
try: try:
table_name = data["datasourceName"] table_name = data["datasourceName"]
database_id = data["dbId"] database_id = data["dbId"]
except KeyError: except KeyError as ex:
raise SupersetGenericErrorException( raise SupersetGenericErrorException(
__( __(
"One or more required fields are missing in the request. Please try " "One or more required fields are missing in the request. Please try "
"again, and if the problem persists conctact your administrator." "again, and if the problem persists conctact your administrator."
), ),
status=400, status=400,
) ) from ex
database = db.session.query(Database).get(database_id) database = db.session.query(Database).get(database_id)
if not database: if not database:
raise SupersetErrorException( raise SupersetErrorException(
@ -2480,7 +2480,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
query.error_message = message query.error_message = message
session.commit() session.commit()
raise SupersetErrorException(error) raise SupersetErrorException(error) from ex
# Update saved query with execution info from the query execution # Update saved query with execution info from the query execution
QueryDAO.update_saved_query_exec_info(query_id) QueryDAO.update_saved_query_exec_info(query_id)
@ -2549,7 +2549,9 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
raise ex raise ex
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception("Query %i failed unexpectedly", query.id) logger.exception("Query %i failed unexpectedly", query.id)
raise SupersetGenericDBErrorException(utils.error_msg_from_exception(ex)) raise SupersetGenericDBErrorException(
utils.error_msg_from_exception(ex)
) from ex
if data.get("status") == QueryStatus.FAILED: if data.get("status") == QueryStatus.FAILED:
# new error payload with rich context # new error payload with rich context

View File

@ -242,7 +242,7 @@ class DatabaseMixin:
except Exception as ex: except Exception as ex:
raise Exception( raise Exception(
_("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex)) _("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex))
) ) from ex
# this will check whether 'metadata_params' is configured correctly # this will check whether 'metadata_params' is configured correctly
metadata_signature = inspect.signature(MetaData) metadata_signature = inspect.signature(MetaData)
@ -266,4 +266,4 @@ class DatabaseMixin:
except Exception as ex: except Exception as ex:
raise Exception( raise Exception(
_("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex)) _("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex))
) ) from ex

View File

@ -34,7 +34,7 @@ def sqlalchemy_uri_validator(
""" """
try: try:
make_url(uri.strip()) make_url(uri.strip())
except (ArgumentError, AttributeError): except (ArgumentError, AttributeError) as ex:
raise exception( raise exception(
[ [
_( _(
@ -45,7 +45,7 @@ def sqlalchemy_uri_validator(
"</p>" "</p>"
) )
] ]
) ) from ex
def schema_allows_csv_upload(database: Database, schema: Optional[str]) -> bool: def schema_allows_csv_upload(database: Database, schema: Optional[str]) -> bool:

View File

@ -65,7 +65,7 @@ def certificate_form_validator(_: _, field: StringField) -> None:
try: try:
utils.parse_ssl_cert(field.data) utils.parse_ssl_cert(field.data)
except CertificateException as ex: except CertificateException as ex:
raise ValidationError(ex.message) raise ValidationError(ex.message) from ex
def upload_stream_write(form_file_field: "FileStorage", path: str) -> None: def upload_stream_write(form_file_field: "FileStorage", path: str) -> None:

View File

@ -81,8 +81,8 @@ class Datasource(BaseSupersetView):
if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]: if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]:
try: try:
check_ownership(orm_datasource) check_ownership(orm_datasource)
except SupersetSecurityException: except SupersetSecurityException as ex:
raise DatasetForbiddenError() raise DatasetForbiddenError() from ex
datasource_dict["owners"] = ( datasource_dict["owners"] = (
db.session.query(orm_datasource.owner_class) db.session.query(orm_datasource.owner_class)
@ -175,6 +175,6 @@ class Datasource(BaseSupersetView):
table_name=params["table_name"], table_name=params["table_name"],
schema_name=params["schema_name"], schema_name=params["schema_name"],
) )
except (NoResultFound, NoSuchTableError): except (NoResultFound, NoSuchTableError) as ex:
raise DatasetNotFoundError raise DatasetNotFoundError() from ex
return self.json_response(external_metadata) return self.json_response(external_metadata)

View File

@ -134,8 +134,8 @@ class EmailScheduleView(
try: try:
recipients = get_email_address_list(item.recipients) recipients = get_email_address_list(item.recipients)
item.recipients = ", ".join(recipients) item.recipients = ", ".join(recipients)
except Exception: except Exception as ex:
raise SupersetException("Invalid email list") raise SupersetException("Invalid email list") from ex
item.user = item.user or g.user item.user = item.user or g.user
if not croniter.is_valid(item.crontab): if not croniter.is_valid(item.crontab):

View File

@ -521,7 +521,7 @@ def check_datasource_perms(
level=ErrorLevel.ERROR, level=ErrorLevel.ERROR,
message=str(ex), message=str(ex),
) )
) ) from ex
if datasource_type is None: if datasource_type is None:
raise SupersetSecurityException( raise SupersetSecurityException(
@ -539,14 +539,14 @@ def check_datasource_perms(
form_data=form_data, form_data=form_data,
force=False, force=False,
) )
except NoResultFound: except NoResultFound as ex:
raise SupersetSecurityException( raise SupersetSecurityException(
SupersetError( SupersetError(
error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR, error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR,
level=ErrorLevel.ERROR, level=ErrorLevel.ERROR,
message=_("Could not find viz object"), message=_("Could not find viz object"),
) )
) ) from ex
viz_obj.raise_for_access() viz_obj.raise_for_access()
@ -572,14 +572,14 @@ def check_slice_perms(_self: Any, slice_id: int) -> None:
form_data=form_data, form_data=form_data,
force=False, force=False,
) )
except NoResultFound: except NoResultFound as ex:
raise SupersetSecurityException( raise SupersetSecurityException(
SupersetError( SupersetError(
error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR, error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR,
level=ErrorLevel.ERROR, level=ErrorLevel.ERROR,
message="Could not find viz object", message="Could not find viz object",
) )
) ) from ex
viz_obj.raise_for_access() viz_obj.raise_for_access()
@ -597,8 +597,8 @@ def _deserialize_results_payload(
with stats_timing("sqllab.query.results_backend_pa_deserialize", stats_logger): with stats_timing("sqllab.query.results_backend_pa_deserialize", stats_logger):
try: try:
pa_table = pa.deserialize(ds_payload["data"]) pa_table = pa.deserialize(ds_payload["data"])
except pa.ArrowSerializationError: except pa.ArrowSerializationError as ex:
raise SerializationError("Unable to deserialize table") raise SerializationError("Unable to deserialize table") from ex
df = result_set.SupersetResultSet.convert_table_to_df(pa_table) df = result_set.SupersetResultSet.convert_table_to_df(pa_table)
ds_payload["data"] = dataframe.df_to_records(df) or [] ds_payload["data"] = dataframe.df_to_records(df) or []