pylint: accept specific 2 character names by default (#9460)

* lint: accept 2 letter names by default

* Address review comments

* Remove e and d from good-names
This commit is contained in:
Ville Brofeldt 2020-04-08 20:32:26 +03:00 committed by GitHub
parent 4485800e21
commit 980dd2fd41
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
72 changed files with 421 additions and 431 deletions

View File

@ -115,10 +115,10 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme
[BASIC] [BASIC]
# Good variable names which should always be accepted, separated by a comma # Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_,d,e,v,o,l,x,ts,f good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x
# Bad variable names which should always be refused, separated by a comma # Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata,d,fd bad-names=fd,foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when # Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles. # the name regexes allow several styles.

View File

@ -170,11 +170,11 @@ class ChartRestApi(BaseSupersetModelRestApi):
try: try:
new_model = CreateChartCommand(g.user, item.data).run() new_model = CreateChartCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data) return self.response(201, id=new_model.id, result=item.data)
except ChartInvalidError as e: except ChartInvalidError as ex:
return self.response_422(message=e.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except ChartCreateFailedError as e: except ChartCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {e}") logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"]) @expose("/<pk>", methods=["PUT"])
@protect() @protect()
@ -237,11 +237,11 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except ChartForbiddenError: except ChartForbiddenError:
return self.response_403() return self.response_403()
except ChartInvalidError as e: except ChartInvalidError as ex:
return self.response_422(message=e.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except ChartUpdateFailedError as e: except ChartUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {e}") logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@protect() @protect()
@ -285,9 +285,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except ChartForbiddenError: except ChartForbiddenError:
return self.response_403() return self.response_403()
except ChartDeleteFailedError as e: except ChartDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}") logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"]) @expose("/", methods=["DELETE"])
@protect() @protect()
@ -346,5 +346,5 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except ChartForbiddenError: except ChartForbiddenError:
return self.response_403() return self.response_403()
except ChartBulkDeleteFailedError as e: except ChartBulkDeleteFailedError as ex:
return self.response_422(message=str(e)) return self.response_422(message=str(ex))

View File

@ -44,8 +44,8 @@ class BulkDeleteChartCommand(BaseCommand):
self.validate() self.validate()
try: try:
ChartDAO.bulk_delete(self._models) ChartDAO.bulk_delete(self._models)
except DeleteFailedError as e: except DeleteFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise ChartBulkDeleteFailedError() raise ChartBulkDeleteFailedError()
def validate(self) -> None: def validate(self) -> None:

View File

@ -44,8 +44,8 @@ class CreateChartCommand(BaseCommand):
self.validate() self.validate()
try: try:
chart = ChartDAO.create(self._properties) chart = ChartDAO.create(self._properties)
except DAOCreateFailedError as e: except DAOCreateFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise ChartCreateFailedError() raise ChartCreateFailedError()
return chart return chart
@ -60,8 +60,8 @@ class CreateChartCommand(BaseCommand):
try: try:
datasource = get_datasource_by_id(datasource_id, datasource_type) datasource = get_datasource_by_id(datasource_id, datasource_type)
self._properties["datasource_name"] = datasource.name self._properties["datasource_name"] = datasource.name
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
# Validate/Populate dashboards # Validate/Populate dashboards
dashboards = DashboardDAO.find_by_ids(dashboard_ids) dashboards = DashboardDAO.find_by_ids(dashboard_ids)
@ -72,8 +72,8 @@ class CreateChartCommand(BaseCommand):
try: try:
owners = populate_owners(self._actor, owner_ids) owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners self._properties["owners"] = owners
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
if exceptions: if exceptions:
exception = ChartInvalidError() exception = ChartInvalidError()
exception.add_list(exceptions) exception.add_list(exceptions)

View File

@ -45,8 +45,8 @@ class DeleteChartCommand(BaseCommand):
self.validate() self.validate()
try: try:
chart = ChartDAO.delete(self._model) chart = ChartDAO.delete(self._model)
except DAODeleteFailedError as e: except DAODeleteFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise ChartDeleteFailedError() raise ChartDeleteFailedError()
return chart return chart

View File

@ -52,8 +52,8 @@ class UpdateChartCommand(BaseCommand):
self.validate() self.validate()
try: try:
chart = ChartDAO.update(self._model, self._properties) chart = ChartDAO.update(self._model, self._properties)
except DAOUpdateFailedError as e: except DAOUpdateFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise ChartUpdateFailedError() raise ChartUpdateFailedError()
return chart return chart
@ -84,8 +84,8 @@ class UpdateChartCommand(BaseCommand):
try: try:
datasource = get_datasource_by_id(datasource_id, datasource_type) datasource = get_datasource_by_id(datasource_id, datasource_type)
self._properties["datasource_name"] = datasource.name self._properties["datasource_name"] = datasource.name
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
# Validate/Populate dashboards # Validate/Populate dashboards
dashboards = DashboardDAO.find_by_ids(dashboard_ids) dashboards = DashboardDAO.find_by_ids(dashboard_ids)
@ -97,8 +97,8 @@ class UpdateChartCommand(BaseCommand):
try: try:
owners = populate_owners(self._actor, owner_ids) owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners self._properties["owners"] = owners
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
if exceptions: if exceptions:
exception = ChartInvalidError() exception = ChartInvalidError()
exception.add_list(exceptions) exception.add_list(exceptions)

View File

@ -47,7 +47,7 @@ class ChartDAO(BaseDAO):
) )
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError as e: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise e raise ex

View File

@ -197,9 +197,9 @@ def refresh_druid(datasource, merge):
for cluster in session.query(DruidCluster).all(): for cluster in session.query(DruidCluster).all():
try: try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge) cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(e))) print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(e) logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now() cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]") print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit() session.commit()
@ -245,9 +245,9 @@ def import_dashboards(path, recursive, username):
try: try:
with file_.open() as data_stream: with file_.open() as data_stream:
dashboard_import_export.import_dashboards(db.session, data_stream) dashboard_import_export.import_dashboards(db.session, data_stream)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.error("Error when importing dashboard from file %s", file_) logger.error("Error when importing dashboard from file %s", file_)
logger.error(e) logger.error(ex)
@superset.command() @superset.command()
@ -317,9 +317,9 @@ def import_datasources(path, sync, recursive):
dict_import_export.import_from_dict( dict_import_export.import_from_dict(
db.session, yaml.safe_load(data_stream), sync=sync_array db.session, yaml.safe_load(data_stream), sync=sync_array
) )
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.error("Error when importing datasources from file %s", file_) logger.error("Error when importing datasources from file %s", file_)
logger.error(e) logger.error(ex)
@superset.command() @superset.command()
@ -397,8 +397,8 @@ def update_datasources_cache():
database.get_all_view_names_in_database( database.get_all_view_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60 force=True, cache=True, cache_timeout=24 * 60 * 60
) )
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
print("{}".format(str(e))) print("{}".format(str(ex)))
@superset.command() @superset.command()

View File

@ -113,7 +113,7 @@ class QueryContext:
} }
@staticmethod @staticmethod
def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use def df_metrics_to_num( # pylint: disable=no-self-use
df: pd.DataFrame, query_object: QueryObject df: pd.DataFrame, query_object: QueryObject
) -> None: ) -> None:
"""Converting metrics to numeric when pandas.read_sql cannot""" """Converting metrics to numeric when pandas.read_sql cannot"""
@ -122,9 +122,7 @@ class QueryContext:
df[col] = pd.to_numeric(df[col], errors="coerce") df[col] = pd.to_numeric(df[col], errors="coerce")
@staticmethod @staticmethod
def get_data( # pylint: disable=invalid-name,no-self-use def get_data(df: pd.DataFrame,) -> List[Dict]: # pylint: disable=no-self-use
df: pd.DataFrame,
) -> List[Dict]:
return df.to_dict(orient="records") return df.to_dict(orient="records")
def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]: def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
@ -197,10 +195,10 @@ class QueryContext:
status = utils.QueryStatus.SUCCESS status = utils.QueryStatus.SUCCESS
is_loaded = True is_loaded = True
stats_logger.incr("loaded_from_cache") stats_logger.incr("loaded_from_cache")
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception(e) logger.exception(ex)
logger.error( logger.error(
"Error reading cache: %s", utils.error_msg_from_exception(e) "Error reading cache: %s", utils.error_msg_from_exception(ex)
) )
logger.info("Serving from cache") logger.info("Serving from cache")
@ -216,10 +214,10 @@ class QueryContext:
if not self.force: if not self.force:
stats_logger.incr("loaded_from_source_without_force") stats_logger.incr("loaded_from_source_without_force")
is_loaded = True is_loaded = True
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception(e) logger.exception(ex)
if not error_message: if not error_message:
error_message = "{}".format(e) error_message = "{}".format(ex)
status = utils.QueryStatus.FAILED status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace() stacktrace = utils.get_stacktrace()
@ -234,11 +232,11 @@ class QueryContext:
stats_logger.incr("set_cache_key") stats_logger.incr("set_cache_key")
cache.set(cache_key, cache_binary, timeout=self.cache_timeout) cache.set(cache_key, cache_binary, timeout=self.cache_timeout)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
# cache.set call can fail if the backend is down or if # cache.set call can fail if the backend is down or if
# the key is too large or whatever other reasons # the key is too large or whatever other reasons
logger.warning("Could not cache key %s", cache_key) logger.warning("Could not cache key %s", cache_key)
logger.exception(e) logger.exception(ex)
cache.delete(cache_key) cache.delete(cache_key)
return { return {
"cache_key": cache_key, "cache_key": cache_key,

View File

@ -76,7 +76,7 @@ class BaseDatasource(
# --------------------------------------------------------------- # ---------------------------------------------------------------
# Columns # Columns
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
description = Column(Text) description = Column(Text)
default_endpoint = Column(Text) default_endpoint = Column(Text)
is_featured = Column(Boolean, default=False) # TODO deprecating is_featured = Column(Boolean, default=False) # TODO deprecating
@ -453,7 +453,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
__tablename__: Optional[str] = None # {connector_name}_column __tablename__: Optional[str] = None # {connector_name}_column
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
column_name = Column(String(255), nullable=False) column_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024)) verbose_name = Column(String(1024))
is_active = Column(Boolean, default=True) is_active = Column(Boolean, default=True)
@ -526,7 +526,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
__tablename__: Optional[str] = None # {connector_name}_metric __tablename__: Optional[str] = None # {connector_name}_metric
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
metric_name = Column(String(255), nullable=False) metric_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024)) verbose_name = Column(String(1024))
metric_type = Column(String(32)) metric_type = Column(String(32))

View File

@ -657,9 +657,9 @@ class DruidDatasource(Model, BaseDatasource):
merge=self.merge_flag, merge=self.merge_flag,
analysisTypes=[], analysisTypes=[],
) )
except Exception as e: except Exception as ex:
logger.warning("Failed first attempt to get latest segment") logger.warning("Failed first attempt to get latest segment")
logger.exception(e) logger.exception(ex)
if not segment_metadata: if not segment_metadata:
# if no segments in the past 7 days, look at all segments # if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10] lbound = datetime(1901, 1, 1).isoformat()[:10]
@ -674,9 +674,9 @@ class DruidDatasource(Model, BaseDatasource):
merge=self.merge_flag, merge=self.merge_flag,
analysisTypes=[], analysisTypes=[],
) )
except Exception as e: except Exception as ex:
logger.warning("Failed 2nd attempt to get latest segment") logger.warning("Failed 2nd attempt to get latest segment")
logger.exception(e) logger.exception(ex)
if segment_metadata: if segment_metadata:
return segment_metadata[-1]["columns"] return segment_metadata[-1]["columns"]

View File

@ -112,8 +112,8 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):
if col.dimension_spec_json: if col.dimension_spec_json:
try: try:
dimension_spec = json.loads(col.dimension_spec_json) dimension_spec = json.loads(col.dimension_spec_json)
except ValueError as e: except ValueError as ex:
raise ValueError("Invalid Dimension Spec JSON: " + str(e)) raise ValueError("Invalid Dimension Spec JSON: " + str(ex))
if not isinstance(dimension_spec, dict): if not isinstance(dimension_spec, dict):
raise ValueError("Dimension Spec must be a JSON object") raise ValueError("Dimension Spec must be a JSON object")
if "outputName" not in dimension_spec: if "outputName" not in dimension_spec:
@ -374,15 +374,15 @@ class Druid(BaseSupersetView):
valid_cluster = True valid_cluster = True
try: try:
cluster.refresh_datasources(refresh_all=refresh_all) cluster.refresh_datasources(refresh_all=refresh_all)
except Exception as e: except Exception as ex:
valid_cluster = False valid_cluster = False
flash( flash(
"Error while processing cluster '{}'\n{}".format( "Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e) cluster_name, utils.error_msg_from_exception(ex)
), ),
"danger", "danger",
) )
logger.exception(e) logger.exception(ex)
pass pass
if valid_cluster: if valid_cluster:
cluster.metadata_last_refreshed = datetime.now() cluster.metadata_last_refreshed = datetime.now()

View File

@ -96,11 +96,11 @@ class AnnotationDatasource(BaseDatasource):
status = utils.QueryStatus.SUCCESS status = utils.QueryStatus.SUCCESS
try: try:
df = pd.read_sql_query(qry.statement, db.engine) df = pd.read_sql_query(qry.statement, db.engine)
except Exception as e: except Exception as ex:
df = pd.DataFrame() df = pd.DataFrame()
status = utils.QueryStatus.FAILED status = utils.QueryStatus.FAILED
logger.exception(e) logger.exception(ex)
error_message = utils.error_msg_from_exception(e) error_message = utils.error_msg_from_exception(ex)
return QueryResult( return QueryResult(
status=status, df=df, duration=0, query="", error_message=error_message status=status, df=df, duration=0, query="", error_message=error_message
) )
@ -1055,12 +1055,12 @@ class SqlaTable(Model, BaseDatasource):
try: try:
df = self.database.get_df(sql, self.schema, mutator) df = self.database.get_df(sql, self.schema, mutator)
except Exception as e: except Exception as ex:
df = pd.DataFrame() df = pd.DataFrame()
status = utils.QueryStatus.FAILED status = utils.QueryStatus.FAILED
logger.exception(f"Query {sql} on schema {self.schema} failed") logger.exception(f"Query {sql} on schema {self.schema} failed")
db_engine_spec = self.database.db_engine_spec db_engine_spec = self.database.db_engine_spec
error_message = db_engine_spec.extract_error_message(e) error_message = db_engine_spec.extract_error_message(ex)
return QueryResult( return QueryResult(
status=status, status=status,
@ -1077,8 +1077,8 @@ class SqlaTable(Model, BaseDatasource):
"""Fetches the metadata for the table and merges it in""" """Fetches the metadata for the table and merges it in"""
try: try:
table = self.get_sqla_table_object() table = self.get_sqla_table_object()
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
raise Exception( raise Exception(
_( _(
"Table [{}] doesn't seem to exist in the specified database, " "Table [{}] doesn't seem to exist in the specified database, "
@ -1102,10 +1102,10 @@ class SqlaTable(Model, BaseDatasource):
datatype = db_engine_spec.column_datatype_to_string( datatype = db_engine_spec.column_datatype_to_string(
col.type, db_dialect col.type, db_dialect
) )
except Exception as e: except Exception as ex:
datatype = "UNKNOWN" datatype = "UNKNOWN"
logger.error("Unrecognized data type in {}.{}".format(table, col.name)) logger.error("Unrecognized data type in {}.{}".format(table, col.name))
logger.exception(e) logger.exception(ex)
dbcol = dbcols.get(col.name, None) dbcol = dbcols.get(col.name, None)
if not dbcol: if not dbcol:
dbcol = TableColumn(column_name=col.name, type=datatype, table=self) dbcol = TableColumn(column_name=col.name, type=datatype, table=self)
@ -1254,7 +1254,7 @@ class RowLevelSecurityFilter(Model, AuditMixinNullable):
""" """
__tablename__ = "row_level_security_filters" __tablename__ = "row_level_security_filters"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
roles = relationship( roles = relationship(
security_manager.role_model, security_manager.role_model,
secondary=RLSFilterRoles, secondary=RLSFilterRoles,

View File

@ -387,7 +387,7 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
# Fail before adding if the table can't be found # Fail before adding if the table can't be found
try: try:
table.get_sqla_table_object() table.get_sqla_table_object()
except Exception as e: except Exception as ex:
logger.exception(f"Got an error in pre_add for {table.name}") logger.exception(f"Got an error in pre_add for {table.name}")
raise Exception( raise Exception(
_( _(
@ -395,7 +395,7 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
"please double check your " "please double check your "
"database connection, schema, and " "database connection, schema, and "
"table name, error: {}" "table name, error: {}"
).format(table.name, str(e)) ).format(table.name, str(ex))
) )
def post_add(self, table, flash_message=True): def post_add(self, table, flash_message=True):

View File

@ -89,9 +89,9 @@ class BaseDAO:
db.session.add(model) db.session.add(model)
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError as e: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback() db.session.rollback()
raise DAOCreateFailedError(exception=e) raise DAOCreateFailedError(exception=ex)
return model return model
@classmethod @classmethod
@ -106,9 +106,9 @@ class BaseDAO:
db.session.merge(model) db.session.merge(model)
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError as e: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback() db.session.rollback()
raise DAOUpdateFailedError(exception=e) raise DAOUpdateFailedError(exception=ex)
return model return model
@classmethod @classmethod
@ -121,7 +121,7 @@ class BaseDAO:
db.session.delete(model) db.session.delete(model)
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError as e: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback() db.session.rollback()
raise DAODeleteFailedError(exception=e) raise DAODeleteFailedError(exception=ex)
return model return model

View File

@ -168,11 +168,11 @@ class DashboardRestApi(BaseSupersetModelRestApi):
try: try:
new_model = CreateDashboardCommand(g.user, item.data).run() new_model = CreateDashboardCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data) return self.response(201, id=new_model.id, result=item.data)
except DashboardInvalidError as e: except DashboardInvalidError as ex:
return self.response_422(message=e.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DashboardCreateFailedError as e: except DashboardCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {e}") logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"]) @expose("/<pk>", methods=["PUT"])
@protect() @protect()
@ -235,11 +235,11 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except DashboardForbiddenError: except DashboardForbiddenError:
return self.response_403() return self.response_403()
except DashboardInvalidError as e: except DashboardInvalidError as ex:
return self.response_422(message=e.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DashboardUpdateFailedError as e: except DashboardUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {e}") logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@protect() @protect()
@ -283,9 +283,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except DashboardForbiddenError: except DashboardForbiddenError:
return self.response_403() return self.response_403()
except DashboardDeleteFailedError as e: except DashboardDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}") logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"]) @expose("/", methods=["DELETE"])
@protect() @protect()
@ -344,8 +344,8 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except DashboardForbiddenError: except DashboardForbiddenError:
return self.response_403() return self.response_403()
except DashboardBulkDeleteFailedError as e: except DashboardBulkDeleteFailedError as ex:
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"]) @expose("/export/", methods=["GET"])
@protect() @protect()

View File

@ -45,8 +45,8 @@ class BulkDeleteDashboardCommand(BaseCommand):
try: try:
DashboardDAO.bulk_delete(self._models) DashboardDAO.bulk_delete(self._models)
return None return None
except DeleteFailedError as e: except DeleteFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise DashboardBulkDeleteFailedError() raise DashboardBulkDeleteFailedError()
def validate(self) -> None: def validate(self) -> None:

View File

@ -43,8 +43,8 @@ class CreateDashboardCommand(BaseCommand):
self.validate() self.validate()
try: try:
dashboard = DashboardDAO.create(self._properties) dashboard = DashboardDAO.create(self._properties)
except DAOCreateFailedError as e: except DAOCreateFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise DashboardCreateFailedError() raise DashboardCreateFailedError()
return dashboard return dashboard
@ -60,8 +60,8 @@ class CreateDashboardCommand(BaseCommand):
try: try:
owners = populate_owners(self._actor, owner_ids) owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners self._properties["owners"] = owners
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
if exceptions: if exceptions:
exception = DashboardInvalidError() exception = DashboardInvalidError()
exception.add_list(exceptions) exception.add_list(exceptions)

View File

@ -45,8 +45,8 @@ class DeleteDashboardCommand(BaseCommand):
self.validate() self.validate()
try: try:
dashboard = DashboardDAO.delete(self._model) dashboard = DashboardDAO.delete(self._model)
except DAODeleteFailedError as e: except DAODeleteFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise DashboardDeleteFailedError() raise DashboardDeleteFailedError()
return dashboard return dashboard

View File

@ -50,8 +50,8 @@ class UpdateDashboardCommand(BaseCommand):
self.validate() self.validate()
try: try:
dashboard = DashboardDAO.update(self._model, self._properties) dashboard = DashboardDAO.update(self._model, self._properties)
except DAOUpdateFailedError as e: except DAOUpdateFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise DashboardUpdateFailedError() raise DashboardUpdateFailedError()
return dashboard return dashboard
@ -80,8 +80,8 @@ class UpdateDashboardCommand(BaseCommand):
try: try:
owners = populate_owners(self._actor, owner_ids) owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners self._properties["owners"] = owners
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
if exceptions: if exceptions:
exception = DashboardInvalidError() exception = DashboardInvalidError()
exception.add_list(exceptions) exception.add_list(exceptions)

View File

@ -63,7 +63,7 @@ class DashboardDAO(BaseDAO):
) )
if commit: if commit:
db.session.commit() db.session.commit()
except SQLAlchemyError as e: except SQLAlchemyError as ex:
if commit: if commit:
db.session.rollback() db.session.rollback()
raise e raise ex

View File

@ -26,10 +26,10 @@ from superset.utils.core import JS_MAX_INTEGER
def df_to_records(dframe: pd.DataFrame) -> List[Dict[str, Any]]: def df_to_records(dframe: pd.DataFrame) -> List[Dict[str, Any]]:
data: List[Dict[str, Any]] = dframe.to_dict(orient="records") data: List[Dict[str, Any]] = dframe.to_dict(orient="records")
# TODO: refactor this # TODO: refactor this
for d in data: for row in data:
for k, v in list(d.items()): for key, value in list(row.items()):
# if an int is too big for JavaScript to handle # if an int is too big for JavaScript to handle
# convert it to a string # convert it to a string
if isinstance(v, int) and abs(v) > JS_MAX_INTEGER: if isinstance(value, int) and abs(value) > JS_MAX_INTEGER:
d[k] = str(v) row[key] = str(value)
return data return data

View File

@ -171,11 +171,11 @@ class DatasetRestApi(BaseSupersetModelRestApi):
try: try:
new_model = CreateDatasetCommand(g.user, item.data).run() new_model = CreateDatasetCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data) return self.response(201, id=new_model.id, result=item.data)
except DatasetInvalidError as e: except DatasetInvalidError as ex:
return self.response_422(message=e.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DatasetCreateFailedError as e: except DatasetCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {e}") logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"]) @expose("/<pk>", methods=["PUT"])
@protect() @protect()
@ -238,11 +238,11 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except DatasetForbiddenError: except DatasetForbiddenError:
return self.response_403() return self.response_403()
except DatasetInvalidError as e: except DatasetInvalidError as ex:
return self.response_422(message=e.normalized_messages()) return self.response_422(message=ex.normalized_messages())
except DatasetUpdateFailedError as e: except DatasetUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {e}") logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@protect() @protect()
@ -286,9 +286,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except DatasetForbiddenError: except DatasetForbiddenError:
return self.response_403() return self.response_403()
except DatasetDeleteFailedError as e: except DatasetDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}") logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"]) @expose("/export/", methods=["GET"])
@protect() @protect()
@ -345,7 +345,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
@expose("/<pk>/refresh", methods=["PUT"]) @expose("/<pk>/refresh", methods=["PUT"])
@protect() @protect()
@safe @safe
def refresh(self, pk: int) -> Response: # pylint: disable=invalid-name def refresh(self, pk: int) -> Response:
"""Refresh a Dataset """Refresh a Dataset
--- ---
put: put:
@ -384,6 +384,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_404() return self.response_404()
except DatasetForbiddenError: except DatasetForbiddenError:
return self.response_403() return self.response_403()
except DatasetRefreshFailedError as e: except DatasetRefreshFailedError as ex:
logger.error(f"Error refreshing dataset {self.__class__.__name__}: {e}") logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))

View File

@ -60,8 +60,8 @@ class CreateDatasetCommand(BaseCommand):
"schema_access", dataset.schema_perm "schema_access", dataset.schema_perm
) )
db.session.commit() db.session.commit()
except (SQLAlchemyError, DAOCreateFailedError) as e: except (SQLAlchemyError, DAOCreateFailedError) as ex:
logger.exception(e) logger.exception(ex)
db.session.rollback() db.session.rollback()
raise DatasetCreateFailedError() raise DatasetCreateFailedError()
return dataset return dataset
@ -92,8 +92,8 @@ class CreateDatasetCommand(BaseCommand):
try: try:
owners = populate_owners(self._actor, owner_ids) owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners self._properties["owners"] = owners
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
if exceptions: if exceptions:
exception = DatasetInvalidError() exception = DatasetInvalidError()
exception.add_list(exceptions) exception.add_list(exceptions)

View File

@ -51,8 +51,8 @@ class DeleteDatasetCommand(BaseCommand):
"datasource_access", dataset.get_perm() "datasource_access", dataset.get_perm()
) )
db.session.commit() db.session.commit()
except (SQLAlchemyError, DAODeleteFailedError) as e: except (SQLAlchemyError, DAODeleteFailedError) as ex:
logger.exception(e) logger.exception(ex)
db.session.rollback() db.session.rollback()
raise DatasetDeleteFailedError() raise DatasetDeleteFailedError()
return dataset return dataset

View File

@ -46,8 +46,8 @@ class RefreshDatasetCommand(BaseCommand):
try: try:
self._model.fetch_metadata() self._model.fetch_metadata()
return self._model return self._model
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
raise DatasetRefreshFailedError() raise DatasetRefreshFailedError()
raise DatasetRefreshFailedError() raise DatasetRefreshFailedError()

View File

@ -60,8 +60,8 @@ class UpdateDatasetCommand(BaseCommand):
try: try:
dataset = DatasetDAO.update(self._model, self._properties) dataset = DatasetDAO.update(self._model, self._properties)
return dataset return dataset
except DAOUpdateFailedError as e: except DAOUpdateFailedError as ex:
logger.exception(e.exception) logger.exception(ex.exception)
raise DatasetUpdateFailedError() raise DatasetUpdateFailedError()
raise DatasetUpdateFailedError() raise DatasetUpdateFailedError()
@ -92,8 +92,8 @@ class UpdateDatasetCommand(BaseCommand):
try: try:
owners = populate_owners(self._actor, owner_ids) owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners self._properties["owners"] = owners
except ValidationError as e: except ValidationError as ex:
exceptions.append(e) exceptions.append(ex)
# Validate columns # Validate columns
columns = self._properties.get("columns") columns = self._properties.get("columns")

View File

@ -45,8 +45,8 @@ class DatasetDAO(BaseDAO):
def get_database_by_id(database_id: int) -> Optional[Database]: def get_database_by_id(database_id: int) -> Optional[Database]:
try: try:
return db.session.query(Database).filter_by(id=database_id).one_or_none() return db.session.query(Database).filter_by(id=database_id).one_or_none()
except SQLAlchemyError as e: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Could not get database by id: {e}") logger.error(f"Could not get database by id: {ex}")
return None return None
@staticmethod @staticmethod
@ -54,8 +54,8 @@ class DatasetDAO(BaseDAO):
try: try:
database.get_table(table_name, schema=schema) database.get_table(table_name, schema=schema)
return True return True
except SQLAlchemyError as e: # pragma: no cover except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Got an error {e} validating table: {table_name}") logger.error(f"Got an error {ex} validating table: {table_name}")
return False return False
@staticmethod @staticmethod

View File

@ -39,7 +39,7 @@ def validate_python_date_format(value: str) -> None:
class DatasetColumnsPutSchema(Schema): class DatasetColumnsPutSchema(Schema):
id = fields.Integer() # pylint: disable=invalid-name id = fields.Integer()
column_name = fields.String(required=True, validate=Length(1, 255)) column_name = fields.String(required=True, validate=Length(1, 255))
type = fields.String(validate=Length(1, 32)) type = fields.String(validate=Length(1, 32))
verbose_name = fields.String(allow_none=True, Length=(1, 1024)) verbose_name = fields.String(allow_none=True, Length=(1, 1024))
@ -55,7 +55,7 @@ class DatasetColumnsPutSchema(Schema):
class DatasetMetricsPutSchema(Schema): class DatasetMetricsPutSchema(Schema):
id = fields.Integer() # pylint: disable=invalid-name id = fields.Integer()
expression = fields.String(required=True) expression = fields.String(required=True)
description = fields.String(allow_none=True) description = fields.String(allow_none=True)
metric_name = fields.String(required=True, validate=Length(1, 255)) metric_name = fields.String(required=True, validate=Length(1, 255))

View File

@ -441,9 +441,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return df return df
@classmethod @classmethod
def df_to_sql( # pylint: disable=invalid-name def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None:
cls, df: pd.DataFrame, **kwargs: Any
) -> None:
""" Upload data from a Pandas DataFrame to a database. For """ Upload data from a Pandas DataFrame to a database. For
regular engines this calls the DataFrame.to_sql() method. Can be regular engines this calls the DataFrame.to_sql() method. Can be
overridden for engines that don't work well with to_sql(), e.g. overridden for engines that don't work well with to_sql(), e.g.
@ -562,13 +560,13 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
pass pass
@classmethod @classmethod
def extract_error_message(cls, e: Exception) -> str: def extract_error_message(cls, ex: Exception) -> str:
return f"{cls.engine} error: {cls._extract_error_message(e)}" return f"{cls.engine} error: {cls._extract_error_message(ex)}"
@classmethod @classmethod
def _extract_error_message(cls, e: Exception) -> Optional[str]: def _extract_error_message(cls, ex: Exception) -> Optional[str]:
"""Extract error message for queries""" """Extract error message for queries"""
return utils.error_msg_from_exception(e) return utils.error_msg_from_exception(ex)
@classmethod @classmethod
def adjust_database_uri(cls, uri: URL, selected_schema: Optional[str]) -> None: def adjust_database_uri(cls, uri: URL, selected_schema: Optional[str]) -> None:
@ -977,7 +975,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
if database.extra: if database.extra:
try: try:
extra = json.loads(database.extra) extra = json.loads(database.extra)
except json.JSONDecodeError as e: except json.JSONDecodeError as ex:
logger.error(e) logger.error(ex)
raise e raise ex
return extra return extra

View File

@ -64,9 +64,9 @@ class DruidEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
""" """
try: try:
extra = json.loads(database.extra or "{}") extra = json.loads(database.extra or "{}")
except json.JSONDecodeError as e: except json.JSONDecodeError as ex:
logger.error(e) logger.error(ex)
raise e raise ex
if database.server_cert: if database.server_cert:
engine_params = extra.get("engine_params", {}) engine_params = extra.get("engine_params", {})

View File

@ -203,8 +203,8 @@ class HiveEngineSpec(PrestoEngineSpec):
uri.database = parse.quote(selected_schema, safe="") uri.database = parse.quote(selected_schema, safe="")
@classmethod @classmethod
def _extract_error_message(cls, e: Exception) -> str: def _extract_error_message(cls, ex: Exception) -> str:
msg = str(e) msg = str(ex)
match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg) match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg)
if match: if match:
msg = match.group(1) msg = match.group(1)

View File

@ -86,12 +86,12 @@ class MySQLEngineSpec(BaseEngineSpec):
return "from_unixtime({col})" return "from_unixtime({col})"
@classmethod @classmethod
def _extract_error_message(cls, e: Exception) -> str: def _extract_error_message(cls, ex: Exception) -> str:
"""Extract error message for queries""" """Extract error message for queries"""
message = str(e) message = str(ex)
try: try:
if isinstance(e.args, tuple) and len(e.args) > 1: if isinstance(ex.args, tuple) and len(ex.args) > 1:
message = e.args[1] message = ex.args[1]
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
pass pass
return message return message

View File

@ -762,22 +762,22 @@ class PrestoEngineSpec(BaseEngineSpec):
polled = cursor.poll() polled = cursor.poll()
@classmethod @classmethod
def _extract_error_message(cls, e: Exception) -> Optional[str]: def _extract_error_message(cls, ex: Exception) -> Optional[str]:
if ( if (
hasattr(e, "orig") hasattr(ex, "orig")
and type(e.orig).__name__ == "DatabaseError" # type: ignore and type(ex.orig).__name__ == "DatabaseError" # type: ignore
and isinstance(e.orig[0], dict) # type: ignore and isinstance(ex.orig[0], dict) # type: ignore
): ):
error_dict = e.orig[0] # type: ignore error_dict = ex.orig[0] # type: ignore
return "{} at {}: {}".format( return "{} at {}: {}".format(
error_dict.get("errorName"), error_dict.get("errorName"),
error_dict.get("errorLocation"), error_dict.get("errorLocation"),
error_dict.get("message"), error_dict.get("message"),
) )
if type(e).__name__ == "DatabaseError" and hasattr(e, "args") and e.args: if type(ex).__name__ == "DatabaseError" and hasattr(ex, "args") and ex.args:
error_dict = e.args[0] error_dict = ex.args[0]
return error_dict.get("message") return error_dict.get("message")
return utils.error_msg_from_exception(e) return utils.error_msg_from_exception(ex)
@classmethod @classmethod
def _partition_query( # pylint: disable=too-many-arguments,too-many-locals def _partition_query( # pylint: disable=too-many-arguments,too-many-locals
@ -863,9 +863,7 @@ class PrestoEngineSpec(BaseEngineSpec):
return query return query
@classmethod @classmethod
def _latest_partition_from_df( # pylint: disable=invalid-name def _latest_partition_from_df(cls, df: pd.DataFrame) -> Optional[List[str]]:
cls, df: pd.DataFrame
) -> Optional[List[str]]:
if not df.empty: if not df.empty:
return df.to_records(index=False)[0].item() return df.to_records(index=False)[0].item()
return None return None

View File

@ -62,9 +62,9 @@ def merge_slice(slc: Slice) -> None:
def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str: def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str:
d = defaults.copy() defaults_copy = defaults.copy()
d.update(kwargs) defaults_copy.update(kwargs)
return json.dumps(d, indent=4, sort_keys=True) return json.dumps(defaults_copy, indent=4, sort_keys=True)
def get_example_data( def get_example_data(

View File

@ -88,13 +88,13 @@ def upgrade():
batch_op.create_unique_constraint( batch_op.create_unique_constraint(
"uq_dashboard_slice", ["dashboard_id", "slice_id"] "uq_dashboard_slice", ["dashboard_id", "slice_id"]
) )
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
def downgrade(): def downgrade():
try: try:
with op.batch_alter_table("dashboard_slices") as batch_op: with op.batch_alter_table("dashboard_slices") as batch_op:
batch_op.drop_constraint("uq_dashboard_slice", type_="unique") batch_op.drop_constraint("uq_dashboard_slice", type_="unique")
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)

View File

@ -101,8 +101,8 @@ def upgrade():
dashboard.json_metadata = None dashboard.json_metadata = None
session.merge(dashboard) session.merge(dashboard)
except Exception as e: except Exception as ex:
logging.exception(f"dashboard {dashboard.id} has error: {e}") logging.exception(f"dashboard {dashboard.id} has error: {ex}")
session.commit() session.commit()
session.close() session.close()

View File

@ -58,8 +58,8 @@ def upgrade():
batch_op.drop_constraint(slices_ibfk_2, type_="foreignkey") batch_op.drop_constraint(slices_ibfk_2, type_="foreignkey")
batch_op.drop_column("druid_datasource_id") batch_op.drop_column("druid_datasource_id")
batch_op.drop_column("table_id") batch_op.drop_column("table_id")
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
# fixed issue: https://github.com/airbnb/superset/issues/466 # fixed issue: https://github.com/airbnb/superset/issues/466
try: try:
@ -67,27 +67,27 @@ def upgrade():
batch_op.create_foreign_key( batch_op.create_foreign_key(
None, "datasources", ["datasource_name"], ["datasource_name"] None, "datasources", ["datasource_name"], ["datasource_name"]
) )
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
try: try:
with op.batch_alter_table("query") as batch_op: with op.batch_alter_table("query") as batch_op:
batch_op.create_unique_constraint("client_id", ["client_id"]) batch_op.create_unique_constraint("client_id", ["client_id"])
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
try: try:
with op.batch_alter_table("query") as batch_op: with op.batch_alter_table("query") as batch_op:
batch_op.drop_column("name") batch_op.drop_column("name")
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
def downgrade(): def downgrade():
try: try:
with op.batch_alter_table("tables") as batch_op: with op.batch_alter_table("tables") as batch_op:
batch_op.create_index("table_name", ["table_name"], unique=True) batch_op.create_index("table_name", ["table_name"], unique=True)
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
try: try:
with op.batch_alter_table("slices") as batch_op: with op.batch_alter_table("slices") as batch_op:
@ -111,8 +111,8 @@ def downgrade():
"slices_ibfk_1", "datasources", ["druid_datasource_id"], ["id"] "slices_ibfk_1", "datasources", ["druid_datasource_id"], ["id"]
) )
batch_op.create_foreign_key("slices_ibfk_2", "tables", ["table_id"], ["id"]) batch_op.create_foreign_key("slices_ibfk_2", "tables", ["table_id"], ["id"])
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
try: try:
fk_columns = generic_find_constraint_name( fk_columns = generic_find_constraint_name(
@ -123,12 +123,12 @@ def downgrade():
) )
with op.batch_alter_table("columns") as batch_op: with op.batch_alter_table("columns") as batch_op:
batch_op.drop_constraint(fk_columns, type_="foreignkey") batch_op.drop_constraint(fk_columns, type_="foreignkey")
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))
op.add_column("query", sa.Column("name", sa.String(length=256), nullable=True)) op.add_column("query", sa.Column("name", sa.String(length=256), nullable=True))
try: try:
with op.batch_alter_table("query") as batch_op: with op.batch_alter_table("query") as batch_op:
batch_op.drop_constraint("client_id", type_="unique") batch_op.drop_constraint("client_id", type_="unique")
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))

View File

@ -120,14 +120,14 @@ def upgrade():
or "uq_datasources_datasource_name", or "uq_datasources_datasource_name",
type_="unique", type_="unique",
) )
except Exception as e: except Exception as ex:
logging.warning( logging.warning(
"Constraint drop failed, you may want to do this " "Constraint drop failed, you may want to do this "
"manually on your database. For context, this is a known " "manually on your database. For context, this is a known "
"issue around undeterministic contraint names on Postgres " "issue around undeterministic contraint names on Postgres "
"and perhaps more databases through SQLAlchemy." "and perhaps more databases through SQLAlchemy."
) )
logging.exception(e) logging.exception(ex)
def downgrade(): def downgrade():

View File

@ -39,6 +39,6 @@ def upgrade():
def downgrade(): def downgrade():
try: try:
op.drop_column("dbs", "allow_dml") op.drop_column("dbs", "allow_dml")
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
pass pass

View File

@ -81,8 +81,8 @@ def upgrade():
layout, indent=None, separators=(",", ":"), sort_keys=True layout, indent=None, separators=(",", ":"), sort_keys=True
) )
session.merge(dashboard) session.merge(dashboard)
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
session.commit() session.commit()
session.close() session.close()
@ -111,8 +111,8 @@ def downgrade():
layout, indent=None, separators=(",", ":"), sort_keys=True layout, indent=None, separators=(",", ":"), sort_keys=True
) )
session.merge(dashboard) session.merge(dashboard)
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
session.commit() session.commit()
session.close() session.close()

View File

@ -85,8 +85,8 @@ def upgrade():
params.pop("resample_fillmethod", None) params.pop("resample_fillmethod", None)
params.pop("resample_how", None) params.pop("resample_how", None)
slc.params = json.dumps(params, sort_keys=True) slc.params = json.dumps(params, sort_keys=True)
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
session.commit() session.commit()
session.close() session.close()
@ -110,8 +110,8 @@ def downgrade():
del params["resample_method"] del params["resample_method"]
slc.params = json.dumps(params, sort_keys=True) slc.params = json.dumps(params, sort_keys=True)
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
session.commit() session.commit()
session.close() session.close()

View File

@ -39,5 +39,5 @@ def upgrade():
def downgrade(): def downgrade():
try: try:
op.drop_column("tables", "params") op.drop_column("tables", "params")
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))

View File

@ -62,8 +62,8 @@ def upgrade():
session.merge(slc) session.merge(slc)
session.commit() session.commit()
print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name)) print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name))
except Exception as e: except Exception as ex:
print(slc.slice_name + " error: " + str(e)) print(slc.slice_name + " error: " + str(ex))
session.close() session.close()

View File

@ -60,8 +60,8 @@ def upgrade():
session.merge(slc) session.merge(slc)
session.commit() session.commit()
print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name)) print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name))
except Exception as e: except Exception as ex:
print(slc.slice_name + " error: " + str(e)) print(slc.slice_name + " error: " + str(ex))
session.close() session.close()

View File

@ -43,7 +43,7 @@ def upgrade():
try: try:
op.create_unique_constraint(None, "dbs", ["verbose_name"]) op.create_unique_constraint(None, "dbs", ["verbose_name"])
op.create_unique_constraint(None, "clusters", ["verbose_name"]) op.create_unique_constraint(None, "clusters", ["verbose_name"])
except Exception as e: except Exception:
logging.info("Constraint not created, expected when using sqlite") logging.info("Constraint not created, expected when using sqlite")
@ -51,5 +51,5 @@ def downgrade():
try: try:
op.drop_column("dbs", "verbose_name") op.drop_column("dbs", "verbose_name")
op.drop_column("clusters", "verbose_name") op.drop_column("clusters", "verbose_name")
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)

View File

@ -37,5 +37,5 @@ def upgrade():
def downgrade(): def downgrade():
try: try:
op.drop_column("tables", "template_params") op.drop_column("tables", "template_params")
except Exception as e: except Exception as ex:
logging.warning(str(e)) logging.warning(str(ex))

View File

@ -78,7 +78,7 @@ def upgrade():
for slc in filter_box_slices.all(): for slc in filter_box_slices.all():
try: try:
upgrade_slice(slc) upgrade_slice(slc)
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(e)
session.commit() session.commit()
@ -100,8 +100,8 @@ def downgrade():
params["metric"] = flts[0].get("metric") params["metric"] = flts[0].get("metric")
params["groupby"] = [o.get("column") for o in flts] params["groupby"] = [o.get("column") for o in flts]
slc.params = json.dumps(params, sort_keys=True) slc.params = json.dumps(params, sort_keys=True)
except Exception as e: except Exception as ex:
logging.exception(e) logging.exception(ex)
session.commit() session.commit()
session.close() session.close()

View File

@ -27,7 +27,7 @@ class AnnotationLayer(Model, AuditMixinNullable):
"""A logical namespace for a set of annotations""" """A logical namespace for a set of annotations"""
__tablename__ = "annotation_layer" __tablename__ = "annotation_layer"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
name = Column(String(250)) name = Column(String(250))
descr = Column(Text) descr = Column(Text)
@ -40,7 +40,7 @@ class Annotation(Model, AuditMixinNullable):
"""Time-related annotation""" """Time-related annotation"""
__tablename__ = "annotation" __tablename__ = "annotation"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
start_dttm = Column(DateTime) start_dttm = Column(DateTime)
end_dttm = Column(DateTime) end_dttm = Column(DateTime)
layer_id = Column(Integer, ForeignKey("annotation_layer.id"), nullable=False) layer_id = Column(Integer, ForeignKey("annotation_layer.id"), nullable=False)

View File

@ -73,7 +73,7 @@ class Url(Model, AuditMixinNullable):
"""Used for the short url feature""" """Used for the short url feature"""
__tablename__ = "url" __tablename__ = "url"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
url = Column(Text) url = Column(Text)
@ -82,7 +82,7 @@ class KeyValue(Model): # pylint: disable=too-few-public-methods
"""Used for any type of key-value store""" """Used for any type of key-value store"""
__tablename__ = "keyvalue" __tablename__ = "keyvalue"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
value = Column(Text, nullable=False) value = Column(Text, nullable=False)
@ -91,7 +91,7 @@ class CssTemplate(Model, AuditMixinNullable):
"""CSS templates for dashboards""" """CSS templates for dashboards"""
__tablename__ = "css_templates" __tablename__ = "css_templates"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
template_name = Column(String(250)) template_name = Column(String(250))
css = Column(Text, default="") css = Column(Text, default="")
@ -106,7 +106,7 @@ class Database(
type = "table" type = "table"
__table_args__ = (UniqueConstraint("database_name"),) __table_args__ = (UniqueConstraint("database_name"),)
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True) verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions # short unique name, used in permissions
database_name = Column(String(250), unique=True, nullable=False) database_name = Column(String(250), unique=True, nullable=False)
@ -481,8 +481,8 @@ class Database(
return [ return [
utils.DatasourceName(table=table, schema=schema) for table in tables utils.DatasourceName(table=table, schema=schema) for table in tables
] ]
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception(e) logger.exception(ex)
@cache_util.memoized_func( @cache_util.memoized_func(
key=lambda *args, **kwargs: f"db:{{}}:schema:{kwargs.get('schema')}:view_list", # type: ignore key=lambda *args, **kwargs: f"db:{{}}:schema:{kwargs.get('schema')}:view_list", # type: ignore
@ -511,8 +511,8 @@ class Database(
database=self, inspector=self.inspector, schema=schema database=self, inspector=self.inspector, schema=schema
) )
return [utils.DatasourceName(table=view, schema=schema) for view in views] return [utils.DatasourceName(table=view, schema=schema) for view in views]
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception(e) logger.exception(ex)
@cache_util.memoized_func( @cache_util.memoized_func(
key=lambda *args, **kwargs: "db:{}:schema_list", attribute_in_key="id" key=lambda *args, **kwargs: "db:{}:schema_list", attribute_in_key="id"
@ -564,9 +564,9 @@ class Database(
if self.encrypted_extra: if self.encrypted_extra:
try: try:
encrypted_extra = json.loads(self.encrypted_extra) encrypted_extra = json.loads(self.encrypted_extra)
except json.JSONDecodeError as e: except json.JSONDecodeError as ex:
logger.error(e) logger.error(ex)
raise e raise ex
return encrypted_extra return encrypted_extra
def get_table(self, table_name: str, schema: Optional[str] = None) -> Table: def get_table(self, table_name: str, schema: Optional[str] = None) -> Table:
@ -645,7 +645,7 @@ class Log(Model): # pylint: disable=too-few-public-methods
__tablename__ = "logs" __tablename__ = "logs"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
action = Column(String(512)) action = Column(String(512))
user_id = Column(Integer, ForeignKey("ab_user.id")) user_id = Column(Integer, ForeignKey("ab_user.id"))
dashboard_id = Column(Integer) dashboard_id = Column(Integer)
@ -662,7 +662,7 @@ class Log(Model): # pylint: disable=too-few-public-methods
class FavStar(Model): # pylint: disable=too-few-public-methods class FavStar(Model): # pylint: disable=too-few-public-methods
__tablename__ = "favstar" __tablename__ = "favstar"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id")) user_id = Column(Integer, ForeignKey("ab_user.id"))
class_name = Column(String(50)) class_name = Column(String(50))
obj_id = Column(Integer) obj_id = Column(Integer)

View File

@ -119,7 +119,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes
"""The dashboard object!""" """The dashboard object!"""
__tablename__ = "dashboards" __tablename__ = "dashboards"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
dashboard_title = Column(String(500)) dashboard_title = Column(String(500))
position_json = Column(utils.MediumText()) position_json = Column(utils.MediumText())
description = Column(Text) description = Column(Text)

View File

@ -37,7 +37,7 @@ class DatasourceAccessRequest(Model, AuditMixinNullable):
"""ORM model for the access requests for datasources and dbs.""" """ORM model for the access requests for datasources and dbs."""
__tablename__ = "access_request" __tablename__ = "access_request"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
datasource_id = Column(Integer) datasource_id = Column(Integer)
datasource_type = Column(String(200)) datasource_type = Column(String(200))

View File

@ -166,14 +166,14 @@ class ImportMixin:
try: try:
obj_query = session.query(cls).filter(and_(*filters)) obj_query = session.query(cls).filter(and_(*filters))
obj = obj_query.one_or_none() obj = obj_query.one_or_none()
except MultipleResultsFound as e: except MultipleResultsFound as ex:
logger.error( logger.error(
"Error importing %s \n %s \n %s", "Error importing %s \n %s \n %s",
cls.__name__, cls.__name__,
str(obj_query), str(obj_query),
yaml.safe_dump(dict_rep), yaml.safe_dump(dict_rep),
) )
raise e raise ex
if not obj: if not obj:
is_new_obj = True is_new_obj = True
@ -274,14 +274,14 @@ class ImportMixin:
return new_obj return new_obj
def alter_params(self, **kwargs): def alter_params(self, **kwargs):
d = self.params_dict params = self.params_dict
d.update(kwargs) params.update(kwargs)
self.params = json.dumps(d) self.params = json.dumps(params)
def remove_params(self, param_to_remove: str) -> None: def remove_params(self, param_to_remove: str) -> None:
d = self.params_dict params = self.params_dict
d.pop(param_to_remove, None) params.pop(param_to_remove, None)
self.params = json.dumps(d) self.params = json.dumps(params)
def reset_ownership(self): def reset_ownership(self):
""" object will belong to the user the current user """ """ object will belong to the user the current user """
@ -376,7 +376,7 @@ class QueryResult: # pylint: disable=too-few-public-methods
def __init__( # pylint: disable=too-many-arguments def __init__( # pylint: disable=too-many-arguments
self, df, query, duration, status=QueryStatus.SUCCESS, error_message=None self, df, query, duration, status=QueryStatus.SUCCESS, error_message=None
): ):
self.df: pd.DataFrame = df # pylint: disable=invalid-name self.df: pd.DataFrame = df
self.query: str = query self.query: str = query
self.duration: int = duration self.duration: int = duration
self.status: str = status self.status: str = status
@ -395,8 +395,8 @@ class ExtraJSONMixin:
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
return {} return {}
def set_extra_json(self, d): def set_extra_json(self, extras):
self.extra_json = json.dumps(d) self.extra_json = json.dumps(extras)
def set_extra_json_key(self, key, value): def set_extra_json_key(self, key, value):
extra = self.extra extra = self.extra

View File

@ -50,7 +50,7 @@ class EmailSchedule:
__tablename__ = "email_schedules" __tablename__ = "email_schedules"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
active = Column(Boolean, default=True, index=True) active = Column(Boolean, default=True, index=True)
crontab = Column(String(50)) crontab = Column(String(50))

View File

@ -55,7 +55,7 @@ class Slice(
"""A slice is essentially a report or a view on data""" """A slice is essentially a report or a view on data"""
__tablename__ = "slices" __tablename__ = "slices"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
slice_name = Column(String(250)) slice_name = Column(String(250))
datasource_id = Column(Integer) datasource_id = Column(Integer)
datasource_type = Column(String(200)) datasource_type = Column(String(200))
@ -135,9 +135,9 @@ class Slice(
@property # type: ignore @property # type: ignore
@utils.memoized @utils.memoized
def viz(self) -> BaseViz: def viz(self) -> BaseViz:
d = json.loads(self.params) form_data = json.loads(self.params)
viz_class = viz_types[self.viz_type] viz_class = viz_types[self.viz_type]
return viz_class(datasource=self.datasource, form_data=d) return viz_class(datasource=self.datasource, form_data=form_data)
@property @property
def description_markeddown(self) -> str: def description_markeddown(self) -> str:
@ -146,14 +146,14 @@ class Slice(
@property @property
def data(self) -> Dict[str, Any]: def data(self) -> Dict[str, Any]:
"""Data used to render slice in templates""" """Data used to render slice in templates"""
d: Dict[str, Any] = {} data: Dict[str, Any] = {}
self.token = "" self.token = ""
try: try:
d = self.viz.data data = self.viz.data
self.token = d.get("token") # type: ignore self.token = data.get("token") # type: ignore
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception(e) logger.exception(ex)
d["error"] = str(e) data["error"] = str(ex)
return { return {
"cache_timeout": self.cache_timeout, "cache_timeout": self.cache_timeout,
"datasource": self.datasource_name, "datasource": self.datasource_name,
@ -178,9 +178,9 @@ class Slice(
form_data: Dict[str, Any] = {} form_data: Dict[str, Any] = {}
try: try:
form_data = json.loads(self.params) form_data = json.loads(self.params)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.error("Malformed json in slice's params") logger.error("Malformed json in slice's params")
logger.exception(e) logger.exception(ex)
form_data.update( form_data.update(
{ {
"slice_id": self.id, "slice_id": self.id,

View File

@ -48,7 +48,7 @@ class Query(Model, ExtraJSONMixin):
table may represent multiple SQL statements executed sequentially""" table may represent multiple SQL statements executed sequentially"""
__tablename__ = "query" __tablename__ = "query"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
client_id = Column(String(11), unique=True, nullable=False) client_id = Column(String(11), unique=True, nullable=False)
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False) database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
@ -150,7 +150,7 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin):
"""ORM model for SQL query""" """ORM model for SQL query"""
__tablename__ = "saved_query" __tablename__ = "saved_query"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True) user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
db_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) db_id = Column(Integer, ForeignKey("dbs.id"), nullable=True)
schema = Column(String(128)) schema = Column(String(128))
@ -195,9 +195,7 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "tab_state" __tablename__ = "tab_state"
# basic info # basic info
id = Column( # pylint: disable=invalid-name id = Column(Integer, primary_key=True, autoincrement=True)
Integer, primary_key=True, autoincrement=True
)
user_id = Column(Integer, ForeignKey("ab_user.id")) user_id = Column(Integer, ForeignKey("ab_user.id"))
label = Column(String(256)) label = Column(String(256))
active = Column(Boolean, default=False) active = Column(Boolean, default=False)
@ -248,9 +246,7 @@ class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "table_schema" __tablename__ = "table_schema"
id = Column( # pylint: disable=invalid-name id = Column(Integer, primary_key=True, autoincrement=True)
Integer, primary_key=True, autoincrement=True
)
tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE")) tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE"))
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False) database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)

View File

@ -62,7 +62,7 @@ class Tag(Model, AuditMixinNullable):
"""A tag attached to an object (query, chart or dashboard).""" """A tag attached to an object (query, chart or dashboard)."""
__tablename__ = "tag" __tablename__ = "tag"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
name = Column(String(250), unique=True) name = Column(String(250), unique=True)
type = Column(Enum(TagTypes)) type = Column(Enum(TagTypes))
@ -72,7 +72,7 @@ class TaggedObject(Model, AuditMixinNullable):
"""An association between an object and a tag.""" """An association between an object and a tag."""
__tablename__ = "tagged_object" __tablename__ = "tagged_object"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
tag_id = Column(Integer, ForeignKey("tag.id")) tag_id = Column(Integer, ForeignKey("tag.id"))
object_id = Column(Integer) object_id = Column(Integer)
object_type = Column(Enum(ObjectTypes)) object_type = Column(Enum(ObjectTypes))

View File

@ -34,7 +34,7 @@ class UserAttribute(Model, AuditMixinNullable):
""" """
__tablename__ = "user_attribute" __tablename__ = "user_attribute"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id")) user_id = Column(Integer, ForeignKey("ab_user.id"))
user = relationship( user = relationship(
security_manager.user_model, backref="extra_attributes", foreign_keys=[user_id] security_manager.user_model, backref="extra_attributes", foreign_keys=[user_id]

View File

@ -138,8 +138,8 @@ class SupersetResultSet:
pa_data[i] = pa.Array.from_pandas( pa_data[i] = pa.Array.from_pandas(
series, type=pa.timestamp("ns", tz=tz) series, type=pa.timestamp("ns", tz=tz)
) )
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
self.table = pa.Table.from_arrays(pa_data, names=column_names) self.table = pa.Table.from_arrays(pa_data, names=column_names)
self._type_dict: Dict[str, Any] = {} self._type_dict: Dict[str, Any] = {}
@ -150,8 +150,8 @@ class SupersetResultSet:
for i, col in enumerate(column_names) for i, col in enumerate(column_names)
if deduped_cursor_desc if deduped_cursor_desc
} }
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
@staticmethod @staticmethod
def convert_pa_dtype(pa_dtype: pa.DataType) -> Optional[str]: def convert_pa_dtype(pa_dtype: pa.DataType) -> Optional[str]:

View File

@ -135,9 +135,9 @@ def session_scope(nullpool):
try: try:
yield session yield session
session.commit() session.commit()
except Exception as e: except Exception as ex:
session.rollback() session.rollback()
logger.exception(e) logger.exception(ex)
raise raise
finally: finally:
session.close() session.close()
@ -175,12 +175,12 @@ def get_sql_results( # pylint: disable=too-many-arguments
expand_data=expand_data, expand_data=expand_data,
log_params=log_params, log_params=log_params,
) )
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.error("Query %d", query_id) logger.error("Query %d", query_id)
logger.debug("Query %d: %s", query_id, e) logger.debug("Query %d: %s", query_id, ex)
stats_logger.incr("error_sqllab_unhandled") stats_logger.incr("error_sqllab_unhandled")
query = get_query(query_id, session) query = get_query(query_id, session)
return handle_query_error(str(e), query, session) return handle_query_error(str(ex), query, session)
# pylint: disable=too-many-arguments # pylint: disable=too-many-arguments
@ -253,17 +253,17 @@ def execute_sql_statement(sql_statement, query, user_name, session, cursor, log_
) )
data = db_engine_spec.fetch_data(cursor, query.limit) data = db_engine_spec.fetch_data(cursor, query.limit)
except SoftTimeLimitExceeded as e: except SoftTimeLimitExceeded as ex:
logger.error("Query %d: Time limit exceeded", query.id) logger.error("Query %d: Time limit exceeded", query.id)
logger.debug("Query %d: %s", query.id, e) logger.debug("Query %d: %s", query.id, ex)
raise SqlLabTimeoutException( raise SqlLabTimeoutException(
"SQL Lab timeout. This environment's policy is to kill queries " "SQL Lab timeout. This environment's policy is to kill queries "
"after {} seconds.".format(SQLLAB_TIMEOUT) "after {} seconds.".format(SQLLAB_TIMEOUT)
) )
except Exception as e: except Exception as ex:
logger.error("Query %d: %s", query.id, type(e)) logger.error("Query %d: %s", query.id, type(ex))
logger.debug("Query %d: %s", query.id, e) logger.debug("Query %d: %s", query.id, ex)
raise SqlLabException(db_engine_spec.extract_error_message(e)) raise SqlLabException(db_engine_spec.extract_error_message(ex))
logger.debug("Query %d: Fetching cursor description", query.id) logger.debug("Query %d: Fetching cursor description", query.id)
cursor_description = cursor.description cursor_description = cursor.description
@ -378,8 +378,8 @@ def execute_sql_statements(
result_set = execute_sql_statement( result_set = execute_sql_statement(
statement, query, user_name, session, cursor, log_params statement, query, user_name, session, cursor, log_params
) )
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
msg = str(e) msg = str(ex)
if statement_count > 1: if statement_count > 1:
msg = f"[Statement {i+1} out of {statement_count}] " + msg msg = f"[Statement {i+1} out of {statement_count}] " + msg
payload = handle_query_error(msg, query, session, payload) payload = handle_query_error(msg, query, session, payload)

View File

@ -136,9 +136,9 @@ class PrestoDBSQLValidator(BaseSQLValidator):
start_column=start_column, start_column=start_column,
end_column=end_column, end_column=end_column,
) )
except Exception as e: except Exception as ex:
logger.exception(f"Unexpected error running validation query: {e}") logger.exception(f"Unexpected error running validation query: {ex}")
raise e raise ex
@classmethod @classmethod
def validate( def validate(

View File

@ -263,8 +263,8 @@ def parse_human_datetime(s):
if parsed_flags & 2 == 0: if parsed_flags & 2 == 0:
parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0) parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0)
dttm = dttm_from_timetuple(parsed_dttm.utctimetuple()) dttm = dttm_from_timetuple(parsed_dttm.utctimetuple())
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
raise ValueError("Couldn't parse date string [{}]".format(s)) raise ValueError("Couldn't parse date string [{}]".format(s))
return dttm return dttm
@ -565,8 +565,8 @@ def validate_json(obj: Union[bytes, bytearray, str]) -> None:
if obj: if obj:
try: try:
json.loads(obj) json.loads(obj)
except Exception as e: except Exception as ex:
logger.error(f"JSON is not valid {e}") logger.error(f"JSON is not valid {ex}")
raise SupersetException("JSON is not valid") raise SupersetException("JSON is not valid")
@ -597,16 +597,16 @@ class timeout:
try: try:
signal.signal(signal.SIGALRM, self.handle_timeout) signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.seconds) signal.alarm(self.seconds)
except ValueError as e: except ValueError as ex:
logger.warning("timeout can't be used in the current context") logger.warning("timeout can't be used in the current context")
logger.exception(e) logger.exception(ex)
def __exit__(self, type, value, traceback): def __exit__(self, type, value, traceback):
try: try:
signal.alarm(0) signal.alarm(0)
except ValueError as e: except ValueError as ex:
logger.warning("timeout can't be used in the current context") logger.warning("timeout can't be used in the current context")
logger.exception(e) logger.exception(ex)
def pessimistic_connection_handling(some_engine): def pessimistic_connection_handling(some_engine):

View File

@ -37,8 +37,8 @@ def stats_timing(stats_key, stats_logger):
start_ts = now_as_float() start_ts = now_as_float()
try: try:
yield start_ts yield start_ts
except Exception as e: except Exception as ex:
raise e raise ex
finally: finally:
stats_logger.timing(stats_key, now_as_float() - start_ts) stats_logger.timing(stats_key, now_as_float() - start_ts)

View File

@ -37,19 +37,19 @@ class AbstractEventLogger(ABC):
user_id = None user_id = None
if g.user: if g.user:
user_id = g.user.get_id() user_id = g.user.get_id()
d = request.form.to_dict() or {} form_data = request.form.to_dict() or {}
# request parameters can overwrite post body # request parameters can overwrite post body
request_params = request.args.to_dict() request_params = request.args.to_dict()
d.update(request_params) form_data.update(request_params)
d.update(kwargs) form_data.update(kwargs)
slice_id = d.get("slice_id") slice_id = form_data.get("slice_id")
dashboard_id = d.get("dashboard_id") dashboard_id = form_data.get("dashboard_id")
try: try:
slice_id = int( slice_id = int(
slice_id or json.loads(d.get("form_data")).get("slice_id") slice_id or json.loads(form_data.get("form_data")).get("slice_id")
) )
except (ValueError, TypeError): except (ValueError, TypeError):
slice_id = 0 slice_id = 0
@ -61,10 +61,10 @@ class AbstractEventLogger(ABC):
# bulk insert # bulk insert
try: try:
explode_by = d.get("explode") explode_by = form_data.get("explode")
records = json.loads(d.get(explode_by)) records = json.loads(form_data.get(explode_by))
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
records = [d] records = [form_data]
referrer = request.referrer[:1000] if request.referrer else None referrer = request.referrer[:1000] if request.referrer else None

View File

@ -107,8 +107,8 @@ def api(f):
def wraps(self, *args, **kwargs): def wraps(self, *args, **kwargs):
try: try:
return f(self, *args, **kwargs) return f(self, *args, **kwargs)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
logger.exception(e) logger.exception(ex)
return json_error_response(get_error_msg()) return json_error_response(get_error_msg())
return functools.update_wrapper(wraps, f) return functools.update_wrapper(wraps, f)
@ -124,22 +124,24 @@ def handle_api_exception(f):
def wraps(self, *args, **kwargs): def wraps(self, *args, **kwargs):
try: try:
return f(self, *args, **kwargs) return f(self, *args, **kwargs)
except SupersetSecurityException as e: except SupersetSecurityException as ex:
logger.exception(e) logger.exception(ex)
return json_error_response( return json_error_response(
utils.error_msg_from_exception(e), status=e.status, link=e.link utils.error_msg_from_exception(ex), status=ex.status, link=ex.link
) )
except SupersetException as e: except SupersetException as ex:
logger.exception(e) logger.exception(ex)
return json_error_response( return json_error_response(
utils.error_msg_from_exception(e), status=e.status utils.error_msg_from_exception(ex), status=ex.status
) )
except HTTPException as e: except HTTPException as ex:
logger.exception(e) logger.exception(ex)
return json_error_response(utils.error_msg_from_exception(e), status=e.code) return json_error_response(
except Exception as e: # pylint: disable=broad-except utils.error_msg_from_exception(ex), status=ex.code
logger.exception(e) )
return json_error_response(utils.error_msg_from_exception(e)) except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
return json_error_response(utils.error_msg_from_exception(ex))
return functools.update_wrapper(wraps, f) return functools.update_wrapper(wraps, f)
@ -176,8 +178,8 @@ def menu_data():
or f"/profile/{g.user.username}/" or f"/profile/{g.user.username}/"
) )
# when user object has no username # when user object has no username
except NameError as e: except NameError as ex:
logger.exception(e) logger.exception(ex)
if logo_target_path.startswith("/"): if logo_target_path.startswith("/"):
root_path = f"/superset{logo_target_path}" root_path = f"/superset{logo_target_path}"
@ -261,8 +263,8 @@ class ListWidgetWithCheckboxes(ListWidget): # pylint: disable=too-few-public-me
def validate_json(_form, field): def validate_json(_form, field):
try: try:
json.loads(field.data) json.loads(field.data)
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
raise Exception(_("json isn't valid")) raise Exception(_("json isn't valid"))
@ -303,8 +305,8 @@ class DeleteMixin: # pylint: disable=too-few-public-methods
abort(404) abort(404)
try: try:
self.pre_delete(item) self.pre_delete(item)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
flash(str(e), "danger") flash(str(ex), "danger")
else: else:
view_menu = security_manager.find_view_menu(item.get_perm()) view_menu = security_manager.find_view_menu(item.get_perm())
pvs = ( pvs = (
@ -338,8 +340,8 @@ class DeleteMixin: # pylint: disable=too-few-public-methods
for item in items: for item in items:
try: try:
self.pre_delete(item) self.pre_delete(item)
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
flash(str(e), "danger") flash(str(ex), "danger")
else: else:
self._delete(item.id) self._delete(item.id)
self.update_redirect() self.update_redirect()

View File

@ -44,7 +44,7 @@ def check_ownership_and_item_exists(f):
A Decorator that checks if an object exists and is owned by the current user A Decorator that checks if an object exists and is owned by the current user
""" """
def wraps(self, pk): # pylint: disable=invalid-name def wraps(self, pk):
item = self.datamodel.get( item = self.datamodel.get(
pk, self._base_filters # pylint: disable=protected-access pk, self._base_filters # pylint: disable=protected-access
) )
@ -52,8 +52,8 @@ def check_ownership_and_item_exists(f):
return self.response_404() return self.response_404()
try: try:
check_ownership(item) check_ownership(item)
except SupersetSecurityException as e: except SupersetSecurityException as ex:
return self.response(403, message=str(e)) return self.response(403, message=str(ex))
return f(self, item) return f(self, item)
return functools.update_wrapper(wraps, f) return functools.update_wrapper(wraps, f)
@ -290,9 +290,9 @@ class BaseOwnedModelRestApi(BaseSupersetModelRestApi):
return self.response( return self.response(
200, result=self.edit_model_schema.dump(item.data, many=False).data 200, result=self.edit_model_schema.dump(item.data, many=False).data
) )
except SQLAlchemyError as e: except SQLAlchemyError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {e}") logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/", methods=["POST"]) @expose("/", methods=["POST"])
@protect() @protect()
@ -342,9 +342,9 @@ class BaseOwnedModelRestApi(BaseSupersetModelRestApi):
result=self.add_model_schema.dump(item.data, many=False).data, result=self.add_model_schema.dump(item.data, many=False).data,
id=item.data.id, id=item.data.id,
) )
except SQLAlchemyError as e: except SQLAlchemyError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {e}") logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"]) @expose("/<pk>", methods=["DELETE"])
@protect() @protect()
@ -383,6 +383,6 @@ class BaseOwnedModelRestApi(BaseSupersetModelRestApi):
try: try:
self.datamodel.delete(item, raise_exception=True) self.datamodel.delete(item, raise_exception=True)
return self.response(200, message="OK") return self.response(200, message="OK")
except SQLAlchemyError as e: except SQLAlchemyError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}") logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(e)) return self.response_422(message=str(ex))

View File

@ -185,8 +185,8 @@ def check_datasource_perms(
datasource_id, datasource_type = get_datasource_info( datasource_id, datasource_type = get_datasource_info(
datasource_id, datasource_type, form_data datasource_id, datasource_type, form_data
) )
except SupersetException as e: except SupersetException as ex:
raise SupersetSecurityException(str(e)) raise SupersetSecurityException(str(ex))
if datasource_type is None: if datasource_type is None:
raise SupersetSecurityException("Could not determine datasource type") raise SupersetSecurityException("Could not determine datasource type")
@ -317,8 +317,8 @@ class KV(BaseSupersetView):
obj = models.KeyValue(value=value) obj = models.KeyValue(value=value)
db.session.add(obj) db.session.add(obj)
db.session.commit() db.session.commit()
except Exception as e: except Exception as ex:
return json_error_response(e) return json_error_response(ex)
return Response(json.dumps({"id": obj.id}), status=200) return Response(json.dumps({"id": obj.id}), status=200)
@event_logger.log_this @event_logger.log_this
@ -329,8 +329,8 @@ class KV(BaseSupersetView):
kv = db.session.query(models.KeyValue).filter_by(id=key_id).scalar() kv = db.session.query(models.KeyValue).filter_by(id=key_id).scalar()
if not kv: if not kv:
return Response(status=404, content_type="text/plain") return Response(status=404, content_type="text/plain")
except Exception as e: except Exception as ex:
return json_error_response(e) return json_error_response(ex)
return Response(kv.value, status=200, content_type="text/plain") return Response(kv.value, status=200, content_type="text/plain")
@ -600,9 +600,9 @@ class Superset(BaseSupersetView):
query_obj = viz_obj.query_obj() query_obj = viz_obj.query_obj()
if query_obj: if query_obj:
query = viz_obj.datasource.get_query_str(query_obj) query = viz_obj.datasource.get_query_str(query_obj)
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
return json_error_response(e) return json_error_response(ex)
if not query: if not query:
query = "No query." query = "No query."
@ -706,8 +706,8 @@ class Superset(BaseSupersetView):
datasource_id, datasource_type = get_datasource_info( datasource_id, datasource_type = get_datasource_info(
datasource_id, datasource_type, form_data datasource_id, datasource_type, form_data
) )
except SupersetException as e: except SupersetException as ex:
return json_error_response(utils.error_msg_from_exception(e)) return json_error_response(utils.error_msg_from_exception(ex))
viz_obj = get_viz( viz_obj = get_viz(
datasource_type=datasource_type, datasource_type=datasource_type,
@ -729,19 +729,19 @@ class Superset(BaseSupersetView):
if request.method == "POST" and f: if request.method == "POST" and f:
try: try:
dashboard_import_export.import_dashboards(db.session, f.stream) dashboard_import_export.import_dashboards(db.session, f.stream)
except DatabaseNotFound as e: except DatabaseNotFound as ex:
logger.exception(e) logger.exception(ex)
flash( flash(
_( _(
"Cannot import dashboard: %(db_error)s.\n" "Cannot import dashboard: %(db_error)s.\n"
"Make sure to create the database before " "Make sure to create the database before "
"importing the dashboard.", "importing the dashboard.",
db_error=e, db_error=ex,
), ),
"danger", "danger",
) )
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
flash( flash(
_( _(
"An unknown error occurred. " "An unknown error occurred. "
@ -1371,11 +1371,11 @@ class Superset(BaseSupersetView):
with closing(engine.connect()) as conn: with closing(engine.connect()) as conn:
conn.scalar(select([1])) conn.scalar(select([1]))
return json_success('"OK"') return json_success('"OK"')
except CertificateException as e: except CertificateException as ex:
logger.info(e.message) logger.info(ex.message)
return json_error_response(e.message) return json_error_response(ex.message)
except NoSuchModuleError as e: except NoSuchModuleError as ex:
logger.info("Invalid driver %s", e) logger.info("Invalid driver %s", ex)
driver_name = make_url(uri).drivername driver_name = make_url(uri).drivername
return json_error_response( return json_error_response(
_( _(
@ -1384,24 +1384,24 @@ class Superset(BaseSupersetView):
), ),
400, 400,
) )
except ArgumentError as e: except ArgumentError as ex:
logger.info("Invalid URI %s", e) logger.info("Invalid URI %s", ex)
return json_error_response( return json_error_response(
_( _(
"Invalid connection string, a valid string usually follows:\n" "Invalid connection string, a valid string usually follows:\n"
"'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'" "'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'"
) )
) )
except OperationalError as e: except OperationalError as ex:
logger.warning("Connection failed %s", e) logger.warning("Connection failed %s", ex)
return json_error_response( return json_error_response(
_("Connection failed, please check your connection settings."), 400 _("Connection failed, please check your connection settings."), 400
) )
except DBSecurityException as e: except DBSecurityException as ex:
logger.warning("Stopped an unsafe database connection. %s", e) logger.warning("Stopped an unsafe database connection. %s", ex)
return json_error_response(_(str(e)), 400) return json_error_response(_(str(ex)), 400)
except Exception as e: except Exception as ex:
logger.error("Unexpected error %s", e) logger.error("Unexpected error %s", ex)
return json_error_response( return json_error_response(
_("Unexpected error occurred, please check your logs for details"), 400 _("Unexpected error occurred, please check your logs for details"), 400
) )
@ -1706,9 +1706,9 @@ class Superset(BaseSupersetView):
force=True, force=True,
) )
obj.get_json() obj.get_json()
except Exception as e: except Exception as ex:
logger.exception("Failed to warm up cache") logger.exception("Failed to warm up cache")
return json_error_response(utils.error_msg_from_exception(e)) return json_error_response(utils.error_msg_from_exception(ex))
return json_success( return json_success(
json.dumps( json.dumps(
[{"slice_id": slc.id, "slice_name": slc.slice_name} for slc in slices] [{"slice_id": slc.id, "slice_name": slc.slice_name} for slc in slices]
@ -1950,9 +1950,9 @@ class Superset(BaseSupersetView):
return json_error_response(err_msg) return json_error_response(err_msg)
try: try:
DruidDatasource.sync_to_db_from_config(druid_config, user, cluster) DruidDatasource.sync_to_db_from_config(druid_config, user, cluster)
except Exception as e: except Exception as ex:
logger.exception(utils.error_msg_from_exception(e)) logger.exception(utils.error_msg_from_exception(ex))
return json_error_response(utils.error_msg_from_exception(e)) return json_error_response(utils.error_msg_from_exception(ex))
return Response(status=201) return Response(status=201)
@has_access @has_access
@ -2064,11 +2064,11 @@ class Superset(BaseSupersetView):
cost = mydb.db_engine_spec.estimate_query_cost( cost = mydb.db_engine_spec.estimate_query_cost(
mydb, schema, sql, utils.QuerySource.SQL_LAB mydb, schema, sql, utils.QuerySource.SQL_LAB
) )
except SupersetTimeoutException as e: except SupersetTimeoutException as ex:
logger.exception(e) logger.exception(ex)
return json_error_response(timeout_msg) return json_error_response(timeout_msg)
except Exception as e: except Exception as ex:
return json_error_response(str(e)) return json_error_response(str(ex))
spec = mydb.db_engine_spec spec = mydb.db_engine_spec
query_cost_formatters = get_feature_flags().get( query_cost_formatters = get_feature_flags().get(
@ -2226,15 +2226,15 @@ class Superset(BaseSupersetView):
encoding=None, encoding=None,
) )
return json_success(payload) return json_success(payload)
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
msg = _( msg = _(
f"{validator.name} was unable to check your query.\n" f"{validator.name} was unable to check your query.\n"
"Please recheck your query.\n" "Please recheck your query.\n"
f"Exception: {e}" f"Exception: {ex}"
) )
# Return as a 400 if the database error message says we got a 4xx error # Return as a 400 if the database error message says we got a 4xx error
if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(e)): if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(ex)):
return json_error_response(f"{msg}", status=400) return json_error_response(f"{msg}", status=400)
else: else:
return json_error_response(f"{msg}") return json_error_response(f"{msg}")
@ -2268,8 +2268,8 @@ class Superset(BaseSupersetView):
expand_data=expand_data, expand_data=expand_data,
log_params=log_params, log_params=log_params,
) )
except Exception as e: except Exception as ex:
logger.exception(f"Query {query.id}: {e}") logger.exception(f"Query {query.id}: {ex}")
msg = _( msg = _(
"Failed to start remote query on a worker. " "Failed to start remote query on a worker. "
"Tell your administrator to verify the availability of " "Tell your administrator to verify the availability of "
@ -2330,8 +2330,8 @@ class Superset(BaseSupersetView):
ignore_nan=True, ignore_nan=True,
encoding=None, encoding=None,
) )
except Exception as e: except Exception as ex:
logger.exception(f"Query {query.id}: {e}") logger.exception(f"Query {query.id}: {ex}")
return json_error_response(f"{{e}}") return json_error_response(f"{{e}}")
if data.get("status") == QueryStatus.FAILED: if data.get("status") == QueryStatus.FAILED:
return json_error_response(payload=data) return json_error_response(payload=data)
@ -2414,8 +2414,8 @@ class Superset(BaseSupersetView):
session.flush() session.flush()
query_id = query.id query_id = query.id
session.commit() # shouldn't be necessary session.commit() # shouldn't be necessary
except SQLAlchemyError as e: except SQLAlchemyError as ex:
logger.error(f"Errors saving query details {e}") logger.error(f"Errors saving query details {ex}")
session.rollback() session.rollback()
raise Exception(_("Query record was not created as expected.")) raise Exception(_("Query record was not created as expected."))
if not query_id: if not query_id:
@ -2440,8 +2440,8 @@ class Superset(BaseSupersetView):
rendered_query = template_processor.process_template( rendered_query = template_processor.process_template(
query.sql, **template_params query.sql, **template_params
) )
except Exception as e: except Exception as ex:
error_msg = utils.error_msg_from_exception(e) error_msg = utils.error_msg_from_exception(ex)
return json_error_response( return json_error_response(
f"Query {query_id}: Template rendering failed: {error_msg}" f"Query {query_id}: Template rendering failed: {error_msg}"
) )
@ -2799,8 +2799,8 @@ class Superset(BaseSupersetView):
database, schemas_allowed, False database, schemas_allowed, False
) )
return self.json_response(schemas_allowed_processed) return self.json_response(schemas_allowed_processed)
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
return json_error_response( return json_error_response(
"Failed to fetch schemas allowed for csv upload in this database! " "Failed to fetch schemas allowed for csv upload in this database! "
"Please contact your Superset Admin!" "Please contact your Superset Admin!"

View File

@ -270,9 +270,9 @@ class DatabaseRestApi(DatabaseMixin, BaseSupersetModelRestApi):
self.incr_stats("init", self.table_metadata.__name__) self.incr_stats("init", self.table_metadata.__name__)
try: try:
table_info: Dict = get_table_metadata(database, table_name, schema_name) table_info: Dict = get_table_metadata(database, table_name, schema_name)
except SQLAlchemyError as e: except SQLAlchemyError as ex:
self.incr_stats("error", self.table_metadata.__name__) self.incr_stats("error", self.table_metadata.__name__)
return self.response_422(error_msg_from_exception(e)) return self.response_422(error_msg_from_exception(ex))
self.incr_stats("success", self.table_metadata.__name__) self.incr_stats("success", self.table_metadata.__name__)
return self.response(200, **table_info) return self.response(200, **table_info)

View File

@ -32,9 +32,7 @@ def check_datasource_access(f):
A Decorator that checks if a user has datasource access A Decorator that checks if a user has datasource access
""" """
def wraps( def wraps(self, pk: int, table_name: str, schema_name: Optional[str] = None):
self, pk: int, table_name: str, schema_name: Optional[str] = None
): # pylint: disable=invalid-name
schema_name_parsed = parse_js_uri_path_item(schema_name, eval_undefined=True) schema_name_parsed = parse_js_uri_path_item(schema_name, eval_undefined=True)
table_name_parsed = parse_js_uri_path_item(table_name) table_name_parsed = parse_js_uri_path_item(table_name)
if not table_name_parsed: if not table_name_parsed:

View File

@ -234,9 +234,9 @@ class DatabaseMixin:
# this will check whether json.loads(extra) can succeed # this will check whether json.loads(extra) can succeed
try: try:
extra = database.get_extra() extra = database.get_extra()
except Exception as e: except Exception as ex:
raise Exception( raise Exception(
_("Extra field cannot be decoded by JSON. %{msg}s", msg=str(e)) _("Extra field cannot be decoded by JSON. %{msg}s", msg=str(ex))
) )
# this will check whether 'metadata_params' is configured correctly # this will check whether 'metadata_params' is configured correctly
@ -256,7 +256,7 @@ class DatabaseMixin:
# this will check whether json.loads(secure_extra) can succeed # this will check whether json.loads(secure_extra) can succeed
try: try:
database.get_encrypted_extra() database.get_encrypted_extra()
except Exception as e: except Exception as ex:
raise Exception( raise Exception(
_("Extra field cannot be decoded by JSON. %{msg}s", msg=str(e)) _("Extra field cannot be decoded by JSON. %{msg}s", msg=str(ex))
) )

View File

@ -158,7 +158,7 @@ class CsvToDatabaseView(SimpleFormView):
table.fetch_metadata() table.fetch_metadata()
db.session.add(table) db.session.add(table)
db.session.commit() db.session.commit()
except Exception as e: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
db.session.rollback() db.session.rollback()
try: try:
os.remove(path) os.remove(path)
@ -171,7 +171,7 @@ class CsvToDatabaseView(SimpleFormView):
filename=csv_filename, filename=csv_filename,
table_name=form.name.data, table_name=form.name.data,
db_name=database.database_name, db_name=database.database_name,
error_msg=str(e), error_msg=str(ex),
) )
flash(message, "danger") flash(message, "danger")

View File

@ -431,10 +431,10 @@ class BaseViz:
self.status = utils.QueryStatus.SUCCESS self.status = utils.QueryStatus.SUCCESS
is_loaded = True is_loaded = True
stats_logger.incr("loaded_from_cache") stats_logger.incr("loaded_from_cache")
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
logger.error( logger.error(
"Error reading cache: " + utils.error_msg_from_exception(e) "Error reading cache: " + utils.error_msg_from_exception(ex)
) )
logger.info("Serving from cache") logger.info("Serving from cache")
@ -446,10 +446,10 @@ class BaseViz:
if not self.force: if not self.force:
stats_logger.incr("loaded_from_source_without_force") stats_logger.incr("loaded_from_source_without_force")
is_loaded = True is_loaded = True
except Exception as e: except Exception as ex:
logger.exception(e) logger.exception(ex)
if not self.error_message: if not self.error_message:
self.error_message = "{}".format(e) self.error_message = "{}".format(ex)
self.status = utils.QueryStatus.FAILED self.status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace() stacktrace = utils.get_stacktrace()
@ -469,11 +469,11 @@ class BaseViz:
stats_logger.incr("set_cache_key") stats_logger.incr("set_cache_key")
cache.set(cache_key, cache_value, timeout=self.cache_timeout) cache.set(cache_key, cache_value, timeout=self.cache_timeout)
except Exception as e: except Exception as ex:
# cache.set call can fail if the backend is down or if # cache.set call can fail if the backend is down or if
# the key is too large or whatever other reasons # the key is too large or whatever other reasons
logger.warning("Could not cache key {}".format(cache_key)) logger.warning("Could not cache key {}".format(cache_key))
logger.exception(e) logger.exception(ex)
cache.delete(cache_key) cache.delete(cache_key)
return { return {
"cache_key": self._any_cache_key, "cache_key": self._any_cache_key,