pylint: accept specific 2 character names by default (#9460)

* lint: accept 2 letter names by default

* Address review comments

* Remove e and d from good-names
This commit is contained in:
Ville Brofeldt 2020-04-08 20:32:26 +03:00 committed by GitHub
parent 4485800e21
commit 980dd2fd41
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
72 changed files with 421 additions and 431 deletions

View File

@ -115,10 +115,10 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_,d,e,v,o,l,x,ts,f
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata,d,fd
bad-names=fd,foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.

View File

@ -170,11 +170,11 @@ class ChartRestApi(BaseSupersetModelRestApi):
try:
new_model = CreateChartCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data)
except ChartInvalidError as e:
return self.response_422(message=e.normalized_messages())
except ChartCreateFailedError as e:
logger.error(f"Error creating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@protect()
@ -237,11 +237,11 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_404()
except ChartForbiddenError:
return self.response_403()
except ChartInvalidError as e:
return self.response_422(message=e.normalized_messages())
except ChartUpdateFailedError as e:
logger.error(f"Error updating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@protect()
@ -285,9 +285,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_404()
except ChartForbiddenError:
return self.response_403()
except ChartDeleteFailedError as e:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except ChartDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@protect()
@ -346,5 +346,5 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_404()
except ChartForbiddenError:
return self.response_403()
except ChartBulkDeleteFailedError as e:
return self.response_422(message=str(e))
except ChartBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))

View File

@ -44,8 +44,8 @@ class BulkDeleteChartCommand(BaseCommand):
self.validate()
try:
ChartDAO.bulk_delete(self._models)
except DeleteFailedError as e:
logger.exception(e.exception)
except DeleteFailedError as ex:
logger.exception(ex.exception)
raise ChartBulkDeleteFailedError()
def validate(self) -> None:

View File

@ -44,8 +44,8 @@ class CreateChartCommand(BaseCommand):
self.validate()
try:
chart = ChartDAO.create(self._properties)
except DAOCreateFailedError as e:
logger.exception(e.exception)
except DAOCreateFailedError as ex:
logger.exception(ex.exception)
raise ChartCreateFailedError()
return chart
@ -60,8 +60,8 @@ class CreateChartCommand(BaseCommand):
try:
datasource = get_datasource_by_id(datasource_id, datasource_type)
self._properties["datasource_name"] = datasource.name
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
# Validate/Populate dashboards
dashboards = DashboardDAO.find_by_ids(dashboard_ids)
@ -72,8 +72,8 @@ class CreateChartCommand(BaseCommand):
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = ChartInvalidError()
exception.add_list(exceptions)

View File

@ -45,8 +45,8 @@ class DeleteChartCommand(BaseCommand):
self.validate()
try:
chart = ChartDAO.delete(self._model)
except DAODeleteFailedError as e:
logger.exception(e.exception)
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise ChartDeleteFailedError()
return chart

View File

@ -52,8 +52,8 @@ class UpdateChartCommand(BaseCommand):
self.validate()
try:
chart = ChartDAO.update(self._model, self._properties)
except DAOUpdateFailedError as e:
logger.exception(e.exception)
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
raise ChartUpdateFailedError()
return chart
@ -84,8 +84,8 @@ class UpdateChartCommand(BaseCommand):
try:
datasource = get_datasource_by_id(datasource_id, datasource_type)
self._properties["datasource_name"] = datasource.name
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
# Validate/Populate dashboards
dashboards = DashboardDAO.find_by_ids(dashboard_ids)
@ -97,8 +97,8 @@ class UpdateChartCommand(BaseCommand):
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = ChartInvalidError()
exception.add_list(exceptions)

View File

@ -47,7 +47,7 @@ class ChartDAO(BaseDAO):
)
if commit:
db.session.commit()
except SQLAlchemyError as e:
except SQLAlchemyError as ex:
if commit:
db.session.rollback()
raise e
raise ex

View File

@ -197,9 +197,9 @@ def refresh_druid(datasource, merge):
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as e: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(e)))
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit()
@ -245,9 +245,9 @@ def import_dashboards(path, recursive, username):
try:
with file_.open() as data_stream:
dashboard_import_export.import_dashboards(db.session, data_stream)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
logger.error("Error when importing dashboard from file %s", file_)
logger.error(e)
logger.error(ex)
@superset.command()
@ -317,9 +317,9 @@ def import_datasources(path, sync, recursive):
dict_import_export.import_from_dict(
db.session, yaml.safe_load(data_stream), sync=sync_array
)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
logger.error("Error when importing datasources from file %s", file_)
logger.error(e)
logger.error(ex)
@superset.command()
@ -397,8 +397,8 @@ def update_datasources_cache():
database.get_all_view_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60
)
except Exception as e: # pylint: disable=broad-except
print("{}".format(str(e)))
except Exception as ex: # pylint: disable=broad-except
print("{}".format(str(ex)))
@superset.command()

View File

@ -113,7 +113,7 @@ class QueryContext:
}
@staticmethod
def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use
def df_metrics_to_num( # pylint: disable=no-self-use
df: pd.DataFrame, query_object: QueryObject
) -> None:
"""Converting metrics to numeric when pandas.read_sql cannot"""
@ -122,9 +122,7 @@ class QueryContext:
df[col] = pd.to_numeric(df[col], errors="coerce")
@staticmethod
def get_data( # pylint: disable=invalid-name,no-self-use
df: pd.DataFrame,
) -> List[Dict]:
def get_data(df: pd.DataFrame,) -> List[Dict]: # pylint: disable=no-self-use
return df.to_dict(orient="records")
def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
@ -197,10 +195,10 @@ class QueryContext:
status = utils.QueryStatus.SUCCESS
is_loaded = True
stats_logger.incr("loaded_from_cache")
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
logger.error(
"Error reading cache: %s", utils.error_msg_from_exception(e)
"Error reading cache: %s", utils.error_msg_from_exception(ex)
)
logger.info("Serving from cache")
@ -216,10 +214,10 @@ class QueryContext:
if not self.force:
stats_logger.incr("loaded_from_source_without_force")
is_loaded = True
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
if not error_message:
error_message = "{}".format(e)
error_message = "{}".format(ex)
status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace()
@ -234,11 +232,11 @@ class QueryContext:
stats_logger.incr("set_cache_key")
cache.set(cache_key, cache_binary, timeout=self.cache_timeout)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
# cache.set call can fail if the backend is down or if
# the key is too large or whatever other reasons
logger.warning("Could not cache key %s", cache_key)
logger.exception(e)
logger.exception(ex)
cache.delete(cache_key)
return {
"cache_key": cache_key,

View File

@ -76,7 +76,7 @@ class BaseDatasource(
# ---------------------------------------------------------------
# Columns
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
description = Column(Text)
default_endpoint = Column(Text)
is_featured = Column(Boolean, default=False) # TODO deprecating
@ -453,7 +453,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
__tablename__: Optional[str] = None # {connector_name}_column
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
column_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
is_active = Column(Boolean, default=True)
@ -526,7 +526,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
__tablename__: Optional[str] = None # {connector_name}_metric
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
metric_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
metric_type = Column(String(32))

View File

@ -657,9 +657,9 @@ class DruidDatasource(Model, BaseDatasource):
merge=self.merge_flag,
analysisTypes=[],
)
except Exception as e:
except Exception as ex:
logger.warning("Failed first attempt to get latest segment")
logger.exception(e)
logger.exception(ex)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
@ -674,9 +674,9 @@ class DruidDatasource(Model, BaseDatasource):
merge=self.merge_flag,
analysisTypes=[],
)
except Exception as e:
except Exception as ex:
logger.warning("Failed 2nd attempt to get latest segment")
logger.exception(e)
logger.exception(ex)
if segment_metadata:
return segment_metadata[-1]["columns"]

View File

@ -112,8 +112,8 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):
if col.dimension_spec_json:
try:
dimension_spec = json.loads(col.dimension_spec_json)
except ValueError as e:
raise ValueError("Invalid Dimension Spec JSON: " + str(e))
except ValueError as ex:
raise ValueError("Invalid Dimension Spec JSON: " + str(ex))
if not isinstance(dimension_spec, dict):
raise ValueError("Dimension Spec must be a JSON object")
if "outputName" not in dimension_spec:
@ -374,15 +374,15 @@ class Druid(BaseSupersetView):
valid_cluster = True
try:
cluster.refresh_datasources(refresh_all=refresh_all)
except Exception as e:
except Exception as ex:
valid_cluster = False
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)
cluster_name, utils.error_msg_from_exception(ex)
),
"danger",
)
logger.exception(e)
logger.exception(ex)
pass
if valid_cluster:
cluster.metadata_last_refreshed = datetime.now()

View File

@ -96,11 +96,11 @@ class AnnotationDatasource(BaseDatasource):
status = utils.QueryStatus.SUCCESS
try:
df = pd.read_sql_query(qry.statement, db.engine)
except Exception as e:
except Exception as ex:
df = pd.DataFrame()
status = utils.QueryStatus.FAILED
logger.exception(e)
error_message = utils.error_msg_from_exception(e)
logger.exception(ex)
error_message = utils.error_msg_from_exception(ex)
return QueryResult(
status=status, df=df, duration=0, query="", error_message=error_message
)
@ -1055,12 +1055,12 @@ class SqlaTable(Model, BaseDatasource):
try:
df = self.database.get_df(sql, self.schema, mutator)
except Exception as e:
except Exception as ex:
df = pd.DataFrame()
status = utils.QueryStatus.FAILED
logger.exception(f"Query {sql} on schema {self.schema} failed")
db_engine_spec = self.database.db_engine_spec
error_message = db_engine_spec.extract_error_message(e)
error_message = db_engine_spec.extract_error_message(ex)
return QueryResult(
status=status,
@ -1077,8 +1077,8 @@ class SqlaTable(Model, BaseDatasource):
"""Fetches the metadata for the table and merges it in"""
try:
table = self.get_sqla_table_object()
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
raise Exception(
_(
"Table [{}] doesn't seem to exist in the specified database, "
@ -1102,10 +1102,10 @@ class SqlaTable(Model, BaseDatasource):
datatype = db_engine_spec.column_datatype_to_string(
col.type, db_dialect
)
except Exception as e:
except Exception as ex:
datatype = "UNKNOWN"
logger.error("Unrecognized data type in {}.{}".format(table, col.name))
logger.exception(e)
logger.exception(ex)
dbcol = dbcols.get(col.name, None)
if not dbcol:
dbcol = TableColumn(column_name=col.name, type=datatype, table=self)
@ -1254,7 +1254,7 @@ class RowLevelSecurityFilter(Model, AuditMixinNullable):
"""
__tablename__ = "row_level_security_filters"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
roles = relationship(
security_manager.role_model,
secondary=RLSFilterRoles,

View File

@ -387,7 +387,7 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
# Fail before adding if the table can't be found
try:
table.get_sqla_table_object()
except Exception as e:
except Exception as ex:
logger.exception(f"Got an error in pre_add for {table.name}")
raise Exception(
_(
@ -395,7 +395,7 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
"please double check your "
"database connection, schema, and "
"table name, error: {}"
).format(table.name, str(e))
).format(table.name, str(ex))
)
def post_add(self, table, flash_message=True):

View File

@ -89,9 +89,9 @@ class BaseDAO:
db.session.add(model)
if commit:
db.session.commit()
except SQLAlchemyError as e: # pragma: no cover
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAOCreateFailedError(exception=e)
raise DAOCreateFailedError(exception=ex)
return model
@classmethod
@ -106,9 +106,9 @@ class BaseDAO:
db.session.merge(model)
if commit:
db.session.commit()
except SQLAlchemyError as e: # pragma: no cover
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAOUpdateFailedError(exception=e)
raise DAOUpdateFailedError(exception=ex)
return model
@classmethod
@ -121,7 +121,7 @@ class BaseDAO:
db.session.delete(model)
if commit:
db.session.commit()
except SQLAlchemyError as e: # pragma: no cover
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAODeleteFailedError(exception=e)
raise DAODeleteFailedError(exception=ex)
return model

View File

@ -168,11 +168,11 @@ class DashboardRestApi(BaseSupersetModelRestApi):
try:
new_model = CreateDashboardCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data)
except DashboardInvalidError as e:
return self.response_422(message=e.normalized_messages())
except DashboardCreateFailedError as e:
logger.error(f"Error creating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@protect()
@ -235,11 +235,11 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404()
except DashboardForbiddenError:
return self.response_403()
except DashboardInvalidError as e:
return self.response_422(message=e.normalized_messages())
except DashboardUpdateFailedError as e:
logger.error(f"Error updating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@protect()
@ -283,9 +283,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404()
except DashboardForbiddenError:
return self.response_403()
except DashboardDeleteFailedError as e:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DashboardDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@protect()
@ -344,8 +344,8 @@ class DashboardRestApi(BaseSupersetModelRestApi):
return self.response_404()
except DashboardForbiddenError:
return self.response_403()
except DashboardBulkDeleteFailedError as e:
return self.response_422(message=str(e))
except DashboardBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"])
@protect()

View File

@ -45,8 +45,8 @@ class BulkDeleteDashboardCommand(BaseCommand):
try:
DashboardDAO.bulk_delete(self._models)
return None
except DeleteFailedError as e:
logger.exception(e.exception)
except DeleteFailedError as ex:
logger.exception(ex.exception)
raise DashboardBulkDeleteFailedError()
def validate(self) -> None:

View File

@ -43,8 +43,8 @@ class CreateDashboardCommand(BaseCommand):
self.validate()
try:
dashboard = DashboardDAO.create(self._properties)
except DAOCreateFailedError as e:
logger.exception(e.exception)
except DAOCreateFailedError as ex:
logger.exception(ex.exception)
raise DashboardCreateFailedError()
return dashboard
@ -60,8 +60,8 @@ class CreateDashboardCommand(BaseCommand):
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = DashboardInvalidError()
exception.add_list(exceptions)

View File

@ -45,8 +45,8 @@ class DeleteDashboardCommand(BaseCommand):
self.validate()
try:
dashboard = DashboardDAO.delete(self._model)
except DAODeleteFailedError as e:
logger.exception(e.exception)
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise DashboardDeleteFailedError()
return dashboard

View File

@ -50,8 +50,8 @@ class UpdateDashboardCommand(BaseCommand):
self.validate()
try:
dashboard = DashboardDAO.update(self._model, self._properties)
except DAOUpdateFailedError as e:
logger.exception(e.exception)
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
raise DashboardUpdateFailedError()
return dashboard
@ -80,8 +80,8 @@ class UpdateDashboardCommand(BaseCommand):
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = DashboardInvalidError()
exception.add_list(exceptions)

View File

@ -63,7 +63,7 @@ class DashboardDAO(BaseDAO):
)
if commit:
db.session.commit()
except SQLAlchemyError as e:
except SQLAlchemyError as ex:
if commit:
db.session.rollback()
raise e
raise ex

View File

@ -26,10 +26,10 @@ from superset.utils.core import JS_MAX_INTEGER
def df_to_records(dframe: pd.DataFrame) -> List[Dict[str, Any]]:
data: List[Dict[str, Any]] = dframe.to_dict(orient="records")
# TODO: refactor this
for d in data:
for k, v in list(d.items()):
for row in data:
for key, value in list(row.items()):
# if an int is too big for JavaScript to handle
# convert it to a string
if isinstance(v, int) and abs(v) > JS_MAX_INTEGER:
d[k] = str(v)
if isinstance(value, int) and abs(value) > JS_MAX_INTEGER:
row[key] = str(value)
return data

View File

@ -171,11 +171,11 @@ class DatasetRestApi(BaseSupersetModelRestApi):
try:
new_model = CreateDatasetCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data)
except DatasetInvalidError as e:
return self.response_422(message=e.normalized_messages())
except DatasetCreateFailedError as e:
logger.error(f"Error creating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DatasetCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@protect()
@ -238,11 +238,11 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_404()
except DatasetForbiddenError:
return self.response_403()
except DatasetInvalidError as e:
return self.response_422(message=e.normalized_messages())
except DatasetUpdateFailedError as e:
logger.error(f"Error updating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DatasetInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DatasetUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@protect()
@ -286,9 +286,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_404()
except DatasetForbiddenError:
return self.response_403()
except DatasetDeleteFailedError as e:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DatasetDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"])
@protect()
@ -345,7 +345,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
@expose("/<pk>/refresh", methods=["PUT"])
@protect()
@safe
def refresh(self, pk: int) -> Response: # pylint: disable=invalid-name
def refresh(self, pk: int) -> Response:
"""Refresh a Dataset
---
put:
@ -384,6 +384,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_404()
except DatasetForbiddenError:
return self.response_403()
except DatasetRefreshFailedError as e:
logger.error(f"Error refreshing dataset {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except DatasetRefreshFailedError as ex:
logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))

View File

@ -60,8 +60,8 @@ class CreateDatasetCommand(BaseCommand):
"schema_access", dataset.schema_perm
)
db.session.commit()
except (SQLAlchemyError, DAOCreateFailedError) as e:
logger.exception(e)
except (SQLAlchemyError, DAOCreateFailedError) as ex:
logger.exception(ex)
db.session.rollback()
raise DatasetCreateFailedError()
return dataset
@ -92,8 +92,8 @@ class CreateDatasetCommand(BaseCommand):
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = DatasetInvalidError()
exception.add_list(exceptions)

View File

@ -51,8 +51,8 @@ class DeleteDatasetCommand(BaseCommand):
"datasource_access", dataset.get_perm()
)
db.session.commit()
except (SQLAlchemyError, DAODeleteFailedError) as e:
logger.exception(e)
except (SQLAlchemyError, DAODeleteFailedError) as ex:
logger.exception(ex)
db.session.rollback()
raise DatasetDeleteFailedError()
return dataset

View File

@ -46,8 +46,8 @@ class RefreshDatasetCommand(BaseCommand):
try:
self._model.fetch_metadata()
return self._model
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
raise DatasetRefreshFailedError()
raise DatasetRefreshFailedError()

View File

@ -60,8 +60,8 @@ class UpdateDatasetCommand(BaseCommand):
try:
dataset = DatasetDAO.update(self._model, self._properties)
return dataset
except DAOUpdateFailedError as e:
logger.exception(e.exception)
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
raise DatasetUpdateFailedError()
raise DatasetUpdateFailedError()
@ -92,8 +92,8 @@ class UpdateDatasetCommand(BaseCommand):
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
# Validate columns
columns = self._properties.get("columns")

View File

@ -45,8 +45,8 @@ class DatasetDAO(BaseDAO):
def get_database_by_id(database_id: int) -> Optional[Database]:
try:
return db.session.query(Database).filter_by(id=database_id).one_or_none()
except SQLAlchemyError as e: # pragma: no cover
logger.error(f"Could not get database by id: {e}")
except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Could not get database by id: {ex}")
return None
@staticmethod
@ -54,8 +54,8 @@ class DatasetDAO(BaseDAO):
try:
database.get_table(table_name, schema=schema)
return True
except SQLAlchemyError as e: # pragma: no cover
logger.error(f"Got an error {e} validating table: {table_name}")
except SQLAlchemyError as ex: # pragma: no cover
logger.error(f"Got an error {ex} validating table: {table_name}")
return False
@staticmethod

View File

@ -39,7 +39,7 @@ def validate_python_date_format(value: str) -> None:
class DatasetColumnsPutSchema(Schema):
id = fields.Integer() # pylint: disable=invalid-name
id = fields.Integer()
column_name = fields.String(required=True, validate=Length(1, 255))
type = fields.String(validate=Length(1, 32))
verbose_name = fields.String(allow_none=True, Length=(1, 1024))
@ -55,7 +55,7 @@ class DatasetColumnsPutSchema(Schema):
class DatasetMetricsPutSchema(Schema):
id = fields.Integer() # pylint: disable=invalid-name
id = fields.Integer()
expression = fields.String(required=True)
description = fields.String(allow_none=True)
metric_name = fields.String(required=True, validate=Length(1, 255))

View File

@ -441,9 +441,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return df
@classmethod
def df_to_sql( # pylint: disable=invalid-name
cls, df: pd.DataFrame, **kwargs: Any
) -> None:
def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None:
""" Upload data from a Pandas DataFrame to a database. For
regular engines this calls the DataFrame.to_sql() method. Can be
overridden for engines that don't work well with to_sql(), e.g.
@ -562,13 +560,13 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
pass
@classmethod
def extract_error_message(cls, e: Exception) -> str:
return f"{cls.engine} error: {cls._extract_error_message(e)}"
def extract_error_message(cls, ex: Exception) -> str:
return f"{cls.engine} error: {cls._extract_error_message(ex)}"
@classmethod
def _extract_error_message(cls, e: Exception) -> Optional[str]:
def _extract_error_message(cls, ex: Exception) -> Optional[str]:
"""Extract error message for queries"""
return utils.error_msg_from_exception(e)
return utils.error_msg_from_exception(ex)
@classmethod
def adjust_database_uri(cls, uri: URL, selected_schema: Optional[str]) -> None:
@ -977,7 +975,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
if database.extra:
try:
extra = json.loads(database.extra)
except json.JSONDecodeError as e:
logger.error(e)
raise e
except json.JSONDecodeError as ex:
logger.error(ex)
raise ex
return extra

View File

@ -64,9 +64,9 @@ class DruidEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
"""
try:
extra = json.loads(database.extra or "{}")
except json.JSONDecodeError as e:
logger.error(e)
raise e
except json.JSONDecodeError as ex:
logger.error(ex)
raise ex
if database.server_cert:
engine_params = extra.get("engine_params", {})

View File

@ -203,8 +203,8 @@ class HiveEngineSpec(PrestoEngineSpec):
uri.database = parse.quote(selected_schema, safe="")
@classmethod
def _extract_error_message(cls, e: Exception) -> str:
msg = str(e)
def _extract_error_message(cls, ex: Exception) -> str:
msg = str(ex)
match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg)
if match:
msg = match.group(1)

View File

@ -86,12 +86,12 @@ class MySQLEngineSpec(BaseEngineSpec):
return "from_unixtime({col})"
@classmethod
def _extract_error_message(cls, e: Exception) -> str:
def _extract_error_message(cls, ex: Exception) -> str:
"""Extract error message for queries"""
message = str(e)
message = str(ex)
try:
if isinstance(e.args, tuple) and len(e.args) > 1:
message = e.args[1]
if isinstance(ex.args, tuple) and len(ex.args) > 1:
message = ex.args[1]
except Exception: # pylint: disable=broad-except
pass
return message

View File

@ -762,22 +762,22 @@ class PrestoEngineSpec(BaseEngineSpec):
polled = cursor.poll()
@classmethod
def _extract_error_message(cls, e: Exception) -> Optional[str]:
def _extract_error_message(cls, ex: Exception) -> Optional[str]:
if (
hasattr(e, "orig")
and type(e.orig).__name__ == "DatabaseError" # type: ignore
and isinstance(e.orig[0], dict) # type: ignore
hasattr(ex, "orig")
and type(ex.orig).__name__ == "DatabaseError" # type: ignore
and isinstance(ex.orig[0], dict) # type: ignore
):
error_dict = e.orig[0] # type: ignore
error_dict = ex.orig[0] # type: ignore
return "{} at {}: {}".format(
error_dict.get("errorName"),
error_dict.get("errorLocation"),
error_dict.get("message"),
)
if type(e).__name__ == "DatabaseError" and hasattr(e, "args") and e.args:
error_dict = e.args[0]
if type(ex).__name__ == "DatabaseError" and hasattr(ex, "args") and ex.args:
error_dict = ex.args[0]
return error_dict.get("message")
return utils.error_msg_from_exception(e)
return utils.error_msg_from_exception(ex)
@classmethod
def _partition_query( # pylint: disable=too-many-arguments,too-many-locals
@ -863,9 +863,7 @@ class PrestoEngineSpec(BaseEngineSpec):
return query
@classmethod
def _latest_partition_from_df( # pylint: disable=invalid-name
cls, df: pd.DataFrame
) -> Optional[List[str]]:
def _latest_partition_from_df(cls, df: pd.DataFrame) -> Optional[List[str]]:
if not df.empty:
return df.to_records(index=False)[0].item()
return None

View File

@ -62,9 +62,9 @@ def merge_slice(slc: Slice) -> None:
def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str:
d = defaults.copy()
d.update(kwargs)
return json.dumps(d, indent=4, sort_keys=True)
defaults_copy = defaults.copy()
defaults_copy.update(kwargs)
return json.dumps(defaults_copy, indent=4, sort_keys=True)
def get_example_data(

View File

@ -88,13 +88,13 @@ def upgrade():
batch_op.create_unique_constraint(
"uq_dashboard_slice", ["dashboard_id", "slice_id"]
)
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
def downgrade():
try:
with op.batch_alter_table("dashboard_slices") as batch_op:
batch_op.drop_constraint("uq_dashboard_slice", type_="unique")
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)

View File

@ -101,8 +101,8 @@ def upgrade():
dashboard.json_metadata = None
session.merge(dashboard)
except Exception as e:
logging.exception(f"dashboard {dashboard.id} has error: {e}")
except Exception as ex:
logging.exception(f"dashboard {dashboard.id} has error: {ex}")
session.commit()
session.close()

View File

@ -58,8 +58,8 @@ def upgrade():
batch_op.drop_constraint(slices_ibfk_2, type_="foreignkey")
batch_op.drop_column("druid_datasource_id")
batch_op.drop_column("table_id")
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
# fixed issue: https://github.com/airbnb/superset/issues/466
try:
@ -67,27 +67,27 @@ def upgrade():
batch_op.create_foreign_key(
None, "datasources", ["datasource_name"], ["datasource_name"]
)
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
try:
with op.batch_alter_table("query") as batch_op:
batch_op.create_unique_constraint("client_id", ["client_id"])
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
try:
with op.batch_alter_table("query") as batch_op:
batch_op.drop_column("name")
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
def downgrade():
try:
with op.batch_alter_table("tables") as batch_op:
batch_op.create_index("table_name", ["table_name"], unique=True)
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
try:
with op.batch_alter_table("slices") as batch_op:
@ -111,8 +111,8 @@ def downgrade():
"slices_ibfk_1", "datasources", ["druid_datasource_id"], ["id"]
)
batch_op.create_foreign_key("slices_ibfk_2", "tables", ["table_id"], ["id"])
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
try:
fk_columns = generic_find_constraint_name(
@ -123,12 +123,12 @@ def downgrade():
)
with op.batch_alter_table("columns") as batch_op:
batch_op.drop_constraint(fk_columns, type_="foreignkey")
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))
op.add_column("query", sa.Column("name", sa.String(length=256), nullable=True))
try:
with op.batch_alter_table("query") as batch_op:
batch_op.drop_constraint("client_id", type_="unique")
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))

View File

@ -120,14 +120,14 @@ def upgrade():
or "uq_datasources_datasource_name",
type_="unique",
)
except Exception as e:
except Exception as ex:
logging.warning(
"Constraint drop failed, you may want to do this "
"manually on your database. For context, this is a known "
"issue around undeterministic contraint names on Postgres "
"and perhaps more databases through SQLAlchemy."
)
logging.exception(e)
logging.exception(ex)
def downgrade():

View File

@ -39,6 +39,6 @@ def upgrade():
def downgrade():
try:
op.drop_column("dbs", "allow_dml")
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
pass

View File

@ -81,8 +81,8 @@ def upgrade():
layout, indent=None, separators=(",", ":"), sort_keys=True
)
session.merge(dashboard)
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
session.commit()
session.close()
@ -111,8 +111,8 @@ def downgrade():
layout, indent=None, separators=(",", ":"), sort_keys=True
)
session.merge(dashboard)
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
session.commit()
session.close()

View File

@ -85,8 +85,8 @@ def upgrade():
params.pop("resample_fillmethod", None)
params.pop("resample_how", None)
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
session.commit()
session.close()
@ -110,8 +110,8 @@ def downgrade():
del params["resample_method"]
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
session.commit()
session.close()

View File

@ -39,5 +39,5 @@ def upgrade():
def downgrade():
try:
op.drop_column("tables", "params")
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))

View File

@ -62,8 +62,8 @@ def upgrade():
session.merge(slc)
session.commit()
print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name))
except Exception as e:
print(slc.slice_name + " error: " + str(e))
except Exception as ex:
print(slc.slice_name + " error: " + str(ex))
session.close()

View File

@ -60,8 +60,8 @@ def upgrade():
session.merge(slc)
session.commit()
print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name))
except Exception as e:
print(slc.slice_name + " error: " + str(e))
except Exception as ex:
print(slc.slice_name + " error: " + str(ex))
session.close()

View File

@ -43,7 +43,7 @@ def upgrade():
try:
op.create_unique_constraint(None, "dbs", ["verbose_name"])
op.create_unique_constraint(None, "clusters", ["verbose_name"])
except Exception as e:
except Exception:
logging.info("Constraint not created, expected when using sqlite")
@ -51,5 +51,5 @@ def downgrade():
try:
op.drop_column("dbs", "verbose_name")
op.drop_column("clusters", "verbose_name")
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)

View File

@ -37,5 +37,5 @@ def upgrade():
def downgrade():
try:
op.drop_column("tables", "template_params")
except Exception as e:
logging.warning(str(e))
except Exception as ex:
logging.warning(str(ex))

View File

@ -78,7 +78,7 @@ def upgrade():
for slc in filter_box_slices.all():
try:
upgrade_slice(slc)
except Exception as e:
except Exception as ex:
logging.exception(e)
session.commit()
@ -100,8 +100,8 @@ def downgrade():
params["metric"] = flts[0].get("metric")
params["groupby"] = [o.get("column") for o in flts]
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
logging.exception(e)
except Exception as ex:
logging.exception(ex)
session.commit()
session.close()

View File

@ -27,7 +27,7 @@ class AnnotationLayer(Model, AuditMixinNullable):
"""A logical namespace for a set of annotations"""
__tablename__ = "annotation_layer"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
name = Column(String(250))
descr = Column(Text)
@ -40,7 +40,7 @@ class Annotation(Model, AuditMixinNullable):
"""Time-related annotation"""
__tablename__ = "annotation"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
start_dttm = Column(DateTime)
end_dttm = Column(DateTime)
layer_id = Column(Integer, ForeignKey("annotation_layer.id"), nullable=False)

View File

@ -73,7 +73,7 @@ class Url(Model, AuditMixinNullable):
"""Used for the short url feature"""
__tablename__ = "url"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
url = Column(Text)
@ -82,7 +82,7 @@ class KeyValue(Model): # pylint: disable=too-few-public-methods
"""Used for any type of key-value store"""
__tablename__ = "keyvalue"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
value = Column(Text, nullable=False)
@ -91,7 +91,7 @@ class CssTemplate(Model, AuditMixinNullable):
"""CSS templates for dashboards"""
__tablename__ = "css_templates"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
template_name = Column(String(250))
css = Column(Text, default="")
@ -106,7 +106,7 @@ class Database(
type = "table"
__table_args__ = (UniqueConstraint("database_name"),)
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
database_name = Column(String(250), unique=True, nullable=False)
@ -481,8 +481,8 @@ class Database(
return [
utils.DatasourceName(table=table, schema=schema) for table in tables
]
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
@cache_util.memoized_func(
key=lambda *args, **kwargs: f"db:{{}}:schema:{kwargs.get('schema')}:view_list", # type: ignore
@ -511,8 +511,8 @@ class Database(
database=self, inspector=self.inspector, schema=schema
)
return [utils.DatasourceName(table=view, schema=schema) for view in views]
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
@cache_util.memoized_func(
key=lambda *args, **kwargs: "db:{}:schema_list", attribute_in_key="id"
@ -564,9 +564,9 @@ class Database(
if self.encrypted_extra:
try:
encrypted_extra = json.loads(self.encrypted_extra)
except json.JSONDecodeError as e:
logger.error(e)
raise e
except json.JSONDecodeError as ex:
logger.error(ex)
raise ex
return encrypted_extra
def get_table(self, table_name: str, schema: Optional[str] = None) -> Table:
@ -645,7 +645,7 @@ class Log(Model): # pylint: disable=too-few-public-methods
__tablename__ = "logs"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
action = Column(String(512))
user_id = Column(Integer, ForeignKey("ab_user.id"))
dashboard_id = Column(Integer)
@ -662,7 +662,7 @@ class Log(Model): # pylint: disable=too-few-public-methods
class FavStar(Model): # pylint: disable=too-few-public-methods
__tablename__ = "favstar"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id"))
class_name = Column(String(50))
obj_id = Column(Integer)

View File

@ -119,7 +119,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes
"""The dashboard object!"""
__tablename__ = "dashboards"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
dashboard_title = Column(String(500))
position_json = Column(utils.MediumText())
description = Column(Text)

View File

@ -37,7 +37,7 @@ class DatasourceAccessRequest(Model, AuditMixinNullable):
"""ORM model for the access requests for datasources and dbs."""
__tablename__ = "access_request"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
datasource_id = Column(Integer)
datasource_type = Column(String(200))

View File

@ -166,14 +166,14 @@ class ImportMixin:
try:
obj_query = session.query(cls).filter(and_(*filters))
obj = obj_query.one_or_none()
except MultipleResultsFound as e:
except MultipleResultsFound as ex:
logger.error(
"Error importing %s \n %s \n %s",
cls.__name__,
str(obj_query),
yaml.safe_dump(dict_rep),
)
raise e
raise ex
if not obj:
is_new_obj = True
@ -274,14 +274,14 @@ class ImportMixin:
return new_obj
def alter_params(self, **kwargs):
d = self.params_dict
d.update(kwargs)
self.params = json.dumps(d)
params = self.params_dict
params.update(kwargs)
self.params = json.dumps(params)
def remove_params(self, param_to_remove: str) -> None:
d = self.params_dict
d.pop(param_to_remove, None)
self.params = json.dumps(d)
params = self.params_dict
params.pop(param_to_remove, None)
self.params = json.dumps(params)
def reset_ownership(self):
""" object will belong to the user the current user """
@ -376,7 +376,7 @@ class QueryResult: # pylint: disable=too-few-public-methods
def __init__( # pylint: disable=too-many-arguments
self, df, query, duration, status=QueryStatus.SUCCESS, error_message=None
):
self.df: pd.DataFrame = df # pylint: disable=invalid-name
self.df: pd.DataFrame = df
self.query: str = query
self.duration: int = duration
self.status: str = status
@ -395,8 +395,8 @@ class ExtraJSONMixin:
except Exception: # pylint: disable=broad-except
return {}
def set_extra_json(self, d):
self.extra_json = json.dumps(d)
def set_extra_json(self, extras):
self.extra_json = json.dumps(extras)
def set_extra_json_key(self, key, value):
extra = self.extra

View File

@ -50,7 +50,7 @@ class EmailSchedule:
__tablename__ = "email_schedules"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
active = Column(Boolean, default=True, index=True)
crontab = Column(String(50))

View File

@ -55,7 +55,7 @@ class Slice(
"""A slice is essentially a report or a view on data"""
__tablename__ = "slices"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
slice_name = Column(String(250))
datasource_id = Column(Integer)
datasource_type = Column(String(200))
@ -135,9 +135,9 @@ class Slice(
@property # type: ignore
@utils.memoized
def viz(self) -> BaseViz:
d = json.loads(self.params)
form_data = json.loads(self.params)
viz_class = viz_types[self.viz_type]
return viz_class(datasource=self.datasource, form_data=d)
return viz_class(datasource=self.datasource, form_data=form_data)
@property
def description_markeddown(self) -> str:
@ -146,14 +146,14 @@ class Slice(
@property
def data(self) -> Dict[str, Any]:
"""Data used to render slice in templates"""
d: Dict[str, Any] = {}
data: Dict[str, Any] = {}
self.token = ""
try:
d = self.viz.data
self.token = d.get("token") # type: ignore
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
d["error"] = str(e)
data = self.viz.data
self.token = data.get("token") # type: ignore
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
data["error"] = str(ex)
return {
"cache_timeout": self.cache_timeout,
"datasource": self.datasource_name,
@ -178,9 +178,9 @@ class Slice(
form_data: Dict[str, Any] = {}
try:
form_data = json.loads(self.params)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
logger.error("Malformed json in slice's params")
logger.exception(e)
logger.exception(ex)
form_data.update(
{
"slice_id": self.id,

View File

@ -48,7 +48,7 @@ class Query(Model, ExtraJSONMixin):
table may represent multiple SQL statements executed sequentially"""
__tablename__ = "query"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
client_id = Column(String(11), unique=True, nullable=False)
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
@ -150,7 +150,7 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin):
"""ORM model for SQL query"""
__tablename__ = "saved_query"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
db_id = Column(Integer, ForeignKey("dbs.id"), nullable=True)
schema = Column(String(128))
@ -195,9 +195,7 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "tab_state"
# basic info
id = Column( # pylint: disable=invalid-name
Integer, primary_key=True, autoincrement=True
)
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("ab_user.id"))
label = Column(String(256))
active = Column(Boolean, default=False)
@ -248,9 +246,7 @@ class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "table_schema"
id = Column( # pylint: disable=invalid-name
Integer, primary_key=True, autoincrement=True
)
id = Column(Integer, primary_key=True, autoincrement=True)
tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE"))
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)

View File

@ -62,7 +62,7 @@ class Tag(Model, AuditMixinNullable):
"""A tag attached to an object (query, chart or dashboard)."""
__tablename__ = "tag"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
name = Column(String(250), unique=True)
type = Column(Enum(TagTypes))
@ -72,7 +72,7 @@ class TaggedObject(Model, AuditMixinNullable):
"""An association between an object and a tag."""
__tablename__ = "tagged_object"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
tag_id = Column(Integer, ForeignKey("tag.id"))
object_id = Column(Integer)
object_type = Column(Enum(ObjectTypes))

View File

@ -34,7 +34,7 @@ class UserAttribute(Model, AuditMixinNullable):
"""
__tablename__ = "user_attribute"
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id"))
user = relationship(
security_manager.user_model, backref="extra_attributes", foreign_keys=[user_id]

View File

@ -138,8 +138,8 @@ class SupersetResultSet:
pa_data[i] = pa.Array.from_pandas(
series, type=pa.timestamp("ns", tz=tz)
)
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
self.table = pa.Table.from_arrays(pa_data, names=column_names)
self._type_dict: Dict[str, Any] = {}
@ -150,8 +150,8 @@ class SupersetResultSet:
for i, col in enumerate(column_names)
if deduped_cursor_desc
}
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
@staticmethod
def convert_pa_dtype(pa_dtype: pa.DataType) -> Optional[str]:

View File

@ -135,9 +135,9 @@ def session_scope(nullpool):
try:
yield session
session.commit()
except Exception as e:
except Exception as ex:
session.rollback()
logger.exception(e)
logger.exception(ex)
raise
finally:
session.close()
@ -175,12 +175,12 @@ def get_sql_results( # pylint: disable=too-many-arguments
expand_data=expand_data,
log_params=log_params,
)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
logger.error("Query %d", query_id)
logger.debug("Query %d: %s", query_id, e)
logger.debug("Query %d: %s", query_id, ex)
stats_logger.incr("error_sqllab_unhandled")
query = get_query(query_id, session)
return handle_query_error(str(e), query, session)
return handle_query_error(str(ex), query, session)
# pylint: disable=too-many-arguments
@ -253,17 +253,17 @@ def execute_sql_statement(sql_statement, query, user_name, session, cursor, log_
)
data = db_engine_spec.fetch_data(cursor, query.limit)
except SoftTimeLimitExceeded as e:
except SoftTimeLimitExceeded as ex:
logger.error("Query %d: Time limit exceeded", query.id)
logger.debug("Query %d: %s", query.id, e)
logger.debug("Query %d: %s", query.id, ex)
raise SqlLabTimeoutException(
"SQL Lab timeout. This environment's policy is to kill queries "
"after {} seconds.".format(SQLLAB_TIMEOUT)
)
except Exception as e:
logger.error("Query %d: %s", query.id, type(e))
logger.debug("Query %d: %s", query.id, e)
raise SqlLabException(db_engine_spec.extract_error_message(e))
except Exception as ex:
logger.error("Query %d: %s", query.id, type(ex))
logger.debug("Query %d: %s", query.id, ex)
raise SqlLabException(db_engine_spec.extract_error_message(ex))
logger.debug("Query %d: Fetching cursor description", query.id)
cursor_description = cursor.description
@ -378,8 +378,8 @@ def execute_sql_statements(
result_set = execute_sql_statement(
statement, query, user_name, session, cursor, log_params
)
except Exception as e: # pylint: disable=broad-except
msg = str(e)
except Exception as ex: # pylint: disable=broad-except
msg = str(ex)
if statement_count > 1:
msg = f"[Statement {i+1} out of {statement_count}] " + msg
payload = handle_query_error(msg, query, session, payload)

View File

@ -136,9 +136,9 @@ class PrestoDBSQLValidator(BaseSQLValidator):
start_column=start_column,
end_column=end_column,
)
except Exception as e:
logger.exception(f"Unexpected error running validation query: {e}")
raise e
except Exception as ex:
logger.exception(f"Unexpected error running validation query: {ex}")
raise ex
@classmethod
def validate(

View File

@ -263,8 +263,8 @@ def parse_human_datetime(s):
if parsed_flags & 2 == 0:
parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0)
dttm = dttm_from_timetuple(parsed_dttm.utctimetuple())
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
raise ValueError("Couldn't parse date string [{}]".format(s))
return dttm
@ -565,8 +565,8 @@ def validate_json(obj: Union[bytes, bytearray, str]) -> None:
if obj:
try:
json.loads(obj)
except Exception as e:
logger.error(f"JSON is not valid {e}")
except Exception as ex:
logger.error(f"JSON is not valid {ex}")
raise SupersetException("JSON is not valid")
@ -597,16 +597,16 @@ class timeout:
try:
signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.seconds)
except ValueError as e:
except ValueError as ex:
logger.warning("timeout can't be used in the current context")
logger.exception(e)
logger.exception(ex)
def __exit__(self, type, value, traceback):
try:
signal.alarm(0)
except ValueError as e:
except ValueError as ex:
logger.warning("timeout can't be used in the current context")
logger.exception(e)
logger.exception(ex)
def pessimistic_connection_handling(some_engine):

View File

@ -37,8 +37,8 @@ def stats_timing(stats_key, stats_logger):
start_ts = now_as_float()
try:
yield start_ts
except Exception as e:
raise e
except Exception as ex:
raise ex
finally:
stats_logger.timing(stats_key, now_as_float() - start_ts)

View File

@ -37,19 +37,19 @@ class AbstractEventLogger(ABC):
user_id = None
if g.user:
user_id = g.user.get_id()
d = request.form.to_dict() or {}
form_data = request.form.to_dict() or {}
# request parameters can overwrite post body
request_params = request.args.to_dict()
d.update(request_params)
d.update(kwargs)
form_data.update(request_params)
form_data.update(kwargs)
slice_id = d.get("slice_id")
dashboard_id = d.get("dashboard_id")
slice_id = form_data.get("slice_id")
dashboard_id = form_data.get("dashboard_id")
try:
slice_id = int(
slice_id or json.loads(d.get("form_data")).get("slice_id")
slice_id or json.loads(form_data.get("form_data")).get("slice_id")
)
except (ValueError, TypeError):
slice_id = 0
@ -61,10 +61,10 @@ class AbstractEventLogger(ABC):
# bulk insert
try:
explode_by = d.get("explode")
records = json.loads(d.get(explode_by))
explode_by = form_data.get("explode")
records = json.loads(form_data.get(explode_by))
except Exception: # pylint: disable=broad-except
records = [d]
records = [form_data]
referrer = request.referrer[:1000] if request.referrer else None

View File

@ -107,8 +107,8 @@ def api(f):
def wraps(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
return json_error_response(get_error_msg())
return functools.update_wrapper(wraps, f)
@ -124,22 +124,24 @@ def handle_api_exception(f):
def wraps(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except SupersetSecurityException as e:
logger.exception(e)
except SupersetSecurityException as ex:
logger.exception(ex)
return json_error_response(
utils.error_msg_from_exception(e), status=e.status, link=e.link
utils.error_msg_from_exception(ex), status=ex.status, link=ex.link
)
except SupersetException as e:
logger.exception(e)
except SupersetException as ex:
logger.exception(ex)
return json_error_response(
utils.error_msg_from_exception(e), status=e.status
utils.error_msg_from_exception(ex), status=ex.status
)
except HTTPException as e:
logger.exception(e)
return json_error_response(utils.error_msg_from_exception(e), status=e.code)
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
return json_error_response(utils.error_msg_from_exception(e))
except HTTPException as ex:
logger.exception(ex)
return json_error_response(
utils.error_msg_from_exception(ex), status=ex.code
)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
return json_error_response(utils.error_msg_from_exception(ex))
return functools.update_wrapper(wraps, f)
@ -176,8 +178,8 @@ def menu_data():
or f"/profile/{g.user.username}/"
)
# when user object has no username
except NameError as e:
logger.exception(e)
except NameError as ex:
logger.exception(ex)
if logo_target_path.startswith("/"):
root_path = f"/superset{logo_target_path}"
@ -261,8 +263,8 @@ class ListWidgetWithCheckboxes(ListWidget): # pylint: disable=too-few-public-me
def validate_json(_form, field):
try:
json.loads(field.data)
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
raise Exception(_("json isn't valid"))
@ -303,8 +305,8 @@ class DeleteMixin: # pylint: disable=too-few-public-methods
abort(404)
try:
self.pre_delete(item)
except Exception as e: # pylint: disable=broad-except
flash(str(e), "danger")
except Exception as ex: # pylint: disable=broad-except
flash(str(ex), "danger")
else:
view_menu = security_manager.find_view_menu(item.get_perm())
pvs = (
@ -338,8 +340,8 @@ class DeleteMixin: # pylint: disable=too-few-public-methods
for item in items:
try:
self.pre_delete(item)
except Exception as e: # pylint: disable=broad-except
flash(str(e), "danger")
except Exception as ex: # pylint: disable=broad-except
flash(str(ex), "danger")
else:
self._delete(item.id)
self.update_redirect()

View File

@ -44,7 +44,7 @@ def check_ownership_and_item_exists(f):
A Decorator that checks if an object exists and is owned by the current user
"""
def wraps(self, pk): # pylint: disable=invalid-name
def wraps(self, pk):
item = self.datamodel.get(
pk, self._base_filters # pylint: disable=protected-access
)
@ -52,8 +52,8 @@ def check_ownership_and_item_exists(f):
return self.response_404()
try:
check_ownership(item)
except SupersetSecurityException as e:
return self.response(403, message=str(e))
except SupersetSecurityException as ex:
return self.response(403, message=str(ex))
return f(self, item)
return functools.update_wrapper(wraps, f)
@ -290,9 +290,9 @@ class BaseOwnedModelRestApi(BaseSupersetModelRestApi):
return self.response(
200, result=self.edit_model_schema.dump(item.data, many=False).data
)
except SQLAlchemyError as e:
logger.error(f"Error updating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except SQLAlchemyError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/", methods=["POST"])
@protect()
@ -342,9 +342,9 @@ class BaseOwnedModelRestApi(BaseSupersetModelRestApi):
result=self.add_model_schema.dump(item.data, many=False).data,
id=item.data.id,
)
except SQLAlchemyError as e:
logger.error(f"Error creating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except SQLAlchemyError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["DELETE"])
@protect()
@ -383,6 +383,6 @@ class BaseOwnedModelRestApi(BaseSupersetModelRestApi):
try:
self.datamodel.delete(item, raise_exception=True)
return self.response(200, message="OK")
except SQLAlchemyError as e:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except SQLAlchemyError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))

View File

@ -185,8 +185,8 @@ def check_datasource_perms(
datasource_id, datasource_type = get_datasource_info(
datasource_id, datasource_type, form_data
)
except SupersetException as e:
raise SupersetSecurityException(str(e))
except SupersetException as ex:
raise SupersetSecurityException(str(ex))
if datasource_type is None:
raise SupersetSecurityException("Could not determine datasource type")
@ -317,8 +317,8 @@ class KV(BaseSupersetView):
obj = models.KeyValue(value=value)
db.session.add(obj)
db.session.commit()
except Exception as e:
return json_error_response(e)
except Exception as ex:
return json_error_response(ex)
return Response(json.dumps({"id": obj.id}), status=200)
@event_logger.log_this
@ -329,8 +329,8 @@ class KV(BaseSupersetView):
kv = db.session.query(models.KeyValue).filter_by(id=key_id).scalar()
if not kv:
return Response(status=404, content_type="text/plain")
except Exception as e:
return json_error_response(e)
except Exception as ex:
return json_error_response(ex)
return Response(kv.value, status=200, content_type="text/plain")
@ -600,9 +600,9 @@ class Superset(BaseSupersetView):
query_obj = viz_obj.query_obj()
if query_obj:
query = viz_obj.datasource.get_query_str(query_obj)
except Exception as e:
logger.exception(e)
return json_error_response(e)
except Exception as ex:
logger.exception(ex)
return json_error_response(ex)
if not query:
query = "No query."
@ -706,8 +706,8 @@ class Superset(BaseSupersetView):
datasource_id, datasource_type = get_datasource_info(
datasource_id, datasource_type, form_data
)
except SupersetException as e:
return json_error_response(utils.error_msg_from_exception(e))
except SupersetException as ex:
return json_error_response(utils.error_msg_from_exception(ex))
viz_obj = get_viz(
datasource_type=datasource_type,
@ -729,19 +729,19 @@ class Superset(BaseSupersetView):
if request.method == "POST" and f:
try:
dashboard_import_export.import_dashboards(db.session, f.stream)
except DatabaseNotFound as e:
logger.exception(e)
except DatabaseNotFound as ex:
logger.exception(ex)
flash(
_(
"Cannot import dashboard: %(db_error)s.\n"
"Make sure to create the database before "
"importing the dashboard.",
db_error=e,
db_error=ex,
),
"danger",
)
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
flash(
_(
"An unknown error occurred. "
@ -1371,11 +1371,11 @@ class Superset(BaseSupersetView):
with closing(engine.connect()) as conn:
conn.scalar(select([1]))
return json_success('"OK"')
except CertificateException as e:
logger.info(e.message)
return json_error_response(e.message)
except NoSuchModuleError as e:
logger.info("Invalid driver %s", e)
except CertificateException as ex:
logger.info(ex.message)
return json_error_response(ex.message)
except NoSuchModuleError as ex:
logger.info("Invalid driver %s", ex)
driver_name = make_url(uri).drivername
return json_error_response(
_(
@ -1384,24 +1384,24 @@ class Superset(BaseSupersetView):
),
400,
)
except ArgumentError as e:
logger.info("Invalid URI %s", e)
except ArgumentError as ex:
logger.info("Invalid URI %s", ex)
return json_error_response(
_(
"Invalid connection string, a valid string usually follows:\n"
"'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'"
)
)
except OperationalError as e:
logger.warning("Connection failed %s", e)
except OperationalError as ex:
logger.warning("Connection failed %s", ex)
return json_error_response(
_("Connection failed, please check your connection settings."), 400
)
except DBSecurityException as e:
logger.warning("Stopped an unsafe database connection. %s", e)
return json_error_response(_(str(e)), 400)
except Exception as e:
logger.error("Unexpected error %s", e)
except DBSecurityException as ex:
logger.warning("Stopped an unsafe database connection. %s", ex)
return json_error_response(_(str(ex)), 400)
except Exception as ex:
logger.error("Unexpected error %s", ex)
return json_error_response(
_("Unexpected error occurred, please check your logs for details"), 400
)
@ -1706,9 +1706,9 @@ class Superset(BaseSupersetView):
force=True,
)
obj.get_json()
except Exception as e:
except Exception as ex:
logger.exception("Failed to warm up cache")
return json_error_response(utils.error_msg_from_exception(e))
return json_error_response(utils.error_msg_from_exception(ex))
return json_success(
json.dumps(
[{"slice_id": slc.id, "slice_name": slc.slice_name} for slc in slices]
@ -1950,9 +1950,9 @@ class Superset(BaseSupersetView):
return json_error_response(err_msg)
try:
DruidDatasource.sync_to_db_from_config(druid_config, user, cluster)
except Exception as e:
logger.exception(utils.error_msg_from_exception(e))
return json_error_response(utils.error_msg_from_exception(e))
except Exception as ex:
logger.exception(utils.error_msg_from_exception(ex))
return json_error_response(utils.error_msg_from_exception(ex))
return Response(status=201)
@has_access
@ -2064,11 +2064,11 @@ class Superset(BaseSupersetView):
cost = mydb.db_engine_spec.estimate_query_cost(
mydb, schema, sql, utils.QuerySource.SQL_LAB
)
except SupersetTimeoutException as e:
logger.exception(e)
except SupersetTimeoutException as ex:
logger.exception(ex)
return json_error_response(timeout_msg)
except Exception as e:
return json_error_response(str(e))
except Exception as ex:
return json_error_response(str(ex))
spec = mydb.db_engine_spec
query_cost_formatters = get_feature_flags().get(
@ -2226,15 +2226,15 @@ class Superset(BaseSupersetView):
encoding=None,
)
return json_success(payload)
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
msg = _(
f"{validator.name} was unable to check your query.\n"
"Please recheck your query.\n"
f"Exception: {e}"
f"Exception: {ex}"
)
# Return as a 400 if the database error message says we got a 4xx error
if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(e)):
if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(ex)):
return json_error_response(f"{msg}", status=400)
else:
return json_error_response(f"{msg}")
@ -2268,8 +2268,8 @@ class Superset(BaseSupersetView):
expand_data=expand_data,
log_params=log_params,
)
except Exception as e:
logger.exception(f"Query {query.id}: {e}")
except Exception as ex:
logger.exception(f"Query {query.id}: {ex}")
msg = _(
"Failed to start remote query on a worker. "
"Tell your administrator to verify the availability of "
@ -2330,8 +2330,8 @@ class Superset(BaseSupersetView):
ignore_nan=True,
encoding=None,
)
except Exception as e:
logger.exception(f"Query {query.id}: {e}")
except Exception as ex:
logger.exception(f"Query {query.id}: {ex}")
return json_error_response(f"{{e}}")
if data.get("status") == QueryStatus.FAILED:
return json_error_response(payload=data)
@ -2414,8 +2414,8 @@ class Superset(BaseSupersetView):
session.flush()
query_id = query.id
session.commit() # shouldn't be necessary
except SQLAlchemyError as e:
logger.error(f"Errors saving query details {e}")
except SQLAlchemyError as ex:
logger.error(f"Errors saving query details {ex}")
session.rollback()
raise Exception(_("Query record was not created as expected."))
if not query_id:
@ -2440,8 +2440,8 @@ class Superset(BaseSupersetView):
rendered_query = template_processor.process_template(
query.sql, **template_params
)
except Exception as e:
error_msg = utils.error_msg_from_exception(e)
except Exception as ex:
error_msg = utils.error_msg_from_exception(ex)
return json_error_response(
f"Query {query_id}: Template rendering failed: {error_msg}"
)
@ -2799,8 +2799,8 @@ class Superset(BaseSupersetView):
database, schemas_allowed, False
)
return self.json_response(schemas_allowed_processed)
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
return json_error_response(
"Failed to fetch schemas allowed for csv upload in this database! "
"Please contact your Superset Admin!"

View File

@ -270,9 +270,9 @@ class DatabaseRestApi(DatabaseMixin, BaseSupersetModelRestApi):
self.incr_stats("init", self.table_metadata.__name__)
try:
table_info: Dict = get_table_metadata(database, table_name, schema_name)
except SQLAlchemyError as e:
except SQLAlchemyError as ex:
self.incr_stats("error", self.table_metadata.__name__)
return self.response_422(error_msg_from_exception(e))
return self.response_422(error_msg_from_exception(ex))
self.incr_stats("success", self.table_metadata.__name__)
return self.response(200, **table_info)

View File

@ -32,9 +32,7 @@ def check_datasource_access(f):
A Decorator that checks if a user has datasource access
"""
def wraps(
self, pk: int, table_name: str, schema_name: Optional[str] = None
): # pylint: disable=invalid-name
def wraps(self, pk: int, table_name: str, schema_name: Optional[str] = None):
schema_name_parsed = parse_js_uri_path_item(schema_name, eval_undefined=True)
table_name_parsed = parse_js_uri_path_item(table_name)
if not table_name_parsed:

View File

@ -234,9 +234,9 @@ class DatabaseMixin:
# this will check whether json.loads(extra) can succeed
try:
extra = database.get_extra()
except Exception as e:
except Exception as ex:
raise Exception(
_("Extra field cannot be decoded by JSON. %{msg}s", msg=str(e))
_("Extra field cannot be decoded by JSON. %{msg}s", msg=str(ex))
)
# this will check whether 'metadata_params' is configured correctly
@ -256,7 +256,7 @@ class DatabaseMixin:
# this will check whether json.loads(secure_extra) can succeed
try:
database.get_encrypted_extra()
except Exception as e:
except Exception as ex:
raise Exception(
_("Extra field cannot be decoded by JSON. %{msg}s", msg=str(e))
_("Extra field cannot be decoded by JSON. %{msg}s", msg=str(ex))
)

View File

@ -158,7 +158,7 @@ class CsvToDatabaseView(SimpleFormView):
table.fetch_metadata()
db.session.add(table)
db.session.commit()
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
db.session.rollback()
try:
os.remove(path)
@ -171,7 +171,7 @@ class CsvToDatabaseView(SimpleFormView):
filename=csv_filename,
table_name=form.name.data,
db_name=database.database_name,
error_msg=str(e),
error_msg=str(ex),
)
flash(message, "danger")

View File

@ -431,10 +431,10 @@ class BaseViz:
self.status = utils.QueryStatus.SUCCESS
is_loaded = True
stats_logger.incr("loaded_from_cache")
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
logger.error(
"Error reading cache: " + utils.error_msg_from_exception(e)
"Error reading cache: " + utils.error_msg_from_exception(ex)
)
logger.info("Serving from cache")
@ -446,10 +446,10 @@ class BaseViz:
if not self.force:
stats_logger.incr("loaded_from_source_without_force")
is_loaded = True
except Exception as e:
logger.exception(e)
except Exception as ex:
logger.exception(ex)
if not self.error_message:
self.error_message = "{}".format(e)
self.error_message = "{}".format(ex)
self.status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace()
@ -469,11 +469,11 @@ class BaseViz:
stats_logger.incr("set_cache_key")
cache.set(cache_key, cache_value, timeout=self.cache_timeout)
except Exception as e:
except Exception as ex:
# cache.set call can fail if the backend is down or if
# the key is too large or whatever other reasons
logger.warning("Could not cache key {}".format(cache_key))
logger.exception(e)
logger.exception(ex)
cache.delete(cache_key)
return {
"cache_key": self._any_cache_key,