Re-enable pylint for superset/utils folder (#8766)

* re-enable pylint for superset/utils/import_datasource.py

* re-enable pylint for superset/utils/cache.py

* re-enable pylint for superset/utils/log.py
This commit is contained in:
Will Barrett 2019-12-06 14:34:58 -08:00 committed by Maxime Beauchemin
parent 60892ae45d
commit ed3944ebef
3 changed files with 27 additions and 29 deletions

View File

@ -14,16 +14,12 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from typing import Optional
from flask import Flask, request
from flask_caching import Cache
from flask import request
from superset.extensions import cache_manager
def view_cache_key(*unused_args, **unused_kwargs) -> str:
def view_cache_key(*_, **__) -> str:
args_hash = hash(frozenset(request.args.items()))
return "view/{}/{}".format(request.path, args_hash)

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import logging
from sqlalchemy.orm.session import make_transient
@ -30,7 +29,7 @@ def import_datasource(
superset instances. Audit metadata isn't copies over.
"""
make_transient(i_datasource)
logging.info("Started import of the datasource: {}".format(i_datasource.to_json()))
logging.info("Started import of the datasource: %s", i_datasource.to_json())
i_datasource.id = None
i_datasource.database_id = lookup_database(i_datasource).id
@ -47,25 +46,25 @@ def import_datasource(
session.add(datasource)
session.flush()
for m in i_datasource.metrics:
new_m = m.copy()
for metric in i_datasource.metrics:
new_m = metric.copy()
new_m.table_id = datasource.id
logging.info(
"Importing metric {} from the datasource: {}".format(
new_m.to_json(), i_datasource.full_name
)
"Importing metric %s from the datasource: %s",
new_m.to_json(),
i_datasource.full_name,
)
imported_m = i_datasource.metric_class.import_obj(new_m)
if imported_m.metric_name not in [m.metric_name for m in datasource.metrics]:
datasource.metrics.append(imported_m)
for c in i_datasource.columns:
new_c = c.copy()
for column in i_datasource.columns:
new_c = column.copy()
new_c.table_id = datasource.id
logging.info(
"Importing column {} from the datasource: {}".format(
new_c.to_json(), i_datasource.full_name
)
"Importing column %s from the datasource: %s",
new_c.to_json(),
i_datasource.full_name,
)
imported_c = i_datasource.column_class.import_obj(new_c)
if imported_c.column_name not in [c.column_name for c in datasource.columns]:

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import functools
import inspect
import json
@ -64,7 +63,7 @@ class AbstractEventLogger(ABC):
try:
explode_by = d.get("explode")
records = json.loads(d.get(explode_by))
except Exception:
except Exception: # pylint: disable=broad-except
records = [d]
referrer = request.referrer[:1000] if request.referrer else None
@ -89,8 +88,9 @@ class AbstractEventLogger(ABC):
def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
"""
This function implements the deprecation of assignment of class objects to EVENT_LOGGER
configuration, and validates type of configured loggers.
This function implements the deprecation of assignment
of class objects to EVENT_LOGGER configuration, and validates
type of configured loggers.
The motivation for this method is to gracefully deprecate the ability to configure
EVENT_LOGGER with a class type, in favor of preconfigured instances which may have
@ -105,10 +105,12 @@ def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
logging.warning(
textwrap.dedent(
"""
In superset private config, EVENT_LOGGER has been assigned a class object. In order to
accomodate pre-configured instances without a default constructor, assignment of a class
is deprecated and may no longer work at some point in the future. Please assign an object
instance of a type that implements superset.utils.log.AbstractEventLogger.
In superset private config, EVENT_LOGGER has been assigned a class
object. In order to accomodate pre-configured instances without a
default constructor, assignment of a class is deprecated and may no
longer work at some point in the future. Please assign an object
instance of a type that implements
superset.utils.log.AbstractEventLogger.
"""
)
)
@ -119,7 +121,8 @@ def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
# Verify that we have a valid logger impl
if not isinstance(result, AbstractEventLogger):
raise TypeError(
"EVENT_LOGGER must be configured with a concrete instance of superset.utils.log.AbstractEventLogger."
"EVENT_LOGGER must be configured with a concrete instance"
"of superset.utils.log.AbstractEventLogger."
)
logging.info(f"Configured event logger of type {type(result)}")
@ -127,7 +130,7 @@ def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
class DBEventLogger(AbstractEventLogger):
def log(self, user_id, action, *args, **kwargs):
def log(self, user_id, action, *args, **kwargs): # pylint: disable=too-many-locals
from superset.models.core import Log
records = kwargs.get("records", list())
@ -140,7 +143,7 @@ class DBEventLogger(AbstractEventLogger):
for record in records:
try:
json_string = json.dumps(record)
except Exception:
except Exception: # pylint: disable=broad-except
json_string = None
log = Log(
action=action,