Re-enable pylint for superset/utils folder (#8766)

* re-enable pylint for superset/utils/import_datasource.py

* re-enable pylint for superset/utils/cache.py

* re-enable pylint for superset/utils/log.py
This commit is contained in:
Will Barrett 2019-12-06 14:34:58 -08:00 committed by Maxime Beauchemin
parent 60892ae45d
commit ed3944ebef
3 changed files with 27 additions and 29 deletions

View File

@ -14,16 +14,12 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
# pylint: disable=C,R,W from flask import request
from typing import Optional
from flask import Flask, request
from flask_caching import Cache
from superset.extensions import cache_manager from superset.extensions import cache_manager
def view_cache_key(*unused_args, **unused_kwargs) -> str: def view_cache_key(*_, **__) -> str:
args_hash = hash(frozenset(request.args.items())) args_hash = hash(frozenset(request.args.items()))
return "view/{}/{}".format(request.path, args_hash) return "view/{}/{}".format(request.path, args_hash)

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
# pylint: disable=C,R,W
import logging import logging
from sqlalchemy.orm.session import make_transient from sqlalchemy.orm.session import make_transient
@ -30,7 +29,7 @@ def import_datasource(
superset instances. Audit metadata isn't copies over. superset instances. Audit metadata isn't copies over.
""" """
make_transient(i_datasource) make_transient(i_datasource)
logging.info("Started import of the datasource: {}".format(i_datasource.to_json())) logging.info("Started import of the datasource: %s", i_datasource.to_json())
i_datasource.id = None i_datasource.id = None
i_datasource.database_id = lookup_database(i_datasource).id i_datasource.database_id = lookup_database(i_datasource).id
@ -47,25 +46,25 @@ def import_datasource(
session.add(datasource) session.add(datasource)
session.flush() session.flush()
for m in i_datasource.metrics: for metric in i_datasource.metrics:
new_m = m.copy() new_m = metric.copy()
new_m.table_id = datasource.id new_m.table_id = datasource.id
logging.info( logging.info(
"Importing metric {} from the datasource: {}".format( "Importing metric %s from the datasource: %s",
new_m.to_json(), i_datasource.full_name new_m.to_json(),
) i_datasource.full_name,
) )
imported_m = i_datasource.metric_class.import_obj(new_m) imported_m = i_datasource.metric_class.import_obj(new_m)
if imported_m.metric_name not in [m.metric_name for m in datasource.metrics]: if imported_m.metric_name not in [m.metric_name for m in datasource.metrics]:
datasource.metrics.append(imported_m) datasource.metrics.append(imported_m)
for c in i_datasource.columns: for column in i_datasource.columns:
new_c = c.copy() new_c = column.copy()
new_c.table_id = datasource.id new_c.table_id = datasource.id
logging.info( logging.info(
"Importing column {} from the datasource: {}".format( "Importing column %s from the datasource: %s",
new_c.to_json(), i_datasource.full_name new_c.to_json(),
) i_datasource.full_name,
) )
imported_c = i_datasource.column_class.import_obj(new_c) imported_c = i_datasource.column_class.import_obj(new_c)
if imported_c.column_name not in [c.column_name for c in datasource.columns]: if imported_c.column_name not in [c.column_name for c in datasource.columns]:

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
# pylint: disable=C,R,W
import functools import functools
import inspect import inspect
import json import json
@ -64,7 +63,7 @@ class AbstractEventLogger(ABC):
try: try:
explode_by = d.get("explode") explode_by = d.get("explode")
records = json.loads(d.get(explode_by)) records = json.loads(d.get(explode_by))
except Exception: except Exception: # pylint: disable=broad-except
records = [d] records = [d]
referrer = request.referrer[:1000] if request.referrer else None referrer = request.referrer[:1000] if request.referrer else None
@ -89,8 +88,9 @@ class AbstractEventLogger(ABC):
def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger: def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
""" """
This function implements the deprecation of assignment of class objects to EVENT_LOGGER This function implements the deprecation of assignment
configuration, and validates type of configured loggers. of class objects to EVENT_LOGGER configuration, and validates
type of configured loggers.
The motivation for this method is to gracefully deprecate the ability to configure The motivation for this method is to gracefully deprecate the ability to configure
EVENT_LOGGER with a class type, in favor of preconfigured instances which may have EVENT_LOGGER with a class type, in favor of preconfigured instances which may have
@ -105,10 +105,12 @@ def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
logging.warning( logging.warning(
textwrap.dedent( textwrap.dedent(
""" """
In superset private config, EVENT_LOGGER has been assigned a class object. In order to In superset private config, EVENT_LOGGER has been assigned a class
accomodate pre-configured instances without a default constructor, assignment of a class object. In order to accomodate pre-configured instances without a
is deprecated and may no longer work at some point in the future. Please assign an object default constructor, assignment of a class is deprecated and may no
instance of a type that implements superset.utils.log.AbstractEventLogger. longer work at some point in the future. Please assign an object
instance of a type that implements
superset.utils.log.AbstractEventLogger.
""" """
) )
) )
@ -119,7 +121,8 @@ def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
# Verify that we have a valid logger impl # Verify that we have a valid logger impl
if not isinstance(result, AbstractEventLogger): if not isinstance(result, AbstractEventLogger):
raise TypeError( raise TypeError(
"EVENT_LOGGER must be configured with a concrete instance of superset.utils.log.AbstractEventLogger." "EVENT_LOGGER must be configured with a concrete instance"
"of superset.utils.log.AbstractEventLogger."
) )
logging.info(f"Configured event logger of type {type(result)}") logging.info(f"Configured event logger of type {type(result)}")
@ -127,7 +130,7 @@ def get_event_logger_from_cfg_value(cfg_value: object) -> AbstractEventLogger:
class DBEventLogger(AbstractEventLogger): class DBEventLogger(AbstractEventLogger):
def log(self, user_id, action, *args, **kwargs): def log(self, user_id, action, *args, **kwargs): # pylint: disable=too-many-locals
from superset.models.core import Log from superset.models.core import Log
records = kwargs.get("records", list()) records = kwargs.get("records", list())
@ -140,7 +143,7 @@ class DBEventLogger(AbstractEventLogger):
for record in records: for record in records:
try: try:
json_string = json.dumps(record) json_string = json.dumps(record)
except Exception: except Exception: # pylint: disable=broad-except
json_string = None json_string = None
log = Log( log = Log(
action=action, action=action,