chore(pylint): Reenable import-outside-toplevel check (#16263)

Co-authored-by: John Bodley <john.bodley@airbnb.com>
This commit is contained in:
John Bodley 2021-08-16 10:20:13 -07:00 committed by GitHub
parent 36abc51f90
commit 0df15bf207
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 83 additions and 51 deletions

View File

@ -84,7 +84,6 @@ confidence=
disable=
missing-docstring,
too-many-lines,
import-outside-toplevel,
raise-missing-from,
too-few-public-methods,
duplicate-code,

View File

@ -30,7 +30,7 @@ combine_as_imports = true
include_trailing_comma = true
line_length = 88
known_first_party = superset
known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,cron_descriptor,croniter,cryptography,dateutil,deprecation,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_jwt_extended,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,graphlib,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,marshmallow_enum,msgpack,numpy,pandas,parameterized,parsedatetime,pgsanity,pkg_resources,polyline,prison,progress,pyarrow,pyhive,pyparsing,pytest,pytest_mock,pytz,redis,requests,selenium,setuptools,simplejson,slack,sqlalchemy,sqlalchemy_utils,sqlparse,tabulate,typing_extensions,werkzeug,wtforms,wtforms_json,yaml
known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,cron_descriptor,croniter,cryptography,dateutil,deprecation,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_jwt_extended,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,graphlib,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,marshmallow_enum,msgpack,numpy,pandas,parameterized,parsedatetime,pgsanity,pkg_resources,polyline,prison,progress,pyarrow,pyhive,pyparsing,pytest,pytest_mock,pytz,redis,requests,selenium,setuptools,simplejson,slack,sqlalchemy,sqlalchemy_utils,sqlparse,tabulate,typing_extensions,urllib3,werkzeug,wtforms,wtforms_json,yaml
multi_line_output = 3
order_by_type = false

View File

@ -17,6 +17,7 @@
# under the License.
import json
import logging
import os
import sys
from datetime import datetime, timedelta
from pathlib import Path
@ -26,11 +27,14 @@ from zipfile import is_zipfile, ZipFile
import click
import yaml
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from celery.utils.abstract import CallableTask
from colorama import Fore, Style
from flask import current_app, g
from flask.cli import FlaskGroup, with_appcontext
from flask_appbuilder import Model
from flask_appbuilder.api import BaseApi
from superset import app, appbuilder, config, security_manager
from superset.app import create_app
@ -120,6 +124,7 @@ def load_examples_run(
examples_db = utils.get_example_database()
print(f"Loading examples metadata and related data into {examples_db}")
# pylint: disable=import-outside-toplevel
from superset import examples
examples.load_css_templates()
@ -202,6 +207,7 @@ def load_examples(
)
def import_directory(directory: str, overwrite: bool, force: bool) -> None:
"""Imports configs from a given directory"""
# pylint: disable=import-outside-toplevel
from superset.examples.utils import load_configs_from_directory
load_configs_from_directory(
@ -242,9 +248,11 @@ def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
)
def refresh_druid(datasource: str, merge: bool) -> None:
"""Refresh druid datasources"""
session = db.session()
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import DruidCluster
session = db.session()
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
@ -265,6 +273,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
)
def export_dashboards(dashboard_file: Optional[str] = None) -> None:
"""Export dashboards to ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.models.dashboard import Dashboard
@ -296,6 +305,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
)
def export_datasources(datasource_file: Optional[str] = None) -> None:
"""Export datasources to ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.export import ExportDatasetsCommand
@ -330,6 +340,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
)
def import_dashboards(path: str, username: Optional[str]) -> None:
"""Import dashboards from ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.dashboards.commands.importers.dispatcher import (
ImportDashboardsCommand,
@ -358,6 +369,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
)
def import_datasources(path: str) -> None:
"""Import datasources from ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.datasets.commands.importers.dispatcher import (
ImportDatasetsCommand,
@ -396,6 +408,7 @@ else:
dashboard_file: Optional[str], print_stdout: bool = False
) -> None:
"""Export dashboards to JSON"""
# pylint: disable=import-outside-toplevel
from superset.utils import dashboard_import_export
data = dashboard_import_export.export_dashboards(db.session)
@ -443,6 +456,7 @@ else:
include_defaults: bool = False,
) -> None:
"""Export datasources to YAML"""
# pylint: disable=import-outside-toplevel
from superset.utils import dict_import_export
data = dict_import_export.export_to_dict(
@ -481,6 +495,7 @@ else:
)
def import_dashboards(path: str, recursive: bool, username: str) -> None:
"""Import dashboards from JSON file"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
path_object = Path(path)
@ -528,6 +543,7 @@ else:
)
def import_datasources(path: str, sync: str, recursive: bool) -> None:
"""Import datasources from YAML"""
# pylint: disable=import-outside-toplevel
from superset.datasets.commands.importers.v0 import ImportDatasetsCommand
sync_array = sync.split(",")
@ -564,6 +580,7 @@ else:
)
def export_datasource_schema(back_references: bool) -> None:
"""Export datasource YAML schema to stdout"""
# pylint: disable=import-outside-toplevel
from superset.utils import dict_import_export
data = dict_import_export.export_schema_to_dict(back_references=back_references)
@ -574,6 +591,7 @@ else:
@with_appcontext
def update_datasources_cache() -> None:
"""Refresh sqllab datasources cache"""
# pylint: disable=import-outside-toplevel
from superset.models.core import Database
for database in db.session.query(Database).all():
@ -678,6 +696,7 @@ def compute_thumbnails(
model_id: int,
) -> None:
"""Compute thumbnails"""
# pylint: disable=import-outside-toplevel
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.tasks.thumbnails import (
@ -790,6 +809,7 @@ def sync_tags() -> None:
# pylint: disable=no-member
metadata = Model.metadata
# pylint: disable=import-outside-toplevel
from superset.common.tags import add_favorites, add_owners, add_types
add_types(db.engine, metadata)
@ -802,6 +822,7 @@ def sync_tags() -> None:
def alert() -> None:
"""Run the alert scheduler loop"""
# this command is just for testing purposes
# pylint: disable=import-outside-toplevel
from superset.models.schedules import ScheduleType
from superset.tasks.schedules import schedule_window
@ -820,13 +841,8 @@ def alert() -> None:
@with_appcontext
def update_api_docs() -> None:
"""Regenerate the openapi.json file in docs"""
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from flask_appbuilder.api import BaseApi
from os import path
superset_dir = path.abspath(path.dirname(__file__))
openapi_json = path.join(
superset_dir = os.path.abspath(os.path.dirname(__file__))
openapi_json = os.path.join(
superset_dir, "..", "docs", "src", "resources", "openapi.json"
)
api_version = "v1"

View File

@ -1648,6 +1648,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
:raises Exception: If the target table is not unique
"""
# pylint: disable=import-outside-toplevel
from superset.datasets.commands.exceptions import get_dataset_exist_error_msg
from superset.datasets.dao import DatasetDAO

View File

@ -275,6 +275,7 @@ def decode_dashboards( # pylint: disable=too-many-return-statements
Function to be passed into json.loads obj_hook parameter
Recreates the dashboard object from a json representation.
"""
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import (
DruidCluster,
DruidColumn,

View File

@ -324,6 +324,7 @@ class BigQueryEngineSpec(BaseEngineSpec):
"""
try:
# pylint: disable=import-outside-toplevel
import pandas_gbq
from google.oauth2 import service_account
except ImportError:

View File

@ -17,6 +17,8 @@
from datetime import datetime
from typing import Dict, Optional, Type
from urllib3.exceptions import NewConnectionError
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.exceptions import SupersetDBAPIDatabaseError
from superset.utils import core as utils
@ -48,8 +50,6 @@ class ClickHouseEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
@classmethod
def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]:
from urllib3.exceptions import NewConnectionError
return {NewConnectionError: SupersetDBAPIDatabaseError}
@classmethod

View File

@ -49,7 +49,8 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
@classmethod
def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]:
import es.exceptions as es_exceptions # pylint: disable=import-error
# pylint: disable=import-error,import-outside-toplevel
import es.exceptions as es_exceptions
return {
es_exceptions.DatabaseError: SupersetDBAPIDatabaseError,

View File

@ -62,8 +62,8 @@ def upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
:returns: The S3 location of the table
"""
# Optional dependency
import boto3 # pylint: disable=import-error
# pylint: disable=import-outside-toplevel
import boto3
bucket_path = current_app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"]
@ -128,6 +128,7 @@ class HiveEngineSpec(PrestoEngineSpec):
@classmethod
def patch(cls) -> None:
# pylint: disable=import-outside-toplevel
from pyhive import hive
from TCLIService import (
constants as patched_constants,
@ -152,6 +153,7 @@ class HiveEngineSpec(PrestoEngineSpec):
def fetch_data(
cls, cursor: Any, limit: Optional[int] = None
) -> List[Tuple[Any, ...]]:
# pylint: disable=import-outside-toplevel
import pyhive
from TCLIService import ttypes
@ -314,6 +316,7 @@ class HiveEngineSpec(PrestoEngineSpec):
cls, cursor: Any, query: Query, session: Session
) -> None:
"""Updates progress information"""
# pylint: disable=import-outside-toplevel
from pyhive import hive
unfinished_states = (

View File

@ -171,6 +171,7 @@ class MySQLEngineSpec(BaseEngineSpec, BasicParametersMixin):
def get_datatype(cls, type_code: Any) -> Optional[str]:
if not cls.type_code_map:
# only import and store if needed at least once
# pylint: disable=import-outside-toplevel
import MySQLdb
ft = MySQLdb.constants.FIELD_TYPE

View File

@ -925,6 +925,7 @@ class PrestoEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-metho
:param schema: Schema name
:param table: Table (view) name
"""
# pylint: disable=import-outside-toplevel
from pyhive.exc import DatabaseError
engine = cls.get_engine(database, schema)

View File

@ -35,6 +35,7 @@ def fetch_logs(
.. note::
This is not a part of DB-API.
"""
# pylint: disable=import-outside-toplevel
from pyhive import hive
from TCLIService import ttypes
from thrift import Thrift

View File

@ -26,6 +26,7 @@ from flask import Flask, redirect
from flask_appbuilder import expose, IndexView
from flask_babel import gettext as __, lazy_gettext as _
from flask_compress import Compress
from werkzeug.middleware.proxy_fix import ProxyFix
from superset.connectors.connector_registry import ConnectorRegistry
from superset.extensions import (
@ -111,9 +112,7 @@ class SupersetAppInitializer:
# models which in turn try to import
# the global Flask app
#
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
# pylint: disable=import-outside-toplevel,too-many-branches,too-many-locals,too-many-statements
from superset.annotation_layers.api import AnnotationLayerRestApi
from superset.annotation_layers.annotations.api import AnnotationRestApi
from superset.async_events.api import AsyncEventsRestApi
@ -623,6 +622,7 @@ class SupersetAppInitializer:
# Doing local imports here as model importing causes a reference to
# app.config to be invoked and we need the current_app to have been setup
#
# pylint: disable=import-outside-toplevel
from superset.utils.url_map_converters import (
ObjectTypeConverter,
RegexConverter,
@ -633,13 +633,12 @@ class SupersetAppInitializer:
def configure_middlewares(self) -> None:
if self.config["ENABLE_CORS"]:
# pylint: disable=import-outside-toplevel
from flask_cors import CORS
CORS(self.superset_app, **self.config["CORS_OPTIONS"])
if self.config["ENABLE_PROXY_FIX"]:
from werkzeug.middleware.proxy_fix import ProxyFix
self.superset_app.wsgi_app = ProxyFix( # type: ignore
self.superset_app.wsgi_app, **self.config["PROXY_FIX_CONFIG"]
)

View File

@ -169,6 +169,7 @@ class ExtraCache:
:returns: The URL parameters
"""
# pylint: disable=import-outside-toplevel
from superset.views.utils import get_form_data
if request.args.get(param):
@ -284,6 +285,7 @@ class ExtraCache:
only apply to the inner query
:return: returns a list of filters
"""
# pylint: disable=import-outside-toplevel
from superset.utils.core import FilterOperator
from superset.views.utils import get_form_data
@ -496,6 +498,7 @@ class PrestoTemplateProcessor(JinjaTemplateProcessor):
:return: the latest partition array
"""
# pylint: disable=import-outside-toplevel
from superset.db_engine_specs.presto import PrestoEngineSpec
table_name, schema = self._schema_table(table_name, self._schema)
@ -506,6 +509,7 @@ class PrestoTemplateProcessor(JinjaTemplateProcessor):
def latest_sub_partition(self, table_name: str, **kwargs: Any) -> Any:
table_name, schema = self._schema_table(table_name, self._schema)
# pylint: disable=import-outside-toplevel
from superset.db_engine_specs.presto import PrestoEngineSpec
return cast(

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from datetime import datetime
from typing import Any, Dict, List, Optional
@ -138,7 +139,6 @@ class ReportScheduleDAO(BaseDAO):
create a report schedule and nested recipients
:raises: DAOCreateFailedError
"""
import json
try:
model = ReportSchedule()
@ -171,7 +171,6 @@ class ReportScheduleDAO(BaseDAO):
create a report schedule and nested recipients
:raises: DAOCreateFailedError
"""
import json
try:
for key, value in properties.items():

View File

@ -364,9 +364,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:returns: The access URL
"""
from superset import conf
return conf.get("PERMISSION_INSTRUCTIONS_LINK")
return current_app.config.get("PERMISSION_INSTRUCTIONS_LINK")
def get_datasource_access_error_object( # pylint: disable=invalid-name
self, datasource: "BaseDatasource"
@ -428,9 +426,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:returns: The access URL
"""
from superset import conf
return conf.get("PERMISSION_INSTRUCTIONS_LINK")
return current_app.config.get("PERMISSION_INSTRUCTIONS_LINK")
def get_user_datasources(self) -> List["BaseDatasource"]:
"""
@ -528,6 +524,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:returns: The list of accessible SQL schemas
"""
# pylint: disable=import-outside-toplevel
from superset.connectors.sqla.models import SqlaTable
if hierarchical and self.can_access_database(database):
@ -629,6 +626,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
Creates missing FAB permissions for datasources, schemas and metrics.
"""
# pylint: disable=import-outside-toplevel
from superset.models import core as models
logger.info("Fetching a set of all perms to lookup which ones are missing")
@ -678,8 +676,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
Initialize the Superset application with security roles and such.
"""
from superset import conf
logger.info("Syncing role definition")
self.create_custom_permissions()
@ -692,9 +688,13 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
self.set_role("sql_lab", self._is_sql_lab_pvm)
# Configure public role
if conf["PUBLIC_ROLE_LIKE"]:
self.copy_role(conf["PUBLIC_ROLE_LIKE"], self.auth_role_public, merge=True)
if conf.get("PUBLIC_ROLE_LIKE_GAMMA", False):
if current_app.config["PUBLIC_ROLE_LIKE"]:
self.copy_role(
current_app.config["PUBLIC_ROLE_LIKE"],
self.auth_role_public,
merge=True,
)
if current_app.config.get("PUBLIC_ROLE_LIKE_GAMMA", False):
logger.warning(
"The config `PUBLIC_ROLE_LIKE_GAMMA` is deprecated and will be removed "
"in Superset 1.0. Please use `PUBLIC_ROLE_LIKE` instead."
@ -996,7 +996,9 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:raises SupersetSecurityException: If the user cannot access the resource
"""
# pylint: disable=import-outside-toplevel
from superset.connectors.sqla.models import SqlaTable
from superset.extensions import feature_flag_manager
from superset.sql_parse import Table
if database and table or query:
@ -1046,8 +1048,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
assert datasource
from superset.extensions import feature_flag_manager
if not (
self.can_access_schema(datasource)
or self.can_access("datasource_access", datasource.perm or "")
@ -1087,6 +1087,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:returns: A list of filters
"""
if hasattr(g, "user") and hasattr(g.user, "id"):
# pylint: disable=import-outside-toplevel
from superset.connectors.sqla.models import (
RLSFilterRoles,
RLSFilterTables,
@ -1168,6 +1169,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
:param dashboard: Dashboard the user wants access to
:raises DashboardAccessDeniedError: If the user cannot access the resource
"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.exceptions import DashboardAccessDeniedError
from superset.views.base import get_user_roles, is_user_admin
from superset.views.utils import is_owner
@ -1189,6 +1191,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
@staticmethod
def can_access_based_on_dashboard(datasource: "BaseDatasource") -> bool:
# pylint: disable=import-outside-toplevel
from superset import db
from superset.dashboards.filters import DashboardAccessFilter
from superset.models.slice import Slice

View File

@ -65,6 +65,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
# engine spec's handle_cursor implementation since we don't record
# these EXPLAIN queries done in validation as proper Query objects
# in the superset ORM.
# pylint: disable=import-outside-toplevel
from pyhive.exc import DatabaseError
try:

View File

@ -50,6 +50,7 @@ def ensure_user_is_set(user_id: Optional[int]) -> None:
def load_chart_data_into_cache(
job_metadata: Dict[str, Any], form_data: Dict[str, Any],
) -> None:
# pylint: disable=import-outside-toplevel
from superset.charts.commands.data import ChartDataCommand
try:

View File

@ -1123,9 +1123,7 @@ def merge_extra_form_data(form_data: Dict[str, Any]) -> None:
)
if append_filters:
adhoc_filters.extend(
simple_filter_to_adhoc(
{"isExtra": True, **fltr} # type: ignore
)
simple_filter_to_adhoc({"isExtra": True, **fltr}) # type: ignore
for fltr in append_filters
if fltr
)
@ -1239,6 +1237,7 @@ def user_label(user: User) -> Optional[str]:
def get_or_create_db(
database_name: str, sqlalchemy_uri: str, always_create: Optional[bool] = True
) -> "Database":
# pylint: disable=import-outside-toplevel
from superset import db
from superset.models import core as models
@ -1266,16 +1265,15 @@ def get_or_create_db(
def get_example_database() -> "Database":
from superset import conf
db_uri = conf.get("SQLALCHEMY_EXAMPLES_URI") or conf.get("SQLALCHEMY_DATABASE_URI")
db_uri = (
current_app.config.get("SQLALCHEMY_EXAMPLES_URI")
or current_app.config["SQLALCHEMY_DATABASE_URI"]
)
return get_or_create_db("examples", db_uri)
def get_main_database() -> "Database":
from superset import conf
db_uri = conf.get("SQLALCHEMY_DATABASE_URI")
db_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
return get_or_create_db("main", db_uri)

View File

@ -91,6 +91,7 @@ def check_dashboard_access(
def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
@wraps(f)
def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
# pylint: disable=import-outside-toplevel
from superset.models.dashboard import Dashboard
dashboard = Dashboard.get(str(kwargs["dashboard_id_or_slug"]))

View File

@ -125,6 +125,7 @@ class AbstractEventLogger(ABC):
log_to_statsd: bool = True,
**payload_override: Optional[Dict[str, Any]],
) -> None:
# pylint: disable=import-outside-toplevel
from superset.views.core import get_form_data
referrer = request.referrer[:1000] if request and request.referrer else None
@ -321,6 +322,7 @@ class DBEventLogger(AbstractEventLogger):
*args: Any,
**kwargs: Any,
) -> None:
# pylint: disable=import-outside-toplevel
from superset.models.core import Log
records = kwargs.get("records", [])

View File

@ -179,6 +179,7 @@ def add_data(
:param str table_name: name of table, will be created if it doesn't exist
:param bool append: if the table already exists, append data or replace?
"""
# pylint: disable=import-outside-toplevel
from superset.utils.core import get_example_database
database = get_example_database()

View File

@ -714,11 +714,11 @@ def _prophet_fit_and_predict( # pylint: disable=too-many-arguments
Fit a prophet model and return a DataFrame with predicted results.
"""
try:
# pylint: disable=import-error,import-outside-toplevel
from prophet import Prophet
prophet_logger = logging.getLogger("prophet.plot")
prophet_logger.setLevel(logging.CRITICAL)
from prophet import Prophet # pylint: disable=import-error
prophet_logger.setLevel(logging.NOTSET)
except ModuleNotFoundError:
raise QueryObjectValidationError(_("`prophet` package not installed"))

View File

@ -21,7 +21,7 @@ from unittest import mock
from werkzeug.wrappers import Request, Response
try:
# pylint: disable=import-error
# pylint: disable=import-error,import-outside-toplevel
from pyinstrument import Profiler
except ModuleNotFoundError:
Profiler = None

View File

@ -21,6 +21,7 @@ from time import sleep
from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
from flask import current_app
from requests.models import PreparedRequest
from selenium.common.exceptions import (
StaleElementReferenceException,
TimeoutException,
@ -103,9 +104,6 @@ class WebDriverProxy:
def get_screenshot(
self, url: str, element_name: str, user: "User",
) -> Optional[bytes]:
from requests.models import PreparedRequest
params = {"standalone": DashboardStandaloneMode.REPORT.value}
req = PreparedRequest()
req.prepare_url(url, params)