chore: set up ruff as a new linter/formatter (#28158)

This commit is contained in:
Maxime Beauchemin 2024-04-24 17:19:53 -07:00 committed by GitHub
parent e8a678b75a
commit 2d63722150
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
579 changed files with 2508 additions and 2542 deletions

View File

@ -19,25 +19,6 @@ repos:
rev: v0.2.2
hooks:
- id: auto-walrus
- repo: https://github.com/asottile/pyupgrade
rev: v3.4.0
hooks:
- id: pyupgrade
exclude: scripts/change_detector.py
args:
- --py39-plus
- repo: https://github.com/hadialqattan/pycln
rev: v2.1.2
hooks:
- id: pycln
args:
- --disable-all-dunder-policy
- --exclude=superset/config.py
- --extend-exclude=tests/integration_tests/superset_test_config.*.py
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.3.0
hooks:
@ -73,11 +54,6 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
args: ["--markdown-linebreak-ext=md"]
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0 # Use the sha or tag you want to point at
hooks:
@ -95,3 +71,9 @@ repos:
hooks:
- id: helm-docs
files: helm
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.0
hooks:
- id: ruff
args: [ --fix ]
- id: ruff-format

View File

@ -77,6 +77,7 @@ disable=
cyclic-import, # re-enable once this no longer raises false positives
missing-docstring,
duplicate-code,
line-too-long,
unspecified-encoding,
too-many-instance-attributes # re-enable once this no longer raises false positives
@ -171,7 +172,7 @@ max-nested-blocks=5
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=90
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$

View File

@ -94,10 +94,10 @@ class GitChangeLog:
if not pull_request:
pull_request = github_repo.get_pull(pr_number)
self._github_prs[pr_number] = pull_request
except BadCredentialsException as ex:
except BadCredentialsException:
print(
f"Bad credentials to github provided"
f" use access_token parameter or set GITHUB_TOKEN"
"Bad credentials to github provided"
" use access_token parameter or set GITHUB_TOKEN"
)
sys.exit(1)
@ -167,8 +167,8 @@ class GitChangeLog:
def _get_changelog_version_head(self) -> str:
if not len(self._logs):
print(
f"No changes found between revisions. "
f"Make sure your branch is up to date."
"No changes found between revisions. "
"Make sure your branch is up to date."
)
sys.exit(1)
return f"### {self._version} ({self._logs[0].time})"

View File

@ -191,6 +191,7 @@ development = [
"pytest-cov",
"pytest-mock",
"python-ldap>=3.4.4",
"ruff",
"sqloxide",
"statsd",
"tox",
@ -267,7 +268,6 @@ usedevelop = true
allowlist_externals =
npm
pkill
{toxinidir}/superset-frontend/cypress_build.sh
[testenv:cypress]
setenv =
@ -393,3 +393,81 @@ envlist =
pylint
skipsdist = true
"""
[tool.ruff]
# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]
# Same as Black.
line-length = 88
indent-width = 4
# Assume Python 3.8
target-version = "py310"
[tool.ruff.lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E4", "E7", "E9", "F"]
ignore = []
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
[tool.ruff.format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
# Enable auto-formatting of code examples in docstrings. Markdown,
# reStructuredText code/literal blocks and doctests are all supported.
#
# This is currently disabled by default, but it is planned for this
# to be opt-out in the future.
docstring-code-format = false
# Set the line length limit used when formatting code snippets in
# docstrings.
#
# This only has an effect when the `docstring-code-format` setting is
# enabled.
docstring-code-line-length = "dynamic"

View File

@ -10,6 +10,8 @@
# via
# -r requirements/base.in
# -r requirements/development.in
appnope==0.1.4
# via ipython
astroid==3.1.0
# via pylint
asttokens==2.2.1
@ -239,6 +241,8 @@ rfc3339-validator==0.1.4
# via openapi-schema-validator
rfc3986==2.0.0
# via tableschema
ruff==0.4.0
# via apache-superset
s3transfer==0.6.1
# via boto3
sqlalchemy-bigquery==1.10.0

View File

@ -82,7 +82,7 @@ def is_latest_release(release: str) -> bool:
return "SKIP_TAG::false" in output
def make_docker_tag(l: list[str]) -> str:
def make_docker_tag(l: list[str]) -> str: # noqa: E741
return f"{REPO}:" + "-".join([o for o in l if o])
@ -140,7 +140,7 @@ def get_docker_command(
build_context_ref: str,
force_latest: bool = False,
) -> str:
tag = ""
tag = "" # noqa: F841
build_target = ""
py_ver = BASE_PY_IMAGE
docker_context = "."
@ -284,7 +284,7 @@ def main(
script = script + docker_build_command
if verbose:
run_cmd("cat Dockerfile")
stdout = run_cmd(script)
stdout = run_cmd(script) # noqa: F841
else:
print("Dry Run - Docker Build Command:")
print(docker_build_command)

View File

@ -32,6 +32,7 @@ Example:
# cancel all jobs of a PR, including the latest runs
./cancel_github_workflows.py 1024 --include-last
"""
import os
from collections.abc import Iterable, Iterator
from typing import Any, Literal, Optional, Union

View File

@ -21,7 +21,6 @@ import os
import re
import subprocess
from typing import List
from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
# Define patterns for each group of files you're interested in
@ -102,12 +101,12 @@ def main(event_type: str, sha: str, repo: str) -> None:
if event_type == "pull_request":
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
files = fetch_changed_files_pr(repo, pr_number)
print(f"PR files:")
print("PR files:")
print_files(files)
elif event_type == "push":
files = fetch_changed_files_push(repo, sha)
print(f"Files touched since previous commit:")
print("Files touched since previous commit:")
print_files(files)
else:
raise ValueError("Unsupported event type")

View File

@ -19,6 +19,7 @@ This module contains utilities to auto-generate an
Entity-Relationship Diagram (ERD) from SQLAlchemy
and onto a plantuml file.
"""
import json
import os
from collections import defaultdict
@ -27,9 +28,8 @@ from typing import Any, Optional
import click
import jinja2
from flask.cli import FlaskGroup, with_appcontext
from superset import app, db
from superset import db
GROUPINGS: dict[str, Iterable[str]] = {
"Core": [

View File

@ -19,19 +19,19 @@
from flask import current_app, Flask
from werkzeug.local import LocalProxy
from superset.app import create_app
from superset.app import create_app # noqa: F401
from superset.extensions import (
appbuilder,
appbuilder, # noqa: F401
cache_manager,
db,
event_logger,
db, # noqa: F401
event_logger, # noqa: F401
feature_flag_manager,
manifest_processor,
results_backend_manager,
security_manager,
talisman,
security_manager, # noqa: F401
talisman, # noqa: F401
)
from superset.security import SupersetSecurityManager
from superset.security import SupersetSecurityManager # noqa: F401
# All of the fields located here should be considered legacy. The correct way
# to declare "global" dependencies is to define it in extensions.py,

View File

@ -88,9 +88,9 @@ def port_translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeRespo
else port_conversion_dict[string_value]
)
except (KeyError, ValueError):
resp[
"error_message"
] = f"'{string_value}' does not appear to be a port name or number"
resp["error_message"] = (
f"'{string_value}' does not appear to be a port name or number"
)
break
else:
resp["display_value"] = ", ".join(

View File

@ -17,6 +17,7 @@
"""
Schemas for advanced data types
"""
from marshmallow import fields, Schema
advanced_data_type_convert_schema = {

View File

@ -24,9 +24,7 @@ from superset.models.annotations import AnnotationLayer
from superset.views.base import BaseFilter
class AnnotationLayerAllTextFilter(
BaseFilter
): # pylint: disable=too-few-public-methods
class AnnotationLayerAllTextFilter(BaseFilter): # pylint: disable=too-few-public-methods
name = _("All Text")
arg_name = "annotation_layer_all_text"

View File

@ -109,10 +109,8 @@ class CacheRestApi(BaseSupersetModelRestApi):
)
try:
delete_stmt = (
CacheKey.__table__.delete().where( # pylint: disable=no-member
CacheKey.cache_key.in_(cache_keys)
)
delete_stmt = CacheKey.__table__.delete().where( # pylint: disable=no-member
CacheKey.cache_key.in_(cache_keys)
)
db.session.execute(delete_stmt)
db.session.commit()

View File

@ -130,9 +130,7 @@ class ChartCreatedByMeFilter(BaseFilter): # pylint: disable=too-few-public-meth
)
class ChartOwnedCreatedFavoredByMeFilter(
BaseFilter
): # pylint: disable=too-few-public-methods
class ChartOwnedCreatedFavoredByMeFilter(BaseFilter): # pylint: disable=too-few-public-methods
"""
Custom filter for the GET chart that filters all charts the user
owns, created, changed or favored.

View File

@ -23,6 +23,7 @@ tables, metrics, and datasets were also introduced.
These models are not fully implemented, and shouldn't be used yet.
"""
import sqlalchemy as sa
from flask_appbuilder import Model

View File

@ -78,9 +78,10 @@ class ExportChartsCommand(ExportModelsCommand):
def _export(
model: Slice, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportChartsCommand._file_name(
model
), lambda: ExportChartsCommand._file_content(model)
yield (
ExportChartsCommand._file_name(model),
lambda: ExportChartsCommand._file_content(model),
)
if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()

View File

@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
from sqlalchemy.orm import Session
from sqlalchemy.orm import Session # noqa: F401
from superset.charts.schemas import ImportV1ChartSchema
from superset.commands.chart.exceptions import ChartImportError
@ -33,7 +33,6 @@ from superset.datasets.schemas import ImportV1DatasetSchema
class ImportChartsCommand(ImportModelsCommand):
"""Import charts"""
dao = ChartDAO

View File

@ -154,9 +154,10 @@ class ExportDashboardsCommand(ExportModelsCommand):
def _export(
model: Dashboard, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportDashboardsCommand._file_name(
model
), lambda: ExportDashboardsCommand._file_content(model)
yield (
ExportDashboardsCommand._file_name(model),
lambda: ExportDashboardsCommand._file_content(model),
)
if export_related:
chart_ids = [chart.id for chart in model.slices]

View File

@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
from sqlalchemy.orm import Session
from sqlalchemy.orm import Session # noqa: F401
from sqlalchemy.sql import select
from superset import db
@ -43,7 +43,6 @@ from superset.models.dashboard import Dashboard, dashboard_slices
class ImportDashboardsCommand(ImportModelsCommand):
"""Import dashboards"""
dao = DashboardDAO
@ -115,7 +114,6 @@ class ImportDashboardsCommand(ImportModelsCommand):
# update datasource id, type, and name
dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict)
# pylint: disable=line-too-long
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:

View File

@ -19,7 +19,6 @@ from typing import Any, Optional
from flask import current_app
from flask_appbuilder.models.sqla import Model
from flask_babel import gettext as _
from marshmallow import ValidationError
from superset import is_feature_enabled

View File

@ -106,9 +106,10 @@ class ExportDatabasesCommand(ExportModelsCommand):
def _export(
model: Database, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportDatabasesCommand._file_name(
model
), lambda: ExportDatabasesCommand._file_content(model)
yield (
ExportDatabasesCommand._file_name(model),
lambda: ExportDatabasesCommand._file_content(model),
)
if export_related:
db_file_name = get_filename(model.database_name, model.id, skip_id=True)
@ -127,6 +128,9 @@ class ExportDatabasesCommand(ExportModelsCommand):
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)
yield file_path, functools.partial( # type: ignore
yaml.safe_dump, payload, sort_keys=False
yield (
file_path,
functools.partial( # type: ignore
yaml.safe_dump, payload, sort_keys=False
),
)

View File

@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
from sqlalchemy.orm import Session
from sqlalchemy.orm import Session # noqa: F401
from superset.commands.database.exceptions import DatabaseImportError
from superset.commands.database.importers.v1.utils import import_database
@ -30,7 +30,6 @@ from superset.datasets.schemas import ImportV1DatasetSchema
class ImportDatabasesCommand(ImportModelsCommand):
"""Import databases"""
dao = DatabaseDAO

View File

@ -18,7 +18,6 @@ import logging
from typing import Any, Optional
from flask_appbuilder.models.sqla import Model
from flask_babel import gettext as _
from marshmallow import ValidationError
from superset import is_feature_enabled
@ -69,11 +68,11 @@ class UpdateDatabaseCommand(BaseCommand):
old_database_name = self._model.database_name
# unmask ``encrypted_extra``
self._properties[
"encrypted_extra"
] = self._model.db_engine_spec.unmask_encrypted_extra(
self._model.encrypted_extra,
self._properties.pop("masked_encrypted_extra", "{}"),
self._properties["encrypted_extra"] = (
self._model.db_engine_spec.unmask_encrypted_extra(
self._model.encrypted_extra,
self._properties.pop("masked_encrypted_extra", "{}"),
)
)
try:

View File

@ -61,8 +61,7 @@ class BaseDataReader:
self._options = options
@abstractmethod
def file_to_dataframe(self, file: Any) -> pd.DataFrame:
...
def file_to_dataframe(self, file: Any) -> pd.DataFrame: ...
def read(
self, file: Any, database: Database, table_name: str, schema_name: Optional[str]

View File

@ -85,9 +85,10 @@ class ExportDatasetsCommand(ExportModelsCommand):
def _export(
model: SqlaTable, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportDatasetsCommand._file_name(
model
), lambda: ExportDatasetsCommand._file_content(model)
yield (
ExportDatasetsCommand._file_name(model),
lambda: ExportDatasetsCommand._file_content(model),
)
# include database as well
if export_related:

View File

@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
from sqlalchemy.orm import Session
from sqlalchemy.orm import Session # noqa: F401
from superset.commands.database.importers.v1.utils import import_database
from superset.commands.dataset.exceptions import DatasetImportError
@ -30,7 +30,6 @@ from superset.datasets.schemas import ImportV1DatasetSchema
class ImportDatasetsCommand(ImportModelsCommand):
"""Import datasets"""
dao = DatasetDAO

View File

@ -87,9 +87,9 @@ class GetExploreCommand(BaseCommand, ABC):
"Form data not found in cache, reverting to chart metadata."
)
elif self._datasource_id:
initial_form_data[
"datasource"
] = f"{self._datasource_id}__{self._datasource_type}"
initial_form_data["datasource"] = (
f"{self._datasource_id}__{self._datasource_type}"
)
if self._form_data_key:
message = _(
"Form data not found in cache, reverting to dataset metadata."

View File

@ -16,9 +16,9 @@
# under the License.
from typing import Any, Optional
from marshmallow import Schema, validate
from marshmallow import Schema, validate # noqa: F401
from marshmallow.exceptions import ValidationError
from sqlalchemy.orm import Session
from sqlalchemy.orm import Session # noqa: F401
from superset import db
from superset.commands.base import BaseCommand
@ -26,12 +26,12 @@ from superset.commands.exceptions import CommandException, CommandInvalidError
from superset.commands.importers.v1.utils import (
load_configs,
load_metadata,
load_yaml,
METADATA_FILE_NAME,
load_yaml, # noqa: F401
METADATA_FILE_NAME, # noqa: F401
validate_metadata_type,
)
from superset.daos.base import BaseDAO
from superset.models.core import Database
from superset.models.core import Database # noqa: F401
class ImportModelsCommand(BaseCommand):
@ -81,7 +81,7 @@ class ImportModelsCommand(BaseCommand):
db.session.rollback()
raise self.import_error() from ex
def validate(self) -> None:
def validate(self) -> None: # noqa: F811
exceptions: list[ValidationError] = []
# verify that the metadata file is present and valid

View File

@ -112,7 +112,6 @@ class ImportAssetsCommand(BaseCommand):
if file_name.startswith("charts/"):
dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict)
# pylint: disable=line-too-long
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:

View File

@ -46,7 +46,6 @@ from superset.utils.database import get_example_database
class ImportExamplesCommand(ImportModelsCommand):
"""Import examples"""
dao = BaseDAO

View File

@ -173,16 +173,16 @@ def load_configs(
# populate ssh_tunnel_passwords from the request or from existing DBs
if file_name in ssh_tunnel_priv_key_passwords:
config["ssh_tunnel"][
"private_key_password"
] = ssh_tunnel_priv_key_passwords[file_name]
config["ssh_tunnel"]["private_key_password"] = (
ssh_tunnel_priv_key_passwords[file_name]
)
elif (
prefix == "databases"
and config["uuid"] in db_ssh_tunnel_priv_key_passws
):
config["ssh_tunnel"][
"private_key_password"
] = db_ssh_tunnel_priv_key_passws[config["uuid"]]
config["ssh_tunnel"]["private_key_password"] = (
db_ssh_tunnel_priv_key_passws[config["uuid"]]
)
schema.load(config)
configs[file_name] = config

View File

@ -69,9 +69,10 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
def _export(
model: SavedQuery, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportSavedQueriesCommand._file_name(
model
), lambda: ExportSavedQueriesCommand._file_content(model)
yield (
ExportSavedQueriesCommand._file_name(model),
lambda: ExportSavedQueriesCommand._file_content(model),
)
if export_related: # TODO: Maybe we can use database export command here?
# include database as well

View File

@ -18,13 +18,13 @@
from typing import Any
from marshmallow import Schema
from sqlalchemy.orm import Session
from sqlalchemy.orm import Session # noqa: F401
from superset.commands.database.importers.v1.utils import import_database
from superset.commands.importers.v1 import ImportModelsCommand
from superset.commands.query.exceptions import SavedQueryImportError
from superset.commands.query.importers.v1.utils import import_saved_query
from superset.connectors.sqla.models import SqlaTable
from superset.connectors.sqla.models import SqlaTable # noqa: F401
from superset.daos.query import SavedQueryDAO
from superset.databases.schemas import ImportV1DatabaseSchema
from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema

View File

@ -41,5 +41,4 @@ class CreateTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
def create(self, cmd_params: CommandParameters) -> str:
...
def create(self, cmd_params: CommandParameters) -> str: ...

View File

@ -41,5 +41,4 @@ class DeleteTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
def delete(self, cmd_params: CommandParameters) -> bool:
...
def delete(self, cmd_params: CommandParameters) -> bool: ...

View File

@ -42,5 +42,4 @@ class GetTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
def get(self, cmd_params: CommandParameters) -> Optional[str]:
...
def get(self, cmd_params: CommandParameters) -> Optional[str]: ...

View File

@ -45,5 +45,4 @@ class UpdateTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
def update(self, cmd_params: CommandParameters) -> Optional[str]:
...
def update(self, cmd_params: CommandParameters) -> Optional[str]: ...

View File

@ -190,7 +190,8 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
return isinstance(metric, str) or is_adhoc_metric(metric)
self.metrics = metrics and [
x if is_str_or_adhoc(x) else x["label"] for x in metrics # type: ignore
x if is_str_or_adhoc(x) else x["label"] # type: ignore
for x in metrics
]
def _set_post_processing(

View File

@ -563,9 +563,9 @@ IS_FEATURE_ENABLED_FUNC: Callable[[str, bool | None], bool] | None = None
#
# Takes as a parameter the common bootstrap payload before transformations.
# Returns a dict containing data that should be added or overridden to the payload.
COMMON_BOOTSTRAP_OVERRIDES_FUNC: Callable[
[dict[str, Any]], dict[str, Any]
] = lambda data: {} # default: empty dict
COMMON_BOOTSTRAP_OVERRIDES_FUNC: Callable[[dict[str, Any]], dict[str, Any]] = ( # noqa: E731
lambda data: {}
) # default: empty dict
# EXTRA_CATEGORICAL_COLOR_SCHEMES is used for adding custom categorical color schemes
# example code for "My custom warm to hot" color scheme
@ -640,8 +640,8 @@ THUMBNAIL_EXECUTE_AS = [ExecutorType.CURRENT_USER, ExecutorType.SELENIUM]
# `THUMBNAIL_EXECUTE_AS`; the executor is only equal to the currently logged in
# user if the executor type is equal to `ExecutorType.CURRENT_USER`)
# and return the final digest string:
THUMBNAIL_DASHBOARD_DIGEST_FUNC: None | (
Callable[[Dashboard, ExecutorType, str], str]
THUMBNAIL_DASHBOARD_DIGEST_FUNC: (
None | (Callable[[Dashboard, ExecutorType, str], str])
) = None
THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None
@ -1035,8 +1035,8 @@ SQLLAB_CTAS_NO_LIMIT = False
# else:
# return f'tmp_{schema}'
# Function accepts database object, user object, schema name and sql that will be run.
SQLLAB_CTAS_SCHEMA_NAME_FUNC: None | (
Callable[[Database, models.User, str, str], str]
SQLLAB_CTAS_SCHEMA_NAME_FUNC: (
None | (Callable[[Database, models.User, str, str], str])
) = None
# If enabled, it can be used to store the results of long-running queries
@ -1080,7 +1080,7 @@ UPLOADED_CSV_HIVE_NAMESPACE: str | None = None
# db configuration and a result of this function.
# mypy doesn't catch that if case ensures list content being always str
ALLOWED_USER_CSV_SCHEMA_FUNC: Callable[[Database, models.User], list[str]] = (
ALLOWED_USER_CSV_SCHEMA_FUNC: Callable[[Database, models.User], list[str]] = ( # noqa: E731
lambda database, user: [UPLOADED_CSV_HIVE_NAMESPACE]
if UPLOADED_CSV_HIVE_NAMESPACE
else []
@ -1170,7 +1170,7 @@ BLUEPRINTS: list[Blueprint] = []
# lambda url, query: url if is_fresh(query) else None
# )
# pylint: disable-next=unnecessary-lambda-assignment
TRACKING_URL_TRANSFORMER = lambda url: url
TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731
# customize the polling time of each engine
@ -1572,7 +1572,7 @@ SSL_CERT_PATH: str | None = None
# conventions and such. You can find examples in the tests.
# pylint: disable-next=unnecessary-lambda-assignment
SQLA_TABLE_MUTATOR = lambda table: table
SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731
# Global async query config options.
@ -1593,9 +1593,9 @@ GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT_FIREHOSE = 1000000
GLOBAL_ASYNC_QUERIES_REGISTER_REQUEST_HANDLERS = True
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME = "async-token"
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SECURE = False
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (
Literal["None", "Lax", "Strict"]
) = None
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (Literal["None", "Lax", "Strict"]) = (
None
)
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None
GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me"
GLOBAL_ASYNC_QUERIES_TRANSPORT: Literal["polling", "ws"] = "polling"
@ -1657,9 +1657,9 @@ ADVANCED_DATA_TYPES: dict[str, AdvancedDataType] = {
# "Xyz",
# [{"col": 'created_by', "opr": 'rel_o_m', "value": 10}],
# )
WELCOME_PAGE_LAST_TAB: (
Literal["examples", "all"] | tuple[str, list[dict[str, Any]]]
) = "all"
WELCOME_PAGE_LAST_TAB: Literal["examples", "all"] | tuple[str, list[dict[str, Any]]] = (
"all"
)
# Max allowed size for a zipped file
ZIPPED_FILE_MAX_SIZE = 100 * 1024 * 1024 # 100MB

View File

@ -14,4 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from . import models, views
from . import models, views # noqa: F401

View File

@ -175,9 +175,7 @@ class DatasourceKind(StrEnum):
PHYSICAL = "physical"
class BaseDatasource(
AuditMixinNullable, ImportExportMixin
): # pylint: disable=too-many-public-methods
class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=too-many-public-methods
"""A common interface to objects that are queryable
(tables and datasources)"""
@ -669,7 +667,8 @@ class BaseDatasource(
)
def get_extra_cache_keys(
self, query_obj: QueryObjectDict # pylint: disable=unused-argument
self,
query_obj: QueryObjectDict, # pylint: disable=unused-argument
) -> list[Hashable]:
"""If a datasource needs to provide additional keys for calculation of
cache keys, those can be provided via this method
@ -757,7 +756,6 @@ class AnnotationDatasource(BaseDatasource):
class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model):
"""ORM object for table columns, each table can have multiple columns"""
__tablename__ = "table_columns"
@ -971,7 +969,6 @@ class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Mod
class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model):
"""ORM object for metrics, each table can have multiple metrics"""
__tablename__ = "sql_metrics"
@ -1289,7 +1286,7 @@ class SqlaTable(
@property
def dttm_cols(self) -> list[str]:
l = [c.column_name for c in self.columns if c.is_dttm]
l = [c.column_name for c in self.columns if c.is_dttm] # noqa: E741
if self.main_dttm_col and self.main_dttm_col not in l:
l.append(self.main_dttm_col)
return l

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Views used by the SqlAlchemy connector"""
import logging
import re

View File

@ -28,7 +28,7 @@ from superset.models.slice import Slice
from superset.utils.core import get_user_id
if TYPE_CHECKING:
from superset.connectors.sqla.models import BaseDatasource
pass
logger = logging.getLogger(__name__)

View File

@ -245,7 +245,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):
[
{**properties, "table_id": model.id}
for properties in property_columns
if not "id" in properties
if "id" not in properties
],
)
@ -297,7 +297,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):
[
{**properties, "table_id": model.id}
for properties in property_metrics
if not "id" in properties
if "id" not in properties
],
)

View File

@ -102,7 +102,7 @@ logger = logging.getLogger(__name__)
def with_dashboard(
f: Callable[[BaseSupersetModelRestApi, Dashboard], Response]
f: Callable[[BaseSupersetModelRestApi, Dashboard], Response],
) -> Callable[[BaseSupersetModelRestApi, str], Response]:
"""
A decorator that looks up the dashboard by id or slug and passes it to the api.
@ -1261,7 +1261,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
@permission_name("set_embedded")
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.delete_embedded",
action=lambda self,
*args,
**kwargs: f"{self.__class__.__name__}.delete_embedded",
log_to_statsd=False,
)
@with_dashboard

View File

@ -1692,9 +1692,9 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
and getattr(engine_spec, "default_driver") in drivers
):
payload["parameters"] = engine_spec.parameters_json_schema()
payload[
"sqlalchemy_uri_placeholder"
] = engine_spec.sqlalchemy_uri_placeholder
payload["sqlalchemy_uri_placeholder"] = (
engine_spec.sqlalchemy_uri_placeholder
)
available_databases.append(payload)

View File

@ -14,8 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Superset utilities for pandas.DataFrame.
"""
"""Superset utilities for pandas.DataFrame."""
import logging
from typing import Any

View File

@ -24,7 +24,6 @@ dataset, new models for columns, metrics, and tables were also introduced.
These models are not fully implemented, and shouldn't be used yet.
"""
import sqlalchemy as sa
from flask_appbuilder import Model
from sqlalchemy.orm import backref, relationship

View File

@ -27,6 +27,7 @@ at all. The classes here will use a common interface to specify all this.
The general idea is to use static classes and an inheritance scheme.
"""
import inspect
import logging
import pkgutil

View File

@ -130,9 +130,7 @@ builtin_time_grains: dict[str | None, str] = {
}
class TimestampExpression(
ColumnClause
): # pylint: disable=abstract-method, too-many-ancestors
class TimestampExpression(ColumnClause): # pylint: disable=abstract-method, too-many-ancestors
def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
"""Sqlalchemy class that can be used to render native column elements respecting
engine-specific quoting rules as part of a string-based expression.
@ -575,7 +573,8 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
@classmethod
def get_allows_alias_in_select(
cls, database: Database # pylint: disable=unused-argument
cls,
database: Database, # pylint: disable=unused-argument
) -> bool:
"""
Method for dynamic `allows_alias_in_select`.
@ -1035,7 +1034,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return indexes
@classmethod
def get_extra_table_metadata( # pylint: disable=unused-argument
def get_extra_table_metadata(
cls,
database: Database,
table: Table,

View File

@ -389,9 +389,9 @@ class BigQueryEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-met
# Add credentials if they are set on the SQLAlchemy dialect.
if creds := engine.dialect.credentials_info:
to_gbq_kwargs[
"credentials"
] = service_account.Credentials.from_service_account_info(creds)
to_gbq_kwargs["credentials"] = (
service_account.Credentials.from_service_account_info(creds)
)
# Only pass through supported kwargs.
supported_kwarg_keys = {"if_exists"}

View File

@ -23,7 +23,7 @@ from typing import Any, TYPE_CHECKING, TypedDict
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from flask_babel import gettext as __, lazy_gettext as _
from flask_babel import gettext as __
from marshmallow import fields, Schema
from sqlalchemy import types
from sqlalchemy.engine.reflection import Inspector

View File

@ -50,8 +50,6 @@ from superset.superset_typing import ResultSetColumnType
if TYPE_CHECKING:
# prevent circular imports
from pyhive.hive import Cursor
from TCLIService.ttypes import TFetchOrientation
from superset.models.core import Database
@ -392,7 +390,7 @@ class HiveEngineSpec(PrestoEngineSpec):
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
for l in log_lines[last_log_line:]:
for l in log_lines[last_log_line:]: # noqa: E741
logger.info("Query %s: [%s] %s", str(query_id), str(job_id), l)
last_log_line = len(log_lines)
if needs_commit:

View File

@ -19,7 +19,7 @@ import contextlib
import re
import threading
from re import Pattern
from typing import Any, Callable, List, NamedTuple, Optional
from typing import Any, Callable, NamedTuple, Optional
from flask_babel import gettext as __
from sqlalchemy.engine.reflection import Inspector
@ -98,7 +98,7 @@ def _wkt_to_geo_json(geo_as_wkt: str) -> Any:
def _point_list_to_wkt(
points, # type: List[pyocient._STPoint]
points, # type: list[pyocient._STPoint]
) -> str:
"""
Converts the list of pyocient._STPoint elements to a WKT LineString.
@ -204,7 +204,7 @@ try:
TypeCodes.ST_LINESTRING: _linestring_to_geo_json,
TypeCodes.ST_POLYGON: _polygon_to_geo_json,
}
except ImportError as e:
except ImportError:
_sanitized_ocient_type_codes = {}

View File

@ -447,14 +447,14 @@ class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
limit_clause = f"LIMIT {limit}" if limit else ""
order_by_clause = ""
if order_by:
l = []
l = [] # noqa: E741
for field, desc in order_by:
l.append(field + " DESC" if desc else "")
order_by_clause = "ORDER BY " + ", ".join(l)
where_clause = ""
if filters:
l = []
l = [] # noqa: E741
for field, value in filters.items():
l.append(f"{field} = '{value}'")
where_clause = "WHERE " + " AND ".join(l)

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""This module contains data related to countries and is used for geo mapping"""
# pylint: disable=too-many-lines
from typing import Any, Optional

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-import
from .bart_lines import load_bart_lines
from .big_data import load_big_data
from .birth_names import load_birth_names
@ -33,3 +32,24 @@ from .supported_charts_dashboard import load_supported_charts_dashboard
from .tabbed_dashboard import load_tabbed_dashboard
from .utils import load_examples_from_configs
from .world_bank import load_world_bank_health_n_pop
__all__ = [
"load_bart_lines",
"load_big_data",
"load_birth_names",
"load_country_map_data",
"load_css_templates",
"load_deck_dash",
"load_energy",
"load_flights",
"load_long_lat_data",
"load_misc_dashboard",
"load_multiformat_time_series",
"load_paris_iris_geojson",
"load_random_time_series_data",
"load_sf_population_polygons",
"load_supported_charts_dashboard",
"load_tabbed_dashboard",
"load_examples_from_configs",
"load_world_bank_health_n_pop",
]

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
import textwrap
import pandas as pd

View File

@ -42,9 +42,7 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
pdf[ # pylint: disable=unsupported-assignment-operation,useless-suppression
"ds"
] = (
pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
)
] = pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
pdf.ds = pd.to_datetime(pdf.ds)
pdf.drop(columns=["DAY", "MONTH", "YEAR"])
pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
import json
import os
from typing import Any

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
import json
import textwrap

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
import json
import os

View File

@ -72,7 +72,6 @@ from superset import db, feature_flag_manager, security_manager, sql_parse
# pylint: disable=abstract-method
class SupersetAPSWDialect(APSWDialect):
"""
A SQLAlchemy dialect for an internal Superset engine.
@ -187,7 +186,6 @@ class FallbackField(Field[Any, str]):
# pylint: disable=too-many-instance-attributes
class SupersetShillelaghAdapter(Adapter):
"""
A Shillelagh adapter for Superset tables.

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Contains the logic to create cohesive forms on the explore view"""
import json
from typing import Any, Optional

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Defines the templating context for SQL Lab"""
import json
import re
from datetime import datetime

View File

@ -59,12 +59,10 @@ class SharedKey(StrEnum):
class KeyValueCodec(ABC):
@abstractmethod
def encode(self, value: Any) -> bytes:
...
def encode(self, value: Any) -> bytes: ...
@abstractmethod
def decode(self, value: bytes) -> Any:
...
def decode(self, value: bytes) -> Any: ...
class JsonKeyValueCodec(KeyValueCodec):

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Code related with dealing with legacy / change management"""
from typing import Any

View File

@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
from alembic.operations import BatchOperations, Operations
from alembic.operations import Operations
naming_convention = {
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",

View File

@ -14,4 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .processors import *
from .processors import * # noqa: F403

View File

@ -24,7 +24,7 @@ from sqlalchemy import and_, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from superset import conf, is_feature_enabled
from superset import conf
from superset.constants import TimeGrain
from superset.migrations.shared.utils import paginated_update, try_load_json

View File

@ -23,10 +23,9 @@ from typing import Any, Callable, Optional, Union
from uuid import uuid4
from alembic import op
from sqlalchemy import engine_from_config, inspect
from sqlalchemy import inspect
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.dialects.postgresql.base import PGDialect
from sqlalchemy.engine import reflection
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.orm import Query, Session
@ -106,7 +105,7 @@ def paginated_update(
result = session.execute(query)
if print_page_progress is None or print_page_progress is True:
print_page_progress = lambda processed, total: print(
print_page_progress = lambda processed, total: print( # noqa: E731
f" {processed}/{total}", end="\r"
)

View File

@ -26,8 +26,8 @@ Create Date: 2015-09-21 17:30:38.442998
revision = "4e6a06bad7a8"
down_revision = None
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-10-05 10:32:15.850753
revision = "5a7bad26f2a7"
down_revision = "4e6a06bad7a8"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-10-05 22:11:00.537054
revision = "1e2841a4128"
down_revision = "5a7bad26f2a7"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-10-19 20:54:00.565633
revision = "2929af7925ed"
down_revision = "1e2841a4128"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,15 +26,15 @@ Create Date: 2015-12-04 09:42:16.973264
revision = "1a48a5411020"
down_revision = "289ce07647b"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():
op.add_column("dashboards", sa.Column("slug", sa.String(length=255), nullable=True))
try:
op.create_unique_constraint("idx_unique_slug", "dashboards", ["slug"])
except:
except: # noqa: E722
pass

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-04 11:16:58.226984
revision = "315b3f4da9b0"
down_revision = "1a48a5411020"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-13 08:38:43.704145
revision = "55179c7f25c7"
down_revision = "315b3f4da9b0"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-14 13:37:17.374852
revision = "12d55656cbca"
down_revision = "55179c7f25c7"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-15 17:02:45.128709
revision = "2591d77e9831"
down_revision = "12d55656cbca"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,12 +21,13 @@ Revises: 2591d77e9831
Create Date: 2016-01-13 20:24:45.256437
"""
# revision identifiers, used by Alembic.
revision = "8e80a26a31db"
down_revision = "2591d77e9831"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2016-01-17 22:00:23.640788
revision = "7dbf98566af7"
down_revision = "8e80a26a31db"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2016-01-18 23:43:16.073483
revision = "43df8de3a5f4"
down_revision = "7dbf98566af7"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2016-02-03 17:41:10.944019
revision = "d827694c7555"
down_revision = "43df8de3a5f4"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,6 +21,7 @@ Revises: d827694c7555
Create Date: 2016-02-10 08:47:28.950891
"""
import sqlalchemy as sa
from alembic import op

View File

@ -26,8 +26,8 @@ Create Date: 2016-03-13 09:56:58.329512
revision = "a2d606a761d9"
down_revision = "18e88e1cc004"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,6 +21,7 @@ Revises: 430039611635
Create Date: 2016-03-13 21:30:24.833107
"""
import sqlalchemy as sa
from alembic import op

View File

@ -21,12 +21,13 @@ Revises: 18e88e1cc004
Create Date: 2016-03-17 08:40:03.186534
"""
# revision identifiers, used by Alembic.
revision = "836c0bf75904"
down_revision = "18e88e1cc004"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2016-03-24 14:13:44.817723
revision = "763d4b211ec9"
down_revision = "d2424a248d63"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():
@ -96,7 +96,7 @@ def upgrade():
op.alter_column("url", "created_on", existing_type=sa.DATETIME(), nullable=True)
op.create_foreign_key(None, "metrics", "ab_user", ["changed_by_fk"], ["id"])
op.create_foreign_key(None, "metrics", "ab_user", ["created_by_fk"], ["id"])
except:
except: # noqa: E722
pass
@ -174,5 +174,5 @@ def downgrade():
op.alter_column(
"columns", "changed_on", existing_type=sa.DATETIME(), nullable=False
)
except:
except: # noqa: E722
pass

View File

@ -21,12 +21,13 @@ Revises: d2424a248d63
Create Date: 2016-03-25 14:35:44.642576
"""
# revision identifiers, used by Alembic.
revision = "1d2ddd543133"
down_revision = "d2424a248d63"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,12 +21,13 @@ Revises: fee7b758c130
Create Date: 2016-04-03 15:23:20.280841
"""
# revision identifiers, used by Alembic.
revision = "867bf4f117f9"
down_revision = "fee7b758c130"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2016-04-11 22:41:06.185955
revision = "bb51420eaf83"
down_revision = "867bf4f117f9"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,6 +21,7 @@ Revises: bb51420eaf83
Create Date: 2016-04-15 08:31:26.249591
"""
from alembic import op
# revision identifiers, used by Alembic.

View File

@ -21,12 +21,13 @@ Revises: b4456560d4f3
Create Date: 2016-04-15 17:58:33.842012
"""
# revision identifiers, used by Alembic.
revision = "4fa88fe24e94"
down_revision = "b4456560d4f3"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,6 +21,7 @@ Revises: 4fa88fe24e94
Create Date: 2016-04-25 08:54:04.303859
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, ForeignKey, Integer, String

View File

@ -26,8 +26,8 @@ Create Date: 2016-05-01 12:21:18.331191
revision = "f0fbf6129e13"
down_revision = "c3a8f8611885"
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():

View File

@ -21,6 +21,7 @@ Revises: f0fbf6129e13
Create Date: 2016-05-11 17:28:32.407340
"""
import sqlalchemy as sa
from alembic import op

View File

@ -21,6 +21,7 @@ Revises: 956a063c52b3
Create Date: 2016-05-27 15:03:32.980343
"""
import logging
from alembic import op
@ -59,7 +60,7 @@ def upgrade():
["datasource_name"],
["datasource_name"],
)
except:
except: # noqa: E722
logging.warning("Could not find or drop constraint on `columns`")

View File

@ -26,12 +26,12 @@ Create Date: 2016-06-07 12:33:25.756640
revision = "d8bc074f7aad"
down_revision = "1226819ee0e3"
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Boolean, Column, Integer
from sqlalchemy.ext.declarative import declarative_base
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
from sqlalchemy import Boolean, Column, Integer # noqa: E402
from sqlalchemy.ext.declarative import declarative_base # noqa: E402
from superset import db
from superset import db # noqa: E402
Base = declarative_base()

Some files were not shown because too many files have changed in this diff Show More