[ci] Deprecate flake8 (#8409)

* [ci] Deprecate flake8

* Addressing @villebro's comments
This commit is contained in:
John Bodley 2019-10-18 14:44:27 -07:00 committed by Maxime Beauchemin
parent a19990185d
commit 9fc37ea9f1
234 changed files with 702 additions and 647 deletions

View File

@ -16,11 +16,21 @@
#
repos:
- repo: https://github.com/ambv/black
rev: stable
rev: 19.3b0
hooks:
- id: black
language_version: python3
- repo: https://github.com/asottile/seed-isort-config
rev: v1.9.3
hooks:
- id: seed-isort-config
- repo: https://github.com/pre-commit/mirrors-isort
rev: v4.3.21
hooks:
- id: isort
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.3
hooks:
@ -30,8 +40,3 @@ repos:
- id: check-added-large-files
- id: check-yaml
- id: debug-statements
- repo: https://gitlab.com/pycqa/flake8
rev: 3.7.1
hooks:
- id: flake8

View File

@ -75,7 +75,10 @@ jobs:
env: TOXENV=black
- language: python
python: 3.6
env: TOXENV=flake8
env: TOXENV=isort
- language: python
python: 3.6
env: TOXENV=mypy
- language: python
python: 3.6
env: TOXENV=py36-sqlite

View File

@ -479,6 +479,24 @@ The Python code is auto-formatted using [Black](https://github.com/python/black)
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/editor_integration.html).
## Conventions
### Python
Parameters in the `config.py` (which are accessible via the Flask app.config dictionary) are assummed to always be defined and thus should be accessed directly via,
```python
blueprints = app.config["BLUEPRINTS"]
```
rather than,
```python
blueprints = app.config.get("BLUEPRINTS")
```
or similar as the later will cause typing issues. The former is of type `List[Callable]` whereas the later is of type `Optional[List[Callable]]`.
## Testing
### Python Testing

View File

@ -16,11 +16,9 @@
#
black==19.3b0
coverage==4.5.3
flake8-import-order==0.18.1
flake8-mypy==17.8.0
flake8==3.7.7
flask-cors==3.0.7
ipdb==0.12
isort==4.3.21
mypy==0.670
nose==1.3.7
pip-tools==3.7.0

View File

@ -39,3 +39,15 @@ detailed-errors = 1
with-coverage = 1
nocapture = 1
cover-package = superset
[isort]
combine_as_imports = true
include_trailing_comma = true
line_length = 88
known_first_party = superset
known_third_party =alembic,backoff,bleach,celery,click,colorama,contextlib2,croniter,dateutil,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_wtf,geohash,geopy,humanize,isodate,jinja2,markdown,marshmallow,msgpack,numpy,pandas,parsedatetime,pathlib2,polyline,prison,psycopg2,pyarrow,pyhive,pytz,retry,selenium,setuptools,simplejson,sphinx_rtd_theme,sqlalchemy,sqlalchemy_utils,sqlparse,werkzeug,wtforms,wtforms_json,yaml
multi_line_output = 3
order_by_type = false
[mypy]
ignore_missing_imports = true

View File

@ -16,11 +16,13 @@
# under the License.
# pylint: disable=C,R,W
"""Package's main module!"""
from copy import deepcopy
import json
import logging
import os
from copy import deepcopy
from typing import Any, Dict
import wtforms_json
from flask import Flask, redirect
from flask_appbuilder import AppBuilder, IndexView, SQLA
from flask_appbuilder.baseviews import expose
@ -28,7 +30,6 @@ from flask_compress import Compress
from flask_migrate import Migrate
from flask_talisman import Talisman
from flask_wtf.csrf import CSRFProtect
import wtforms_json
from superset import config
from superset.connectors.connector_registry import ConnectorRegistry
@ -45,14 +46,14 @@ if not os.path.exists(config.DATA_DIR):
os.makedirs(config.DATA_DIR)
app = Flask(__name__)
app.config.from_object(CONFIG_MODULE)
app.config.from_object(CONFIG_MODULE) # type: ignore
conf = app.config
#################################################################
# Handling manifest file logic at app start
#################################################################
MANIFEST_FILE = APP_DIR + "/static/assets/dist/manifest.json"
manifest = {}
manifest: Dict[Any, Any] = {}
def parse_manifest_json():
@ -103,7 +104,7 @@ def get_manifest():
#################################################################
for bp in conf.get("BLUEPRINTS"):
for bp in conf["BLUEPRINTS"]:
try:
print("Registering blueprint: '{}'".format(bp.name))
app.register_blueprint(bp)
@ -129,7 +130,7 @@ tables_cache = setup_cache(app, conf.get("TABLE_NAMES_CACHE_CONFIG"))
migrate = Migrate(app, db, directory=APP_DIR + "/migrations")
app.config.get("LOGGING_CONFIGURATOR").configure_logging(app.config, app.debug)
app.config["LOGGING_CONFIGURATOR"].configure_logging(app.config, app.debug)
if app.config.get("ENABLE_CORS"):
from flask_cors import CORS
@ -139,7 +140,9 @@ if app.config.get("ENABLE_CORS"):
if app.config.get("ENABLE_PROXY_FIX"):
from werkzeug.middleware.proxy_fix import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app, **app.config.get("PROXY_FIX_CONFIG"))
app.wsgi_app = ProxyFix( # type: ignore
app.wsgi_app, **app.config.get("PROXY_FIX_CONFIG")
)
if app.config.get("ENABLE_CHUNK_ENCODING"):
@ -154,16 +157,16 @@ if app.config.get("ENABLE_CHUNK_ENCODING"):
environ["wsgi.input_terminated"] = True
return self.app(environ, start_response)
app.wsgi_app = ChunkedEncodingFix(app.wsgi_app)
app.wsgi_app = ChunkedEncodingFix(app.wsgi_app) # type: ignore
if app.config.get("UPLOAD_FOLDER"):
if app.config["UPLOAD_FOLDER"]:
try:
os.makedirs(app.config.get("UPLOAD_FOLDER"))
os.makedirs(app.config["UPLOAD_FOLDER"])
except OSError:
pass
for middleware in app.config.get("ADDITIONAL_MIDDLEWARE"):
app.wsgi_app = middleware(app.wsgi_app)
for middleware in app.config["ADDITIONAL_MIDDLEWARE"]:
app.wsgi_app = middleware(app.wsgi_app) # type: ignore
class MyIndexView(IndexView):
@ -233,9 +236,9 @@ flask_app_mutator = app.config.get("FLASK_APP_MUTATOR")
if flask_app_mutator:
flask_app_mutator(app)
from superset import views # noqa
from superset import views # noqa isort:skip
# Registering sources
module_datasource_map = app.config.get("DEFAULT_MODULE_DS_MAP")
module_datasource_map.update(app.config.get("ADDITIONAL_MODULE_DS_MAP"))
module_datasource_map = app.config["DEFAULT_MODULE_DS_MAP"]
module_datasource_map.update(app.config["ADDITIONAL_MODULE_DS_MAP"])
ConnectorRegistry.register_sources(module_datasource_map)

View File

@ -16,17 +16,17 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from datetime import datetime
import logging
from datetime import datetime
from subprocess import Popen
from sys import stdout
import click
import yaml
from colorama import Fore, Style
from flask import g
from flask_appbuilder import Model
from pathlib2 import Path
import yaml
from superset import app, appbuilder, db, examples, security_manager
from superset.common.tags import add_favorites, add_owners, add_types

View File

@ -15,21 +15,21 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from datetime import datetime, timedelta
import logging
import pickle as pkl
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional
import numpy as np
import pandas as pd
from superset import app, cache
from superset import db
from superset import app, cache, db
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
from superset.stats_logger import BaseStatsLogger
from superset.utils import core as utils
from superset.utils.core import DTTM_ALIAS
from .query_object import QueryObject
config = app.config
@ -59,8 +59,10 @@ class QueryContext:
force: bool = False,
custom_cache_timeout: Optional[int] = None,
) -> None:
self.datasource = ConnectorRegistry.get_datasource(
datasource.get("type"), int(datasource.get("id")), db.session # noqa: T400
self.datasource = ConnectorRegistry.get_datasource( # type: ignore
datasource.get("type"), # type: ignore
int(datasource.get("id")), # type: ignore
db.session,
)
self.queries = list(map(lambda query_obj: QueryObject(**query_obj), queries))

View File

@ -15,8 +15,8 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=R
from datetime import datetime, timedelta
import hashlib
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional, Union
import simplejson as json
@ -24,7 +24,6 @@ import simplejson as json
from superset import app
from superset.utils import core as utils
# TODO: Type Metrics dictionary with TypedDict when it becomes a vanilla python type
# https://github.com/python/mypy/issues/5288
@ -39,7 +38,7 @@ class QueryObject:
from_dttm: datetime
to_dttm: datetime
is_timeseries: bool
time_shift: timedelta
time_shift: Optional[timedelta]
groupby: List[str]
metrics: List[Union[Dict, str]]
row_limit: int
@ -61,7 +60,7 @@ class QueryObject:
time_shift: Optional[str] = None,
is_timeseries: bool = False,
timeseries_limit: int = 0,
row_limit: int = app.config.get("ROW_LIMIT"),
row_limit: int = app.config["ROW_LIMIT"],
timeseries_limit_metric: Optional[Dict] = None,
order_desc: bool = True,
extras: Optional[Dict] = None,
@ -79,13 +78,15 @@ class QueryObject:
)
self.is_timeseries = is_timeseries
self.time_range = time_range
self.time_shift = utils.parse_human_timedelta(time_shift)
self.time_shift = (
utils.parse_human_timedelta(time_shift) if time_shift else None
)
self.groupby = groupby or []
# Temporal solution for backward compatability issue
# due the new format of non-ad-hoc metric.
self.metrics = [
metric if "expressionType" in metric else metric["label"] # noqa: T484
metric if "expressionType" in metric else metric["label"] # type: ignore
for metric in metrics
]
self.row_limit = row_limit

View File

@ -21,13 +21,14 @@ All configuration in this file can be overridden by providing a superset_config
in your PYTHONPATH as there is a ``from superset_config import *``
at the end of this file.
"""
from collections import OrderedDict
import imp
import importlib.util
import json
import logging
import os
import sys
from collections import OrderedDict
from typing import Any, Callable, Dict, List
from celery.schedules import crontab
from dateutil import tz
@ -93,7 +94,7 @@ SQLALCHEMY_TRACK_MODIFICATIONS = False
# ---------------------------------------------------------
# Your App secret key
SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h" # noqa
SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h"
# The SQLAlchemy connection string.
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "superset.db")
@ -262,12 +263,12 @@ IMG_UPLOAD_URL = "/static/uploads/"
# IMG_SIZE = (300, 200, True)
CACHE_DEFAULT_TIMEOUT = 60 * 60 * 24
CACHE_CONFIG = {"CACHE_TYPE": "null"}
CACHE_CONFIG: Dict[str, Any] = {"CACHE_TYPE": "null"}
TABLE_NAMES_CACHE_CONFIG = {"CACHE_TYPE": "null"}
# CORS Options
ENABLE_CORS = False
CORS_OPTIONS = {}
CORS_OPTIONS: Dict[Any, Any] = {}
# Chrome allows up to 6 open connections per domain at a time. When there are more
# than 6 slices in dashboard, a lot of time fetch requests are queued up and wait for
@ -292,13 +293,13 @@ CSV_EXPORT = {"encoding": "utf-8"}
# time grains in superset/db_engine_specs.builtin_time_grains).
# For example: to disable 1 second time grain:
# TIME_GRAIN_BLACKLIST = ['PT1S']
TIME_GRAIN_BLACKLIST = []
TIME_GRAIN_BLACKLIST: List[str] = []
# Additional time grains to be supported using similar definitions as in
# superset/db_engine_specs.builtin_time_grains.
# For example: To add a new 2 second time grain:
# TIME_GRAIN_ADDONS = {'PT2S': '2 second'}
TIME_GRAIN_ADDONS = {}
TIME_GRAIN_ADDONS: Dict[str, str] = {}
# Implementation of additional time grains per engine.
# For example: To implement 2 second time grain on clickhouse engine:
@ -307,7 +308,7 @@ TIME_GRAIN_ADDONS = {}
# 'PT2S': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 2)*2)'
# }
# }
TIME_GRAIN_ADDON_FUNCTIONS = {}
TIME_GRAIN_ADDON_FUNCTIONS: Dict[str, Dict[str, str]] = {}
# ---------------------------------------------------
# List of viz_types not allowed in your environment
@ -315,13 +316,13 @@ TIME_GRAIN_ADDON_FUNCTIONS = {}
# VIZ_TYPE_BLACKLIST = ['pivot_table', 'treemap']
# ---------------------------------------------------
VIZ_TYPE_BLACKLIST = []
VIZ_TYPE_BLACKLIST: List[str] = []
# ---------------------------------------------------
# List of data sources not to be refreshed in druid cluster
# ---------------------------------------------------
DRUID_DATA_SOURCE_BLACKLIST = []
DRUID_DATA_SOURCE_BLACKLIST: List[str] = []
# --------------------------------------------------
# Modules, datasources and middleware to be registered
@ -332,8 +333,8 @@ DEFAULT_MODULE_DS_MAP = OrderedDict(
("superset.connectors.druid.models", ["DruidDatasource"]),
]
)
ADDITIONAL_MODULE_DS_MAP = {}
ADDITIONAL_MIDDLEWARE = []
ADDITIONAL_MODULE_DS_MAP: Dict[str, List[str]] = {}
ADDITIONAL_MIDDLEWARE: List[Callable] = []
# 1) https://docs.python-guide.org/writing/logging/
# 2) https://docs.python.org/2/library/logging.config.html
@ -441,8 +442,8 @@ CELERY_CONFIG = CeleryConfig
# within the app
# OVERRIDE_HTTP_HEADERS: sets override values for HTTP headers. These values will
# override anything set within the app
DEFAULT_HTTP_HEADERS = {}
OVERRIDE_HTTP_HEADERS = {}
DEFAULT_HTTP_HEADERS: Dict[str, Any] = {}
OVERRIDE_HTTP_HEADERS: Dict[str, Any] = {}
# The db id here results in selecting this one as a default in SQL Lab
DEFAULT_DB_ID = None
@ -492,7 +493,7 @@ UPLOADED_CSV_HIVE_NAMESPACE = None
# SQL Lab. The existing context gets updated with this dictionary,
# meaning values for existing keys get overwritten by the content of this
# dictionary.
JINJA_CONTEXT_ADDONS = {}
JINJA_CONTEXT_ADDONS: Dict[str, Callable] = {}
# Roles that are controlled by the API / Superset and should not be changes
# by humans.
@ -521,7 +522,7 @@ SMTP_PASSWORD = "superset"
SMTP_MAIL_FROM = "superset@superset.com"
if not CACHE_DEFAULT_TIMEOUT:
CACHE_DEFAULT_TIMEOUT = CACHE_CONFIG.get("CACHE_DEFAULT_TIMEOUT")
CACHE_DEFAULT_TIMEOUT = CACHE_CONFIG.get("CACHE_DEFAULT_TIMEOUT") # type: ignore
# Whether to bump the logging level to ERROR on the flask_appbuilder package
# Set to False if/when debugging FAB related issues like
@ -541,12 +542,12 @@ PERMISSION_INSTRUCTIONS_LINK = ""
# Integrate external Blueprints to the app by passing them to your
# configuration. These blueprints will get integrated in the app
BLUEPRINTS = []
BLUEPRINTS: List[Callable] = []
# Provide a callable that receives a tracking_url and returns another
# URL. This is used to translate internal Hadoop job tracker URL
# into a proxied one
TRACKING_URL_TRANSFORMER = lambda x: x # noqa: E731
TRACKING_URL_TRANSFORMER = lambda x: x
# Interval between consecutive polls when using Hive Engine
HIVE_POLL_INTERVAL = 5
@ -629,7 +630,7 @@ EMAIL_REPORTS_WEBDRIVER = "firefox"
WEBDRIVER_WINDOW = {"dashboard": (1600, 2000), "slice": (3000, 1200)}
# Any config options to be passed as-is to the webdriver
WEBDRIVER_CONFIGURATION = {}
WEBDRIVER_CONFIGURATION: Dict[Any, Any] = {}
# The base URL to query for accessing the user interface
WEBDRIVER_BASEURL = "http://0.0.0.0:8080/"
@ -697,8 +698,8 @@ if CONFIG_PATH_ENV_VAR in os.environ:
raise
elif importlib.util.find_spec("superset_config"):
try:
from superset_config import * # noqa pylint: disable=import-error
import superset_config # noqa pylint: disable=import-error
from superset_config import * # pylint: disable=import-error
import superset_config # pylint: disable=import-error
print(f"Loaded your LOCAL configuration at [{superset_config.__file__}]")
except Exception:

View File

@ -16,7 +16,7 @@
# under the License.
# pylint: disable=C,R,W
import json
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Type
from flask_appbuilder.security.sqla.models import User
from sqlalchemy import and_, Boolean, Column, Integer, String, Text
@ -35,15 +35,17 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
# ---------------------------------------------------------------
# class attributes to define when deriving BaseDatasource
# ---------------------------------------------------------------
__tablename__ = None # {connector_name}_datasource
type = None # datasoure type, str to be defined when deriving this class
baselink = None # url portion pointing to ModelView endpoint
column_class = None # link to derivative of BaseColumn
metric_class = None # link to derivative of BaseMetric
__tablename__: Optional[str] = None # {connector_name}_datasource
type: Optional[ # datasoure type, str to be defined when deriving this class
str
] = None
baselink: Optional[str] = None # url portion pointing to ModelView endpoint
column_class: Optional[Type] = None # link to derivative of BaseColumn
metric_class: Optional[Type] = None # link to derivative of BaseMetric
owner_class = None
# Used to do code highlighting when displaying the query in the UI
query_language = None
query_language: Optional[str] = None
name = None # can be a Column or a property pointing to one
@ -341,7 +343,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
class BaseColumn(AuditMixinNullable, ImportMixin):
"""Interface for column"""
__tablename__ = None # {connector_name}_column
__tablename__: Optional[str] = None # {connector_name}_column
id = Column(Integer, primary_key=True)
column_name = Column(String(255), nullable=False)
@ -411,7 +413,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
"""Interface for Metrics"""
__tablename__ = None # {connector_name}_metric
__tablename__: Optional[str] = None # {connector_name}_metric
id = Column(Integer, primary_key=True)
metric_name = Column(String(255), nullable=False)

View File

@ -14,5 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from . import models # noqa
from . import views # noqa
from . import models, views

View File

@ -16,46 +16,24 @@
# under the License.
# pylint: disable=C,R,W
# pylint: disable=invalid-unary-operand-type
import json
import logging
import re
from collections import OrderedDict
from copy import deepcopy
from datetime import datetime, timedelta
from distutils.version import LooseVersion
import json
import logging
from multiprocessing.pool import ThreadPool
import re
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
import pandas as pd
import sqlalchemy as sa
from dateutil.parser import parse as dparse
from flask import escape, Markup
from flask_appbuilder import Model
from flask_appbuilder.models.decorators import renders
from flask_appbuilder.security.sqla.models import User
from flask_babel import lazy_gettext as _
import pandas as pd
try:
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.dimensions import (
MapLookupExtraction,
RegexExtraction,
RegisteredLookupExtraction,
)
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.having import Aggregation, Having
from pydruid.utils.postaggregator import (
Const,
Field,
HyperUniqueCardinality,
Postaggregator,
Quantile,
Quantiles,
)
import requests
except ImportError:
pass
import sqlalchemy as sa
from sqlalchemy import (
Boolean,
Column,
@ -77,17 +55,37 @@ from superset.models.core import Database
from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult
from superset.utils import core as utils, import_datasource
try:
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.dimensions import (
MapLookupExtraction,
RegexExtraction,
RegisteredLookupExtraction,
)
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.having import Aggregation, Having
from pydruid.utils.postaggregator import (
Const,
Field,
HyperUniqueCardinality,
Postaggregator,
Quantile,
Quantiles,
)
import requests
except ImportError:
pass
try:
from superset.utils.core import DimSelector, DTTM_ALIAS, flasher
except ImportError:
pass
DRUID_TZ = conf.get("DRUID_TZ")
POST_AGG_TYPE = "postagg"
metadata = Model.metadata # pylint: disable=no-member
try:
# Postaggregator might not have been imported.
class JavascriptPostAggregator(Postaggregator):
@ -111,7 +109,6 @@ try:
except NameError:
pass
# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
@ -137,14 +134,14 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
broker_user = Column(String(255))
broker_pass = Column(EncryptedType(String(255), conf.get("SECRET_KEY")))
export_fields = (
export_fields = [
"cluster_name",
"broker_host",
"broker_port",
"broker_endpoint",
"cache_timeout",
"broker_user",
)
]
update_from_object_fields = export_fields
export_children = ["datasources"]
@ -188,7 +185,7 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
auth = requests.auth.HTTPBasicAuth(self.broker_user, self.broker_pass)
return json.loads(requests.get(endpoint, auth=auth).text)["version"]
@property # noqa: T484
@property # type: ignore
@utils.memoized
def druid_version(self) -> str:
return self.get_druid_version()
@ -310,7 +307,7 @@ class DruidColumn(Model, BaseColumn):
)
dimension_spec_json = Column(Text)
export_fields = (
export_fields = [
"datasource_id",
"column_name",
"is_active",
@ -320,7 +317,7 @@ class DruidColumn(Model, BaseColumn):
"description",
"dimension_spec_json",
"verbose_name",
)
]
update_from_object_fields = export_fields
export_parent = "datasource"
@ -332,9 +329,10 @@ class DruidColumn(Model, BaseColumn):
return self.dimension_spec_json
@property
def dimension_spec(self) -> Optional[Dict]: # noqa: T484
def dimension_spec(self) -> Optional[Dict]:
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
return None
def get_metrics(self) -> Dict[str, "DruidMetric"]:
metrics = {
@ -397,7 +395,7 @@ class DruidMetric(Model, BaseMetric):
)
json = Column(Text, nullable=False)
export_fields = (
export_fields = [
"metric_name",
"verbose_name",
"metric_type",
@ -406,7 +404,7 @@ class DruidMetric(Model, BaseMetric):
"description",
"d3format",
"warning_text",
)
]
update_from_object_fields = export_fields
export_parent = "datasource"
@ -490,7 +488,7 @@ class DruidDatasource(Model, BaseDatasource):
owner_class, secondary=druiddatasource_user, backref="druiddatasources"
)
export_fields = (
export_fields = [
"datasource_name",
"is_hidden",
"description",
@ -500,7 +498,7 @@ class DruidDatasource(Model, BaseDatasource):
"cache_timeout",
"params",
"filter_select_enabled",
)
]
update_from_object_fields = export_fields
export_parent = "cluster"
@ -519,7 +517,7 @@ class DruidDatasource(Model, BaseDatasource):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self) -> str:
def name(self) -> str: # type: ignore
return self.datasource_name
@property
@ -825,7 +823,7 @@ class DruidDatasource(Model, BaseDatasource):
granularity["period"] = period_name
else:
granularity["type"] = "duration"
granularity["duration"] = (
granularity["duration"] = ( # type: ignore
utils.parse_human_timedelta(period_name).total_seconds() * 1000
)
return granularity
@ -928,7 +926,7 @@ class DruidDatasource(Model, BaseDatasource):
metrics: List[Union[Dict, str]],
metrics_dict: Dict[str, DruidMetric],
druid_version=None,
) -> Tuple[OrderedDict, OrderedDict]: # noqa: T484
) -> Tuple[OrderedDict, OrderedDict]:
# Separate metrics into those that are aggregations
# and those that are post aggregations
saved_agg_names = set()
@ -937,21 +935,21 @@ class DruidDatasource(Model, BaseDatasource):
for metric in metrics:
if utils.is_adhoc_metric(metric):
adhoc_agg_configs.append(metric)
elif metrics_dict[metric].metric_type != POST_AGG_TYPE: # noqa: T484
elif metrics_dict[metric].metric_type != POST_AGG_TYPE: # type: ignore
saved_agg_names.add(metric)
else:
postagg_names.append(metric)
# Create the post aggregations, maintain order since postaggs
# may depend on previous ones
post_aggs = OrderedDict() # noqa: T484
post_aggs: "OrderedDict[str, Postaggregator]" = OrderedDict()
visited_postaggs = set()
for postagg_name in postagg_names:
postagg = metrics_dict[postagg_name] # noqa: T484
postagg = metrics_dict[postagg_name] # type: ignore
visited_postaggs.add(postagg_name)
DruidDatasource.resolve_postagg(
postagg, post_aggs, saved_agg_names, visited_postaggs, metrics_dict
)
aggs = DruidDatasource.get_aggregations( # noqa: T484
aggs = DruidDatasource.get_aggregations( # type: ignore
metrics_dict, saved_agg_names, adhoc_agg_configs
)
return aggs, post_aggs
@ -1042,7 +1040,7 @@ class DruidDatasource(Model, BaseDatasource):
@staticmethod
def get_aggregations(
metrics_dict: Dict, saved_metrics: Iterable[str], adhoc_metrics: List[Dict] = []
metrics_dict: Dict, saved_metrics: Set[str], adhoc_metrics: List[Dict] = []
) -> OrderedDict:
"""
Returns a dictionary of aggregation metric names to aggregation json objects
@ -1130,14 +1128,14 @@ class DruidDatasource(Model, BaseDatasource):
):
metric["column"]["type"] = "DOUBLE"
def run_query( # noqa / druid
def run_query( # druid
self,
groupby,
metrics,
granularity,
from_dttm,
to_dttm,
filter=None, # noqa
filter=None,
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
@ -1145,7 +1143,7 @@ class DruidDatasource(Model, BaseDatasource):
inner_from_dttm=None,
inner_to_dttm=None,
orderby=None,
extras=None, # noqa
extras=None,
columns=None,
phase=2,
client=None,
@ -1224,7 +1222,7 @@ class DruidDatasource(Model, BaseDatasource):
del qry["dimensions"]
client.timeseries(**qry)
elif not having_filters and len(groupby) == 1 and order_desc:
dim = list(qry.get("dimensions"))[0] # noqa: T484
dim = list(qry["dimensions"])[0]
logging.info("Running two-phase topn query for dimension [{}]".format(dim))
pre_qry = deepcopy(qry)
if timeseries_limit_metric:
@ -1443,7 +1441,7 @@ class DruidDatasource(Model, BaseDatasource):
return (col, extraction_fn)
@classmethod
def get_filters(cls, raw_filters, num_cols, columns_dict) -> Filter: # noqa: T484
def get_filters(cls, raw_filters, num_cols, columns_dict) -> Filter:
"""Given Superset filter data structure, returns pydruid Filter(s)"""
filters = None
for flt in raw_filters:
@ -1561,9 +1559,9 @@ class DruidDatasource(Model, BaseDatasource):
alphaNumeric=is_numeric_col,
)
elif op == "IS NULL":
cond = Dimension(col) == None # NOQA
cond = Dimension(col) is None
elif op == "IS NOT NULL":
cond = Dimension(col) != None # NOQA
cond = Dimension(col) is not None
if filters:
filters = Filter(type="and", fields=[cond, filters])

View File

@ -15,17 +15,16 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from datetime import datetime
import json
import logging
from datetime import datetime
from flask import flash, Markup, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.fieldwidgets import Select2Widget
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access
from flask_babel import gettext as __
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __, lazy_gettext as _
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from superset import appbuilder, db, security_manager
@ -42,10 +41,11 @@ from superset.views.base import (
validate_json,
YamlExportMixin,
)
from . import models
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.DruidColumn)
list_title = _("Columns")
@ -134,7 +134,7 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
appbuilder.add_view_no_menu(DruidColumnInlineView)
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.DruidMetric)
list_title = _("Metrics")
@ -189,7 +189,7 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin): # noqa
class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.DruidCluster)
list_title = _("Druid Clusters")
@ -268,9 +268,7 @@ appbuilder.add_view(
)
class DruidDatasourceModelView(
DatasourceModelView, DeleteMixin, YamlExportMixin
): # noqa
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.DruidDatasource)
list_title = _("Druid Datasources")

View File

@ -14,5 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from . import models # noqa
from . import views # noqa
from . import models, views

View File

@ -15,17 +15,18 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from collections import OrderedDict
from datetime import datetime
import logging
import re
from collections import OrderedDict
from datetime import datetime
from typing import Any, Dict, List, NamedTuple, Optional, Union
import pandas as pd
import sqlalchemy as sa
import sqlparse
from flask import escape, Markup
from flask_appbuilder import Model
from flask_babel import lazy_gettext as _
import pandas as pd
import sqlalchemy as sa
from sqlalchemy import (
and_,
asc,
@ -47,7 +48,6 @@ from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.schema import UniqueConstraint
from sqlalchemy.sql import column, ColumnElement, literal_column, table, text
from sqlalchemy.sql.expression import Label, Select, TextAsFrom
import sqlparse
from superset import app, db, security_manager
from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
@ -126,7 +126,7 @@ class TableColumn(Model, BaseColumn):
expression = Column(Text)
python_date_format = Column(String(255))
export_fields = (
export_fields = [
"table_id",
"column_name",
"verbose_name",
@ -138,7 +138,7 @@ class TableColumn(Model, BaseColumn):
"expression",
"description",
"python_date_format",
)
]
update_from_object_fields = [s for s in export_fields if s not in ("table_id",)]
export_parent = "table"
@ -162,7 +162,7 @@ class TableColumn(Model, BaseColumn):
self, start_dttm: DateTime, end_dttm: DateTime
) -> ColumnElement:
col = self.get_sqla_col(label="__time")
l = [] # noqa: E741
l = []
if start_dttm:
l.append(col >= text(self.dttm_sql_literal(start_dttm)))
if end_dttm:
@ -238,7 +238,7 @@ class SqlMetric(Model, BaseMetric):
)
expression = Column(Text, nullable=False)
export_fields = (
export_fields = [
"metric_name",
"verbose_name",
"metric_type",
@ -247,7 +247,7 @@ class SqlMetric(Model, BaseMetric):
"description",
"d3format",
"warning_text",
)
]
update_from_object_fields = list(
[s for s in export_fields if s not in ("table_id",)]
)
@ -325,7 +325,7 @@ class SqlaTable(Model, BaseDatasource):
baselink = "tablemodelview"
export_fields = (
export_fields = [
"table_name",
"main_dttm_col",
"description",
@ -339,7 +339,7 @@ class SqlaTable(Model, BaseDatasource):
"template_params",
"filter_select_enabled",
"fetch_values_predicate",
)
]
update_from_object_fields = [
f for f in export_fields if f not in ("table_name", "database_id")
]
@ -427,7 +427,7 @@ class SqlaTable(Model, BaseDatasource):
return ("[{obj.database}].[{obj.table_name}]" "(id:{obj.id})").format(obj=self)
@property
def name(self) -> str:
def name(self) -> str: # type: ignore
if not self.schema:
return self.table_name
return "{}.{}".format(self.schema, self.table_name)
@ -440,7 +440,7 @@ class SqlaTable(Model, BaseDatasource):
@property
def dttm_cols(self) -> List:
l = [c.column_name for c in self.columns if c.is_dttm] # noqa: E741
l = [c.column_name for c in self.columns if c.is_dttm]
if self.main_dttm_col and self.main_dttm_col not in l:
l.append(self.main_dttm_col)
return l
@ -618,7 +618,7 @@ class SqlaTable(Model, BaseDatasource):
granularity,
from_dttm,
to_dttm,
filter=None, # noqa
filter=None,
is_timeseries=True,
timeseries_limit=15,
timeseries_limit_metric=None,
@ -759,7 +759,7 @@ class SqlaTable(Model, BaseDatasource):
if op in ("in", "not in"):
cond = col_obj.get_sqla_col().in_(eq)
if "<NULL>" in eq:
cond = or_(cond, col_obj.get_sqla_col() == None) # noqa
cond = or_(cond, col_obj.get_sqla_col() == None)
if op == "not in":
cond = ~cond
where_clause_and.append(cond)
@ -781,9 +781,9 @@ class SqlaTable(Model, BaseDatasource):
elif op == "LIKE":
where_clause_and.append(col_obj.get_sqla_col().like(eq))
elif op == "IS NULL":
where_clause_and.append(col_obj.get_sqla_col() == None) # noqa
where_clause_and.append(col_obj.get_sqla_col() == None)
elif op == "IS NOT NULL":
where_clause_and.append(col_obj.get_sqla_col() != None) # noqa
where_clause_and.append(col_obj.get_sqla_col() != None)
if extras:
where = extras.get("where")
if where:
@ -976,7 +976,7 @@ class SqlaTable(Model, BaseDatasource):
).format(self.table_name)
)
M = SqlMetric # noqa
M = SqlMetric
metrics = []
any_date_col = None
db_engine_spec = self.database.db_engine_spec

View File

@ -25,8 +25,7 @@ from flask_appbuilder.actions import action
from flask_appbuilder.fieldwidgets import Select2Widget
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access
from flask_babel import gettext as __
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __, lazy_gettext as _
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.validators import Regexp
@ -41,12 +40,13 @@ from superset.views.base import (
SupersetModelView,
YamlExportMixin,
)
from . import models
logger = logging.getLogger(__name__)
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.TableColumn)
list_title = _("Columns")
@ -162,7 +162,7 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
appbuilder.add_view_no_menu(TableColumnInlineView)
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.SqlMetric)
list_title = _("Metrics")
@ -224,7 +224,7 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
appbuilder.add_view_no_menu(SqlMetricInlineView)
class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin): # noqa
class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.SqlaTable)
list_title = _("Tables")

View File

@ -22,8 +22,8 @@ TODO(bkyryliuk): add support for the conventions like: *_dim or dim_*
TODO(bkyryliuk): recognize integer encoded enums.
"""
from datetime import date, datetime
import logging
from datetime import date, datetime
import numpy as np
import pandas as pd

View File

@ -28,10 +28,10 @@ at all. The classes here will use a common interface to specify all this.
The general idea is to use static classes and an inheritance scheme.
"""
from importlib import import_module
import inspect
from pathlib import Path
import pkgutil
from importlib import import_module
from pathlib import Path
from typing import Dict, Type
from superset.db_engine_specs.base import BaseEngineSpec

View File

@ -15,16 +15,17 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument
from contextlib import closing
from datetime import datetime
import hashlib
import os
import re
from contextlib import closing
from datetime import datetime
from typing import Any, Dict, List, NamedTuple, Optional, Tuple, TYPE_CHECKING, Union
import pandas as pd
import sqlparse
from flask import g
from flask_babel import lazy_gettext as _
import pandas as pd
from sqlalchemy import column, DateTime, select
from sqlalchemy.engine import create_engine
from sqlalchemy.engine.base import Engine
@ -34,7 +35,6 @@ from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import quoted_name, text
from sqlalchemy.sql.expression import ColumnClause, ColumnElement, Select, TextAsFrom
from sqlalchemy.types import TypeEngine
import sqlparse
from werkzeug.utils import secure_filename
from superset import app, db, sql_parse
@ -339,7 +339,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return sql
@classmethod
def get_limit_from_sql(cls, sql: str) -> int:
def get_limit_from_sql(cls, sql: str) -> Optional[int]:
"""
Extract limit from SQL query

View File

@ -14,9 +14,9 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
import hashlib
import re
from datetime import datetime
from typing import Any, Dict, List, Tuple
import pandas as pd
@ -68,7 +68,7 @@ class BigQueryEngineSpec(BaseEngineSpec):
def fetch_data(cls, cursor, limit: int) -> List[Tuple]:
data = super(BigQueryEngineSpec, cls).fetch_data(cursor, limit)
if data and type(data[0]).__name__ == "Row":
data = [r.values() for r in data]
data = [r.values() for r in data] # type: ignore
return data
@staticmethod

View File

@ -44,5 +44,5 @@ class ExasolEngineSpec(BaseEngineSpec):
data = super().fetch_data(cursor, limit)
# Lists of `pyodbc.Row` need to be unpacked further
if data and type(data[0]).__name__ == "Row":
data = [[value for value in row] for row in data]
data = [tuple(row) for row in data]
return data

View File

@ -14,11 +14,11 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
import logging
import os
import re
import time
from datetime import datetime
from typing import Any, Dict, List, Optional, Tuple
from urllib import parse
@ -415,7 +415,7 @@ class HiveEngineSpec(PrestoEngineSpec):
return configuration
@staticmethod
def execute(
def execute( # type: ignore
cursor, query: str, async_: bool = False
): # pylint: disable=arguments-differ
kwargs = {"async": async_}

View File

@ -14,8 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
import re
from datetime import datetime
from typing import List, Optional, Tuple
from sqlalchemy.engine.interfaces import Dialect
@ -26,7 +26,6 @@ from superset.db_engine_specs.base import BaseEngineSpec, LimitMethod
class MssqlEngineSpec(BaseEngineSpec):
engine = "mssql"
epoch_to_dttm = "dateadd(S, {col}, '1970-01-01')"
limit_method = LimitMethod.WRAP_SQL
max_column_name_length = 128
@ -46,6 +45,10 @@ class MssqlEngineSpec(BaseEngineSpec):
"P1Y": "DATEADD(year, DATEDIFF(year, 0, {col}), 0)",
}
@classmethod
def epoch_to_dttm(cls):
return "dateadd(S, {col}, '1970-01-01')"
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> str:
return "CONVERT(DATETIME, '{}', 126)".format(dttm.isoformat())
@ -54,7 +57,7 @@ class MssqlEngineSpec(BaseEngineSpec):
def fetch_data(cls, cursor, limit: int) -> List[Tuple]:
data = super().fetch_data(cursor, limit)
if data and type(data[0]).__name__ == "Row":
data = [[elem for elem in r] for r in data]
data = [tuple(row) for row in data]
return data
column_types = [

View File

@ -14,14 +14,14 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import defaultdict, deque
from contextlib import closing
from datetime import datetime
from distutils.version import StrictVersion
import logging
import re
import textwrap
import time
from collections import defaultdict, deque
from contextlib import closing
from datetime import datetime
from distutils.version import StrictVersion
from typing import Any, cast, Dict, List, Optional, Tuple, TYPE_CHECKING
from urllib import parse
@ -787,14 +787,14 @@ class PrestoEngineSpec(BaseEngineSpec):
limit_clause = "LIMIT {}".format(limit) if limit else ""
order_by_clause = ""
if order_by:
l = [] # noqa: E741
l = []
for field, desc in order_by:
l.append(field + " DESC" if desc else "")
order_by_clause = "ORDER BY " + ", ".join(l)
where_clause = ""
if filters:
l = [] # noqa: E741
l = []
for field, value in filters.items():
l.append(f"{field} = '{value}'")
where_clause = "WHERE " + " AND ".join(l)
@ -824,7 +824,7 @@ class PrestoEngineSpec(BaseEngineSpec):
def where_latest_partition( # pylint: disable=too-many-arguments
cls,
table_name: str,
schema: str,
schema: Optional[str],
database,
query: Select,
columns: Optional[List] = None,
@ -856,7 +856,7 @@ class PrestoEngineSpec(BaseEngineSpec):
@classmethod
def latest_partition(
cls, table_name: str, schema: str, database, show_first: bool = False
cls, table_name: str, schema: Optional[str], database, show_first: bool = False
):
"""Returns col name and the latest (max) partition value for a table

View File

@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from typing import List, TYPE_CHECKING
from typing import List, Optional, TYPE_CHECKING
from sqlalchemy.engine.reflection import Inspector
@ -83,7 +83,7 @@ class SqliteEngineSpec(BaseEngineSpec):
@classmethod
def get_table_names(
cls, database: "Database", inspector: Inspector, schema: str
cls, database: "Database", inspector: Inspector, schema: Optional[str]
) -> List[str]:
"""Need to disregard the schema for Sqlite"""
return sorted(inspector.get_table_names())

View File

@ -27,8 +27,8 @@ def fetch_logs(self, max_rows=1024, orientation=None):
.. note::
This is not a part of DB-API.
"""
from pyhive import hive # noqa
from TCLIService import ttypes # noqa
from pyhive import hive
from TCLIService import ttypes
from thrift import Thrift # pylint: disable=import-error
orientation = orientation or ttypes.TFetchOrientation.FETCH_NEXT

View File

@ -14,20 +14,20 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .bart_lines import load_bart_lines # noqa
from .birth_names import load_birth_names # noqa
from .country_map import load_country_map_data # noqa
from .css_templates import load_css_templates # noqa
from .deck import load_deck_dash # noqa
from .energy import load_energy # noqa
from .flights import load_flights # noqa
from .long_lat import load_long_lat_data # noqa
from .misc_dashboard import load_misc_dashboard # noqa
from .multi_line import load_multi_line # noqa
from .multiformat_time_series import load_multiformat_time_series # noqa
from .paris import load_paris_iris_geojson # noqa
from .random_time_series import load_random_time_series_data # noqa
from .sf_population_polygons import load_sf_population_polygons # noqa
from .tabbed_dashboard import load_tabbed_dashboard # noqa
from .unicode_test_data import load_unicode_test_data # noqa
from .world_bank import load_world_bank_health_n_pop # noqa
from .bart_lines import load_bart_lines
from .birth_names import load_birth_names
from .country_map import load_country_map_data
from .css_templates import load_css_templates
from .deck import load_deck_dash
from .energy import load_energy
from .flights import load_flights
from .long_lat import load_long_lat_data
from .misc_dashboard import load_misc_dashboard
from .multi_line import load_multi_line
from .multiformat_time_series import load_multiformat_time_series
from .paris import load_paris_iris_geojson
from .random_time_series import load_random_time_series_data
from .sf_population_polygons import load_sf_population_polygons
from .tabbed_dashboard import load_tabbed_dashboard
from .unicode_test_data import load_unicode_test_data
from .world_bank import load_world_bank_health_n_pop

View File

@ -22,6 +22,7 @@ from sqlalchemy import String, Text
from superset import db
from superset.utils.core import get_example_database
from .helpers import get_example_data, TBL

View File

@ -24,6 +24,7 @@ from sqlalchemy.sql import column
from superset import db, security_manager
from superset.connectors.sqla.models import SqlMetric, TableColumn
from superset.utils.core import get_example_database
from .helpers import (
config,
Dash,

View File

@ -15,7 +15,9 @@
# specific language governing permissions and limitations
# under the License.
"""This module contains data related to countries and is used for geo mapping"""
countries = [
from typing import Any, Dict, List
countries: List[Dict[str, Any]] = [
{
"name": "Angola",
"area": 1246700,
@ -2488,7 +2490,7 @@ countries = [
},
]
all_lookups = {}
all_lookups: Dict[str, Dict[str, Dict[str, Any]]] = {}
lookups = ["cioc", "cca2", "cca3", "name"]
for lookup in lookups:
all_lookups[lookup] = {}

View File

@ -23,6 +23,7 @@ from sqlalchemy.sql import column
from superset import db
from superset.connectors.sqla.models import SqlMetric
from superset.utils import core as utils
from .helpers import (
get_example_data,
get_slice_json,

View File

@ -18,6 +18,7 @@
import json
from superset import db
from .helpers import Dash, get_slice_json, merge_slice, Slice, TBL, update_slice_ids
COLOR_RED = {"r": 205, "g": 0, "b": 3, "a": 0.82}

View File

@ -25,6 +25,7 @@ from sqlalchemy.sql import column
from superset import db
from superset.connectors.sqla.models import SqlMetric
from superset.utils import core as utils
from .helpers import get_example_data, merge_slice, misc_dash_slices, Slice, TBL

View File

@ -19,6 +19,7 @@ from sqlalchemy import DateTime
from superset import db
from superset.utils import core as utils
from .helpers import get_example_data, TBL

View File

@ -16,11 +16,12 @@
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
# pylint: disable=C,R,W
from io import BytesIO
import json
import os
from urllib import request
import zlib
from io import BytesIO
from typing import Set
from urllib import request
from superset import app, db
from superset.connectors.connector_registry import ConnectorRegistry
@ -37,9 +38,9 @@ TBL = ConnectorRegistry.sources["table"]
config = app.config
EXAMPLES_FOLDER = os.path.join(config.get("BASE_DIR"), "examples")
EXAMPLES_FOLDER = os.path.join(config["BASE_DIR"], "examples")
misc_dash_slices = set() # slices assembled in a 'Misc Chart' dashboard
misc_dash_slices: Set[str] = set() # slices assembled in a 'Misc Chart' dashboard
def update_slice_ids(layout_dict, slices):

View File

@ -23,6 +23,7 @@ from sqlalchemy import DateTime, Float, String
from superset import db
from superset.utils import core as utils
from .helpers import (
get_example_data,
get_slice_json,

View File

@ -17,8 +17,8 @@
import json
import textwrap
from superset import db
from .helpers import Dash, misc_dash_slices, Slice, update_slice_ids
DASH_SLUG = "misc_charts"

View File

@ -17,6 +17,7 @@
import json
from superset import db
from .birth_names import load_birth_names
from .helpers import merge_slice, misc_dash_slices, Slice
from .world_bank import load_world_bank_health_n_pop

View File

@ -20,6 +20,7 @@ from sqlalchemy import BigInteger, Date, DateTime, String
from superset import db
from superset.utils.core import get_example_database
from .helpers import (
config,
get_example_data,

View File

@ -21,6 +21,7 @@ from sqlalchemy import String, Text
from superset import db
from superset.utils import core as utils
from .helpers import get_example_data, TBL

View File

@ -20,6 +20,7 @@ from sqlalchemy import DateTime
from superset import db
from superset.utils import core as utils
from .helpers import config, get_example_data, get_slice_json, merge_slice, Slice, TBL

View File

@ -21,6 +21,7 @@ from sqlalchemy import BigInteger, Float, Text
from superset import db
from superset.utils import core as utils
from .helpers import get_example_data, TBL

View File

@ -20,6 +20,7 @@ import json
import textwrap
from superset import db
from .helpers import Dash, Slice, update_slice_ids

View File

@ -23,6 +23,7 @@ from sqlalchemy import Date, Float, String
from superset import db
from superset.utils import core as utils
from .helpers import (
config,
Dash,

View File

@ -27,6 +27,7 @@ from sqlalchemy.sql import column
from superset import db
from superset.connectors.sqla.models import SqlMetric
from superset.utils import core as utils
from .helpers import (
config,
Dash,

View File

@ -16,13 +16,13 @@
# under the License.
# pylint: disable=C,R,W
"""Defines the templating context for SQL Lab"""
from datetime import datetime, timedelta
import inspect
import json
import random
import time
from typing import Any, List, Optional, Tuple
import uuid
from datetime import datetime, timedelta
from typing import Any, List, Optional, Tuple
from dateutil.relativedelta import relativedelta
from flask import g, request

View File

@ -19,6 +19,7 @@ import logging
from logging.config import fileConfig
from alembic import context
from flask import current_app
from flask_appbuilder import Base
from sqlalchemy import engine_from_config, pool
@ -31,10 +32,6 @@ config = context.config
fileConfig(config.config_file_name)
logger = logging.getLogger("alembic.env")
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
from flask import current_app
config.set_main_option(
"sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI")

View File

@ -21,8 +21,8 @@ Revises: 55e910a74826
Create Date: 2018-11-05 08:42:56.181012
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "0b1f1ab473c0"

View File

@ -26,8 +26,8 @@ Create Date: 2018-08-06 14:38:18.965248
revision = "0c5070e96b57"
down_revision = "7fcdcde0761c"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,9 +21,8 @@ Revises: def97f26fdfb
Create Date: 2019-09-08 21:50:58.200229
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "11c737c17cc6"

View File

@ -21,10 +21,12 @@ Revises: 956a063c52b3
Create Date: 2016-05-27 15:03:32.980343
"""
import logging
from alembic import op
from superset import db
from superset.utils.core import generic_find_constraint_name
import logging
# revision identifiers, used by Alembic.
revision = "1226819ee0e3"

View File

@ -26,8 +26,8 @@ Create Date: 2016-12-06 17:40:40.389652
revision = "1296d28ec131"
down_revision = "6414e83d82b7"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-14 13:37:17.374852
revision = "12d55656cbca"
down_revision = "55179c7f25c7"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,8 +21,8 @@ Revises: f231d82b9b26
Create Date: 2018-04-03 08:19:34.098789
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from superset import db

View File

@ -26,8 +26,8 @@ Create Date: 2019-01-18 14:56:26.307684
revision = "18dc26817ad2"
down_revision = ("8b70aa3d0f87", "a33a03f16c4a")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,8 +21,8 @@ Revises: 430039611635
Create Date: 2016-03-13 21:30:24.833107
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "18e88e1cc004"

View File

@ -26,8 +26,8 @@ Create Date: 2017-09-15 15:09:40.495345
revision = "19a814813610"
down_revision = "ca69c70ec99b"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -23,8 +23,8 @@ Create Date: 2018-08-13 11:30:07.101702
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
from superset.utils.core import MediumText

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-04 09:42:16.973264
revision = "1a48a5411020"
down_revision = "289ce07647b"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -25,8 +25,8 @@ Create Date: 2016-03-25 14:35:44.642576
revision = "1d2ddd543133"
down_revision = "d2424a248d63"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,8 +21,8 @@ Revises: 3dda56f1c4c6
Create Date: 2018-07-16 18:04:07.764659
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
from sqlalchemy.sql import expression
# revision identifiers, used by Alembic.

View File

@ -26,8 +26,8 @@ Create Date: 2015-10-05 22:11:00.537054
revision = "1e2841a4128"
down_revision = "5a7bad26f2a7"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -22,9 +22,10 @@ Create Date: 2019-09-19 13:40:25.293907
"""
import re
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db
from superset.utils.core import MediumText

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-15 17:02:45.128709
revision = "2591d77e9831"
down_revision = "12d55656cbca"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -27,12 +27,13 @@ revision = "27ae655e4247"
down_revision = "d8bc074f7aad"
from alembic import op
from superset import db
from sqlalchemy.ext.declarative import declarative_base
from flask_appbuilder.models.mixins import AuditMixin
from sqlalchemy.orm import relationship
from flask_appbuilder import Model
from sqlalchemy import Column, Integer, ForeignKey, Table
from flask_appbuilder.models.mixins import AuditMixin
from sqlalchemy import Column, ForeignKey, Integer, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from superset import db
Base = declarative_base()

View File

@ -22,8 +22,8 @@ Create Date: 2015-11-21 11:18:00.650587
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
from sqlalchemy_utils import EncryptedType
# revision identifiers, used by Alembic.

View File

@ -26,8 +26,8 @@ Create Date: 2015-10-19 20:54:00.565633
revision = "2929af7925ed"
down_revision = "1e2841a4128"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,8 +21,8 @@ Revises: a6c18f869a4e
Create Date: 2017-03-29 15:04:35.734190
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "2fcdcb35e487"

View File

@ -28,8 +28,8 @@ down_revision = "f231d82b9b26"
from datetime import date
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-04 11:16:58.226984
revision = "315b3f4da9b0"
down_revision = "1a48a5411020"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -14,12 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from alembic import op
import sqlalchemy as sa
from superset import db
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
"""update slice model
Revision ID: 33d996bcc382
@ -27,6 +21,12 @@ Revises: 41f6a59a61f2
Create Date: 2016-09-07 23:50:59.366779
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from superset import db
# revision identifiers, used by Alembic.
revision = "33d996bcc382"

View File

@ -24,12 +24,14 @@ Revises: 5e4a03ef0bf0
Create Date: 2016-09-22 10:21:33.618976
"""
import logging
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from superset import db
from superset.utils.core import generic_find_constraint_name
import logging
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "3b626e2a6783"

View File

@ -26,8 +26,8 @@ Create Date: 2016-08-18 14:06:28.784699
revision = "3c3ffe173e4f"
down_revision = "ad82a75afd82"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -27,10 +27,10 @@ Create Date: 2018-07-05 15:19:14.609299
import datetime
import json
from alembic import op
import isodate
from sqlalchemy.ext.declarative import declarative_base
from alembic import op
from sqlalchemy import Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db
from superset.utils.core import parse_human_timedelta

View File

@ -22,6 +22,9 @@ Create Date: 2018-12-15 12:34:47.228756
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
from superset import db
from superset.utils.core import generic_find_fk_constraint_name
@ -29,9 +32,6 @@ from superset.utils.core import generic_find_fk_constraint_name
revision = "3e1b21cd94a4"
down_revision = "6c7537a6004a"
from alembic import op
import sqlalchemy as sa
sqlatable_user = sa.Table(
"sqlatable_user",

View File

@ -21,8 +21,8 @@ Revises: 3c3ffe173e4f
Create Date: 2016-08-31 10:26:37.969107
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "41f6a59a61f2"

View File

@ -21,8 +21,8 @@ Revises: d827694c7555
Create Date: 2016-02-10 08:47:28.950891
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "430039611635"

View File

@ -26,8 +26,8 @@ Create Date: 2016-01-18 23:43:16.073483
revision = "43df8de3a5f4"
down_revision = "7dbf98566af7"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -27,10 +27,11 @@ revision = "4451805bbaa1"
down_revision = "bddc498dd179"
from alembic import op
import json
from sqlalchemy.ext.declarative import declarative_base
from alembic import op
from sqlalchemy import Column, create_engine, ForeignKey, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db

View File

@ -26,8 +26,8 @@ Create Date: 2016-09-12 23:33:14.789632
revision = "4500485bde7d"
down_revision = "41f6a59a61f2"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2019-02-16 17:44:44.493427
revision = "45e7da7cfeba"
down_revision = ("e553e78e90c5", "c82ee8a39623")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2018-07-23 11:20:54.929246
revision = "46ba6aaaac97"
down_revision = ("705732c70154", "e3970889f38e")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,8 +21,8 @@ Revises: 4ce8df208545
Create Date: 2018-11-26 00:01:04.781119
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "46f444d8b9b7"

View File

@ -26,8 +26,8 @@ Create Date: 2017-09-21 18:37:30.844196
revision = "472d2f73dfd4"
down_revision = ("19a814813610", "a9c47e2c1547")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -24,8 +24,8 @@ Create Date: 2017-10-03 14:37:01.376578
import logging
from alembic import op
import sqlalchemy as sa
from alembic import op
from superset.utils.core import (
generic_find_fk_constraint_name,

View File

@ -26,8 +26,8 @@ Create Date: 2015-09-21 17:30:38.442998
revision = "4e6a06bad7a8"
down_revision = None
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -25,8 +25,8 @@ Create Date: 2016-04-15 17:58:33.842012
revision = "4fa88fe24e94"
down_revision = "b4456560d4f3"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2016-12-13 16:19:02.239322
revision = "525c854f0005"
down_revision = "e46f2d27a08e"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-12-13 08:38:43.704145
revision = "55179c7f25c7"
down_revision = "315b3f4da9b0"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2018-08-29 14:35:20.407743
revision = "55e910a74826"
down_revision = "1a1d627ebd8e"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2015-10-05 10:32:15.850753
revision = "5a7bad26f2a7"
down_revision = "4e6a06bad7a8"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -25,8 +25,8 @@ Create Date: 2018-04-12 16:00:47.639218
revision = "5ccf602336a0"
down_revision = ("130915240929", "c9495751e314")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -21,8 +21,8 @@ Revises: 41f6a59a61f2
Create Date: 2016-09-09 17:39:57.846309
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "5e4a03ef0bf0"

View File

@ -25,8 +25,8 @@ Create Date: 2016-12-19 09:57:05.814013
revision = "6414e83d82b7"
down_revision = ("525c854f0005", "f1f2d4af5b90")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -24,13 +24,13 @@ Create Date: 2016-09-15 08:48:27.284752
import logging
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "65903709c321"
down_revision = "4500485bde7d"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column("dbs", sa.Column("allow_dml", sa.Boolean(), nullable=True))

View File

@ -24,8 +24,8 @@ Create Date: 2017-12-08 08:19:21.148775
import json
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db

View File

@ -26,8 +26,8 @@ Create Date: 2018-05-15 20:28:51.977572
revision = "6c7537a6004a"
down_revision = "a61b40f9f57f"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -25,8 +25,8 @@ Create Date: 2018-07-22 21:51:19.235558
revision = "705732c70154"
down_revision = ("4451805bbaa1", "1d9e835a84f9")
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

View File

@ -26,8 +26,8 @@ Create Date: 2017-03-03 09:15:56.800930
revision = "732f1c06bcbf"
down_revision = "d6db5a5cdb5d"
from alembic import op
import sqlalchemy as sa
from alembic import op
def upgrade():

Some files were not shown because too many files have changed in this diff Show More