chore: split CLI into multiple files (#18082)

* chore: split CLI into multiple files

* Update tests

* Who fixes the fixtures?

* Add subcommands dynamically

* Rebase
This commit is contained in:
Beto Dealmeida 2022-01-19 11:27:16 -08:00 committed by GitHub
parent 810cfc13db
commit 9e2bc72fb9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 1172 additions and 947 deletions

View File

@ -62,7 +62,7 @@ setup(
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={"console_scripts": ["superset=superset.cli:superset"]},
entry_points={"console_scripts": ["superset=superset.cli.main:superset"]},
install_requires=[
"backoff>=1.8.0",
"bleach>=3.0.2, <4.0.0",

View File

@ -1,902 +0,0 @@
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
import os
import sys
from datetime import datetime, timedelta
from pathlib import Path
from subprocess import Popen
from typing import Any, Dict, List, Optional, Type, Union
from zipfile import is_zipfile, ZipFile
import click
import yaml
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from celery.utils.abstract import CallableTask
from colorama import Fore, Style
from flask import current_app, g
from flask.cli import FlaskGroup, with_appcontext
from flask_appbuilder import Model
from flask_appbuilder.api import BaseApi
from flask_appbuilder.api.manager import resolver
import superset.utils.database as database_utils
from superset import app, appbuilder, config, security_manager
from superset.extensions import celery_app, db
from superset.utils.celery import session_scope
from superset.utils.encrypt import SecretsMigrator
from superset.utils.urls import get_url_path
logger = logging.getLogger(__name__)
feature_flags = config.DEFAULT_FEATURE_FLAGS.copy()
feature_flags.update(config.FEATURE_FLAGS)
feature_flags_func = config.GET_FEATURE_FLAGS_FUNC
if feature_flags_func:
# pylint: disable=not-callable
try:
feature_flags = feature_flags_func(feature_flags)
except Exception: # pylint: disable=broad-except
# bypass any feature flags that depend on context
# that's not available
pass
def normalize_token(token_name: str) -> str:
"""
As of click>=7, underscores in function names are replaced by dashes.
To avoid the need to rename all cli functions, e.g. load_examples to
load-examples, this function is used to convert dashes back to
underscores.
:param token_name: token name possibly containing dashes
:return: token name where dashes are replaced with underscores
"""
return token_name.replace("_", "-")
@click.group(
cls=FlaskGroup, context_settings={"token_normalize_func": normalize_token},
)
@with_appcontext
def superset() -> None:
"""This is a management script for the Superset application."""
@app.shell_context_processor
def make_shell_context() -> Dict[str, Any]:
return dict(app=app, db=db)
@superset.command()
@with_appcontext
def init() -> None:
"""Inits the Superset application"""
appbuilder.add_permissions(update_perms=True)
security_manager.sync_role_definitions()
@superset.command()
@with_appcontext
@click.option("--verbose", "-v", is_flag=True, help="Show extra information")
def version(verbose: bool) -> None:
"""Prints the current version number"""
print(Fore.BLUE + "-=" * 15)
print(
Fore.YELLOW
+ "Superset "
+ Fore.CYAN
+ "{version}".format(version=app.config["VERSION_STRING"])
)
print(Fore.BLUE + "-=" * 15)
if verbose:
print("[DB] : " + "{}".format(db.engine))
print(Style.RESET_ALL)
def load_examples_run(
load_test_data: bool = False,
load_big_data: bool = False,
only_metadata: bool = False,
force: bool = False,
) -> None:
if only_metadata:
print("Loading examples metadata")
else:
examples_db = database_utils.get_example_database()
print(f"Loading examples metadata and related data into {examples_db}")
# pylint: disable=import-outside-toplevel
import superset.examples.data_loading as examples
examples.load_css_templates()
if load_test_data:
print("Loading energy related dataset")
examples.load_energy(only_metadata, force)
print("Loading [World Bank's Health Nutrition and Population Stats]")
examples.load_world_bank_health_n_pop(only_metadata, force)
print("Loading [Birth names]")
examples.load_birth_names(only_metadata, force)
if load_test_data:
print("Loading [Tabbed dashboard]")
examples.load_tabbed_dashboard(only_metadata)
if not load_test_data:
print("Loading [Random long/lat data]")
examples.load_long_lat_data(only_metadata, force)
print("Loading [Country Map data]")
examples.load_country_map_data(only_metadata, force)
print("Loading [San Francisco population polygons]")
examples.load_sf_population_polygons(only_metadata, force)
print("Loading [Flights data]")
examples.load_flights(only_metadata, force)
print("Loading [BART lines]")
examples.load_bart_lines(only_metadata, force)
print("Loading [Multi Line]")
examples.load_multi_line(only_metadata)
print("Loading [Misc Charts] dashboard")
examples.load_misc_dashboard()
print("Loading DECK.gl demo")
examples.load_deck_dash()
if load_big_data:
print("Loading big synthetic data for tests")
examples.load_big_data()
# load examples that are stored as YAML config files
examples.load_examples_from_configs(force, load_test_data)
@with_appcontext
@superset.command()
@click.option("--load-test-data", "-t", is_flag=True, help="Load additional test data")
@click.option("--load-big-data", "-b", is_flag=True, help="Load additional big data")
@click.option(
"--only-metadata", "-m", is_flag=True, help="Only load metadata, skip actual data",
)
@click.option(
"--force", "-f", is_flag=True, help="Force load data even if table already exists",
)
def load_examples(
load_test_data: bool,
load_big_data: bool,
only_metadata: bool = False,
force: bool = False,
) -> None:
"""Loads a set of Slices and Dashboards and a supporting dataset"""
load_examples_run(load_test_data, load_big_data, only_metadata, force)
@with_appcontext
@superset.command()
@click.argument("directory")
@click.option(
"--overwrite", "-o", is_flag=True, help="Overwriting existing metadata definitions",
)
@click.option(
"--force", "-f", is_flag=True, help="Force load data even if table already exists",
)
def import_directory(directory: str, overwrite: bool, force: bool) -> None:
"""Imports configs from a given directory"""
# pylint: disable=import-outside-toplevel
from superset.examples.utils import load_configs_from_directory
load_configs_from_directory(
root=Path(directory), overwrite=overwrite, force_data=force,
)
@with_appcontext
@superset.command()
@click.option("--database_name", "-d", help="Database name to change")
@click.option("--uri", "-u", help="Database URI to change")
@click.option(
"--skip_create",
"-s",
is_flag=True,
default=False,
help="Create the DB if it doesn't exist",
)
def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
"""Updates a database connection URI"""
database_utils.get_or_create_db(database_name, uri, not skip_create)
@superset.command()
@with_appcontext
@click.option(
"--datasource",
"-d",
help="Specify which datasource name to load, if "
"omitted, all datasources will be refreshed",
)
@click.option(
"--merge",
"-m",
is_flag=True,
default=False,
help="Specify using 'merge' property during operation. " "Default value is False.",
)
def refresh_druid(datasource: str, merge: bool) -> None:
"""Refresh druid datasources"""
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import DruidCluster
session = db.session()
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit()
if feature_flags.get("VERSIONED_EXPORT"):
@superset.command()
@with_appcontext
@click.option(
"--dashboard-file", "-f", help="Specify the the file to export to",
)
def export_dashboards(dashboard_file: Optional[str] = None) -> None:
"""Export dashboards to ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.models.dashboard import Dashboard
g.user = security_manager.find_user(username="admin")
dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
root = f"dashboard_export_{timestamp}"
dashboard_file = dashboard_file or f"{root}.zip"
try:
with ZipFile(dashboard_file, "w") as bundle:
for file_name, file_content in ExportDashboardsCommand(
dashboard_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when exporting the dashboards, please check "
"the exception traceback in the log"
)
sys.exit(1)
@superset.command()
@with_appcontext
@click.option(
"--datasource-file", "-f", help="Specify the the file to export to",
)
def export_datasources(datasource_file: Optional[str] = None) -> None:
"""Export datasources to ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.export import ExportDatasetsCommand
g.user = security_manager.find_user(username="admin")
dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
root = f"dataset_export_{timestamp}"
datasource_file = datasource_file or f"{root}.zip"
try:
with ZipFile(datasource_file, "w") as bundle:
for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when exporting the datasets, please check "
"the exception traceback in the log"
)
sys.exit(1)
@superset.command()
@with_appcontext
@click.option(
"--path", "-p", help="Path to a single ZIP file",
)
@click.option(
"--username",
"-u",
default=None,
help="Specify the user name to assign dashboards to",
)
def import_dashboards(path: str, username: Optional[str]) -> None:
"""Import dashboards from ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.dashboards.commands.importers.dispatcher import (
ImportDashboardsCommand,
)
if username is not None:
g.user = security_manager.find_user(username=username)
if is_zipfile(path):
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
with open(path) as file:
contents = {path: file.read()}
try:
ImportDashboardsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when importing the dashboards(s), please check "
"the exception traceback in the log"
)
sys.exit(1)
@superset.command()
@with_appcontext
@click.option(
"--path", "-p", help="Path to a single ZIP file",
)
def import_datasources(path: str) -> None:
"""Import datasources from ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.datasets.commands.importers.dispatcher import (
ImportDatasetsCommand,
)
if is_zipfile(path):
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
with open(path) as file:
contents = {path: file.read()}
try:
ImportDatasetsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when importing the dataset(s), please check the "
"exception traceback in the log"
)
sys.exit(1)
else:
@superset.command()
@with_appcontext
@click.option(
"--dashboard-file",
"-f",
default=None,
help="Specify the the file to export to",
)
@click.option(
"--print_stdout",
"-p",
is_flag=True,
default=False,
help="Print JSON to stdout",
)
def export_dashboards(
dashboard_file: Optional[str], print_stdout: bool = False
) -> None:
"""Export dashboards to JSON"""
# pylint: disable=import-outside-toplevel
from superset.utils import dashboard_import_export
data = dashboard_import_export.export_dashboards(db.session)
if print_stdout or not dashboard_file:
print(data)
if dashboard_file:
logger.info("Exporting dashboards to %s", dashboard_file)
with open(dashboard_file, "w") as data_stream:
data_stream.write(data)
@superset.command()
@with_appcontext
@click.option(
"--datasource-file",
"-f",
default=None,
help="Specify the the file to export to",
)
@click.option(
"--print_stdout",
"-p",
is_flag=True,
default=False,
help="Print YAML to stdout",
)
@click.option(
"--back-references",
"-b",
is_flag=True,
default=False,
help="Include parent back references",
)
@click.option(
"--include-defaults",
"-d",
is_flag=True,
default=False,
help="Include fields containing defaults",
)
def export_datasources(
datasource_file: Optional[str],
print_stdout: bool = False,
back_references: bool = False,
include_defaults: bool = False,
) -> None:
"""Export datasources to YAML"""
# pylint: disable=import-outside-toplevel
from superset.utils import dict_import_export
data = dict_import_export.export_to_dict(
session=db.session,
recursive=True,
back_references=back_references,
include_defaults=include_defaults,
)
if print_stdout or not datasource_file:
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
if datasource_file:
logger.info("Exporting datasources to %s", datasource_file)
with open(datasource_file, "w") as data_stream:
yaml.safe_dump(data, data_stream, default_flow_style=False)
@superset.command()
@with_appcontext
@click.option(
"--path",
"-p",
help="Path to a single JSON file or path containing multiple JSON "
"files to import (*.json)",
)
@click.option(
"--recursive",
"-r",
is_flag=True,
default=False,
help="recursively search the path for json files",
)
@click.option(
"--username",
"-u",
default=None,
help="Specify the user name to assign dashboards to",
)
def import_dashboards(path: str, recursive: bool, username: str) -> None:
"""Import dashboards from JSON file"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
path_object = Path(path)
files: List[Path] = []
if path_object.is_file():
files.append(path_object)
elif path_object.exists() and not recursive:
files.extend(path_object.glob("*.json"))
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.json"))
if username is not None:
g.user = security_manager.find_user(username=username)
contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try:
ImportDashboardsCommand(contents).run()
except Exception: # pylint: disable=broad-except
logger.exception("Error when importing dashboard")
sys.exit(1)
@superset.command()
@with_appcontext
@click.option(
"--path",
"-p",
help="Path to a single YAML file or path containing multiple YAML "
"files to import (*.yaml or *.yml)",
)
@click.option(
"--sync",
"-s",
"sync",
default="",
help="comma seperated list of element types to synchronize "
'e.g. "metrics,columns" deletes metrics and columns in the DB '
"that are not specified in the YAML file",
)
@click.option(
"--recursive",
"-r",
is_flag=True,
default=False,
help="recursively search the path for yaml files",
)
def import_datasources(path: str, sync: str, recursive: bool) -> None:
"""Import datasources from YAML"""
# pylint: disable=import-outside-toplevel
from superset.datasets.commands.importers.v0 import ImportDatasetsCommand
sync_array = sync.split(",")
sync_columns = "columns" in sync_array
sync_metrics = "metrics" in sync_array
path_object = Path(path)
files: List[Path] = []
if path_object.is_file():
files.append(path_object)
elif path_object.exists() and not recursive:
files.extend(path_object.glob("*.yaml"))
files.extend(path_object.glob("*.yml"))
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.yaml"))
files.extend(path_object.rglob("*.yml"))
contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try:
ImportDatasetsCommand(contents, sync_columns, sync_metrics).run()
except Exception: # pylint: disable=broad-except
logger.exception("Error when importing dataset")
sys.exit(1)
@superset.command()
@with_appcontext
@click.option(
"--back-references",
"-b",
is_flag=True,
default=False,
help="Include parent back references",
)
def export_datasource_schema(back_references: bool) -> None:
"""Export datasource YAML schema to stdout"""
# pylint: disable=import-outside-toplevel
from superset.utils import dict_import_export
data = dict_import_export.export_schema_to_dict(back_references=back_references)
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
@superset.command()
@with_appcontext
def update_datasources_cache() -> None:
"""Refresh sqllab datasources cache"""
# pylint: disable=import-outside-toplevel
from superset.models.core import Database
for database in db.session.query(Database).all():
if database.allow_multi_schema_metadata_fetch:
print("Fetching {} datasources ...".format(database.name))
try:
database.get_all_table_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60
)
database.get_all_view_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60
)
except Exception as ex: # pylint: disable=broad-except
print("{}".format(str(ex)))
@superset.command()
@with_appcontext
@click.option(
"--workers", "-w", type=int, help="Number of celery server workers to fire up",
)
def worker(workers: int) -> None:
"""Starts a Superset worker for async SQL query execution."""
logger.info(
"The 'superset worker' command is deprecated. Please use the 'celery "
"worker' command instead."
)
if workers:
celery_app.conf.update(CELERYD_CONCURRENCY=workers)
elif app.config["SUPERSET_CELERY_WORKERS"]:
celery_app.conf.update(
CELERYD_CONCURRENCY=app.config["SUPERSET_CELERY_WORKERS"]
)
local_worker = celery_app.Worker(optimization="fair")
local_worker.start()
@superset.command()
@with_appcontext
@click.option(
"-p", "--port", default="5555", help="Port on which to start the Flower process",
)
@click.option(
"-a", "--address", default="localhost", help="Address on which to run the service",
)
def flower(port: int, address: str) -> None:
"""Runs a Celery Flower web server
Celery Flower is a UI to monitor the Celery operation on a given
broker"""
broker_url = celery_app.conf.BROKER_URL
cmd = (
"celery flower "
f"--broker={broker_url} "
f"--port={port} "
f"--address={address} "
)
logger.info(
"The 'superset flower' command is deprecated. Please use the 'celery "
"flower' command instead."
)
print(Fore.GREEN + "Starting a Celery Flower instance")
print(Fore.BLUE + "-=" * 40)
print(Fore.YELLOW + cmd)
print(Fore.BLUE + "-=" * 40)
Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with
@superset.command()
@with_appcontext
@click.option(
"--asynchronous",
"-a",
is_flag=True,
default=False,
help="Trigger commands to run remotely on a worker",
)
@click.option(
"--dashboards_only",
"-d",
is_flag=True,
default=False,
help="Only process dashboards",
)
@click.option(
"--charts_only", "-c", is_flag=True, default=False, help="Only process charts",
)
@click.option(
"--force",
"-f",
is_flag=True,
default=False,
help="Force refresh, even if previously cached",
)
@click.option("--model_id", "-i", multiple=True)
def compute_thumbnails(
asynchronous: bool,
dashboards_only: bool,
charts_only: bool,
force: bool,
model_id: int,
) -> None:
"""Compute thumbnails"""
# pylint: disable=import-outside-toplevel
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.tasks.thumbnails import (
cache_chart_thumbnail,
cache_dashboard_thumbnail,
)
def compute_generic_thumbnail(
friendly_type: str,
model_cls: Union[Type[Dashboard], Type[Slice]],
model_id: int,
compute_func: CallableTask,
) -> None:
query = db.session.query(model_cls)
if model_id:
query = query.filter(model_cls.id.in_(model_id))
dashboards = query.all()
count = len(dashboards)
for i, model in enumerate(dashboards):
if asynchronous:
func = compute_func.delay
action = "Triggering"
else:
func = compute_func
action = "Processing"
msg = f'{action} {friendly_type} "{model}" ({i+1}/{count})'
click.secho(msg, fg="green")
if friendly_type == "chart":
url = get_url_path(
"Superset.slice", slice_id=model.id, standalone="true"
)
else:
url = get_url_path("Superset.dashboard", dashboard_id_or_slug=model.id)
func(url, model.digest, force=force)
if not charts_only:
compute_generic_thumbnail(
"dashboard", Dashboard, model_id, cache_dashboard_thumbnail
)
if not dashboards_only:
compute_generic_thumbnail("chart", Slice, model_id, cache_chart_thumbnail)
@superset.command()
@with_appcontext
def load_test_users() -> None:
"""
Loads admin, alpha, and gamma user for testing purposes
Syncs permissions for those users/roles
"""
print(Fore.GREEN + "Loading a set of users for unit tests")
load_test_users_run()
def load_test_users_run() -> None:
"""
Loads admin, alpha, and gamma user for testing purposes
Syncs permissions for those users/roles
"""
if app.config["TESTING"]:
sm = security_manager
examples_db = database_utils.get_example_database()
examples_pv = sm.add_permission_view_menu("database_access", examples_db.perm)
sm.sync_role_definitions()
gamma_sqllab_role = sm.add_role("gamma_sqllab")
sm.add_permission_role(gamma_sqllab_role, examples_pv)
gamma_no_csv_role = sm.add_role("gamma_no_csv")
sm.add_permission_role(gamma_no_csv_role, examples_pv)
for role in ["Gamma", "sql_lab"]:
for perm in sm.find_role(role).permissions:
sm.add_permission_role(gamma_sqllab_role, perm)
if str(perm) != "can csv on Superset":
sm.add_permission_role(gamma_no_csv_role, perm)
users = (
("admin", "Admin"),
("gamma", "Gamma"),
("gamma2", "Gamma"),
("gamma_sqllab", "gamma_sqllab"),
("alpha", "Alpha"),
("gamma_no_csv", "gamma_no_csv"),
)
for username, role in users:
user = sm.find_user(username)
if not user:
sm.add_user(
username,
username,
"user",
username + "@fab.org",
sm.find_role(role),
password="general",
)
sm.get_session.commit()
@superset.command()
@with_appcontext
def sync_tags() -> None:
"""Rebuilds special tags (owner, type, favorited by)."""
# pylint: disable=no-member
metadata = Model.metadata
# pylint: disable=import-outside-toplevel
from superset.common.tags import add_favorites, add_owners, add_types
add_types(db.engine, metadata)
add_owners(db.engine, metadata)
add_favorites(db.engine, metadata)
@superset.command()
@with_appcontext
def alert() -> None:
"""Run the alert scheduler loop"""
# this command is just for testing purposes
# pylint: disable=import-outside-toplevel
from superset.models.schedules import ScheduleType
from superset.tasks.schedules import schedule_window
click.secho("Processing one alert loop", fg="green")
with session_scope(nullpool=True) as session:
schedule_window(
report_type=ScheduleType.alert,
start_at=datetime.now() - timedelta(1000),
stop_at=datetime.now(),
resolution=6000,
session=session,
)
@superset.command()
@with_appcontext
def update_api_docs() -> None:
"""Regenerate the openapi.json file in docs"""
superset_dir = os.path.abspath(os.path.dirname(__file__))
openapi_json = os.path.join(
superset_dir, "..", "docs", "src", "resources", "openapi.json"
)
api_version = "v1"
version_found = False
api_spec = APISpec(
title=current_app.appbuilder.app_name,
version=api_version,
openapi_version="3.0.2",
info=dict(description=current_app.appbuilder.app_name),
plugins=[MarshmallowPlugin(schema_name_resolver=resolver)],
servers=[{"url": "http://localhost:8088"}],
)
for base_api in current_app.appbuilder.baseviews:
if isinstance(base_api, BaseApi) and base_api.version == api_version:
base_api.add_api_spec(api_spec)
version_found = True
if version_found:
click.secho("Generating openapi.json", fg="green")
with open(openapi_json, "w") as outfile:
json.dump(api_spec.to_dict(), outfile, sort_keys=True, indent=2)
else:
click.secho("API version not found", err=True)
@superset.command()
@with_appcontext
@click.option(
"--previous_secret_key",
"-a",
required=False,
help="An optional previous secret key, if PREVIOUS_SECRET_KEY "
"is not set on the config",
)
def re_encrypt_secrets(previous_secret_key: Optional[str] = None) -> None:
previous_secret_key = previous_secret_key or current_app.config.get(
"PREVIOUS_SECRET_KEY"
)
if previous_secret_key is None:
click.secho("A previous secret key must be provided", err=True)
sys.exit(1)
secrets_migrator = SecretsMigrator(previous_secret_key=previous_secret_key)
try:
secrets_migrator.run()
except ValueError as exc:
click.secho(
f"An error occurred, "
f"probably an invalid previoud secret key was provided. Error:[{exc}]",
err=True,
)
sys.exit(1)

16
superset/cli/__init__.py Normal file
View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

80
superset/cli/celery.py Executable file
View File

@ -0,0 +1,80 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from subprocess import Popen
import click
from colorama import Fore
from flask.cli import with_appcontext
from superset import app
from superset.extensions import celery_app
logger = logging.getLogger(__name__)
@click.command()
@with_appcontext
@click.option(
"--workers", "-w", type=int, help="Number of celery server workers to fire up",
)
def worker(workers: int) -> None:
"""Starts a Superset worker for async SQL query execution."""
logger.info(
"The 'superset worker' command is deprecated. Please use the 'celery "
"worker' command instead."
)
if workers:
celery_app.conf.update(CELERYD_CONCURRENCY=workers)
elif app.config["SUPERSET_CELERY_WORKERS"]:
celery_app.conf.update(
CELERYD_CONCURRENCY=app.config["SUPERSET_CELERY_WORKERS"]
)
local_worker = celery_app.Worker(optimization="fair")
local_worker.start()
@click.command()
@with_appcontext
@click.option(
"-p", "--port", default="5555", help="Port on which to start the Flower process",
)
@click.option(
"-a", "--address", default="localhost", help="Address on which to run the service",
)
def flower(port: int, address: str) -> None:
"""Runs a Celery Flower web server
Celery Flower is a UI to monitor the Celery operation on a given
broker"""
broker_url = celery_app.conf.BROKER_URL
cmd = (
"celery flower "
f"--broker={broker_url} "
f"--port={port} "
f"--address={address} "
)
logger.info(
"The 'superset flower' command is deprecated. Please use the 'celery "
"flower' command instead."
)
print(Fore.GREEN + "Starting a Celery Flower instance")
print(Fore.BLUE + "-=" * 40)
print(Fore.YELLOW + cmd)
print(Fore.BLUE + "-=" * 40)
Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with

108
superset/cli/examples.py Executable file
View File

@ -0,0 +1,108 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import click
from flask.cli import with_appcontext
import superset.utils.database as database_utils
logger = logging.getLogger(__name__)
def load_examples_run(
load_test_data: bool = False,
load_big_data: bool = False,
only_metadata: bool = False,
force: bool = False,
) -> None:
if only_metadata:
print("Loading examples metadata")
else:
examples_db = database_utils.get_example_database()
print(f"Loading examples metadata and related data into {examples_db}")
# pylint: disable=import-outside-toplevel
import superset.examples.data_loading as examples
examples.load_css_templates()
if load_test_data:
print("Loading energy related dataset")
examples.load_energy(only_metadata, force)
print("Loading [World Bank's Health Nutrition and Population Stats]")
examples.load_world_bank_health_n_pop(only_metadata, force)
print("Loading [Birth names]")
examples.load_birth_names(only_metadata, force)
if load_test_data:
print("Loading [Tabbed dashboard]")
examples.load_tabbed_dashboard(only_metadata)
if not load_test_data:
print("Loading [Random long/lat data]")
examples.load_long_lat_data(only_metadata, force)
print("Loading [Country Map data]")
examples.load_country_map_data(only_metadata, force)
print("Loading [San Francisco population polygons]")
examples.load_sf_population_polygons(only_metadata, force)
print("Loading [Flights data]")
examples.load_flights(only_metadata, force)
print("Loading [BART lines]")
examples.load_bart_lines(only_metadata, force)
print("Loading [Multi Line]")
examples.load_multi_line(only_metadata)
print("Loading [Misc Charts] dashboard")
examples.load_misc_dashboard()
print("Loading DECK.gl demo")
examples.load_deck_dash()
if load_big_data:
print("Loading big synthetic data for tests")
examples.load_big_data()
# load examples that are stored as YAML config files
examples.load_examples_from_configs(force, load_test_data)
@click.command()
@with_appcontext
@click.option("--load-test-data", "-t", is_flag=True, help="Load additional test data")
@click.option("--load-big-data", "-b", is_flag=True, help="Load additional big data")
@click.option(
"--only-metadata", "-m", is_flag=True, help="Only load metadata, skip actual data",
)
@click.option(
"--force", "-f", is_flag=True, help="Force load data even if table already exists",
)
def load_examples(
load_test_data: bool,
load_big_data: bool,
only_metadata: bool = False,
force: bool = False,
) -> None:
"""Loads a set of Slices and Dashboards and a supporting dataset"""
load_examples_run(load_test_data, load_big_data, only_metadata, force)

381
superset/cli/importexport.py Executable file
View File

@ -0,0 +1,381 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import sys
from datetime import datetime
from pathlib import Path
from typing import List, Optional
from zipfile import is_zipfile, ZipFile
import click
import yaml
from flask import g
from flask.cli import with_appcontext
from superset import security_manager
from superset.cli.lib import feature_flags
from superset.extensions import db
logger = logging.getLogger(__name__)
@click.command()
@click.argument("directory")
@click.option(
"--overwrite", "-o", is_flag=True, help="Overwriting existing metadata definitions",
)
@click.option(
"--force", "-f", is_flag=True, help="Force load data even if table already exists",
)
def import_directory(directory: str, overwrite: bool, force: bool) -> None:
"""Imports configs from a given directory"""
# pylint: disable=import-outside-toplevel
from superset.examples.utils import load_configs_from_directory
load_configs_from_directory(
root=Path(directory), overwrite=overwrite, force_data=force,
)
if feature_flags.get("VERSIONED_EXPORT"):
@click.command()
@with_appcontext
@click.option(
"--dashboard-file", "-f", help="Specify the the file to export to",
)
def export_dashboards(dashboard_file: Optional[str] = None) -> None:
"""Export dashboards to ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.models.dashboard import Dashboard
g.user = security_manager.find_user(username="admin")
dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
root = f"dashboard_export_{timestamp}"
dashboard_file = dashboard_file or f"{root}.zip"
try:
with ZipFile(dashboard_file, "w") as bundle:
for file_name, file_content in ExportDashboardsCommand(
dashboard_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when exporting the dashboards, please check "
"the exception traceback in the log"
)
sys.exit(1)
@click.command()
@with_appcontext
@click.option(
"--datasource-file", "-f", help="Specify the the file to export to",
)
def export_datasources(datasource_file: Optional[str] = None) -> None:
"""Export datasources to ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.export import ExportDatasetsCommand
g.user = security_manager.find_user(username="admin")
dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
root = f"dataset_export_{timestamp}"
datasource_file = datasource_file or f"{root}.zip"
try:
with ZipFile(datasource_file, "w") as bundle:
for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when exporting the datasets, please check "
"the exception traceback in the log"
)
sys.exit(1)
@click.command()
@with_appcontext
@click.option(
"--path", "-p", help="Path to a single ZIP file",
)
@click.option(
"--username",
"-u",
default=None,
help="Specify the user name to assign dashboards to",
)
def import_dashboards(path: str, username: Optional[str]) -> None:
"""Import dashboards from ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.dashboards.commands.importers.dispatcher import (
ImportDashboardsCommand,
)
if username is not None:
g.user = security_manager.find_user(username=username)
if is_zipfile(path):
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
with open(path) as file:
contents = {path: file.read()}
try:
ImportDashboardsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when importing the dashboards(s), please check "
"the exception traceback in the log"
)
sys.exit(1)
@click.command()
@with_appcontext
@click.option(
"--path", "-p", help="Path to a single ZIP file",
)
def import_datasources(path: str) -> None:
"""Import datasources from ZIP file"""
# pylint: disable=import-outside-toplevel
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.datasets.commands.importers.dispatcher import (
ImportDatasetsCommand,
)
if is_zipfile(path):
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
with open(path) as file:
contents = {path: file.read()}
try:
ImportDatasetsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when importing the dataset(s), please check the "
"exception traceback in the log"
)
sys.exit(1)
else:
@click.command()
@with_appcontext
@click.option(
"--dashboard-file",
"-f",
default=None,
help="Specify the the file to export to",
)
@click.option(
"--print_stdout",
"-p",
is_flag=True,
default=False,
help="Print JSON to stdout",
)
def export_dashboards(
dashboard_file: Optional[str], print_stdout: bool = False
) -> None:
"""Export dashboards to JSON"""
# pylint: disable=import-outside-toplevel
from superset.utils import dashboard_import_export
data = dashboard_import_export.export_dashboards(db.session)
if print_stdout or not dashboard_file:
print(data)
if dashboard_file:
logger.info("Exporting dashboards to %s", dashboard_file)
with open(dashboard_file, "w") as data_stream:
data_stream.write(data)
@click.command()
@with_appcontext
@click.option(
"--datasource-file",
"-f",
default=None,
help="Specify the the file to export to",
)
@click.option(
"--print_stdout",
"-p",
is_flag=True,
default=False,
help="Print YAML to stdout",
)
@click.option(
"--back-references",
"-b",
is_flag=True,
default=False,
help="Include parent back references",
)
@click.option(
"--include-defaults",
"-d",
is_flag=True,
default=False,
help="Include fields containing defaults",
)
def export_datasources(
datasource_file: Optional[str],
print_stdout: bool = False,
back_references: bool = False,
include_defaults: bool = False,
) -> None:
"""Export datasources to YAML"""
# pylint: disable=import-outside-toplevel
from superset.utils import dict_import_export
data = dict_import_export.export_to_dict(
session=db.session,
recursive=True,
back_references=back_references,
include_defaults=include_defaults,
)
if print_stdout or not datasource_file:
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
if datasource_file:
logger.info("Exporting datasources to %s", datasource_file)
with open(datasource_file, "w") as data_stream:
yaml.safe_dump(data, data_stream, default_flow_style=False)
@click.command()
@with_appcontext
@click.option(
"--path",
"-p",
help="Path to a single JSON file or path containing multiple JSON "
"files to import (*.json)",
)
@click.option(
"--recursive",
"-r",
is_flag=True,
default=False,
help="recursively search the path for json files",
)
@click.option(
"--username",
"-u",
default=None,
help="Specify the user name to assign dashboards to",
)
def import_dashboards(path: str, recursive: bool, username: str) -> None:
"""Import dashboards from JSON file"""
# pylint: disable=import-outside-toplevel
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
path_object = Path(path)
files: List[Path] = []
if path_object.is_file():
files.append(path_object)
elif path_object.exists() and not recursive:
files.extend(path_object.glob("*.json"))
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.json"))
if username is not None:
g.user = security_manager.find_user(username=username)
contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try:
ImportDashboardsCommand(contents).run()
except Exception: # pylint: disable=broad-except
logger.exception("Error when importing dashboard")
sys.exit(1)
@click.command()
@with_appcontext
@click.option(
"--path",
"-p",
help="Path to a single YAML file or path containing multiple YAML "
"files to import (*.yaml or *.yml)",
)
@click.option(
"--sync",
"-s",
"sync",
default="",
help="comma seperated list of element types to synchronize "
'e.g. "metrics,columns" deletes metrics and columns in the DB '
"that are not specified in the YAML file",
)
@click.option(
"--recursive",
"-r",
is_flag=True,
default=False,
help="recursively search the path for yaml files",
)
def import_datasources(path: str, sync: str, recursive: bool) -> None:
"""Import datasources from YAML"""
# pylint: disable=import-outside-toplevel
from superset.datasets.commands.importers.v0 import ImportDatasetsCommand
sync_array = sync.split(",")
sync_columns = "columns" in sync_array
sync_metrics = "metrics" in sync_array
path_object = Path(path)
files: List[Path] = []
if path_object.is_file():
files.append(path_object)
elif path_object.exists() and not recursive:
files.extend(path_object.glob("*.yaml"))
files.extend(path_object.glob("*.yml"))
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.yaml"))
files.extend(path_object.rglob("*.yml"))
contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try:
ImportDatasetsCommand(contents, sync_columns, sync_metrics).run()
except Exception: # pylint: disable=broad-except
logger.exception("Error when importing dataset")
sys.exit(1)
@click.command()
@with_appcontext
@click.option(
"--back-references",
"-b",
is_flag=True,
default=False,
help="Include parent back references",
)
def export_datasource_schema(back_references: bool) -> None:
"""Export datasource YAML schema to stdout"""
# pylint: disable=import-outside-toplevel
from superset.utils import dict_import_export
data = dict_import_export.export_schema_to_dict(back_references=back_references)
yaml.safe_dump(data, sys.stdout, default_flow_style=False)

48
superset/cli/lib.py Executable file
View File

@ -0,0 +1,48 @@
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from superset import config
logger = logging.getLogger(__name__)
feature_flags = config.DEFAULT_FEATURE_FLAGS.copy()
feature_flags.update(config.FEATURE_FLAGS)
feature_flags_func = config.GET_FEATURE_FLAGS_FUNC
if feature_flags_func:
# pylint: disable=not-callable
try:
feature_flags = feature_flags_func(feature_flags)
except Exception: # pylint: disable=broad-except
# bypass any feature flags that depend on context
# that's not available
pass
def normalize_token(token_name: str) -> str:
"""
As of click>=7, underscores in function names are replaced by dashes.
To avoid the need to rename all cli functions, e.g. load_examples to
load-examples, this function is used to convert dashes back to
underscores.
:param token_name: token name possibly containing dashes
:return: token name where dashes are replaced with underscores
"""
return token_name.replace("_", "-")

79
superset/cli/main.py Executable file
View File

@ -0,0 +1,79 @@
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import importlib
import logging
import pkgutil
from typing import Any, Dict
import click
from colorama import Fore, Style
from flask.cli import FlaskGroup, with_appcontext
from superset import app, appbuilder, cli, security_manager
from superset.cli.lib import normalize_token
from superset.extensions import db
logger = logging.getLogger(__name__)
@click.group(
cls=FlaskGroup, context_settings={"token_normalize_func": normalize_token},
)
@with_appcontext
def superset() -> None:
"""This is a management script for the Superset application."""
@app.shell_context_processor
def make_shell_context() -> Dict[str, Any]:
return dict(app=app, db=db)
# add sub-commands
for load, module_name, is_pkg in pkgutil.walk_packages(
cli.__path__, cli.__name__ + "." # type: ignore
):
module = importlib.import_module(module_name)
for attribute in module.__dict__.values():
if isinstance(attribute, click.core.Command):
superset.add_command(attribute)
@superset.command()
@with_appcontext
def init() -> None:
"""Inits the Superset application"""
appbuilder.add_permissions(update_perms=True)
security_manager.sync_role_definitions()
@superset.command()
@with_appcontext
@click.option("--verbose", "-v", is_flag=True, help="Show extra information")
def version(verbose: bool) -> None:
"""Prints the current version number"""
print(Fore.BLUE + "-=" * 15)
print(
Fore.YELLOW
+ "Superset "
+ Fore.CYAN
+ "{version}".format(version=app.config["VERSION_STRING"])
)
print(Fore.BLUE + "-=" * 15)
if verbose:
print("[DB] : " + "{}".format(db.engine))
print(Style.RESET_ALL)

110
superset/cli/test.py Executable file
View File

@ -0,0 +1,110 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from datetime import datetime, timedelta
import click
from colorama import Fore
from flask.cli import with_appcontext
import superset.utils.database as database_utils
from superset import app, security_manager
from superset.utils.celery import session_scope
logger = logging.getLogger(__name__)
@click.command()
@with_appcontext
def load_test_users() -> None:
"""
Loads admin, alpha, and gamma user for testing purposes
Syncs permissions for those users/roles
"""
print(Fore.GREEN + "Loading a set of users for unit tests")
load_test_users_run()
def load_test_users_run() -> None:
"""
Loads admin, alpha, and gamma user for testing purposes
Syncs permissions for those users/roles
"""
if app.config["TESTING"]:
sm = security_manager
examples_db = database_utils.get_example_database()
examples_pv = sm.add_permission_view_menu("database_access", examples_db.perm)
sm.sync_role_definitions()
gamma_sqllab_role = sm.add_role("gamma_sqllab")
sm.add_permission_role(gamma_sqllab_role, examples_pv)
gamma_no_csv_role = sm.add_role("gamma_no_csv")
sm.add_permission_role(gamma_no_csv_role, examples_pv)
for role in ["Gamma", "sql_lab"]:
for perm in sm.find_role(role).permissions:
sm.add_permission_role(gamma_sqllab_role, perm)
if str(perm) != "can csv on Superset":
sm.add_permission_role(gamma_no_csv_role, perm)
users = (
("admin", "Admin"),
("gamma", "Gamma"),
("gamma2", "Gamma"),
("gamma_sqllab", "gamma_sqllab"),
("alpha", "Alpha"),
("gamma_no_csv", "gamma_no_csv"),
)
for username, role in users:
user = sm.find_user(username)
if not user:
sm.add_user(
username,
username,
"user",
username + "@fab.org",
sm.find_role(role),
password="general",
)
sm.get_session.commit()
@click.command()
@with_appcontext
def alert() -> None:
"""Run the alert scheduler loop"""
# this command is just for testing purposes
# pylint: disable=import-outside-toplevel
from superset.models.schedules import ScheduleType
from superset.tasks.schedules import schedule_window
click.secho("Processing one alert loop", fg="green")
with session_scope(nullpool=True) as session:
schedule_window(
report_type=ScheduleType.alert,
start_at=datetime.now() - timedelta(1000),
stop_at=datetime.now(),
resolution=6000,
session=session,
)

106
superset/cli/thumbnails.py Executable file
View File

@ -0,0 +1,106 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Type, Union
import click
from celery.utils.abstract import CallableTask
from flask.cli import with_appcontext
from superset.extensions import db
from superset.utils.urls import get_url_path
logger = logging.getLogger(__name__)
@click.command()
@with_appcontext
@click.option(
"--asynchronous",
"-a",
is_flag=True,
default=False,
help="Trigger commands to run remotely on a worker",
)
@click.option(
"--dashboards_only",
"-d",
is_flag=True,
default=False,
help="Only process dashboards",
)
@click.option(
"--charts_only", "-c", is_flag=True, default=False, help="Only process charts",
)
@click.option(
"--force",
"-f",
is_flag=True,
default=False,
help="Force refresh, even if previously cached",
)
@click.option("--model_id", "-i", multiple=True)
def compute_thumbnails(
asynchronous: bool,
dashboards_only: bool,
charts_only: bool,
force: bool,
model_id: int,
) -> None:
"""Compute thumbnails"""
# pylint: disable=import-outside-toplevel
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.tasks.thumbnails import (
cache_chart_thumbnail,
cache_dashboard_thumbnail,
)
def compute_generic_thumbnail(
friendly_type: str,
model_cls: Union[Type[Dashboard], Type[Slice]],
model_id: int,
compute_func: CallableTask,
) -> None:
query = db.session.query(model_cls)
if model_id:
query = query.filter(model_cls.id.in_(model_id))
dashboards = query.all()
count = len(dashboards)
for i, model in enumerate(dashboards):
if asynchronous:
func = compute_func.delay
action = "Triggering"
else:
func = compute_func
action = "Processing"
msg = f'{action} {friendly_type} "{model}" ({i+1}/{count})'
click.secho(msg, fg="green")
if friendly_type == "chart":
url = get_url_path(
"Superset.slice", slice_id=model.id, standalone="true"
)
else:
url = get_url_path("Superset.dashboard", dashboard_id_or_slug=model.id)
func(url, model.digest, force=force)
if not charts_only:
compute_generic_thumbnail(
"dashboard", Dashboard, model_id, cache_dashboard_thumbnail
)
if not dashboards_only:
compute_generic_thumbnail("chart", Slice, model_id, cache_chart_thumbnail)

181
superset/cli/update.py Executable file
View File

@ -0,0 +1,181 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
import os
import sys
from datetime import datetime
from typing import Optional
import click
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from flask import current_app
from flask.cli import with_appcontext
from flask_appbuilder import Model
from flask_appbuilder.api import BaseApi
from flask_appbuilder.api.manager import resolver
import superset.utils.database as database_utils
from superset.extensions import db
from superset.utils.encrypt import SecretsMigrator
logger = logging.getLogger(__name__)
@click.command()
@with_appcontext
@click.option("--database_name", "-d", help="Database name to change")
@click.option("--uri", "-u", help="Database URI to change")
@click.option(
"--skip_create",
"-s",
is_flag=True,
default=False,
help="Create the DB if it doesn't exist",
)
def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
"""Updates a database connection URI"""
database_utils.get_or_create_db(database_name, uri, not skip_create)
@click.command()
@with_appcontext
@click.option(
"--datasource",
"-d",
help="Specify which datasource name to load, if "
"omitted, all datasources will be refreshed",
)
@click.option(
"--merge",
"-m",
is_flag=True,
default=False,
help="Specify using 'merge' property during operation. " "Default value is False.",
)
def refresh_druid(datasource: str, merge: bool) -> None:
"""Refresh druid datasources"""
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import DruidCluster
session = db.session()
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit()
@click.command()
@with_appcontext
def update_datasources_cache() -> None:
"""Refresh sqllab datasources cache"""
# pylint: disable=import-outside-toplevel
from superset.models.core import Database
for database in db.session.query(Database).all():
if database.allow_multi_schema_metadata_fetch:
print("Fetching {} datasources ...".format(database.name))
try:
database.get_all_table_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60
)
database.get_all_view_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60
)
except Exception as ex: # pylint: disable=broad-except
print("{}".format(str(ex)))
@click.command()
@with_appcontext
def sync_tags() -> None:
"""Rebuilds special tags (owner, type, favorited by)."""
# pylint: disable=no-member
metadata = Model.metadata
# pylint: disable=import-outside-toplevel
from superset.common.tags import add_favorites, add_owners, add_types
add_types(db.engine, metadata)
add_owners(db.engine, metadata)
add_favorites(db.engine, metadata)
@click.command()
@with_appcontext
def update_api_docs() -> None:
"""Regenerate the openapi.json file in docs"""
superset_dir = os.path.abspath(os.path.dirname(__file__))
openapi_json = os.path.join(
superset_dir, "..", "docs", "src", "resources", "openapi.json"
)
api_version = "v1"
version_found = False
api_spec = APISpec(
title=current_app.appbuilder.app_name,
version=api_version,
openapi_version="3.0.2",
info=dict(description=current_app.appbuilder.app_name),
plugins=[MarshmallowPlugin(schema_name_resolver=resolver)],
servers=[{"url": "http://localhost:8088"}],
)
for base_api in current_app.appbuilder.baseviews:
if isinstance(base_api, BaseApi) and base_api.version == api_version:
base_api.add_api_spec(api_spec)
version_found = True
if version_found:
click.secho("Generating openapi.json", fg="green")
with open(openapi_json, "w") as outfile:
json.dump(api_spec.to_dict(), outfile, sort_keys=True, indent=2)
else:
click.secho("API version not found", err=True)
@click.command()
@with_appcontext
@click.option(
"--previous_secret_key",
"-a",
required=False,
help="An optional previous secret key, if PREVIOUS_SECRET_KEY "
"is not set on the config",
)
def re_encrypt_secrets(previous_secret_key: Optional[str] = None) -> None:
previous_secret_key = previous_secret_key or current_app.config.get(
"PREVIOUS_SECRET_KEY"
)
if previous_secret_key is None:
click.secho("A previous secret key must be provided", err=True)
sys.exit(1)
secrets_migrator = SecretsMigrator(previous_secret_key=previous_secret_key)
try:
secrets_migrator.run()
except ValueError as exc:
click.secho(
f"An error occurred, "
f"probably an invalid previoud secret key was provided. Error:[{exc}]",
err=True,
)
sys.exit(1)

View File

@ -26,7 +26,7 @@ import pytest
import yaml
from freezegun import freeze_time
import superset.cli
import superset.cli.importexport
from superset import app
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
@ -53,14 +53,16 @@ def test_export_dashboards_original(app_context, fs):
Test that a JSON file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.export_dashboards, ("-f", "dashboards.json"))
response = runner.invoke(
superset.cli.importexport.export_dashboards, ("-f", "dashboards.json")
)
assert response.exit_code == 0
assert Path("dashboards.json").exists()
@ -77,15 +79,15 @@ def test_export_datasources_original(app_context, fs):
Test that a YAML file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
runner = app.test_cli_runner()
response = runner.invoke(
superset.cli.export_datasources, ("-f", "datasources.yaml")
superset.cli.importexport.export_datasources, ("-f", "datasources.yaml")
)
assert response.exit_code == 0
@ -99,22 +101,22 @@ def test_export_datasources_original(app_context, fs):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
def test_export_dashboards_versioned_export(app_context, fs):
"""
Test that a ZIP file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
runner = app.test_cli_runner()
with freeze_time("2021-01-01T00:00:00Z"):
response = runner.invoke(superset.cli.export_dashboards, ())
response = runner.invoke(superset.cli.importexport.export_dashboards, ())
assert response.exit_code == 0
assert Path("dashboard_export_20210101T000000.zip").exists()
@ -123,7 +125,7 @@ def test_export_dashboards_versioned_export(app_context, fs):
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch(
"superset.dashboards.commands.export.ExportDashboardsCommand.run",
@ -138,37 +140,37 @@ def test_failing_export_dashboards_versioned_export(
caplog.set_level(logging.DEBUG)
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
runner = app.test_cli_runner()
with freeze_time("2021-01-01T00:00:00Z"):
response = runner.invoke(superset.cli.export_dashboards, ())
response = runner.invoke(superset.cli.importexport.export_dashboards, ())
assert_cli_fails_properly(response, caplog)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
def test_export_datasources_versioned_export(app_context, fs):
"""
Test that a ZIP file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
runner = app.test_cli_runner()
with freeze_time("2021-01-01T00:00:00Z"):
response = runner.invoke(superset.cli.export_datasources, ())
response = runner.invoke(superset.cli.importexport.export_datasources, ())
assert response.exit_code == 0
assert Path("dataset_export_20210101T000000.zip").exists()
@ -177,7 +179,7 @@ def test_export_datasources_versioned_export(app_context, fs):
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch(
"superset.dashboards.commands.export.ExportDatasetsCommand.run",
@ -190,21 +192,21 @@ def test_failing_export_datasources_versioned_export(
Test that failing to export ZIP file is done elegantly.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
runner = app.test_cli_runner()
with freeze_time("2021-01-01T00:00:00Z"):
response = runner.invoke(superset.cli.export_datasources, ())
response = runner.invoke(superset.cli.importexport.export_datasources, ())
assert_cli_fails_properly(response, caplog)
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch("superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand")
def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs):
@ -212,18 +214,20 @@ def test_import_dashboards_versioned_export(import_dashboards_command, app_conte
Test that both ZIP and JSON can be imported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
# write JSON file
with open("dashboards.json", "w") as fp:
fp.write('{"hello": "world"}')
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.json"))
response = runner.invoke(
superset.cli.importexport.import_dashboards, ("-p", "dashboards.json")
)
assert response.exit_code == 0
expected_contents = {"dashboards.json": '{"hello": "world"}'}
@ -235,7 +239,9 @@ def test_import_dashboards_versioned_export(import_dashboards_command, app_conte
fp.write(b"hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.zip"))
response = runner.invoke(
superset.cli.importexport.import_dashboards, ("-p", "dashboards.zip")
)
assert response.exit_code == 0
expected_contents = {"dashboard.yaml": "hello: world"}
@ -243,7 +249,7 @@ def test_import_dashboards_versioned_export(import_dashboards_command, app_conte
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch(
"superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand.run",
@ -256,18 +262,20 @@ def test_failing_import_dashboards_versioned_export(
Test that failing to import either ZIP and JSON is done elegantly.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
# write JSON file
with open("dashboards.json", "w") as fp:
fp.write('{"hello": "world"}')
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.json"))
response = runner.invoke(
superset.cli.importexport.import_dashboards, ("-p", "dashboards.json")
)
assert_cli_fails_properly(response, caplog)
@ -277,13 +285,15 @@ def test_failing_import_dashboards_versioned_export(
fp.write(b"hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.zip"))
response = runner.invoke(
superset.cli.importexport.import_dashboards, ("-p", "dashboards.zip")
)
assert_cli_fails_properly(response, caplog)
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch("superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand")
def test_import_datasets_versioned_export(import_datasets_command, app_context, fs):
@ -291,18 +301,20 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context,
Test that both ZIP and YAML can be imported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_datasets correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
# write YAML file
with open("datasets.yaml", "w") as fp:
fp.write("hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.yaml"))
response = runner.invoke(
superset.cli.importexport.import_datasources, ("-p", "datasets.yaml")
)
assert response.exit_code == 0
expected_contents = {"datasets.yaml": "hello: world"}
@ -314,7 +326,9 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context,
fp.write(b"hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.zip"))
response = runner.invoke(
superset.cli.importexport.import_datasources, ("-p", "datasets.zip")
)
assert response.exit_code == 0
expected_contents = {"dataset.yaml": "hello: world"}
@ -322,7 +336,7 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context,
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch(
"superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand.run",
@ -335,18 +349,20 @@ def test_failing_import_datasets_versioned_export(
Test that failing to import either ZIP or YAML is done elegantly.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
import superset.cli.importexport # noqa: F811
# reload to define export_datasets correctly based on the
# feature flags
importlib.reload(superset.cli)
importlib.reload(superset.cli.importexport)
# write YAML file
with open("datasets.yaml", "w") as fp:
fp.write("hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.yaml"))
response = runner.invoke(
superset.cli.importexport.import_datasources, ("-p", "datasets.yaml")
)
assert_cli_fails_properly(response, caplog)
@ -356,6 +372,8 @@ def test_failing_import_datasets_versioned_export(
fp.write(b"hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.zip"))
response = runner.invoke(
superset.cli.importexport.import_datasources, ("-p", "datasets.zip")
)
assert_cli_fails_properly(response, caplog)

View File

@ -50,7 +50,7 @@ def setup_sample_data() -> Any:
with app.app_context():
setup_presto_if_needed()
from superset.cli import load_test_users_run
from superset.cli.test import load_test_users_run
load_test_users_run()