mirror of https://github.com/apache/superset.git
refactor: Removes the deprecated VERSIONED_EXPORT feature flag (#26347)
This commit is contained in:
parent
649ff4dd61
commit
f63e66be01
|
@ -88,4 +88,3 @@ These features flags currently default to True and **will be removed in a future
|
|||
- ENABLE_JAVASCRIPT_CONTROLS
|
||||
- GENERIC_CHART_AXES
|
||||
- KV_STORE
|
||||
- VERSIONED_EXPORT
|
||||
|
|
|
@ -28,6 +28,7 @@ assists people when migrating to a new version.
|
|||
|
||||
### Breaking Changes
|
||||
|
||||
- [26347](https://github.com/apache/superset/issues/26347): Removes the deprecated `VERSIONED_EXPORT` feature flag. The previous value of the feature flag was `True` and now the feature is permanently enabled.
|
||||
- [26328](https://github.com/apache/superset/issues/26328): Removes the deprecated Filter Box code and it's associated dependencies `react-select` and `array-move`. It also removes the `DeprecatedSelect` and `AsyncSelect` components that were exclusively used by filter boxes. Existing filter boxes will be automatically migrated to native filters.
|
||||
- [26330](https://github.com/apache/superset/issues/26330): Removes the deprecated `DASHBOARD_FILTERS_EXPERIMENTAL` feature flag. The previous value of the feature flag was `False` and now the feature is permanently removed.
|
||||
- [26344](https://github.com/apache/superset/issues/26344): Removes the deprecated `ENABLE_EXPLORE_JSON_CSRF_PROTECTION` feature flag. The previous value of the feature flag was `False` and now the feature is permanently removed.
|
||||
|
|
|
@ -57,7 +57,6 @@ export enum FeatureFlag {
|
|||
TAGGING_SYSTEM = 'TAGGING_SYSTEM',
|
||||
THUMBNAILS = 'THUMBNAILS',
|
||||
USE_ANALAGOUS_COLORS = 'USE_ANALAGOUS_COLORS',
|
||||
VERSIONED_EXPORT = 'VERSIONED_EXPORT',
|
||||
}
|
||||
export type ScheduleQueriesProps = {
|
||||
JSONSCHEMA: {
|
||||
|
|
|
@ -67,8 +67,7 @@ export default function ChartCard({
|
|||
const history = useHistory();
|
||||
const canEdit = hasPerm('can_write');
|
||||
const canDelete = hasPerm('can_write');
|
||||
const canExport =
|
||||
hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
|
||||
const canExport = hasPerm('can_export');
|
||||
const theme = useTheme();
|
||||
|
||||
const menu = (
|
||||
|
|
|
@ -234,8 +234,7 @@ function ChartList(props: ChartListProps) {
|
|||
const canCreate = hasPerm('can_write');
|
||||
const canEdit = hasPerm('can_write');
|
||||
const canDelete = hasPerm('can_write');
|
||||
const canExport =
|
||||
hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
|
||||
const canExport = hasPerm('can_export');
|
||||
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
|
||||
const handleBulkChartExport = (chartsToExport: Chart[]) => {
|
||||
const ids = chartsToExport.map(({ id }) => id);
|
||||
|
@ -777,21 +776,19 @@ function ChartList(props: ChartListProps) {
|
|||
},
|
||||
});
|
||||
|
||||
if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
|
||||
subMenuButtons.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
id="import-tooltip"
|
||||
title={t('Import charts')}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<Icons.Import data-test="import-button" />
|
||||
</Tooltip>
|
||||
),
|
||||
buttonStyle: 'link',
|
||||
onClick: openChartImportModal,
|
||||
});
|
||||
}
|
||||
subMenuButtons.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
id="import-tooltip"
|
||||
title={t('Import charts')}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<Icons.Import data-test="import-button" />
|
||||
</Tooltip>
|
||||
),
|
||||
buttonStyle: 'link',
|
||||
onClick: openChartImportModal,
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
|
@ -183,8 +183,7 @@ function DashboardList(props: DashboardListProps) {
|
|||
const canCreate = hasPerm('can_write');
|
||||
const canEdit = hasPerm('can_write');
|
||||
const canDelete = hasPerm('can_write');
|
||||
const canExport =
|
||||
hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
|
||||
const canExport = hasPerm('can_export');
|
||||
|
||||
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
|
||||
|
||||
|
@ -659,21 +658,19 @@ function DashboardList(props: DashboardListProps) {
|
|||
},
|
||||
});
|
||||
|
||||
if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
|
||||
subMenuButtons.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
id="import-tooltip"
|
||||
title={t('Import dashboards')}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<Icons.Import data-test="import-button" />
|
||||
</Tooltip>
|
||||
),
|
||||
buttonStyle: 'link',
|
||||
onClick: openDashboardImportModal,
|
||||
});
|
||||
}
|
||||
subMenuButtons.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
id="import-tooltip"
|
||||
title={t('Import dashboards')}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<Icons.Import data-test="import-button" />
|
||||
</Tooltip>
|
||||
),
|
||||
buttonStyle: 'link',
|
||||
onClick: openDashboardImportModal,
|
||||
});
|
||||
}
|
||||
return (
|
||||
<>
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
import {
|
||||
isFeatureEnabled,
|
||||
FeatureFlag,
|
||||
getExtensionsRegistry,
|
||||
styled,
|
||||
SupersetClient,
|
||||
|
@ -216,8 +214,7 @@ function DatabaseList({
|
|||
const canCreate = hasPerm('can_write');
|
||||
const canEdit = hasPerm('can_write');
|
||||
const canDelete = hasPerm('can_write');
|
||||
const canExport =
|
||||
hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
|
||||
const canExport = hasPerm('can_export');
|
||||
|
||||
const { canUploadCSV, canUploadColumnar, canUploadExcel } = uploadUserPerms(
|
||||
roles,
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
import {
|
||||
isFeatureEnabled,
|
||||
FeatureFlag,
|
||||
getExtensionsRegistry,
|
||||
styled,
|
||||
SupersetClient,
|
||||
|
@ -207,8 +205,7 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
|
|||
const canDelete = hasPerm('can_write');
|
||||
const canCreate = hasPerm('can_write');
|
||||
const canDuplicate = hasPerm('can_duplicate');
|
||||
const canExport =
|
||||
hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
|
||||
const canExport = hasPerm('can_export');
|
||||
|
||||
const initialSort = SORT_BY;
|
||||
|
||||
|
@ -654,21 +651,19 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
|
|||
buttonStyle: 'primary',
|
||||
});
|
||||
|
||||
if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
|
||||
buttonArr.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
id="import-tooltip"
|
||||
title={t('Import datasets')}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<Icons.Import data-test="import-button" />
|
||||
</Tooltip>
|
||||
),
|
||||
buttonStyle: 'link',
|
||||
onClick: openDatasetImportModal,
|
||||
});
|
||||
}
|
||||
buttonArr.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
id="import-tooltip"
|
||||
title={t('Import datasets')}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<Icons.Import data-test="import-button" />
|
||||
</Tooltip>
|
||||
),
|
||||
buttonStyle: 'link',
|
||||
onClick: openDatasetImportModal,
|
||||
});
|
||||
}
|
||||
|
||||
menuData.buttons = buttonArr;
|
||||
|
|
|
@ -159,8 +159,7 @@ function SavedQueryList({
|
|||
const canCreate = hasPerm('can_write');
|
||||
const canEdit = hasPerm('can_write');
|
||||
const canDelete = hasPerm('can_write');
|
||||
const canExport =
|
||||
hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
|
||||
const canExport = hasPerm('can_export');
|
||||
|
||||
const handleSavedQueryPreview = useCallback(
|
||||
(id: number) => {
|
||||
|
@ -204,7 +203,7 @@ function SavedQueryList({
|
|||
buttonStyle: 'primary',
|
||||
});
|
||||
|
||||
if (canCreate && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
|
||||
if (canCreate) {
|
||||
subMenuButtons.push({
|
||||
name: (
|
||||
<Tooltip
|
||||
|
|
|
@ -27,7 +27,6 @@ from flask import g
|
|||
from flask.cli import with_appcontext
|
||||
|
||||
from superset import security_manager
|
||||
from superset.cli.lib import feature_flags
|
||||
from superset.extensions import db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -60,338 +59,338 @@ def import_directory(directory: str, overwrite: bool, force: bool) -> None:
|
|||
)
|
||||
|
||||
|
||||
if feature_flags.get("VERSIONED_EXPORT"):
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--dashboard-file",
|
||||
"-f",
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
def export_dashboards(dashboard_file: Optional[str] = None) -> None:
|
||||
"""Export dashboards to ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dashboard.export import ExportDashboardsCommand
|
||||
from superset.models.dashboard import Dashboard
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--dashboard-file",
|
||||
"-f",
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
def export_dashboards(dashboard_file: Optional[str] = None) -> None:
|
||||
"""Export dashboards to ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dashboard.export import ExportDashboardsCommand
|
||||
from superset.models.dashboard import Dashboard
|
||||
g.user = security_manager.find_user(username="admin")
|
||||
|
||||
g.user = security_manager.find_user(username="admin")
|
||||
dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dashboard_export_{timestamp}"
|
||||
dashboard_file = dashboard_file or f"{root}.zip"
|
||||
|
||||
dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dashboard_export_{timestamp}"
|
||||
dashboard_file = dashboard_file or f"{root}.zip"
|
||||
|
||||
try:
|
||||
with ZipFile(dashboard_file, "w") as bundle:
|
||||
for file_name, file_content in ExportDashboardsCommand(
|
||||
dashboard_ids
|
||||
).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when exporting the dashboards, please check "
|
||||
"the exception traceback in the log"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--datasource-file",
|
||||
"-f",
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
def export_datasources(datasource_file: Optional[str] = None) -> None:
|
||||
"""Export datasources to ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dataset.export import ExportDatasetsCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
|
||||
g.user = security_manager.find_user(username="admin")
|
||||
|
||||
dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dataset_export_{timestamp}"
|
||||
datasource_file = datasource_file or f"{root}.zip"
|
||||
|
||||
try:
|
||||
with ZipFile(datasource_file, "w") as bundle:
|
||||
for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when exporting the datasets, please check "
|
||||
"the exception traceback in the log"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single ZIP file",
|
||||
)
|
||||
@click.option(
|
||||
"--username",
|
||||
"-u",
|
||||
default=None,
|
||||
help="Specify the user name to assign dashboards to",
|
||||
)
|
||||
def import_dashboards(path: str, username: Optional[str]) -> None:
|
||||
"""Import dashboards from ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dashboard.importers.dispatcher import (
|
||||
ImportDashboardsCommand,
|
||||
try:
|
||||
with ZipFile(dashboard_file, "w") as bundle:
|
||||
for file_name, file_content in ExportDashboardsCommand(dashboard_ids).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when exporting the dashboards, please check "
|
||||
"the exception traceback in the log"
|
||||
)
|
||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||
sys.exit(1)
|
||||
|
||||
if username is not None:
|
||||
g.user = security_manager.find_user(username=username)
|
||||
if is_zipfile(path):
|
||||
with ZipFile(path) as bundle:
|
||||
contents = get_contents_from_bundle(bundle)
|
||||
else:
|
||||
with open(path) as file:
|
||||
contents = {path: file.read()}
|
||||
try:
|
||||
ImportDashboardsCommand(contents, overwrite=True).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when importing the dashboards(s), please check "
|
||||
"the exception traceback in the log"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single ZIP file",
|
||||
)
|
||||
def import_datasources(path: str) -> None:
|
||||
"""Import datasources from ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand
|
||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--datasource-file",
|
||||
"-f",
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
def export_datasources(datasource_file: Optional[str] = None) -> None:
|
||||
"""Export datasources to ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dataset.export import ExportDatasetsCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
|
||||
if is_zipfile(path):
|
||||
with ZipFile(path) as bundle:
|
||||
contents = get_contents_from_bundle(bundle)
|
||||
else:
|
||||
with open(path) as file:
|
||||
contents = {path: file.read()}
|
||||
try:
|
||||
ImportDatasetsCommand(contents, overwrite=True).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when importing the dataset(s), please check the "
|
||||
"exception traceback in the log"
|
||||
)
|
||||
sys.exit(1)
|
||||
g.user = security_manager.find_user(username="admin")
|
||||
|
||||
else:
|
||||
dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dataset_export_{timestamp}"
|
||||
datasource_file = datasource_file or f"{root}.zip"
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--dashboard-file",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
@click.option(
|
||||
"--print_stdout",
|
||||
"-p",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Print JSON to stdout",
|
||||
)
|
||||
def export_dashboards(
|
||||
dashboard_file: Optional[str], print_stdout: bool = False
|
||||
) -> None:
|
||||
"""Export dashboards to JSON"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.utils import dashboard_import_export
|
||||
|
||||
data = dashboard_import_export.export_dashboards(db.session)
|
||||
if print_stdout or not dashboard_file:
|
||||
print(data)
|
||||
if dashboard_file:
|
||||
logger.info("Exporting dashboards to %s", dashboard_file)
|
||||
with open(dashboard_file, "w") as data_stream:
|
||||
data_stream.write(data)
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--datasource-file",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
@click.option(
|
||||
"--print_stdout",
|
||||
"-p",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Print YAML to stdout",
|
||||
)
|
||||
@click.option(
|
||||
"--back-references",
|
||||
"-b",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Include parent back references",
|
||||
)
|
||||
@click.option(
|
||||
"--include-defaults",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Include fields containing defaults",
|
||||
)
|
||||
def export_datasources(
|
||||
datasource_file: Optional[str],
|
||||
print_stdout: bool = False,
|
||||
back_references: bool = False,
|
||||
include_defaults: bool = False,
|
||||
) -> None:
|
||||
"""Export datasources to YAML"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.utils import dict_import_export
|
||||
|
||||
data = dict_import_export.export_to_dict(
|
||||
session=db.session,
|
||||
recursive=True,
|
||||
back_references=back_references,
|
||||
include_defaults=include_defaults,
|
||||
try:
|
||||
with ZipFile(datasource_file, "w") as bundle:
|
||||
for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when exporting the datasets, please check "
|
||||
"the exception traceback in the log"
|
||||
)
|
||||
if print_stdout or not datasource_file:
|
||||
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
|
||||
if datasource_file:
|
||||
logger.info("Exporting datasources to %s", datasource_file)
|
||||
with open(datasource_file, "w") as data_stream:
|
||||
yaml.safe_dump(data, data_stream, default_flow_style=False)
|
||||
sys.exit(1)
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single JSON file or path containing multiple JSON "
|
||||
"files to import (*.json)",
|
||||
)
|
||||
@click.option(
|
||||
"--recursive",
|
||||
"-r",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="recursively search the path for json files",
|
||||
)
|
||||
@click.option(
|
||||
"--username",
|
||||
"-u",
|
||||
default=None,
|
||||
help="Specify the user name to assign dashboards to",
|
||||
)
|
||||
def import_dashboards(path: str, recursive: bool, username: str) -> None:
|
||||
"""Import dashboards from JSON file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand
|
||||
|
||||
path_object = Path(path)
|
||||
files: list[Path] = []
|
||||
if path_object.is_file():
|
||||
files.append(path_object)
|
||||
elif path_object.exists() and not recursive:
|
||||
files.extend(path_object.glob("*.json"))
|
||||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.json"))
|
||||
if username is not None:
|
||||
g.user = security_manager.find_user(username=username)
|
||||
contents = {}
|
||||
for path_ in files:
|
||||
with open(path_) as file:
|
||||
contents[path_.name] = file.read()
|
||||
try:
|
||||
ImportDashboardsCommand(contents).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception("Error when importing dashboard")
|
||||
sys.exit(1)
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single ZIP file",
|
||||
)
|
||||
@click.option(
|
||||
"--username",
|
||||
"-u",
|
||||
default=None,
|
||||
help="Specify the user name to assign dashboards to",
|
||||
)
|
||||
def import_dashboards(path: str, username: Optional[str]) -> None:
|
||||
"""Import dashboards from ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dashboard.importers.dispatcher import ImportDashboardsCommand
|
||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single YAML file or path containing multiple YAML "
|
||||
"files to import (*.yaml or *.yml)",
|
||||
if username is not None:
|
||||
g.user = security_manager.find_user(username=username)
|
||||
if is_zipfile(path):
|
||||
with ZipFile(path) as bundle:
|
||||
contents = get_contents_from_bundle(bundle)
|
||||
else:
|
||||
with open(path) as file:
|
||||
contents = {path: file.read()}
|
||||
try:
|
||||
ImportDashboardsCommand(contents, overwrite=True).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when importing the dashboards(s), please check "
|
||||
"the exception traceback in the log"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single ZIP file",
|
||||
)
|
||||
def import_datasources(path: str) -> None:
|
||||
"""Import datasources from ZIP file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand
|
||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||
|
||||
if is_zipfile(path):
|
||||
with ZipFile(path) as bundle:
|
||||
contents = get_contents_from_bundle(bundle)
|
||||
else:
|
||||
with open(path) as file:
|
||||
contents = {path: file.read()}
|
||||
try:
|
||||
ImportDatasetsCommand(contents, overwrite=True).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"There was an error when importing the dataset(s), please check the "
|
||||
"exception traceback in the log"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--dashboard-file",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
@click.option(
|
||||
"--print_stdout",
|
||||
"-p",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Print JSON to stdout",
|
||||
)
|
||||
def legacy_export_dashboards(
|
||||
dashboard_file: Optional[str], print_stdout: bool = False
|
||||
) -> None:
|
||||
"""Export dashboards to JSON"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.utils import dashboard_import_export
|
||||
|
||||
data = dashboard_import_export.export_dashboards(db.session)
|
||||
if print_stdout or not dashboard_file:
|
||||
print(data)
|
||||
if dashboard_file:
|
||||
logger.info("Exporting dashboards to %s", dashboard_file)
|
||||
with open(dashboard_file, "w") as data_stream:
|
||||
data_stream.write(data)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--datasource-file",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Specify the file to export to",
|
||||
)
|
||||
@click.option(
|
||||
"--print_stdout",
|
||||
"-p",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Print YAML to stdout",
|
||||
)
|
||||
@click.option(
|
||||
"--back-references",
|
||||
"-b",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Include parent back references",
|
||||
)
|
||||
@click.option(
|
||||
"--include-defaults",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Include fields containing defaults",
|
||||
)
|
||||
def legacy_export_datasources(
|
||||
datasource_file: Optional[str],
|
||||
print_stdout: bool = False,
|
||||
back_references: bool = False,
|
||||
include_defaults: bool = False,
|
||||
) -> None:
|
||||
"""Export datasources to YAML"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.utils import dict_import_export
|
||||
|
||||
data = dict_import_export.export_to_dict(
|
||||
session=db.session,
|
||||
recursive=True,
|
||||
back_references=back_references,
|
||||
include_defaults=include_defaults,
|
||||
)
|
||||
@click.option(
|
||||
"--sync",
|
||||
"-s",
|
||||
"sync",
|
||||
default="",
|
||||
help="comma separated list of element types to synchronize "
|
||||
'e.g. "metrics,columns" deletes metrics and columns in the DB '
|
||||
"that are not specified in the YAML file",
|
||||
)
|
||||
@click.option(
|
||||
"--recursive",
|
||||
"-r",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="recursively search the path for yaml files",
|
||||
)
|
||||
def import_datasources(path: str, sync: str, recursive: bool) -> None:
|
||||
"""Import datasources from YAML"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dataset.importers.v0 import ImportDatasetsCommand
|
||||
|
||||
sync_array = sync.split(",")
|
||||
sync_columns = "columns" in sync_array
|
||||
sync_metrics = "metrics" in sync_array
|
||||
|
||||
path_object = Path(path)
|
||||
files: list[Path] = []
|
||||
if path_object.is_file():
|
||||
files.append(path_object)
|
||||
elif path_object.exists() and not recursive:
|
||||
files.extend(path_object.glob("*.yaml"))
|
||||
files.extend(path_object.glob("*.yml"))
|
||||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.yaml"))
|
||||
files.extend(path_object.rglob("*.yml"))
|
||||
contents = {}
|
||||
for path_ in files:
|
||||
with open(path_) as file:
|
||||
contents[path_.name] = file.read()
|
||||
try:
|
||||
ImportDatasetsCommand(
|
||||
contents, sync_columns=sync_columns, sync_metrics=sync_metrics
|
||||
).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception("Error when importing dataset")
|
||||
sys.exit(1)
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--back-references",
|
||||
"-b",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Include parent back references",
|
||||
)
|
||||
def export_datasource_schema(back_references: bool) -> None:
|
||||
"""Export datasource YAML schema to stdout"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.utils import dict_import_export
|
||||
|
||||
data = dict_import_export.export_schema_to_dict(back_references=back_references)
|
||||
if print_stdout or not datasource_file:
|
||||
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
|
||||
if datasource_file:
|
||||
logger.info("Exporting datasources to %s", datasource_file)
|
||||
with open(datasource_file, "w") as data_stream:
|
||||
yaml.safe_dump(data, data_stream, default_flow_style=False)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single JSON file or path containing multiple JSON "
|
||||
"files to import (*.json)",
|
||||
)
|
||||
@click.option(
|
||||
"--recursive",
|
||||
"-r",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="recursively search the path for json files",
|
||||
)
|
||||
@click.option(
|
||||
"--username",
|
||||
"-u",
|
||||
default=None,
|
||||
help="Specify the user name to assign dashboards to",
|
||||
)
|
||||
def legacy_import_dashboards(path: str, recursive: bool, username: str) -> None:
|
||||
"""Import dashboards from JSON file"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand
|
||||
|
||||
path_object = Path(path)
|
||||
files: list[Path] = []
|
||||
if path_object.is_file():
|
||||
files.append(path_object)
|
||||
elif path_object.exists() and not recursive:
|
||||
files.extend(path_object.glob("*.json"))
|
||||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.json"))
|
||||
if username is not None:
|
||||
g.user = security_manager.find_user(username=username)
|
||||
contents = {}
|
||||
for path_ in files:
|
||||
with open(path_) as file:
|
||||
contents[path_.name] = file.read()
|
||||
try:
|
||||
ImportDashboardsCommand(contents).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception("Error when importing dashboard")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--path",
|
||||
"-p",
|
||||
help="Path to a single YAML file or path containing multiple YAML "
|
||||
"files to import (*.yaml or *.yml)",
|
||||
)
|
||||
@click.option(
|
||||
"--sync",
|
||||
"-s",
|
||||
"sync",
|
||||
default="",
|
||||
help="comma separated list of element types to synchronize "
|
||||
'e.g. "metrics,columns" deletes metrics and columns in the DB '
|
||||
"that are not specified in the YAML file",
|
||||
)
|
||||
@click.option(
|
||||
"--recursive",
|
||||
"-r",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="recursively search the path for yaml files",
|
||||
)
|
||||
def legacy_import_datasources(path: str, sync: str, recursive: bool) -> None:
|
||||
"""Import datasources from YAML"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.commands.dataset.importers.v0 import ImportDatasetsCommand
|
||||
|
||||
sync_array = sync.split(",")
|
||||
sync_columns = "columns" in sync_array
|
||||
sync_metrics = "metrics" in sync_array
|
||||
|
||||
path_object = Path(path)
|
||||
files: list[Path] = []
|
||||
if path_object.is_file():
|
||||
files.append(path_object)
|
||||
elif path_object.exists() and not recursive:
|
||||
files.extend(path_object.glob("*.yaml"))
|
||||
files.extend(path_object.glob("*.yml"))
|
||||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.yaml"))
|
||||
files.extend(path_object.rglob("*.yml"))
|
||||
contents = {}
|
||||
for path_ in files:
|
||||
with open(path_) as file:
|
||||
contents[path_.name] = file.read()
|
||||
try:
|
||||
ImportDatasetsCommand(
|
||||
contents, sync_columns=sync_columns, sync_metrics=sync_metrics
|
||||
).run()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception("Error when importing dataset")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option(
|
||||
"--back-references",
|
||||
"-b",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Include parent back references",
|
||||
)
|
||||
def legacy_export_datasource_schema(back_references: bool) -> None:
|
||||
"""Export datasource YAML schema to stdout"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from superset.utils import dict_import_export
|
||||
|
||||
data = dict_import_export.export_schema_to_dict(back_references=back_references)
|
||||
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
|
||||
|
|
|
@ -432,7 +432,6 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
|
|||
"DASHBOARD_CROSS_FILTERS": True, # deprecated
|
||||
"DASHBOARD_VIRTUALIZATION": True,
|
||||
"GLOBAL_ASYNC_QUERIES": False,
|
||||
"VERSIONED_EXPORT": True, # deprecated
|
||||
"EMBEDDED_SUPERSET": False,
|
||||
# Enables Alerts and reports new implementation
|
||||
"ALERT_REPORTS": False,
|
||||
|
|
|
@ -23,7 +23,7 @@ from io import BytesIO
|
|||
from typing import Any, Callable, cast, Optional
|
||||
from zipfile import is_zipfile, ZipFile
|
||||
|
||||
from flask import make_response, redirect, request, Response, send_file, url_for
|
||||
from flask import redirect, request, Response, send_file, url_for
|
||||
from flask_appbuilder import permission_name
|
||||
from flask_appbuilder.api import expose, protect, rison, safe
|
||||
from flask_appbuilder.hooks import before_request
|
||||
|
@ -85,7 +85,6 @@ from superset.tasks.thumbnails import cache_dashboard_thumbnail
|
|||
from superset.tasks.utils import get_current_user
|
||||
from superset.utils.screenshots import DashboardScreenshot
|
||||
from superset.utils.urls import get_url_path
|
||||
from superset.views.base import generate_download_headers
|
||||
from superset.views.base_api import (
|
||||
BaseSupersetModelRestApi,
|
||||
RelatedFieldFilter,
|
||||
|
@ -714,7 +713,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
|
|||
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
|
||||
log_to_statsd=False,
|
||||
)
|
||||
def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
|
||||
def export(self, **kwargs: Any) -> Response:
|
||||
"""Download multiple dashboards as YAML files.
|
||||
---
|
||||
get:
|
||||
|
@ -745,50 +744,32 @@ class DashboardRestApi(BaseSupersetModelRestApi):
|
|||
$ref: '#/components/responses/500'
|
||||
"""
|
||||
requested_ids = kwargs["rison"]
|
||||
token = request.args.get("token")
|
||||
|
||||
if is_feature_enabled("VERSIONED_EXPORT"):
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dashboard_export_{timestamp}"
|
||||
filename = f"{root}.zip"
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dashboard_export_{timestamp}"
|
||||
filename = f"{root}.zip"
|
||||
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
try:
|
||||
for file_name, file_content in ExportDashboardsCommand(
|
||||
requested_ids
|
||||
).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except DashboardNotFoundError:
|
||||
return self.response_404()
|
||||
buf.seek(0)
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
try:
|
||||
for file_name, file_content in ExportDashboardsCommand(
|
||||
requested_ids
|
||||
).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except DashboardNotFoundError:
|
||||
return self.response_404()
|
||||
buf.seek(0)
|
||||
|
||||
response = send_file(
|
||||
buf,
|
||||
mimetype="application/zip",
|
||||
as_attachment=True,
|
||||
download_name=filename,
|
||||
)
|
||||
if token:
|
||||
response.set_cookie(token, "done", max_age=600)
|
||||
return response
|
||||
|
||||
query = self.datamodel.session.query(Dashboard).filter(
|
||||
Dashboard.id.in_(requested_ids)
|
||||
response = send_file(
|
||||
buf,
|
||||
mimetype="application/zip",
|
||||
as_attachment=True,
|
||||
download_name=filename,
|
||||
)
|
||||
query = self._base_filters.apply_all(query)
|
||||
ids = {item.id for item in query.all()}
|
||||
if not ids:
|
||||
return self.response_404()
|
||||
export = Dashboard.export_dashboards(ids)
|
||||
resp = make_response(export, 200)
|
||||
resp.headers["Content-Disposition"] = generate_download_headers("json")[
|
||||
"Content-Disposition"
|
||||
]
|
||||
if token:
|
||||
resp.set_cookie(token, "done", max_age=600)
|
||||
return resp
|
||||
if token := request.args.get("token"):
|
||||
response.set_cookie(token, "done", max_age=600)
|
||||
return response
|
||||
|
||||
@expose("/<pk>/thumbnail/<digest>/", methods=("GET",))
|
||||
@protect()
|
||||
|
|
|
@ -22,14 +22,13 @@ from io import BytesIO
|
|||
from typing import Any
|
||||
from zipfile import is_zipfile, ZipFile
|
||||
|
||||
import yaml
|
||||
from flask import request, Response, send_file
|
||||
from flask_appbuilder.api import expose, protect, rison, safe
|
||||
from flask_appbuilder.models.sqla.interface import SQLAInterface
|
||||
from flask_babel import ngettext
|
||||
from marshmallow import ValidationError
|
||||
|
||||
from superset import event_logger, is_feature_enabled
|
||||
from superset import event_logger
|
||||
from superset.commands.dataset.create import CreateDatasetCommand
|
||||
from superset.commands.dataset.delete import DeleteDatasetCommand
|
||||
from superset.commands.dataset.duplicate import DuplicateDatasetCommand
|
||||
|
@ -68,7 +67,7 @@ from superset.datasets.schemas import (
|
|||
openapi_spec_methods_override,
|
||||
)
|
||||
from superset.utils.core import parse_boolean_string
|
||||
from superset.views.base import DatasourceFilter, generate_download_headers
|
||||
from superset.views.base import DatasourceFilter
|
||||
from superset.views.base_api import (
|
||||
BaseSupersetModelRestApi,
|
||||
RelatedFieldFilter,
|
||||
|
@ -489,7 +488,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
|
|||
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
|
||||
log_to_statsd=False,
|
||||
)
|
||||
def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
|
||||
def export(self, **kwargs: Any) -> Response:
|
||||
"""Download multiple datasets as YAML files.
|
||||
---
|
||||
get:
|
||||
|
@ -519,49 +518,31 @@ class DatasetRestApi(BaseSupersetModelRestApi):
|
|||
"""
|
||||
requested_ids = kwargs["rison"]
|
||||
|
||||
if is_feature_enabled("VERSIONED_EXPORT"):
|
||||
token = request.args.get("token")
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dataset_export_{timestamp}"
|
||||
filename = f"{root}.zip"
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"dataset_export_{timestamp}"
|
||||
filename = f"{root}.zip"
|
||||
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
try:
|
||||
for file_name, file_content in ExportDatasetsCommand(
|
||||
requested_ids
|
||||
).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except DatasetNotFoundError:
|
||||
return self.response_404()
|
||||
buf.seek(0)
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
try:
|
||||
for file_name, file_content in ExportDatasetsCommand(
|
||||
requested_ids
|
||||
).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except DatasetNotFoundError:
|
||||
return self.response_404()
|
||||
buf.seek(0)
|
||||
|
||||
response = send_file(
|
||||
buf,
|
||||
mimetype="application/zip",
|
||||
as_attachment=True,
|
||||
download_name=filename,
|
||||
)
|
||||
if token:
|
||||
response.set_cookie(token, "done", max_age=600)
|
||||
return response
|
||||
|
||||
query = self.datamodel.session.query(SqlaTable).filter(
|
||||
SqlaTable.id.in_(requested_ids)
|
||||
)
|
||||
query = self._base_filters.apply_all(query)
|
||||
items = query.all()
|
||||
ids = [item.id for item in items]
|
||||
if len(ids) != len(requested_ids):
|
||||
return self.response_404()
|
||||
|
||||
data = [t.export_to_dict() for t in items]
|
||||
return Response(
|
||||
yaml.safe_dump(data),
|
||||
headers=generate_download_headers("yaml"),
|
||||
mimetype="application/text",
|
||||
response = send_file(
|
||||
buf,
|
||||
mimetype="application/zip",
|
||||
as_attachment=True,
|
||||
download_name=filename,
|
||||
)
|
||||
if token := request.args.get("token"):
|
||||
response.set_cookie(token, "done", max_age=600)
|
||||
return response
|
||||
|
||||
@expose("/duplicate", methods=("POST",))
|
||||
@protect()
|
||||
|
|
|
@ -118,7 +118,6 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
|
|||
# the global Flask app
|
||||
#
|
||||
# pylint: disable=import-outside-toplevel,too-many-locals,too-many-statements
|
||||
from superset import security_manager
|
||||
from superset.advanced_data_type.api import AdvancedDataTypeRestApi
|
||||
from superset.annotation_layers.annotations.api import AnnotationRestApi
|
||||
from superset.annotation_layers.api import AnnotationLayerRestApi
|
||||
|
@ -327,20 +326,6 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
|
|||
#
|
||||
# Add links
|
||||
#
|
||||
appbuilder.add_link(
|
||||
"Import Dashboards",
|
||||
label=__("Import Dashboards"),
|
||||
href="/superset/import_dashboards/",
|
||||
icon="fa-cloud-upload",
|
||||
category="Manage",
|
||||
category_label=__("Manage"),
|
||||
category_icon="fa-wrench",
|
||||
cond=lambda: (
|
||||
security_manager.can_access("can_import_dashboards", "Superset")
|
||||
and not feature_flag_manager.is_feature_enabled("VERSIONED_EXPORT")
|
||||
),
|
||||
)
|
||||
|
||||
appbuilder.add_link(
|
||||
"SQL Editor",
|
||||
label=__("SQL Lab"),
|
||||
|
|
|
@ -47,7 +47,6 @@ from superset import (
|
|||
from superset.async_events.async_query_manager import AsyncQueryTokenException
|
||||
from superset.commands.chart.exceptions import ChartNotFoundError
|
||||
from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand
|
||||
from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand
|
||||
from superset.commands.dashboard.permalink.get import GetDashboardPermalinkCommand
|
||||
from superset.commands.dataset.exceptions import DatasetNotFoundError
|
||||
from superset.commands.explore.form_data.create import CreateFormDataCommand
|
||||
|
@ -61,7 +60,6 @@ from superset.daos.datasource import DatasourceDAO
|
|||
from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError
|
||||
from superset.exceptions import (
|
||||
CacheLoadError,
|
||||
DatabaseNotFound,
|
||||
SupersetException,
|
||||
SupersetSecurityException,
|
||||
)
|
||||
|
@ -345,55 +343,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
|
|||
except SupersetException as ex:
|
||||
return json_error_response(utils.error_msg_from_exception(ex), 400)
|
||||
|
||||
@has_access
|
||||
@event_logger.log_this
|
||||
@expose(
|
||||
"/import_dashboards/",
|
||||
methods=(
|
||||
"GET",
|
||||
"POST",
|
||||
),
|
||||
)
|
||||
def import_dashboards(self) -> FlaskResponse:
|
||||
"""Overrides the dashboards using json instances from the file."""
|
||||
import_file = request.files.get("file")
|
||||
if request.method == "POST" and import_file:
|
||||
success = False
|
||||
database_id = request.form.get("db_id")
|
||||
try:
|
||||
ImportDashboardsCommand(
|
||||
{import_file.filename: import_file.read()}, database_id
|
||||
).run()
|
||||
success = True
|
||||
except DatabaseNotFound as ex:
|
||||
logger.exception(ex)
|
||||
flash(
|
||||
_(
|
||||
"Cannot import dashboard: %(db_error)s.\n"
|
||||
"Make sure to create the database before "
|
||||
"importing the dashboard.",
|
||||
db_error=ex,
|
||||
),
|
||||
"danger",
|
||||
)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
logger.exception(ex)
|
||||
flash(
|
||||
_(
|
||||
"An unknown error occurred. "
|
||||
"Please contact your Superset administrator"
|
||||
),
|
||||
"danger",
|
||||
)
|
||||
if success:
|
||||
flash("Dashboard(s) have been imported", "success")
|
||||
return redirect("/dashboard/list/")
|
||||
|
||||
databases = db.session.query(Database).all()
|
||||
return self.render_template(
|
||||
"superset/import_dashboards.html", databases=databases
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_redirect_url() -> str:
|
||||
"""Assembles the redirect URL to the new endpoint. It also replaces
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
# under the License.
|
||||
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
@ -49,69 +48,7 @@ def assert_cli_fails_properly(response, caplog):
|
|||
assert caplog.records[-1].levelname == "ERROR"
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
|
||||
)
|
||||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
def test_export_dashboards_original(app_context, fs):
|
||||
"""
|
||||
Test that a JSON file is exported.
|
||||
"""
|
||||
# pylint: disable=reimported, redefined-outer-name
|
||||
import superset.cli.importexport # noqa: F811
|
||||
|
||||
# reload to define export_dashboards correctly based on the
|
||||
# feature flags
|
||||
importlib.reload(superset.cli.importexport)
|
||||
|
||||
runner = app.test_cli_runner()
|
||||
response = runner.invoke(
|
||||
superset.cli.importexport.export_dashboards, ("-f", "dashboards.json")
|
||||
)
|
||||
|
||||
assert response.exit_code == 0
|
||||
assert Path("dashboards.json").exists()
|
||||
|
||||
# check that file is valid JSON
|
||||
with open("dashboards.json") as fp:
|
||||
contents = fp.read()
|
||||
json.loads(contents)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
|
||||
)
|
||||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
def test_export_datasources_original(app_context, fs):
|
||||
"""
|
||||
Test that a YAML file is exported.
|
||||
"""
|
||||
# pylint: disable=reimported, redefined-outer-name
|
||||
import superset.cli.importexport # noqa: F811
|
||||
|
||||
# reload to define export_dashboards correctly based on the
|
||||
# feature flags
|
||||
importlib.reload(superset.cli.importexport)
|
||||
|
||||
runner = app.test_cli_runner()
|
||||
response = runner.invoke(
|
||||
superset.cli.importexport.export_datasources, ("-f", "datasources.yaml")
|
||||
)
|
||||
|
||||
assert response.exit_code == 0
|
||||
|
||||
assert Path("datasources.yaml").exists()
|
||||
|
||||
# check that file is valid JSON
|
||||
with open("datasources.yaml") as fp:
|
||||
contents = fp.read()
|
||||
yaml.safe_load(contents)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
def test_export_dashboards_versioned_export(app_context, fs):
|
||||
"""
|
||||
Test that a ZIP file is exported.
|
||||
|
@ -133,9 +70,6 @@ def test_export_dashboards_versioned_export(app_context, fs):
|
|||
assert is_zipfile("dashboard_export_20210101T000000.zip")
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
@mock.patch(
|
||||
"superset.commands.dashboard.export.ExportDashboardsCommand.run",
|
||||
side_effect=Exception(),
|
||||
|
@ -163,9 +97,6 @@ def test_failing_export_dashboards_versioned_export(
|
|||
|
||||
|
||||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
def test_export_datasources_versioned_export(app_context, fs):
|
||||
"""
|
||||
Test that a ZIP file is exported.
|
||||
|
@ -187,9 +118,6 @@ def test_export_datasources_versioned_export(app_context, fs):
|
|||
assert is_zipfile("dataset_export_20210101T000000.zip")
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
@mock.patch(
|
||||
"superset.commands.dashboard.export.ExportDatasetsCommand.run",
|
||||
side_effect=Exception(),
|
||||
|
@ -214,9 +142,6 @@ def test_failing_export_datasources_versioned_export(
|
|||
assert_cli_fails_properly(response, caplog)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
@mock.patch("superset.commands.dashboard.importers.dispatcher.ImportDashboardsCommand")
|
||||
def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs):
|
||||
"""
|
||||
|
@ -257,9 +182,6 @@ def test_import_dashboards_versioned_export(import_dashboards_command, app_conte
|
|||
import_dashboards_command.assert_called_with(expected_contents, overwrite=True)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
@mock.patch(
|
||||
"superset.commands.dashboard.importers.dispatcher.ImportDashboardsCommand.run",
|
||||
side_effect=Exception(),
|
||||
|
@ -301,9 +223,6 @@ def test_failing_import_dashboards_versioned_export(
|
|||
assert_cli_fails_properly(response, caplog)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
@mock.patch("superset.commands.dataset.importers.dispatcher.ImportDatasetsCommand")
|
||||
def test_import_datasets_versioned_export(import_datasets_command, app_context, fs):
|
||||
"""
|
||||
|
@ -344,120 +263,6 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context,
|
|||
import_datasets_command.assert_called_with(expected_contents, overwrite=True)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
|
||||
)
|
||||
@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand")
|
||||
def test_import_datasets_sync_argument_columns_metrics(
|
||||
import_datasets_command, app_context, fs
|
||||
):
|
||||
"""
|
||||
Test that the --sync command line argument syncs dataset in superset
|
||||
with YAML file. Using both columns and metrics with the --sync flag
|
||||
"""
|
||||
# pylint: disable=reimported, redefined-outer-name
|
||||
import superset.cli.importexport # noqa: F811
|
||||
|
||||
# reload to define export_datasets correctly based on the
|
||||
# feature flags
|
||||
importlib.reload(superset.cli.importexport)
|
||||
|
||||
# write YAML file
|
||||
with open("dataset.yaml", "w") as fp:
|
||||
fp.write("hello: world")
|
||||
|
||||
runner = app.test_cli_runner()
|
||||
response = runner.invoke(
|
||||
superset.cli.importexport.import_datasources,
|
||||
["-p", "dataset.yaml", "-s", "metrics,columns"],
|
||||
)
|
||||
|
||||
assert response.exit_code == 0
|
||||
expected_contents = {"dataset.yaml": "hello: world"}
|
||||
import_datasets_command.assert_called_with(
|
||||
expected_contents,
|
||||
sync_columns=True,
|
||||
sync_metrics=True,
|
||||
)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
|
||||
)
|
||||
@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand")
|
||||
def test_import_datasets_sync_argument_columns(
|
||||
import_datasets_command, app_context, fs
|
||||
):
|
||||
"""
|
||||
Test that the --sync command line argument syncs dataset in superset
|
||||
with YAML file. Using only columns with the --sync flag
|
||||
"""
|
||||
# pylint: disable=reimported, redefined-outer-name
|
||||
import superset.cli.importexport # noqa: F811
|
||||
|
||||
# reload to define export_datasets correctly based on the
|
||||
# feature flags
|
||||
importlib.reload(superset.cli.importexport)
|
||||
|
||||
# write YAML file
|
||||
with open("dataset.yaml", "w") as fp:
|
||||
fp.write("hello: world")
|
||||
|
||||
runner = app.test_cli_runner()
|
||||
response = runner.invoke(
|
||||
superset.cli.importexport.import_datasources,
|
||||
["-p", "dataset.yaml", "-s", "columns"],
|
||||
)
|
||||
|
||||
assert response.exit_code == 0
|
||||
expected_contents = {"dataset.yaml": "hello: world"}
|
||||
import_datasets_command.assert_called_with(
|
||||
expected_contents,
|
||||
sync_columns=True,
|
||||
sync_metrics=False,
|
||||
)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
|
||||
)
|
||||
@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand")
|
||||
def test_import_datasets_sync_argument_metrics(
|
||||
import_datasets_command, app_context, fs
|
||||
):
|
||||
"""
|
||||
Test that the --sync command line argument syncs dataset in superset
|
||||
with YAML file. Using only metrics with the --sync flag
|
||||
"""
|
||||
# pylint: disable=reimported, redefined-outer-name
|
||||
import superset.cli.importexport # noqa: F811
|
||||
|
||||
# reload to define export_datasets correctly based on the
|
||||
# feature flags
|
||||
importlib.reload(superset.cli.importexport)
|
||||
|
||||
# write YAML file
|
||||
with open("dataset.yaml", "w") as fp:
|
||||
fp.write("hello: world")
|
||||
|
||||
runner = app.test_cli_runner()
|
||||
response = runner.invoke(
|
||||
superset.cli.importexport.import_datasources,
|
||||
["-p", "dataset.yaml", "-s", "metrics"],
|
||||
)
|
||||
|
||||
assert response.exit_code == 0
|
||||
expected_contents = {"dataset.yaml": "hello: world"}
|
||||
import_datasets_command.assert_called_with(
|
||||
expected_contents,
|
||||
sync_columns=False,
|
||||
sync_metrics=True,
|
||||
)
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
"superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
|
||||
)
|
||||
@mock.patch(
|
||||
"superset.commands.dataset.importers.dispatcher.ImportDatasetsCommand.run",
|
||||
side_effect=Exception(),
|
||||
|
|
|
@ -36,7 +36,6 @@ from superset.models.core import FavStar, FavStarClassName
|
|||
from superset.reports.models import ReportSchedule, ReportScheduleType
|
||||
from superset.models.slice import Slice
|
||||
from superset.utils.core import backend, override_user
|
||||
from superset.views.base import generate_download_headers
|
||||
|
||||
from tests.integration_tests.conftest import with_feature_flags
|
||||
from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin
|
||||
|
@ -1652,11 +1651,6 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
|
|||
db.session.delete(user_alpha2)
|
||||
db.session.commit()
|
||||
|
||||
@patch.dict(
|
||||
"superset.extensions.feature_flag_manager._feature_flags",
|
||||
{"VERSIONED_EXPORT": False},
|
||||
clear=True,
|
||||
)
|
||||
@pytest.mark.usefixtures(
|
||||
"load_world_bank_dashboard_with_slices",
|
||||
"load_birth_names_dashboard_with_slices",
|
||||
|
@ -1671,8 +1665,8 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
|
|||
uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}"
|
||||
|
||||
rv = self.get_assert_metric(uri, "export")
|
||||
headers = generate_download_headers("json")["Content-Disposition"]
|
||||
|
||||
headers = f"attachment; filename=dashboard_export_20220101T000000.zip"
|
||||
assert rv.status_code == 200
|
||||
assert rv.headers["Content-Disposition"] == headers
|
||||
|
||||
|
|
|
@ -1348,7 +1348,6 @@ class TestRolePermission(SupersetTestCase):
|
|||
self.assert_can_all("CssTemplate", perm_set)
|
||||
self.assert_can_all("Dataset", perm_set)
|
||||
self.assert_can_read("Database", perm_set)
|
||||
self.assertIn(("can_import_dashboards", "Superset"), perm_set)
|
||||
self.assertIn(("can_this_form_post", "CsvToDatabaseView"), perm_set)
|
||||
self.assertIn(("can_this_form_get", "CsvToDatabaseView"), perm_set)
|
||||
self.assert_can_menu("Manage", perm_set)
|
||||
|
|
Loading…
Reference in New Issue