2020-10-16 14:10:39 -04:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2021-05-12 21:32:10 -04:00
|
|
|
from unittest import mock, skip
|
2020-10-16 14:10:39 -04:00
|
|
|
from unittest.mock import patch
|
|
|
|
|
2020-11-16 20:11:20 -05:00
|
|
|
import pytest
|
2020-10-16 14:10:39 -04:00
|
|
|
import yaml
|
2022-01-12 16:01:34 -05:00
|
|
|
from func_timeout import FunctionTimedOut
|
2021-03-09 08:17:13 -05:00
|
|
|
from sqlalchemy.exc import DBAPIError
|
2020-10-16 14:10:39 -04:00
|
|
|
|
2021-03-09 08:17:13 -05:00
|
|
|
from superset import db, event_logger, security_manager
|
2020-11-16 20:11:20 -05:00
|
|
|
from superset.commands.exceptions import CommandInvalidError
|
|
|
|
from superset.commands.importers.exceptions import IncorrectVersionError
|
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2022-03-08 20:31:19 -05:00
|
|
|
from superset.databases.commands.create import CreateDatabaseCommand
|
2021-03-09 08:17:13 -05:00
|
|
|
from superset.databases.commands.exceptions import (
|
2022-03-08 20:31:19 -05:00
|
|
|
DatabaseInvalidError,
|
2021-03-09 08:17:13 -05:00
|
|
|
DatabaseNotFoundError,
|
|
|
|
DatabaseSecurityUnsafeError,
|
2023-02-01 07:45:57 -05:00
|
|
|
DatabaseTablesUnexpectedError,
|
2021-03-09 08:17:13 -05:00
|
|
|
DatabaseTestConnectionDriverError,
|
|
|
|
DatabaseTestConnectionUnexpectedError,
|
|
|
|
)
|
2020-10-16 14:10:39 -04:00
|
|
|
from superset.databases.commands.export import ExportDatabasesCommand
|
2020-11-16 20:11:20 -05:00
|
|
|
from superset.databases.commands.importers.v1 import ImportDatabasesCommand
|
2023-02-01 07:45:57 -05:00
|
|
|
from superset.databases.commands.tables import TablesDatabaseCommand
|
2021-03-09 08:17:13 -05:00
|
|
|
from superset.databases.commands.test_connection import TestConnectionDatabaseCommand
|
2021-05-12 21:32:10 -04:00
|
|
|
from superset.databases.commands.validate import ValidateDatabaseParametersCommand
|
2021-03-09 08:17:13 -05:00
|
|
|
from superset.databases.schemas import DatabaseTestConnectionSchema
|
2023-02-24 12:36:21 -05:00
|
|
|
from superset.databases.ssh_tunnel.models import SSHTunnel
|
2021-05-12 21:32:10 -04:00
|
|
|
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
2022-01-12 16:01:34 -05:00
|
|
|
from superset.exceptions import (
|
|
|
|
SupersetErrorsException,
|
2023-02-01 07:45:57 -05:00
|
|
|
SupersetException,
|
2022-01-12 16:01:34 -05:00
|
|
|
SupersetSecurityException,
|
|
|
|
SupersetTimeoutException,
|
|
|
|
)
|
2020-11-16 20:11:20 -05:00
|
|
|
from superset.models.core import Database
|
2022-01-16 01:32:50 -05:00
|
|
|
from superset.utils.core import backend
|
|
|
|
from superset.utils.database import get_example_database
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.base_tests import SupersetTestCase
|
|
|
|
from tests.integration_tests.fixtures.birth_names_dashboard import (
|
|
|
|
load_birth_names_dashboard_with_slices,
|
2021-12-16 19:11:47 -05:00
|
|
|
load_birth_names_data,
|
2021-07-01 11:03:07 -04:00
|
|
|
)
|
|
|
|
from tests.integration_tests.fixtures.energy_dashboard import (
|
2021-12-16 19:11:47 -05:00
|
|
|
load_energy_table_data,
|
2021-07-01 11:03:07 -04:00
|
|
|
load_energy_table_with_slice,
|
|
|
|
)
|
|
|
|
from tests.integration_tests.fixtures.importexport import (
|
2020-11-16 20:11:20 -05:00
|
|
|
database_config,
|
|
|
|
database_metadata_config,
|
2023-02-24 12:36:21 -05:00
|
|
|
database_with_ssh_tunnel_config_mix_credentials,
|
|
|
|
database_with_ssh_tunnel_config_no_credentials,
|
|
|
|
database_with_ssh_tunnel_config_password,
|
|
|
|
database_with_ssh_tunnel_config_private_key,
|
|
|
|
database_with_ssh_tunnel_config_private_pass_only,
|
2020-11-16 20:11:20 -05:00
|
|
|
dataset_config,
|
|
|
|
dataset_metadata_config,
|
|
|
|
)
|
2020-10-16 14:10:39 -04:00
|
|
|
|
|
|
|
|
2022-03-08 20:31:19 -05:00
|
|
|
class TestCreateDatabaseCommand(SupersetTestCase):
|
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_create_duplicate_error(self, mock_g, mock_logger):
|
2022-03-08 20:31:19 -05:00
|
|
|
example_db = get_example_database()
|
2022-07-07 14:04:27 -04:00
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2022-03-08 20:31:19 -05:00
|
|
|
command = CreateDatabaseCommand(
|
|
|
|
{"database_name": example_db.database_name},
|
|
|
|
)
|
|
|
|
with pytest.raises(DatabaseInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == ("Database parameters are invalid.")
|
|
|
|
# logger should list classnames of all errors
|
|
|
|
mock_logger.assert_called_with(
|
|
|
|
action="db_connection_failed."
|
|
|
|
"DatabaseInvalidError."
|
|
|
|
"DatabaseExistsValidationError."
|
|
|
|
"DatabaseRequiredFieldValidationError"
|
|
|
|
)
|
|
|
|
|
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_multiple_error_logging(self, mock_g, mock_logger):
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
command = CreateDatabaseCommand({})
|
2022-03-08 20:31:19 -05:00
|
|
|
with pytest.raises(DatabaseInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == ("Database parameters are invalid.")
|
|
|
|
# logger should list a unique set of errors with no duplicates
|
|
|
|
mock_logger.assert_called_with(
|
|
|
|
action="db_connection_failed."
|
|
|
|
"DatabaseInvalidError."
|
|
|
|
"DatabaseRequiredFieldValidationError"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-10-16 14:10:39 -04:00
|
|
|
class TestExportDatabasesCommand(SupersetTestCase):
|
2021-05-12 21:32:10 -04:00
|
|
|
@skip("Flaky")
|
2020-10-16 14:10:39 -04:00
|
|
|
@patch("superset.security.manager.g")
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures(
|
|
|
|
"load_birth_names_dashboard_with_slices", "load_energy_table_with_slice"
|
|
|
|
)
|
2020-10-16 14:10:39 -04:00
|
|
|
def test_export_database_command(self, mock_g):
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
2021-01-11 08:57:55 -05:00
|
|
|
db_uuid = example_db.uuid
|
|
|
|
|
2020-10-30 14:52:11 -04:00
|
|
|
command = ExportDatabasesCommand([example_db.id])
|
2020-10-16 14:10:39 -04:00
|
|
|
contents = dict(command.run())
|
|
|
|
|
|
|
|
# TODO: this list shouldn't depend on the order in which unit tests are run
|
|
|
|
# or on the backend; for now use a stable subset
|
2020-10-22 13:32:08 -04:00
|
|
|
core_files = {
|
2020-10-30 14:52:11 -04:00
|
|
|
"metadata.yaml",
|
2020-10-16 14:10:39 -04:00
|
|
|
"databases/examples.yaml",
|
2020-10-22 13:32:08 -04:00
|
|
|
"datasets/examples/energy_usage.yaml",
|
|
|
|
"datasets/examples/birth_names.yaml",
|
2020-10-16 14:10:39 -04:00
|
|
|
}
|
|
|
|
expected_extra = {
|
|
|
|
"engine_params": {},
|
|
|
|
"metadata_cache_timeout": {},
|
|
|
|
"metadata_params": {},
|
2021-10-25 06:53:06 -04:00
|
|
|
"schemas_allowed_for_file_upload": [],
|
2020-10-16 14:10:39 -04:00
|
|
|
}
|
|
|
|
if backend() == "presto":
|
2021-04-01 21:10:17 -04:00
|
|
|
expected_extra = {
|
|
|
|
**expected_extra,
|
|
|
|
"engine_params": {"connect_args": {"poll_interval": 0.1}},
|
|
|
|
}
|
2020-10-22 13:32:08 -04:00
|
|
|
assert core_files.issubset(set(contents.keys()))
|
2020-10-16 14:10:39 -04:00
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
if example_db.backend == "postgresql":
|
|
|
|
ds_type = "TIMESTAMP WITHOUT TIME ZONE"
|
|
|
|
elif example_db.backend == "hive":
|
|
|
|
ds_type = "TIMESTAMP"
|
|
|
|
elif example_db.backend == "presto":
|
|
|
|
ds_type = "VARCHAR(255)"
|
|
|
|
else:
|
|
|
|
ds_type = "DATETIME"
|
|
|
|
if example_db.backend == "mysql":
|
|
|
|
big_int_type = "BIGINT(20)"
|
|
|
|
else:
|
|
|
|
big_int_type = "BIGINT"
|
2020-10-16 14:10:39 -04:00
|
|
|
metadata = yaml.safe_load(contents["databases/examples.yaml"])
|
|
|
|
assert metadata == (
|
|
|
|
{
|
2021-11-03 14:25:30 -04:00
|
|
|
"allow_csv_upload": True,
|
2020-10-16 14:10:39 -04:00
|
|
|
"allow_ctas": True,
|
|
|
|
"allow_cvas": True,
|
2023-01-24 22:16:46 -05:00
|
|
|
"allow_dml": True,
|
2020-10-16 14:10:39 -04:00
|
|
|
"allow_run_async": False,
|
|
|
|
"cache_timeout": None,
|
|
|
|
"database_name": "examples",
|
|
|
|
"expose_in_sqllab": True,
|
|
|
|
"extra": expected_extra,
|
|
|
|
"sqlalchemy_uri": example_db.sqlalchemy_uri,
|
|
|
|
"uuid": str(example_db.uuid),
|
|
|
|
"version": "1.0.0",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2020-10-22 13:32:08 -04:00
|
|
|
metadata = yaml.safe_load(contents["datasets/examples/birth_names.yaml"])
|
2020-10-16 14:10:39 -04:00
|
|
|
metadata.pop("uuid")
|
2021-01-11 08:57:55 -05:00
|
|
|
|
|
|
|
metadata["columns"].sort(key=lambda x: x["column_name"])
|
|
|
|
expected_metadata = {
|
|
|
|
"cache_timeout": None,
|
|
|
|
"columns": [
|
2020-10-16 14:10:39 -04:00
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"column_name": "ds",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": True,
|
|
|
|
"python_date_format": None,
|
|
|
|
"type": ds_type,
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"column_name": "gender",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": "STRING" if example_db.backend == "hive" else "VARCHAR(16)",
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"column_name": "name",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": "STRING"
|
|
|
|
if example_db.backend == "hive"
|
|
|
|
else "VARCHAR(255)",
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"column_name": "num",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": big_int_type,
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"column_name": "num_california",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": None,
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"column_name": "state",
|
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": "STRING" if example_db.backend == "hive" else "VARCHAR(10)",
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
2021-01-08 17:13:20 -05:00
|
|
|
"column_name": "num_boys",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": big_int_type,
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"column_name": "num_girls",
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": None,
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_active": True,
|
|
|
|
"is_dttm": False,
|
2020-10-16 14:10:39 -04:00
|
|
|
"python_date_format": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"type": big_int_type,
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"verbose_name": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
2021-01-11 08:57:55 -05:00
|
|
|
],
|
|
|
|
"database_uuid": str(db_uuid),
|
|
|
|
"default_endpoint": None,
|
|
|
|
"description": "",
|
|
|
|
"extra": None,
|
2021-03-09 10:27:46 -05:00
|
|
|
"fetch_values_predicate": "123 = 123",
|
2021-01-11 08:57:55 -05:00
|
|
|
"filter_select_enabled": True,
|
|
|
|
"main_dttm_col": "ds",
|
|
|
|
"metrics": [
|
2020-10-16 14:10:39 -04:00
|
|
|
{
|
2021-01-11 08:57:55 -05:00
|
|
|
"d3format": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
"description": None,
|
2021-01-11 08:57:55 -05:00
|
|
|
"expression": "COUNT(*)",
|
|
|
|
"extra": None,
|
|
|
|
"metric_name": "count",
|
|
|
|
"metric_type": "count",
|
|
|
|
"verbose_name": "COUNT(*)",
|
|
|
|
"warning_text": None,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"d3format": None,
|
|
|
|
"description": None,
|
|
|
|
"expression": "SUM(num)",
|
|
|
|
"extra": None,
|
|
|
|
"metric_name": "sum__num",
|
|
|
|
"metric_type": None,
|
|
|
|
"verbose_name": None,
|
|
|
|
"warning_text": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
},
|
|
|
|
],
|
2021-01-11 08:57:55 -05:00
|
|
|
"offset": 0,
|
|
|
|
"params": None,
|
|
|
|
"schema": None,
|
|
|
|
"sql": None,
|
|
|
|
"table_name": "birth_names",
|
|
|
|
"template_params": None,
|
2020-10-16 14:10:39 -04:00
|
|
|
"version": "1.0.0",
|
|
|
|
}
|
2021-01-11 08:57:55 -05:00
|
|
|
expected_metadata["columns"].sort(key=lambda x: x["column_name"])
|
|
|
|
assert metadata == expected_metadata
|
2020-10-16 14:10:39 -04:00
|
|
|
|
|
|
|
@patch("superset.security.manager.g")
|
|
|
|
def test_export_database_command_no_access(self, mock_g):
|
|
|
|
"""Test that users can't export databases they don't have access to"""
|
|
|
|
mock_g.user = security_manager.find_user("gamma")
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
2020-10-30 14:52:11 -04:00
|
|
|
command = ExportDatabasesCommand([example_db.id])
|
2020-10-16 14:10:39 -04:00
|
|
|
contents = command.run()
|
|
|
|
with self.assertRaises(DatabaseNotFoundError):
|
|
|
|
next(contents)
|
|
|
|
|
|
|
|
@patch("superset.security.manager.g")
|
|
|
|
def test_export_database_command_invalid_database(self, mock_g):
|
|
|
|
"""Test that an error is raised when exporting an invalid database"""
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2020-10-30 14:52:11 -04:00
|
|
|
command = ExportDatabasesCommand([-1])
|
2020-10-16 14:10:39 -04:00
|
|
|
contents = command.run()
|
|
|
|
with self.assertRaises(DatabaseNotFoundError):
|
|
|
|
next(contents)
|
|
|
|
|
|
|
|
@patch("superset.security.manager.g")
|
|
|
|
def test_export_database_command_key_order(self, mock_g):
|
|
|
|
"""Test that they keys in the YAML have the same order as export_fields"""
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
2020-10-30 14:52:11 -04:00
|
|
|
command = ExportDatabasesCommand([example_db.id])
|
2020-10-16 14:10:39 -04:00
|
|
|
contents = dict(command.run())
|
|
|
|
|
|
|
|
metadata = yaml.safe_load(contents["databases/examples.yaml"])
|
|
|
|
assert list(metadata.keys()) == [
|
|
|
|
"database_name",
|
|
|
|
"sqlalchemy_uri",
|
|
|
|
"cache_timeout",
|
|
|
|
"expose_in_sqllab",
|
|
|
|
"allow_run_async",
|
|
|
|
"allow_ctas",
|
|
|
|
"allow_cvas",
|
2023-01-24 22:16:46 -05:00
|
|
|
"allow_dml",
|
2021-11-03 14:25:30 -04:00
|
|
|
"allow_csv_upload",
|
2020-10-16 14:10:39 -04:00
|
|
|
"extra",
|
|
|
|
"uuid",
|
|
|
|
"version",
|
|
|
|
]
|
2020-11-16 20:11:20 -05:00
|
|
|
|
2022-03-16 19:03:06 -04:00
|
|
|
@patch("superset.security.manager.g")
|
|
|
|
@pytest.mark.usefixtures(
|
|
|
|
"load_birth_names_dashboard_with_slices", "load_energy_table_with_slice"
|
|
|
|
)
|
|
|
|
def test_export_database_command_no_related(self, mock_g):
|
|
|
|
"""
|
|
|
|
Test that only databases are exported when export_related=False.
|
|
|
|
"""
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
|
|
|
db_uuid = example_db.uuid
|
|
|
|
|
|
|
|
command = ExportDatabasesCommand([example_db.id], export_related=False)
|
|
|
|
contents = dict(command.run())
|
|
|
|
prefixes = {path.split("/")[0] for path in contents}
|
|
|
|
assert "metadata.yaml" in prefixes
|
|
|
|
assert "databases" in prefixes
|
|
|
|
assert "datasets" not in prefixes
|
|
|
|
|
2020-11-23 19:28:26 -05:00
|
|
|
|
|
|
|
class TestImportDatabasesCommand(SupersetTestCase):
|
2020-11-16 20:11:20 -05:00
|
|
|
def test_import_v1_database(self):
|
|
|
|
"""Test that a database can be imported"""
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
2021-10-25 06:53:06 -04:00
|
|
|
assert database.allow_file_upload
|
2020-11-16 20:11:20 -05:00
|
|
|
assert database.allow_ctas
|
|
|
|
assert database.allow_cvas
|
2023-01-24 22:16:46 -05:00
|
|
|
assert database.allow_dml
|
2020-11-16 20:11:20 -05:00
|
|
|
assert not database.allow_run_async
|
|
|
|
assert database.cache_timeout is None
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
assert database.expose_in_sqllab
|
|
|
|
assert database.extra == "{}"
|
|
|
|
assert database.sqlalchemy_uri == "sqlite:///test.db"
|
|
|
|
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-11-03 14:25:30 -04:00
|
|
|
def test_import_v1_database_broken_csv_fields(self):
|
|
|
|
"""
|
|
|
|
Test that a database can be imported with broken schema.
|
|
|
|
|
|
|
|
https://github.com/apache/superset/pull/16756 renamed some fields, changing
|
|
|
|
the V1 schema. This test ensures that we can import databases that were
|
|
|
|
exported with the broken schema.
|
|
|
|
"""
|
|
|
|
broken_config = database_config.copy()
|
|
|
|
broken_config["allow_file_upload"] = broken_config.pop("allow_csv_upload")
|
|
|
|
broken_config["extra"] = {"schemas_allowed_for_file_upload": ["upload"]}
|
|
|
|
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(broken_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert database.allow_file_upload
|
|
|
|
assert database.allow_ctas
|
|
|
|
assert database.allow_cvas
|
2023-01-24 22:16:46 -05:00
|
|
|
assert database.allow_dml
|
2021-11-03 14:25:30 -04:00
|
|
|
assert not database.allow_run_async
|
|
|
|
assert database.cache_timeout is None
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
assert database.expose_in_sqllab
|
|
|
|
assert database.extra == '{"schemas_allowed_for_file_upload": ["upload"]}'
|
|
|
|
assert database.sqlalchemy_uri == "sqlite:///test.db"
|
|
|
|
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-16 20:11:20 -05:00
|
|
|
def test_import_v1_database_multiple(self):
|
|
|
|
"""Test that a database can be imported multiple times"""
|
|
|
|
num_databases = db.session.query(Database).count()
|
|
|
|
|
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
}
|
2020-12-10 17:50:10 -05:00
|
|
|
command = ImportDatabasesCommand(contents, overwrite=True)
|
2020-11-16 20:11:20 -05:00
|
|
|
|
|
|
|
# import twice
|
|
|
|
command.run()
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
2021-10-25 06:53:06 -04:00
|
|
|
assert database.allow_file_upload
|
2020-11-16 20:11:20 -05:00
|
|
|
|
2021-10-25 06:53:06 -04:00
|
|
|
# update allow_file_upload to False
|
2020-11-16 20:11:20 -05:00
|
|
|
new_config = database_config.copy()
|
2021-11-03 14:25:30 -04:00
|
|
|
new_config["allow_csv_upload"] = False
|
2020-11-16 20:11:20 -05:00
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(new_config),
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
}
|
2020-12-10 17:50:10 -05:00
|
|
|
command = ImportDatabasesCommand(contents, overwrite=True)
|
2020-11-16 20:11:20 -05:00
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
2021-10-25 06:53:06 -04:00
|
|
|
assert not database.allow_file_upload
|
2020-11-16 20:11:20 -05:00
|
|
|
|
|
|
|
# test that only one database was created
|
|
|
|
new_num_databases = db.session.query(Database).count()
|
|
|
|
assert new_num_databases == num_databases + 1
|
|
|
|
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_import_v1_database_with_dataset(self):
|
|
|
|
"""Test that a database can be imported with datasets"""
|
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert len(database.tables) == 1
|
|
|
|
assert str(database.tables[0].uuid) == "10808100-158b-42c4-842e-f32b99d88dfb"
|
|
|
|
|
|
|
|
db.session.delete(database.tables[0])
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_import_v1_database_with_dataset_multiple(self):
|
|
|
|
"""Test that a database can be imported multiple times w/o changing datasets"""
|
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
dataset = (
|
|
|
|
db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert dataset.offset == 66
|
|
|
|
|
|
|
|
new_config = dataset_config.copy()
|
|
|
|
new_config["offset"] = 67
|
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
"datasets/imported_dataset.yaml": yaml.safe_dump(new_config),
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
}
|
2020-12-10 17:50:10 -05:00
|
|
|
command = ImportDatabasesCommand(contents, overwrite=True)
|
2020-11-16 20:11:20 -05:00
|
|
|
command.run()
|
|
|
|
|
|
|
|
# the underlying dataset should not be modified by the second import, since
|
|
|
|
# we're importing a database, not a dataset
|
|
|
|
dataset = (
|
|
|
|
db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert dataset.offset == 66
|
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.delete(dataset.database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_import_v1_database_validation(self):
|
|
|
|
"""Test different validations applied when importing a database"""
|
|
|
|
# metadata.yaml must be present
|
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(IncorrectVersionError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Missing metadata.yaml"
|
|
|
|
|
|
|
|
# version should be 1.0.0
|
|
|
|
contents["metadata.yaml"] = yaml.safe_dump(
|
|
|
|
{
|
|
|
|
"version": "2.0.0",
|
|
|
|
"type": "Database",
|
|
|
|
"timestamp": "2020-11-04T21:27:44.423819+00:00",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(IncorrectVersionError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Must be equal to 1.0.0."
|
|
|
|
|
|
|
|
# type should be Database
|
|
|
|
contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config)
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Error importing database"
|
|
|
|
assert excinfo.value.normalized_messages() == {
|
2020-11-17 17:49:33 -05:00
|
|
|
"metadata.yaml": {"type": ["Must be equal to Database."]}
|
2020-11-16 20:11:20 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
# must also validate datasets
|
|
|
|
broken_config = dataset_config.copy()
|
|
|
|
del broken_config["table_name"]
|
|
|
|
contents["metadata.yaml"] = yaml.safe_dump(database_metadata_config)
|
|
|
|
contents["datasets/imported_dataset.yaml"] = yaml.safe_dump(broken_config)
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Error importing database"
|
|
|
|
assert excinfo.value.normalized_messages() == {
|
|
|
|
"datasets/imported_dataset.yaml": {
|
|
|
|
"table_name": ["Missing data for required field."],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-07 14:22:45 -05:00
|
|
|
def test_import_v1_database_masked_password(self):
|
|
|
|
"""Test that database imports with masked passwords are rejected"""
|
|
|
|
masked_database_config = database_config.copy()
|
|
|
|
masked_database_config[
|
|
|
|
"sqlalchemy_uri"
|
|
|
|
] = "postgresql://username:XXXXXXXXXX@host:12345/db"
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Error importing database"
|
|
|
|
assert excinfo.value.normalized_messages() == {
|
|
|
|
"databases/imported_database.yaml": {
|
|
|
|
"_schema": ["Must provide a password for the database"]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-24 12:36:21 -05:00
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_masked_ssh_tunnel_password(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that database imports with masked ssh_tunnel passwords are rejected"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = database_with_ssh_tunnel_config_password.copy()
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Error importing database"
|
|
|
|
assert excinfo.value.normalized_messages() == {
|
|
|
|
"databases/imported_database.yaml": {
|
|
|
|
"_schema": ["Must provide a password for the ssh tunnel"]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_masked_ssh_tunnel_private_key_and_password(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that database imports with masked ssh_tunnel private_key and private_key_password are rejected"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = database_with_ssh_tunnel_config_private_key.copy()
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Error importing database"
|
|
|
|
assert excinfo.value.normalized_messages() == {
|
|
|
|
"databases/imported_database.yaml": {
|
|
|
|
"_schema": [
|
|
|
|
"Must provide a private key for the ssh tunnel",
|
|
|
|
"Must provide a private key password for the ssh tunnel",
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_with_ssh_tunnel_password(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that a database with ssh_tunnel password can be imported"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = database_with_ssh_tunnel_config_password.copy()
|
|
|
|
masked_database_config["ssh_tunnel"]["password"] = "TEST"
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert database.allow_file_upload
|
|
|
|
assert database.allow_ctas
|
|
|
|
assert database.allow_cvas
|
|
|
|
assert database.allow_dml
|
|
|
|
assert not database.allow_run_async
|
|
|
|
assert database.cache_timeout is None
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
assert database.expose_in_sqllab
|
|
|
|
assert database.extra == "{}"
|
|
|
|
assert database.sqlalchemy_uri == "sqlite:///test.db"
|
|
|
|
|
|
|
|
model_ssh_tunnel = (
|
|
|
|
db.session.query(SSHTunnel)
|
|
|
|
.filter(SSHTunnel.database_id == database.id)
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
self.assertEqual(model_ssh_tunnel.password, "TEST")
|
|
|
|
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_with_ssh_tunnel_private_key_and_password(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that a database with ssh_tunnel private_key and private_key_password can be imported"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = database_with_ssh_tunnel_config_private_key.copy()
|
|
|
|
masked_database_config["ssh_tunnel"]["private_key"] = "TestPrivateKey"
|
|
|
|
masked_database_config["ssh_tunnel"]["private_key_password"] = "TEST"
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert database.allow_file_upload
|
|
|
|
assert database.allow_ctas
|
|
|
|
assert database.allow_cvas
|
|
|
|
assert database.allow_dml
|
|
|
|
assert not database.allow_run_async
|
|
|
|
assert database.cache_timeout is None
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
assert database.expose_in_sqllab
|
|
|
|
assert database.extra == "{}"
|
|
|
|
assert database.sqlalchemy_uri == "sqlite:///test.db"
|
|
|
|
|
|
|
|
model_ssh_tunnel = (
|
|
|
|
db.session.query(SSHTunnel)
|
|
|
|
.filter(SSHTunnel.database_id == database.id)
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
self.assertEqual(model_ssh_tunnel.private_key, "TestPrivateKey")
|
|
|
|
self.assertEqual(model_ssh_tunnel.private_key_password, "TEST")
|
|
|
|
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_masked_ssh_tunnel_no_credentials(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that databases with ssh_tunnels that have no credentials are rejected"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = database_with_ssh_tunnel_config_no_credentials.copy()
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Must provide credentials for the SSH Tunnel"
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_masked_ssh_tunnel_multiple_credentials(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that databases with ssh_tunnels that have multiple credentials are rejected"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = database_with_ssh_tunnel_config_mix_credentials.copy()
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert (
|
|
|
|
str(excinfo.value) == "Cannot have multiple credentials for the SSH Tunnel"
|
|
|
|
)
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.schemas.is_feature_enabled")
|
|
|
|
def test_import_v1_database_masked_ssh_tunnel_only_priv_key_psswd(
|
|
|
|
self, mock_schema_is_feature_enabled
|
|
|
|
):
|
|
|
|
"""Test that databases with ssh_tunnels that have multiple credentials are rejected"""
|
|
|
|
mock_schema_is_feature_enabled.return_value = True
|
|
|
|
masked_database_config = (
|
|
|
|
database_with_ssh_tunnel_config_private_pass_only.copy()
|
|
|
|
)
|
|
|
|
contents = {
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(CommandInvalidError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Error importing database"
|
|
|
|
assert excinfo.value.normalized_messages() == {
|
|
|
|
"databases/imported_database.yaml": {
|
|
|
|
"_schema": [
|
|
|
|
"Must provide a private key for the ssh tunnel",
|
|
|
|
"Must provide a private key password for the ssh tunnel",
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-16 20:11:20 -05:00
|
|
|
@patch("superset.databases.commands.importers.v1.import_dataset")
|
|
|
|
def test_import_v1_rollback(self, mock_import_dataset):
|
|
|
|
"""Test than on an exception everything is rolled back"""
|
|
|
|
num_databases = db.session.query(Database).count()
|
|
|
|
|
|
|
|
# raise an exception when importing the dataset, after the database has
|
|
|
|
# already been imported
|
|
|
|
mock_import_dataset.side_effect = Exception("A wild exception appears!")
|
|
|
|
|
|
|
|
contents = {
|
|
|
|
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
|
|
|
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
|
|
|
|
"metadata.yaml": yaml.safe_dump(database_metadata_config),
|
|
|
|
}
|
|
|
|
command = ImportDatabasesCommand(contents)
|
|
|
|
with pytest.raises(Exception) as excinfo:
|
|
|
|
command.run()
|
2020-11-25 01:45:35 -05:00
|
|
|
assert str(excinfo.value) == "Import database failed for an unknown reason"
|
2020-11-16 20:11:20 -05:00
|
|
|
|
|
|
|
# verify that the database was not added
|
|
|
|
new_num_databases = db.session.query(Database).count()
|
|
|
|
assert new_num_databases == num_databases
|
2021-03-09 08:17:13 -05:00
|
|
|
|
|
|
|
|
|
|
|
class TestTestConnectionDatabaseCommand(SupersetTestCase):
|
2022-11-15 13:45:14 -05:00
|
|
|
@mock.patch("superset.databases.dao.Database._get_sqla_engine")
|
2021-03-09 08:17:13 -05:00
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_connection_db_exception(
|
|
|
|
self, mock_g, mock_event_logger, mock_get_sqla_engine
|
|
|
|
):
|
2021-03-09 08:17:13 -05:00
|
|
|
"""Test to make sure event_logger is called when an exception is raised"""
|
|
|
|
database = get_example_database()
|
2022-07-07 14:04:27 -04:00
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2021-03-09 08:17:13 -05:00
|
|
|
mock_get_sqla_engine.side_effect = Exception("An error has occurred!")
|
|
|
|
db_uri = database.sqlalchemy_uri_decrypted
|
|
|
|
json_payload = {"sqlalchemy_uri": db_uri}
|
2022-07-07 14:04:27 -04:00
|
|
|
command_without_db_name = TestConnectionDatabaseCommand(json_payload)
|
2021-03-09 08:17:13 -05:00
|
|
|
|
|
|
|
with pytest.raises(DatabaseTestConnectionUnexpectedError) as excinfo:
|
|
|
|
command_without_db_name.run()
|
|
|
|
assert str(excinfo.value) == (
|
|
|
|
"Unexpected error occurred, please check your logs for details"
|
|
|
|
)
|
|
|
|
mock_event_logger.assert_called()
|
|
|
|
|
2022-11-15 13:45:14 -05:00
|
|
|
@mock.patch("superset.databases.dao.Database._get_sqla_engine")
|
2021-04-21 18:43:40 -04:00
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
2021-04-21 18:43:40 -04:00
|
|
|
def test_connection_do_ping_exception(
|
2022-07-07 14:04:27 -04:00
|
|
|
self, mock_g, mock_event_logger, mock_get_sqla_engine
|
2021-04-21 18:43:40 -04:00
|
|
|
):
|
|
|
|
"""Test to make sure do_ping exceptions gets captured"""
|
|
|
|
database = get_example_database()
|
2022-07-07 14:04:27 -04:00
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2021-04-21 18:43:40 -04:00
|
|
|
mock_get_sqla_engine.return_value.dialect.do_ping.side_effect = Exception(
|
|
|
|
"An error has occurred!"
|
|
|
|
)
|
|
|
|
db_uri = database.sqlalchemy_uri_decrypted
|
|
|
|
json_payload = {"sqlalchemy_uri": db_uri}
|
2022-07-07 14:04:27 -04:00
|
|
|
command_without_db_name = TestConnectionDatabaseCommand(json_payload)
|
2021-04-21 18:43:40 -04:00
|
|
|
|
2022-10-26 19:44:09 -04:00
|
|
|
with pytest.raises(SupersetErrorsException) as excinfo:
|
2021-04-21 18:43:40 -04:00
|
|
|
command_without_db_name.run()
|
|
|
|
assert (
|
|
|
|
excinfo.value.errors[0].error_type
|
|
|
|
== SupersetErrorType.GENERIC_DB_ENGINE_ERROR
|
|
|
|
)
|
|
|
|
|
2022-01-12 16:01:34 -05:00
|
|
|
@mock.patch("superset.databases.commands.test_connection.func_timeout")
|
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_connection_do_ping_timeout(
|
|
|
|
self, mock_g, mock_event_logger, mock_func_timeout
|
|
|
|
):
|
2022-01-12 16:01:34 -05:00
|
|
|
"""Test to make sure do_ping exceptions gets captured"""
|
|
|
|
database = get_example_database()
|
2022-07-07 14:04:27 -04:00
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2022-01-12 16:01:34 -05:00
|
|
|
mock_func_timeout.side_effect = FunctionTimedOut("Time out")
|
|
|
|
db_uri = database.sqlalchemy_uri_decrypted
|
|
|
|
json_payload = {"sqlalchemy_uri": db_uri}
|
2022-07-07 14:04:27 -04:00
|
|
|
command_without_db_name = TestConnectionDatabaseCommand(json_payload)
|
2022-01-12 16:01:34 -05:00
|
|
|
|
|
|
|
with pytest.raises(SupersetTimeoutException) as excinfo:
|
|
|
|
command_without_db_name.run()
|
|
|
|
assert excinfo.value.status == 408
|
|
|
|
assert (
|
|
|
|
excinfo.value.error.error_type
|
|
|
|
== SupersetErrorType.CONNECTION_DATABASE_TIMEOUT
|
|
|
|
)
|
|
|
|
|
2022-11-15 13:45:14 -05:00
|
|
|
@mock.patch("superset.databases.dao.Database._get_sqla_engine")
|
2021-03-09 08:17:13 -05:00
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
2021-03-09 08:17:13 -05:00
|
|
|
def test_connection_superset_security_connection(
|
2022-07-07 14:04:27 -04:00
|
|
|
self, mock_g, mock_event_logger, mock_get_sqla_engine
|
2021-03-09 08:17:13 -05:00
|
|
|
):
|
|
|
|
"""Test to make sure event_logger is called when security
|
|
|
|
connection exc is raised"""
|
|
|
|
database = get_example_database()
|
2022-07-07 14:04:27 -04:00
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2021-03-09 08:17:13 -05:00
|
|
|
mock_get_sqla_engine.side_effect = SupersetSecurityException(
|
2021-07-23 18:19:30 -04:00
|
|
|
SupersetError(error_type=500, message="test", level="info")
|
2021-03-09 08:17:13 -05:00
|
|
|
)
|
|
|
|
db_uri = database.sqlalchemy_uri_decrypted
|
|
|
|
json_payload = {"sqlalchemy_uri": db_uri}
|
2022-07-07 14:04:27 -04:00
|
|
|
command_without_db_name = TestConnectionDatabaseCommand(json_payload)
|
2021-03-09 08:17:13 -05:00
|
|
|
|
|
|
|
with pytest.raises(DatabaseSecurityUnsafeError) as excinfo:
|
|
|
|
command_without_db_name.run()
|
|
|
|
assert str(excinfo.value) == ("Stopped an unsafe database connection")
|
|
|
|
|
|
|
|
mock_event_logger.assert_called()
|
|
|
|
|
2022-11-15 13:45:14 -05:00
|
|
|
@mock.patch("superset.databases.dao.Database._get_sqla_engine")
|
2021-03-09 08:17:13 -05:00
|
|
|
@mock.patch(
|
|
|
|
"superset.databases.commands.test_connection.event_logger.log_with_context"
|
|
|
|
)
|
2022-07-07 14:04:27 -04:00
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_connection_db_api_exc(
|
|
|
|
self, mock_g, mock_event_logger, mock_get_sqla_engine
|
|
|
|
):
|
2021-03-09 08:17:13 -05:00
|
|
|
"""Test to make sure event_logger is called when DBAPIError is raised"""
|
|
|
|
database = get_example_database()
|
2022-07-07 14:04:27 -04:00
|
|
|
mock_g.user = security_manager.find_user("admin")
|
2021-03-09 08:17:13 -05:00
|
|
|
mock_get_sqla_engine.side_effect = DBAPIError(
|
|
|
|
statement="error", params={}, orig={}
|
|
|
|
)
|
|
|
|
db_uri = database.sqlalchemy_uri_decrypted
|
|
|
|
json_payload = {"sqlalchemy_uri": db_uri}
|
2022-07-07 14:04:27 -04:00
|
|
|
command_without_db_name = TestConnectionDatabaseCommand(json_payload)
|
2021-03-09 08:17:13 -05:00
|
|
|
|
2022-10-26 19:44:09 -04:00
|
|
|
with pytest.raises(SupersetErrorsException) as excinfo:
|
2021-03-09 08:17:13 -05:00
|
|
|
command_without_db_name.run()
|
|
|
|
assert str(excinfo.value) == (
|
|
|
|
"Connection failed, please check your connection settings"
|
|
|
|
)
|
|
|
|
|
|
|
|
mock_event_logger.assert_called()
|
2021-05-12 21:32:10 -04:00
|
|
|
|
|
|
|
|
|
|
|
@mock.patch("superset.db_engine_specs.base.is_hostname_valid")
|
|
|
|
@mock.patch("superset.db_engine_specs.base.is_port_open")
|
|
|
|
@mock.patch("superset.databases.commands.validate.DatabaseDAO")
|
|
|
|
def test_validate(DatabaseDAO, is_port_open, is_hostname_valid, app_context):
|
|
|
|
"""
|
|
|
|
Test parameter validation.
|
|
|
|
"""
|
|
|
|
is_hostname_valid.return_value = True
|
|
|
|
is_port_open.return_value = True
|
|
|
|
|
|
|
|
payload = {
|
|
|
|
"engine": "postgresql",
|
|
|
|
"parameters": {
|
|
|
|
"host": "localhost",
|
|
|
|
"port": 5432,
|
|
|
|
"username": "superset",
|
|
|
|
"password": "superset",
|
|
|
|
"database": "test",
|
|
|
|
"query": {},
|
|
|
|
},
|
|
|
|
}
|
2022-07-07 14:04:27 -04:00
|
|
|
command = ValidateDatabaseParametersCommand(payload)
|
2021-05-12 21:32:10 -04:00
|
|
|
command.run()
|
|
|
|
|
|
|
|
|
|
|
|
@mock.patch("superset.db_engine_specs.base.is_hostname_valid")
|
|
|
|
@mock.patch("superset.db_engine_specs.base.is_port_open")
|
|
|
|
def test_validate_partial(is_port_open, is_hostname_valid, app_context):
|
|
|
|
"""
|
|
|
|
Test parameter validation when only some parameters are present.
|
|
|
|
"""
|
|
|
|
is_hostname_valid.return_value = True
|
|
|
|
is_port_open.return_value = True
|
|
|
|
|
|
|
|
payload = {
|
|
|
|
"engine": "postgresql",
|
|
|
|
"parameters": {
|
|
|
|
"host": "localhost",
|
|
|
|
"port": 5432,
|
|
|
|
"username": "",
|
|
|
|
"password": "superset",
|
|
|
|
"database": "test",
|
|
|
|
"query": {},
|
|
|
|
},
|
|
|
|
}
|
2022-07-07 14:04:27 -04:00
|
|
|
command = ValidateDatabaseParametersCommand(payload)
|
2021-05-12 21:32:10 -04:00
|
|
|
with pytest.raises(SupersetErrorsException) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert excinfo.value.errors == [
|
|
|
|
SupersetError(
|
|
|
|
message="One or more parameters are missing: username",
|
|
|
|
error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
|
|
|
|
level=ErrorLevel.WARNING,
|
|
|
|
extra={
|
|
|
|
"missing": ["username"],
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1018,
|
|
|
|
"message": "Issue 1018 - One or more parameters needed to configure a database are missing.",
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
@mock.patch("superset.db_engine_specs.base.is_hostname_valid")
|
|
|
|
def test_validate_partial_invalid_hostname(is_hostname_valid, app_context):
|
|
|
|
"""
|
|
|
|
Test parameter validation when only some parameters are present.
|
|
|
|
"""
|
|
|
|
is_hostname_valid.return_value = False
|
|
|
|
|
|
|
|
payload = {
|
|
|
|
"engine": "postgresql",
|
|
|
|
"parameters": {
|
|
|
|
"host": "localhost",
|
|
|
|
"port": None,
|
|
|
|
"username": "",
|
|
|
|
"password": "",
|
|
|
|
"database": "",
|
|
|
|
"query": {},
|
|
|
|
},
|
|
|
|
}
|
2022-07-07 14:04:27 -04:00
|
|
|
command = ValidateDatabaseParametersCommand(payload)
|
2021-05-12 21:32:10 -04:00
|
|
|
with pytest.raises(SupersetErrorsException) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert excinfo.value.errors == [
|
|
|
|
SupersetError(
|
|
|
|
message="One or more parameters are missing: database, port, username",
|
|
|
|
error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
|
|
|
|
level=ErrorLevel.WARNING,
|
|
|
|
extra={
|
|
|
|
"missing": ["database", "port", "username"],
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1018,
|
|
|
|
"message": "Issue 1018 - One or more parameters needed to configure a database are missing.",
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
),
|
|
|
|
SupersetError(
|
|
|
|
message="The hostname provided can't be resolved.",
|
|
|
|
error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
|
|
|
|
level=ErrorLevel.ERROR,
|
|
|
|
extra={
|
|
|
|
"invalid": ["host"],
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1007,
|
|
|
|
"message": "Issue 1007 - The hostname provided can't be resolved.",
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
),
|
|
|
|
]
|
2023-02-01 07:45:57 -05:00
|
|
|
|
|
|
|
|
|
|
|
class TestTablesDatabaseCommand(SupersetTestCase):
|
|
|
|
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
|
|
|
|
def test_database_tables_list_with_unknown_database(self, mock_find_by_id):
|
|
|
|
mock_find_by_id.return_value = None
|
|
|
|
command = TablesDatabaseCommand(1, "test", False)
|
|
|
|
|
|
|
|
with pytest.raises(DatabaseNotFoundError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == ("Database not found.")
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
|
|
|
|
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
|
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_database_tables_superset_exception(
|
|
|
|
self, mock_g, mock_can_access_database, mock_find_by_id
|
|
|
|
):
|
|
|
|
database = get_example_database()
|
|
|
|
if database.backend == "mysql":
|
|
|
|
return
|
|
|
|
|
|
|
|
mock_find_by_id.return_value = database
|
|
|
|
mock_can_access_database.side_effect = SupersetException("Test Error")
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
|
|
|
|
command = TablesDatabaseCommand(database.id, "main", False)
|
|
|
|
with pytest.raises(SupersetException) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert str(excinfo.value) == "Test Error"
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
|
|
|
|
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
|
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_database_tables_exception(
|
|
|
|
self, mock_g, mock_can_access_database, mock_find_by_id
|
|
|
|
):
|
|
|
|
database = get_example_database()
|
|
|
|
mock_find_by_id.return_value = database
|
|
|
|
mock_can_access_database.side_effect = Exception("Test Error")
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
|
|
|
|
command = TablesDatabaseCommand(database.id, "main", False)
|
|
|
|
with pytest.raises(DatabaseTablesUnexpectedError) as excinfo:
|
|
|
|
command.run()
|
|
|
|
assert (
|
|
|
|
str(excinfo.value)
|
|
|
|
== "Unexpected error occurred, please check your logs for details"
|
|
|
|
)
|
|
|
|
|
|
|
|
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
|
|
|
|
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
|
|
|
|
@mock.patch("superset.utils.core.g")
|
|
|
|
def test_database_tables_list_tables(
|
|
|
|
self, mock_g, mock_can_access_database, mock_find_by_id
|
|
|
|
):
|
|
|
|
database = get_example_database()
|
|
|
|
mock_find_by_id.return_value = database
|
|
|
|
mock_can_access_database.return_value = True
|
|
|
|
mock_g.user = security_manager.find_user("admin")
|
|
|
|
|
|
|
|
schema_name = self.default_schema_backend_map[database.backend]
|
|
|
|
if database.backend == "postgresql" or database.backend == "mysql":
|
|
|
|
return
|
|
|
|
|
|
|
|
command = TablesDatabaseCommand(database.id, schema_name, False)
|
|
|
|
result = command.run()
|
|
|
|
|
|
|
|
assert result["count"] > 0
|
|
|
|
assert len(result["result"]) > 0
|
|
|
|
assert len(result["result"]) == result["count"]
|