diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 25a814f751..5f465d2648 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -8,5 +8,5 @@ contact_links: url: https://github.com/apache/superset/discussions/new?category=q-a-help about: Open a community Q&A thread on GitHub Discussions - name: Slack - url: bit.ly/join-superset-slack - about: Join the Superset Community on Slack for other discussions/assistance + url: https://bit.ly/join-superset-slack + about: Join the Superset Community on Slack for other discussions and assistance diff --git a/.github/ISSUE_TEMPLATE/sip.md b/.github/ISSUE_TEMPLATE/sip.md index 8261b0f881..d0ca3ef1d9 100644 --- a/.github/ISSUE_TEMPLATE/sip.md +++ b/.github/ISSUE_TEMPLATE/sip.md @@ -1,13 +1,13 @@ --- name: SIP -about: "Superset Improvement Proposal. See https://github.com/apache/superset/issues/5602 for details. The purpose of a Superset Improvement Proposal (SIP) is to introduce any major change into Apache Superset, such as a major new feature, subsystem, or piece of functionality, or any change that impacts the public interfaces of the project" +about: "Superset Improvement Proposal. See SIP-0 (https://github.com/apache/superset/issues/5602) for details. A SIP introduces any major change into Apache Superset's code or process." labels: sip title: "[SIP] Your Title Here (do not add SIP number)" assignees: "apache/superset-committers" --- *Please make sure you are familiar with the SIP process documented* -(here)[https://github.com/apache/superset/issues/5602]. The SIP will be numbered by a committer upon acceptance. +[here](https://github.com/apache/superset/issues/5602). The SIP will be numbered by a committer upon acceptance. ## [SIP] Proposal for ... diff --git a/.github/SECURITY.md b/.github/SECURITY.md index f35b9c48f0..086ff8c0ca 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -12,8 +12,8 @@ Apache Software Foundation takes a rigorous standpoint in annihilating the secur in its software projects. Apache Superset is highly sensitive and forthcoming to issues pertaining to its features and functionality. If you have any concern or believe you have found a vulnerability in Apache Superset, -please get in touch with the Apache Security Team privately at -e-mail address [security@apache.org](mailto:security@apache.org). +please get in touch with the Apache Superset Security Team privately at +e-mail address [security@superset.apache.org](mailto:security@superset.apache.org). More details can be found on the ASF website at [ASF vulnerability reporting process](https://apache.org/security/#reporting-a-vulnerability) diff --git a/CHANGELOG.md b/CHANGELOG.md index 170824f6f2..ecd236cb0d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,8 +19,10 @@ under the License. ## Change Log +- [3.0.2](#302-mon-nov-20-073838-2023--0500) - [3.0.1](#301-tue-oct-13-103221-2023--0700) - [3.0.0](#300-thu-aug-24-133627-2023--0600) +- [2.1.2](#212-wed-oct-18-165930-2023--0700) - [2.1.1](#211-sun-apr-23-154421-2023-0100) - [2.1.0](#210-thu-mar-16-211305-2023--0700) - [2.0.1](#201-fri-nov-4-103402-2022--0400) @@ -32,6 +34,59 @@ under the License. - [1.4.2](#142-sat-mar-19-000806-2022-0200) - [1.4.1](#141) +### 3.0.2 (Mon Nov 20 07:38:38 2023 -0500) + +**Fixes** + +- [#26037](https://github.com/apache/superset/pull/26037) fix: update FAB to 4.3.10, Azure user info fix (@dpgaspar) +- [#25901](https://github.com/apache/superset/pull/25901) fix(native filters): rendering performance improvement by reduce overrendering (@justinpark) +- [#25985](https://github.com/apache/superset/pull/25985) fix(explore): redandant force param (@justinpark) +- [#25993](https://github.com/apache/superset/pull/25993) fix: Make Select component fire onChange listener when a selection is pasted in (@jfrag1) +- [#25997](https://github.com/apache/superset/pull/25997) fix(rls): Update text from tables to datasets in RLS modal (@yousoph) +- [#25703](https://github.com/apache/superset/pull/25703) fix(helm): Restart all related deployments when bootstrap script changed (@josedev-union) +- [#25973](https://github.com/apache/superset/pull/25973) fix: naming denomalized to denormalized in helpers.py (@hughhhh) +- [#25919](https://github.com/apache/superset/pull/25919) fix: always denorm column value before querying values (@hughhhh) +- [#25947](https://github.com/apache/superset/pull/25947) fix: update flask-caching to avoid breaking redis cache, solves #25339 (@ggbaro) +- [#25903](https://github.com/apache/superset/pull/25903) fix(sqllab): invalid sanitization on comparison symbol (@justinpark) +- [#25857](https://github.com/apache/superset/pull/25857) fix(table): Double percenting ad-hoc percentage metrics (@john-bodley) +- [#25872](https://github.com/apache/superset/pull/25872) fix(trino): allow impersonate_user flag to be imported (@FGrobelny) +- [#25897](https://github.com/apache/superset/pull/25897) fix: trino cursor (@betodealmeida) +- [#25898](https://github.com/apache/superset/pull/25898) fix: database version field (@betodealmeida) +- [#25877](https://github.com/apache/superset/pull/25877) fix: Saving Mixed Chart with dashboard filter applied breaks adhoc_filter_b (@kgabryje) +- [#25842](https://github.com/apache/superset/pull/25842) fix(charts): Time grain is None when dataset uses Jinja (@Antonio-RiveroMartnez) +- [#25843](https://github.com/apache/superset/pull/25843) fix: remove `update_charts_owners` (@betodealmeida) +- [#25707](https://github.com/apache/superset/pull/25707) fix(table chart): Show Cell Bars correctly #25625 (@SA-Ark) +- [#25429](https://github.com/apache/superset/pull/25429) fix: the temporal x-axis results in a none time_range. (@mapledan) +- [#25853](https://github.com/apache/superset/pull/25853) fix: Fires onChange when clearing all values of single select (@michael-s-molina) +- [#25814](https://github.com/apache/superset/pull/25814) fix(sqllab): infinite fetching status after results are landed (@justinpark) +- [#25768](https://github.com/apache/superset/pull/25768) fix(SQL field in edit dataset modal): display full sql query (@rtexelm) +- [#25804](https://github.com/apache/superset/pull/25804) fix: Resolve issue #24195 (@john-bodley) +- [#25801](https://github.com/apache/superset/pull/25801) fix: Revert "fix: Apply normalization to all dttm columns (#25147)" (@john-bodley) +- [#25779](https://github.com/apache/superset/pull/25779) fix: DB-specific quoting in Jinja macro (@betodealmeida) +- [#25640](https://github.com/apache/superset/pull/25640) fix: allow for backward compatible errors (@eschutho) +- [#25741](https://github.com/apache/superset/pull/25741) fix(sqllab): slow pop datasource query (@justinpark) +- [#25756](https://github.com/apache/superset/pull/25756) fix: dataset update uniqueness (@betodealmeida) +- [#25753](https://github.com/apache/superset/pull/25753) fix: Revert "fix(Charts): Set max row limit + removed the option to use an empty row limit value" (@geido) +- [#25732](https://github.com/apache/superset/pull/25732) fix(horizontal filter label): show full tooltip with ellipsis (@rtexelm) +- [#25712](https://github.com/apache/superset/pull/25712) fix: bump to FAB 4.3.9 remove CSP exception (@dpgaspar) +- [#24709](https://github.com/apache/superset/pull/24709) fix(chore): dashboard requests to database equal the number of slices it has (@Always-prog) +- [#25679](https://github.com/apache/superset/pull/25679) fix: remove unnecessary redirect (@Khrol) +- [#25680](https://github.com/apache/superset/pull/25680) fix(sqllab): reinstate "Force trino client async execution" (@giftig) +- [#25657](https://github.com/apache/superset/pull/25657) fix(dremio): Fixes issue with Dremio SQL generation for Charts with Series Limit (@OskarNS) +- [#23638](https://github.com/apache/superset/pull/23638) fix: warning of nth-child (@justinpark) +- [#25658](https://github.com/apache/superset/pull/25658) fix: improve upload ZIP file validation (@dpgaspar) +- [#25495](https://github.com/apache/superset/pull/25495) fix(header navlinks): link navlinks to path prefix (@fisjac) +- [#25112](https://github.com/apache/superset/pull/25112) fix: permalink save/overwrites in explore (@hughhhh) +- [#25493](https://github.com/apache/superset/pull/25493) fix(import): Make sure query context is overwritten for overwriting imports (@jfrag1) +- [#25553](https://github.com/apache/superset/pull/25553) fix: avoid 500 errors with SQLLAB_BACKEND_PERSISTENCE (@Khrol) +- [#25626](https://github.com/apache/superset/pull/25626) fix(sqllab): template validation error within comments (@justinpark) +- [#25523](https://github.com/apache/superset/pull/25523) fix(sqllab): Mistitled for new tab after rename (@justinpark) + +**Others** + +- [#25995](https://github.com/apache/superset/pull/25995) chore: Optimize fetching samples logic (@john-bodley) +- [#23619](https://github.com/apache/superset/pull/23619) chore(colors): Updating Airbnb brand colors (@john-bodley) + ### 3.0.1 (Tue Oct 13 10:32:21 2023 -0700) **Database Migrations** @@ -849,6 +904,24 @@ under the License. - [#23158](https://github.com/apache/superset/pull/23158) chore: Bump cryptography to 39.0.1 (@EugeneTorap) - [#23108](https://github.com/apache/superset/pull/23108) chore: Remove yarn.lock from the root folder (@EugeneTorap) +### 2.1.2 (Wed Oct 18 16:59:30 2023 -0700) +**Database Migrations** + +**Features** + +**Fixes** +- [#25150](https://github.com/apache/superset/pull/25150) fix: Chart series limit doesn't work for some databases (@KSPT-taylorjohn) +- [#25014](https://github.com/apache/superset/pull/25014) fix: CTE queries with non-SELECT statements (@dpgaspar) +- [#24849](https://github.com/apache/superset/pull/24849) fix: validation errors appearing after ssh tunnel switch (@hughhhh) +- [#24196](https://github.com/apache/superset/pull/24196) fix: SSH Tunnel creation with dynamic form (@hughhhh) +- [#24821](https://github.com/apache/superset/pull/24821) fix: Allow chart import to update the dataset an existing chart points to (@jfrag1) +- [#24317](https://github.com/apache/superset/pull/24317) fix: update order of build for testing a release (@eschutho) + +**Others** +- [#24826](https://github.com/apache/superset/pull/24826) chore: remove CssTemplate and Annotation access from gamma role (@lilykuang) +- [#23680](https://github.com/apache/superset/pull/23680) chore: bump wtforms and add missing flask-limiter (@dpgaspar) +- [#24758](https://github.com/apache/superset/pull/24758) chore(view_api): return application/json as content-type for api/v1/form_data endpoint (@zephyring) + ### 2.1.1 (Sun Apr 23 15:44:21 2023 +0100) **Database Migrations** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d9e480ee95..a955f123db 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -180,6 +180,51 @@ See [Translating](#translating) for more details. There is a dedicated [`apache-superset` tag](https://stackoverflow.com/questions/tagged/apache-superset) on [StackOverflow](https://stackoverflow.com/). Please use it when asking questions. +## Types of Contributors + +Following the project governance model of the Apache Software Foundation (ASF), Apache Superset has a specific set of contributor roles: + +### PMC Member + +A Project Management Committee (PMC) member is a person who has been elected by the PMC to help manage the project. PMC members are responsible for the overall health of the project, including community development, release management, and project governance. PMC members are also responsible for the technical direction of the project. + +For more information about Apache Project PMCs, please refer to https://www.apache.org/foundation/governance/pmcs.html + +### Committer + +A committer is a person who has been elected by the PMC to have write access (commit access) to the code repository. They can modify the code, documentation, and website and accept contributions from others. + +The official list of committers and PMC members can be found [here](https://projects.apache.org/committee.html?superset). + +### Contributor + +A contributor is a person who has contributed to the project in any way, including but not limited to code, tests, documentation, issues, and discussions. + +> You can also review the Superset project's guidelines for PMC member promotion here: https://github.com/apache/superset/wiki/Guidelines-for-promoting-Superset-Committers-to-the-Superset-PMC + +### Security Team + +The security team is a selected subset of PMC members, committers and non-committers who are responsible for handling security issues. + +New members of the security team are selected by the PMC members in a vote. You can request to be added to the team by sending a message to private@superset.apache.org. However, the team should be small and focused on solving security issues, so the requests will be evaluated on a case-by-case basis and the team size will be kept relatively small, limited to only actively security-focused contributors. + +This security team must follow the [ASF vulnerability handling process](https://apache.org/security/committers.html#asf-project-security-for-committers). + +Each new security issue is tracked as a JIRA ticket on the [ASF's JIRA Superset security project](https://issues.apache.org/jira/secure/RapidBoard.jspa?rapidView=588&projectKey=SUPERSETSEC) + +Security team members must: + +- Have an [ICLA](https://www.apache.org/licenses/contributor-agreements.html) signed with Apache Software Foundation. +- Not reveal information about pending and unfixed security issues to anyone (including their employers) unless specifically authorised by the security team members, e.g., if the security team agrees that diagnosing and solving an issue requires the involvement of external experts. + +A release manager, the contributor overseeing the release of a specific version of Apache Superset, is by default a member of the security team. However, they are not expected to be active in assessing, discussing, and fixing security issues. + +Security team members should also follow these general expectations: + +- Actively participate in assessing, discussing, fixing, and releasing security issues in Superset. +- Avoid discussing security fixes in public forums. Pull request (PR) descriptions should not contain any information about security issues. The corresponding JIRA ticket should contain a link to the PR. +- Security team members who contribute to a fix may be listed as remediation developers in the CVE report, along with their job affiliation (if they choose to include it). + ## Pull Request Guidelines A philosophy we would like to strongly encourage is @@ -424,7 +469,7 @@ Commits to `master` trigger a rebuild and redeploy of the documentation site. Su Make sure your machine meets the [OS dependencies](https://superset.apache.org/docs/installation/installing-superset-from-scratch#os-dependencies) before following these steps. You also need to install MySQL or [MariaDB](https://mariadb.com/downloads). -Ensure that you are using Python version 3.8, 3.9, 3.10 or 3.11, then proceed with: +Ensure that you are using Python version 3.9, 3.10 or 3.11, then proceed with: ```bash # Create a virtual environment and activate it (recommended) @@ -610,6 +655,31 @@ Then put this: export NODE_OPTIONS=--no-experimental-fetch ``` +If while using the above commands you encounter an error related to the limit of file watchers: + +```bash +Error: ENOSPC: System limit for number of file watchers reached +``` +The error is thrown because the number of files monitored by the system has reached the limit. +You can address this this error by increasing the number of inotify watchers. + + +The current value of max watches can be checked with: +```bash +cat /proc/sys/fs/inotify/max_user_watches +``` +Edit the file /etc/sysctl.conf to increase this value. +The value needs to be decided based on the system memory [(see this StackOverflow answer for more context)](https://stackoverflow.com/questions/535768/what-is-a-reasonable-amount-of-inotify-watches-with-linux). + +Open the file in editor and add a line at the bottom specifying the max watches values. +```bash +fs.inotify.max_user_watches=524288 +``` +Save the file and exit editor. +To confirm that the change succeeded, run the following command to load the updated value of max_user_watches from sysctl.conf: +```bash +sudo sysctl -p +``` #### Webpack dev server The dev server by default starts at `http://localhost:9000` and proxies the backend requests to `http://localhost:8088`. diff --git a/Dockerfile b/Dockerfile index b9714d6c69..fc3e667037 100644 --- a/Dockerfile +++ b/Dockerfile @@ -61,9 +61,7 @@ ENV LANG=C.UTF-8 \ SUPERSET_HOME="/app/superset_home" \ SUPERSET_PORT=8088 -RUN --mount=target=/var/lib/apt/lists,type=cache \ - --mount=target=/var/cache/apt,type=cache \ - mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg-info requirements \ +RUN mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg-info requirements \ && useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \ && apt-get update -qq && apt-get install -yqq --no-install-recommends \ build-essential \ @@ -75,7 +73,8 @@ RUN --mount=target=/var/lib/apt/lists,type=cache \ libecpg-dev \ libldap2-dev \ && touch superset/static/version_info.json \ - && chown -R superset:superset ./* + && chown -R superset:superset ./* \ + && rm -rf /var/lib/apt/lists/* COPY --chown=superset:superset setup.py MANIFEST.in README.md ./ # setup.py uses the version information in package.json @@ -112,9 +111,8 @@ ARG GECKODRIVER_VERSION=v0.33.0 \ USER root -RUN --mount=target=/var/lib/apt/lists,type=cache \ - --mount=target=/var/cache/apt,type=cache \ - apt-get install -yqq --no-install-recommends \ +RUN apt-get update -qq \ + && apt-get install -yqq --no-install-recommends \ libnss3 \ libdbus-glib-1-2 \ libgtk-3-0 \ @@ -127,7 +125,7 @@ RUN --mount=target=/var/lib/apt/lists,type=cache \ # Install Firefox && wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \ && ln -s /opt/firefox/firefox /usr/local/bin/firefox \ - && apt-get autoremove -yqq --purge wget && rm -rf /var/[log,tmp]/* /tmp/* + && apt-get autoremove -yqq --purge wget && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* # Cache everything for dev purposes... RUN --mount=type=bind,target=./requirements/base.txt,src=./requirements/base.txt \ --mount=type=bind,target=./requirements/docker.txt,src=./requirements/docker.txt \ diff --git a/README.md b/README.md index 757c0fb503..3588d99419 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,7 @@ Here are some of the major database solutions that are supported: <img src="superset-frontend/src/assets/images/yugabyte.png" alt="yugabyte" border="0" width="200" height="80"/> <img src="superset-frontend/src/assets/images/databend.png" alt="databend" border="0" width="200" height="80"/> <img src="superset-frontend/src/assets/images/starrocks.png" alt="starrocks" border="0" width="200" height="80"/> + <img src="superset-frontend/src/assets/images/doris.png" alt="doris" border="0" width="200" height="80"/> </p> **A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/databases/installing-database-drivers). diff --git a/RELEASING/README.md b/RELEASING/README.md index 8b23dafbf1..b007a89170 100644 --- a/RELEASING/README.md +++ b/RELEASING/README.md @@ -30,6 +30,7 @@ partaking in the process should join the channel. ## Release notes for recent releases +- [3.1](release-notes-3-1/README.md) - [2.0](release-notes-2-0/README.md) - [1.5](release-notes-1-5/README.md) - [1.4](release-notes-1-4/README.md) diff --git a/RELEASING/email_templates/announce.j2 b/RELEASING/email_templates/announce.j2 index 4eb89701be..5e2318f792 100644 --- a/RELEASING/email_templates/announce.j2 +++ b/RELEASING/email_templates/announce.j2 @@ -35,6 +35,12 @@ The PyPI package: https://pypi.org/project/apache-superset/ +The Change Log for the release: +https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG.md + +The Updating instructions for the release: +https://github.com/apache/{{ project_module }}/blob/{{ version }}/UPDATING.md + If you have any usage questions or have problems when upgrading or find any issues with enhancements included in this release, please don't hesitate to let us know by sending feedback to this mailing diff --git a/RELEASING/release-notes-3-1/README.md b/RELEASING/release-notes-3-1/README.md new file mode 100644 index 0000000000..97635139b1 --- /dev/null +++ b/RELEASING/release-notes-3-1/README.md @@ -0,0 +1,166 @@ +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +# Release Notes for Superset 3.1.0 + +Superset 3.1.0 brings a range of new features and quality of life improvements. This release is a minor version, meaning it doesn't include any breaking changes to ensure a seamless transition for our users. Here are some of the highlights of this release. + +### Waterfall chart + +The new [Waterfall chart](https://github.com/apache/superset/pull/25557) visualization provides a visual representation of how a value changes over time or across different categories. They are very helpful to show the cumulative effect of positive and negative changes from a starting value. Superset's Waterfall chart supports Breakdowns which can be used to analyze the contribution of different dimensions or factors to a specific metric. By breaking down the data into various categories or dimensions, you can identify the individual components that contribute to the overall variation or change in the metric. + +The chart example below displays the total sales grouped by year and broken down by product line. + +![Waterfall](media/waterfall_chart.png) + +### Bubble Chart ECharts version + +The new ECharts [Bubble chart](https://github.com/apache/superset/pull/22107) offers feature parity with the previous NVD3 version which should be removed in the next major release. This work is part of the [ECharts migration effort](https://github.com/apache/superset/issues/10418) to increase consistency and quality of our plugins. We'll add a migration to the new plugin soon which you'll be able to execute using the new CLI command. + +![Bubble](media/bubble_chart.png) + +### Improved Dataset selectors + +The [dataset selectors](https://github.com/apache/superset/pull/25569) have been improved to also display the database and schema names which will help users locate the correct dataset, particularly when there are multiple tables/datasets with the same name that could benefit from disambiguation. + +![Dataset](media/dataset_selector.png) + +### SQL Lab improvements + +SQL Lab received many user experience and performance improvements in this release. We’ll continue to improve the capabilities of SQL Lab with feedback from the community. + +Now users can [automatically format](https://github.com/apache/superset/pull/25344) their SQL queries using the `Ctrl+Shift+F` shortcut or the Format SQL menu option available in the SQL configuration panel. Another improvement is that the results panel now shows the [executed query](https://github.com/apache/superset/pull/24787) which is very helpful when your SQL Lab editor has multiple queries. + +![SQL Formatting](media/sql_formatting.png) + +In the SQL panel configurations, there's a menu option to show the [keyboard shortcuts](https://github.com/apache/superset/pull/25542) a user has access to. + +![Keyboard Shortcuts](media/keyboard_shortcuts.png) + +SQL Lab has launched a non-blocking persistence mode, as outlined in [SIP-93](https://github.com/apache/superset/issues/21385). This enhancement ensures that your SQL editor content is preserved, even if your internet or service goes offline. Moreover, it improves user interaction by saving changes in a non-blocking way, similar to how Google Docs does. + +Finally, the [SQL Lab module was moved to the Single Page Application](https://github.com/apache/superset/pull/25151) context. This means that both navigation and loading time of that module is significantly faster than previous versions (particularly when navigating to and from this page from other pages in Superset). This also reduces the number of requests to the server and pays some of our technical debt. Try it out! The difference is quite impressive! + +### Country Map improvements + +The Country Map visualization received some improvements in this release. The community added [France's regions](https://github.com/apache/superset/pull/25676) in addition to its departments and also many [Central Asia countries](https://github.com/apache/superset/pull/24870). + +<table> + <tr> + <td width="33%">France's regions</td> + <td width="33%">Kazakhstan</td> + <td width="33%">Kyrgyzstan</td> + </tr> + <tr> + <td width="33%"><img src="media/france.png" width="100%"/></td> + <td width="33%"><img src="media/kazakhstan.png" width="100%"></td> + <td width="33%"><img src="media/kyrgyzstan.png" width="100%"></td> + </tr> + <tr> + <td width="33%">Tajikistan</td> + <td width="33%">Turkmenistan</td> + <td width="33%">Uzbekistan</td> + </tr> + <tr> + <td width="33%"><img src="media/tajikistan.png" width="100%"/></td> + <td width="33%"><img src="media/turkmenistan.png" width="100%"></td> + <td width="33%"><img src="media/uzbekistan.png" width="100%"></td> + </tr> +</table> + +### Deck.gl ContourLayer + +We [added](https://github.com/apache/superset/pull/24154) the Deck.gl [ContourLayer](https://deck.gl/docs/api-reference/aggregation-layers/contour-layer) which aggregates data into Isolines or Isobands for a given threshold and cell size. By expanding the range of available [Deck.gl](https://deck.gl/) visualization layers, users will have more options to choose from when creating their visualizations. This will allow them to tailor their visualizations to their specific needs and explore their data in different ways. + +![Contour](media/contour.png) + +### New Databases + +Superset has added support for two new databases: + +- [Databend](https://databend.rs/), an open-source, elastic, and workload-aware cloud data warehouse built in Rust. You can see the PR [here](https://github.com/apache/superset/pull/23308), and the updated documentation [here](https://superset.apache.org/docs/databases/databend). +- [Apache Doris](https://doris.apache.org/), which is based on the MySQL protocol and introduces the concept of Multi Catalog. You can see the PR [here](https://github.com/apache/superset/pull/24714/) and the updated documentation [here](https://superset.apache.org/docs/databases/doris). + +<table> + <tr> + <td width="50%"><img src="media/databend.png" width="100%"/></td> + <td width="50%"><img src="media/doris.png" width="100%"></td> + </tr> +</table> + +### CLI command to execute viz migrations + +A new [CLI command](https://github.com/apache/superset/pull/25304) called viz-migrations was added to allow users to migrate charts of a specific type. This command is particularly helpful to migrate visualizations to their latest version and at the same time disable their legacy versions with the `VIZ_TYPE_DENYLIST` configuration. The main advantage of this command is that you can migrate your visualizations without needing to wait for a major release, where we generally remove the legacy plugins. + +Currently, you can use the command to migrate Area, Bubble, Line, and Sunburst chart types but we'll add more as the ECharts migrations continue. Note that migrations for deprecated charts may be forced in upcoming major versions when the code is removed. Running migrations earlier will allow you to de-risk future upgrades while improving user experience. + +```bash +Usage: superset viz-migrations [OPTIONS] COMMAND [ARGS]... + + Migrates a viz from one type to another. + +Commands: + downgrade Downgrades a viz to the previous version. + upgrade Upgrade a viz to the latest version. +``` + +Note: When migrating dashboards from one Superset instance to another (using import/export features or the Superset CLI), or restoring a backup of prior charts and dashboards, Superset will apply the existing migrations that are used during version upgrades. This will ensure that your charts and dashboards are using the latest and greatest charts that Superset officially supports. + +### Database engine spec improvements + +Many database engine improvements were added in this release. Some highlights: + +- [feat: improve SQLite DB engine spec](https://github.com/apache/superset/pull/24909) +- [feat: add MotherDuck DB engine spec](https://github.com/apache/superset/pull/24934) +- [feat: Add week time grain for Elasticsearch datasets](https://github.com/apache/superset/pull/25683) +- [feat: method for dynamic allows_alias_in_select](https://github.com/apache/superset/pull/25882) + +We even added a new [CLI command](https://github.com/apache/superset/pull/24918) to test DB engine specs, SQLAlchemy dialects, and database connections. + +```bash +Usage: superset test-db [OPTIONS] SQLALCHEMY_URI + + Run a series of tests against an analytical database. + + This command tests: + 1. The Superset DB engine spec. + 2. The SQLAlchemy dialect. + 3. The database connectivity and performance. + + It's useful for people developing DB engine specs and/or SQLAlchemy + dialects, and also to test new versions of DB API 2.0 drivers. + +Options: + -c, --connect-args TEXT Connect args as JSON or YAML + --help Show this message and exit. +``` + +### Playwright as an alternative to Selenium + +Per [SIP-98](https://github.com/apache/superset/issues/24948), we [introduced Playwright](https://github.com/apache/superset/pull/25247) for rendering charts in Superset reports. [Playwright](https://playwright.dev/) is an open-source library for automating web browsers, similar to Selenium but with better support for modern browser features and improved performance. By using Playwright, we aim to provide a more stable and accurate chart rendering experience in Superset reports, especially for [Deck.gl](https://deck.gl/) charts. + +Since configuring Playwright requires installing additional dependencies, in order to prevent breaking changes in existing deployments, we put the new flow behind a feature flag called `PLAYWRIGHT_REPORTS_AND_THUMBNAILS`. Users that don't enable the feature flag will be unaffected by the changes. + +### Pandas upgraded to v2 + +We [upgraded Pandas to v2](https://github.com/apache/superset/pull/24705) and [added performance dependencies](https://github.com/apache/superset/pull/24768) to provide speed improvements, especially when working with large data sets. For the full list of changes, check [Pandas 2.0.0 Release Notes](https://pandas.pydata.org/docs/dev/whatsnew/v2.0.0.html). + +### Tags + +Tags evolved a lot since 3.0, with many PRs that further improved the feature. During this phase, the community also made [great suggestions](https://github.com/apache/superset/discussions/25918) to make sure the feature is scalable, adhere to our security model, and offer a consistent design. We're still working on this feedback and new improvements will follow. For that reason, we're keeping the feature as beta behind the `TAGGING_SYSTEM` feature flag. diff --git a/RELEASING/release-notes-3-1/media/bubble_chart.png b/RELEASING/release-notes-3-1/media/bubble_chart.png new file mode 100644 index 0000000000..505913ed2c Binary files /dev/null and b/RELEASING/release-notes-3-1/media/bubble_chart.png differ diff --git a/RELEASING/release-notes-3-1/media/contour.png b/RELEASING/release-notes-3-1/media/contour.png new file mode 100644 index 0000000000..16a16d7b24 Binary files /dev/null and b/RELEASING/release-notes-3-1/media/contour.png differ diff --git a/RELEASING/release-notes-3-1/media/databend.png b/RELEASING/release-notes-3-1/media/databend.png new file mode 100644 index 0000000000..60ae9ea8e2 Binary files /dev/null and b/RELEASING/release-notes-3-1/media/databend.png differ diff --git a/RELEASING/release-notes-3-1/media/dataset_selector.png b/RELEASING/release-notes-3-1/media/dataset_selector.png new file mode 100644 index 0000000000..d18c3315be Binary files /dev/null and b/RELEASING/release-notes-3-1/media/dataset_selector.png differ diff --git a/RELEASING/release-notes-3-1/media/doris.png b/RELEASING/release-notes-3-1/media/doris.png new file mode 100644 index 0000000000..f3d2fc40dc Binary files /dev/null and b/RELEASING/release-notes-3-1/media/doris.png differ diff --git a/RELEASING/release-notes-3-1/media/france.png b/RELEASING/release-notes-3-1/media/france.png new file mode 100644 index 0000000000..8deed333a3 Binary files /dev/null and b/RELEASING/release-notes-3-1/media/france.png differ diff --git a/RELEASING/release-notes-3-1/media/kazakhstan.png b/RELEASING/release-notes-3-1/media/kazakhstan.png new file mode 100644 index 0000000000..a73c3efa88 Binary files /dev/null and b/RELEASING/release-notes-3-1/media/kazakhstan.png differ diff --git a/RELEASING/release-notes-3-1/media/keyboard_shortcuts.png b/RELEASING/release-notes-3-1/media/keyboard_shortcuts.png new file mode 100644 index 0000000000..60f147d11e Binary files /dev/null and b/RELEASING/release-notes-3-1/media/keyboard_shortcuts.png differ diff --git a/RELEASING/release-notes-3-1/media/kyrgyzstan.png b/RELEASING/release-notes-3-1/media/kyrgyzstan.png new file mode 100644 index 0000000000..13a791c3ef Binary files /dev/null and b/RELEASING/release-notes-3-1/media/kyrgyzstan.png differ diff --git a/RELEASING/release-notes-3-1/media/sql_formatting.png b/RELEASING/release-notes-3-1/media/sql_formatting.png new file mode 100644 index 0000000000..a4a4e57fca Binary files /dev/null and b/RELEASING/release-notes-3-1/media/sql_formatting.png differ diff --git a/RELEASING/release-notes-3-1/media/tajikistan.png b/RELEASING/release-notes-3-1/media/tajikistan.png new file mode 100644 index 0000000000..0114ef9068 Binary files /dev/null and b/RELEASING/release-notes-3-1/media/tajikistan.png differ diff --git a/RELEASING/release-notes-3-1/media/turkmenistan.png b/RELEASING/release-notes-3-1/media/turkmenistan.png new file mode 100644 index 0000000000..b4999d880f Binary files /dev/null and b/RELEASING/release-notes-3-1/media/turkmenistan.png differ diff --git a/RELEASING/release-notes-3-1/media/uzbekistan.png b/RELEASING/release-notes-3-1/media/uzbekistan.png new file mode 100644 index 0000000000..d1c1230eeb Binary files /dev/null and b/RELEASING/release-notes-3-1/media/uzbekistan.png differ diff --git a/RELEASING/release-notes-3-1/media/waterfall_chart.png b/RELEASING/release-notes-3-1/media/waterfall_chart.png new file mode 100644 index 0000000000..0fd61e5175 Binary files /dev/null and b/RELEASING/release-notes-3-1/media/waterfall_chart.png differ diff --git a/RESOURCES/INTHEWILD.md b/RESOURCES/INTHEWILD.md index 155cbe83b4..51951fbcc1 100644 --- a/RESOURCES/INTHEWILD.md +++ b/RESOURCES/INTHEWILD.md @@ -111,6 +111,7 @@ Join our growing community! - [Steamroot](https://streamroot.io/) - [TechAudit](https://www.techaudit.info) [@ETselikov] - [Tenable](https://www.tenable.com) [@dflionis] +- [Tentacle](https://public.tentaclecmi.com) [@jdclarke5] - [timbr.ai](https://timbr.ai/) [@semantiDan] - [Tobii](http://www.tobii.com/) [@dwa] - [Tooploox](https://www.tooploox.com/) [@jakubczaplicki] @@ -175,8 +176,10 @@ Join our growing community! - [Automattic](https://automattic.com/) [@Khrol, @Usiel] - [Dropbox](https://www.dropbox.com/) [@bkyryliuk] - [Grassroot](https://www.grassrootinstitute.org/) +- [Increff](https://www.increff.com/) [@ishansinghania] - [komoot](https://www.komoot.com/) [@christophlingg] - [Let's Roam](https://www.letsroam.com/) +- [Onebeat](https://1beat.com/) [@GuyAttia] - [Twitter](https://twitter.com/) - [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect] - [Yahoo!](https://yahoo.com/) diff --git a/UPDATING.md b/UPDATING.md index 542938c35b..b5c48924b7 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -24,6 +24,7 @@ assists people when migrating to a new version. ## Next +- [26034](https://github.com/apache/superset/issues/26034): Fixes a problem where numeric x-axes were being treated as categorical values. As a consequence of that, the way labels are displayed might change given that ECharts has a different treatment for numerical and categorical values. To revert to the old behavior, users need to manually convert numerical columns to text so that they are treated as categories. Check https://github.com/apache/superset/issues/26159 for more details. - [24657](https://github.com/apache/superset/pull/24657): Bumps the cryptography package to augment the OpenSSL security vulnerability. ### Breaking Changes diff --git a/docs/docs/api.mdx b/docs/docs/api.mdx index fb3572eb37..3e9b95f763 100644 --- a/docs/docs/api.mdx +++ b/docs/docs/api.mdx @@ -1,7 +1,7 @@ --- title: API hide_title: true -sidebar_position: 9 +sidebar_position: 10 --- import { Buffer } from 'buffer'; diff --git a/docs/docs/contributing/_category_.json b/docs/docs/contributing/_category_.json index ca96e44a2d..6e98583254 100644 --- a/docs/docs/contributing/_category_.json +++ b/docs/docs/contributing/_category_.json @@ -1,4 +1,4 @@ { "label": "Contributing", - "position": 6 + "position": 7 } diff --git a/docs/docs/databases/_category_.json b/docs/docs/databases/_category_.json index de1c640183..7c9f05f856 100644 --- a/docs/docs/databases/_category_.json +++ b/docs/docs/databases/_category_.json @@ -1,4 +1,4 @@ { "label": "Connecting to Databases", - "position": 3 + "position": 5 } diff --git a/docs/docs/databases/docker-add-drivers.mdx b/docs/docs/databases/docker-add-drivers.mdx index 03a971a979..cb7c550c07 100644 --- a/docs/docs/databases/docker-add-drivers.mdx +++ b/docs/docs/databases/docker-add-drivers.mdx @@ -7,87 +7,57 @@ version: 1 ## Adding New Database Drivers in Docker -Superset requires a Python database driver to be installed for each additional type of database you -want to connect to. When setting up Superset locally via `docker compose`, the drivers and packages -contained in -[requirements.txt](https://github.com/apache/superset/blob/master/requirements.txt) and -[requirements-dev.txt](https://github.com/apache/superset/blob/master/requirements-dev.txt) -will be installed automatically. +Superset requires a Python database driver to be installed for each additional type of database you want to connect to. -In this section, we'll walk through how to install the MySQL connector library. The connector -library installation process is the same for all additional libraries and we'll end this section -with the recommended connector library for each database. +In this example, we'll walk through how to install the MySQL connector library. The connector library installation process is the same for all additional libraries. ### 1. Determine the driver you need -To figure out how to install the [database driver](/docs/databases/installing-database-drivers) of your choice. +Consult the [list of database drivers](/docs/databases/installing-database-drivers) and find the PyPI package needed to connect to your database. In this example, we're connecting to a MySQL database, so we'll need the `mysqlclient` connector library. -In the example, we'll walk through the process of installing a MySQL driver in Superset. +### 2. Install the driver in the container -### 2. Install MySQL Driver +We need to get the `mysqlclient` library installed into the Superset docker container (it doesn't matter if it's installed on the host machine). We could enter the running container with `docker exec -it <container_name> bash` and run `pip install mysqlclient` there, but that wouldn't persist permanently. -As we are currently running inside of a Docker container via `docker compose`, we cannot simply run -`pip install mysqlclient` on our local shell and expect the drivers to be installed within the -Docker containers for superset. +To address this, the Superset `docker compose` deployment uses the convention of a `requirements-local.txt` file. All packages listed in this file will be installed into the container from PyPI at runtime. This file will be ignored by Git for the purposes of local development. -In order to address this, the Superset `docker compose` setup comes with a mechanism for you to -install packages locally, which will be ignored by Git for the purposes of local development. Please -follow these steps: - -Create `requirements-local.txt` +Create the file `requirements-local.txt` in a subdirectory called `docker` that exists in the directory with your `docker-compose.yml` or `docker-compose-non-dev.yml` file. ``` -# From the repo root... +# Run from the repo root: touch ./docker/requirements-local.txt ``` -Add the driver selected in step above: +Add the driver identified in step above. You can use a text editor or do it from the command line like: ``` echo "mysqlclient" >> ./docker/requirements-local.txt ``` -Rebuild your local image with the new driver baked in: +**If you are running a stock (non-customized) Superset image**, you are done. Launch Superset with `docker compose -f docker-compose-non-dev.yml up` and the driver should be present. + +You can check its presence by entering the running container with `docker exec -it <container_name> bash` and running `pip freeze`. The PyPI package should be present in the printed list. + +**If you're running a customized docker image**, rebuild your local image with the new driver baked in: ``` docker compose build --force-rm ``` -After the rebuild of the Docker images is complete (which may take a few minutes) you can relaunch using the following command: - -``` -docker compose up -``` - -The other option is to start Superset via Docker Compose is using the recipe in `docker-compose-non-dev.yml`, which will use pre-built frontend assets and skip the building of front-end assets: - -``` -docker compose -f docker-compose-non-dev.yml pull -docker compose -f docker-compose-non-dev.yml up -``` +After the rebuild of the Docker images is complete, relaunch Superset by running `docker compose up`. ### 3. Connect to MySQL -Now that you've got a MySQL driver installed locally, you should be able to test it out. +Now that you've got a MySQL driver installed in your container, you should be able to connect to your database via the Superset web UI. -We can now create a Datasource in Superset that can be used to connect to a MySQL instance. Assuming -your MySQL instance is running locally and can be accessed via localhost, use the following -connection string in “SQL Alchemy URI”, by going to Sources > Databases > + icon (to add a new -datasource) in Superset. +As an admin user, go to Settings -> Data: Database Connections and click the +DATABASE button. From there, follow the steps on the [Using Database Connection UI page](https://superset.apache.org/docs/databases/db-connection-ui). -For Docker running in Linux: +Consult the page for your specific database type in the Superset documentation to determine the connection string and any other parameters you need to input. For instance, on the [MySQL page](https://superset.apache.org/docs/databases/mysql), we see that the connection string to a local MySQL database differs depending on whether the setup is running on Linux or Mac. -``` -mysql://mysqluser:mysqluserpassword@localhost/example?charset=utf8 -``` +Click the “Test Connection” button, which should result in a popup message saying, "Connection looks good!". -For Docker running in OSX: +### 4. Troubleshooting -``` -mysql://mysqluser:mysqluserpassword@docker.for.mac.host.internal/example?charset=utf8 -``` +If the test fails, review your docker logs for error messages. Superset uses SQLAlchemy to connect to databases; to troubleshoot the connection string for your database, you might start Python in the Superset application container or host environment and try to connect directly to the desired database and fetch data. This eliminates Superset for the purposes of isolating the problem. -Then click “Test Connection”, which should give you an “OK” message. If not, please look at your -terminal for error messages, and reach out for help. - -You can repeat this process for every database you want superset to be able to connect to. +Repeat this process for each different type of database you want Superset to be able to connect to. diff --git a/docs/docs/databases/doris.mdx b/docs/docs/databases/doris.mdx new file mode 100644 index 0000000000..62c16afeb3 --- /dev/null +++ b/docs/docs/databases/doris.mdx @@ -0,0 +1,26 @@ +--- +title: Apache Doris +hide_title: true +sidebar_position: 5 +version: 1 +--- + +## Doris + +The [sqlalchemy-doris](https://pypi.org/project/pydoris/) library is the recommended way to connect to Apache Doris through SQLAlchemy. + +You'll need the following setting values to form the connection string: + +- **User**: User Name +- **Password**: Password +- **Host**: Doris FE Host +- **Port**: Doris FE port +- **Catalog**: Catalog Name +- **Database**: Database Name + + +Here's what the connection string looks like: + +``` +doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database> +``` diff --git a/docs/docs/databases/installing-database-drivers.mdx b/docs/docs/databases/installing-database-drivers.mdx index b4be939c3b..f11b4ec5eb 100644 --- a/docs/docs/databases/installing-database-drivers.mdx +++ b/docs/docs/databases/installing-database-drivers.mdx @@ -22,47 +22,48 @@ as well as the packages needed to connect to the databases you want to access th Some of the recommended packages are shown below. Please refer to [setup.py](https://github.com/apache/superset/blob/master/setup.py) for the versions that are compatible with Superset. -| Database | PyPI package | Connection String | -| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | -| [Amazon Athena](/docs/databases/athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{ ` | -| [Amazon DynamoDB](/docs/databases/dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` | -| [Amazon Redshift](/docs/databases/redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` | -| [Apache Drill](/docs/databases/drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` | -| [Apache Druid](/docs/databases/druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` | -| [Apache Hive](/docs/databases/hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | -| [Apache Impala](/docs/databases/impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` | -| [Apache Kylin](/docs/databases/kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` | -| [Apache Pinot](/docs/databases/pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` | -| [Apache Solr](/docs/databases/solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` | -| [Apache Spark SQL](/docs/databases/spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | -| [Ascend.io](/docs/databases/ascend) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` | -| [Azure MS SQL](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` | -| [Big Query](/docs/databases/bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` | -| [ClickHouse](/docs/databases/clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` | -| [CockroachDB](/docs/databases/cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` | -| [Dremio](/docs/databases/dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` | -| [Elasticsearch](/docs/databases/elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` | -| [Exasol](/docs/databases/exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` | -| [Google Sheets](/docs/databases/google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` | -| [Firebolt](/docs/databases/firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{username}:{password}@{database} or firebolt://{username}:{password}@{database}/{engine_name}` | -| [Hologres](/docs/databases/hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | -| [IBM Db2](/docs/databases/ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` | -| [IBM Netezza Performance Server](/docs/databases/netezza) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | -| [MySQL](/docs/databases/mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | -| [Oracle](/docs/databases/oracle) | `pip install cx_Oracle` | `oracle://` | -| [PostgreSQL](/docs/databases/postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | -| [Presto](/docs/databases/presto) | `pip install pyhive` | `presto://` | -| [Rockset](/docs/databases/rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` | -| [SAP Hana](/docs/databases/hana) | `pip install hdbcli sqlalchemy-hana or pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` | -| [StarRocks](/docs/databases/starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` | -| [Snowflake](/docs/databases/snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | -| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` | -| [SQL Server](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://` | -| [Teradata](/docs/databases/teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` | -| [TimescaleDB](/docs/databases/timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` | -| [Trino](/docs/databases/trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` | -| [Vertica](/docs/databases/vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | -| [YugabyteDB](/docs/databases/yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | +| Database | PyPI package | Connection String | +| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| [Amazon Athena](/docs/databases/athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{schema_name}?s3_staging_dir={s3_staging_dir}&... ` | +| [Apache Doris](/docs/databases/doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` | +| [Amazon DynamoDB](/docs/databases/dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` | +| [Amazon Redshift](/docs/databases/redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` | +| [Apache Drill](/docs/databases/drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` | +| [Apache Druid](/docs/databases/druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` | +| [Apache Hive](/docs/databases/hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | +| [Apache Impala](/docs/databases/impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` | +| [Apache Kylin](/docs/databases/kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` | +| [Apache Pinot](/docs/databases/pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` | +| [Apache Solr](/docs/databases/solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` | +| [Apache Spark SQL](/docs/databases/spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | +| [Ascend.io](/docs/databases/ascend) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` | +| [Azure MS SQL](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` | +| [Big Query](/docs/databases/bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` | +| [ClickHouse](/docs/databases/clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` | +| [CockroachDB](/docs/databases/cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` | +| [Dremio](/docs/databases/dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` | +| [Elasticsearch](/docs/databases/elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` | +| [Exasol](/docs/databases/exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` | +| [Google Sheets](/docs/databases/google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` | +| [Firebolt](/docs/databases/firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{username}:{password}@{database} or firebolt://{username}:{password}@{database}/{engine_name}` | +| [Hologres](/docs/databases/hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | +| [IBM Db2](/docs/databases/ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` | +| [IBM Netezza Performance Server](/docs/databases/netezza) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | +| [MySQL](/docs/databases/mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | +| [Oracle](/docs/databases/oracle) | `pip install cx_Oracle` | `oracle://` | +| [PostgreSQL](/docs/databases/postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | +| [Presto](/docs/databases/presto) | `pip install pyhive` | `presto://` | +| [Rockset](/docs/databases/rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` | +| [SAP Hana](/docs/databases/hana) | `pip install hdbcli sqlalchemy-hana or pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` | +| [StarRocks](/docs/databases/starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` | +| [Snowflake](/docs/databases/snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | +| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` | +| [SQL Server](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://` | +| [Teradata](/docs/databases/teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` | +| [TimescaleDB](/docs/databases/timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` | +| [Trino](/docs/databases/trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` | +| [Vertica](/docs/databases/vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | +| [YugabyteDB](/docs/databases/yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | --- Note that many other databases are supported, the main criteria being the existence of a functional diff --git a/docs/docs/databases/pinot.mdx b/docs/docs/databases/pinot.mdx index 8d5b8c2062..e6add897ba 100644 --- a/docs/docs/databases/pinot.mdx +++ b/docs/docs/databases/pinot.mdx @@ -14,3 +14,9 @@ The expected connection string is formatted as follows: ``` pinot+http://<pinot-broker-host>:<pinot-broker-port>/query?controller=http://<pinot-controller-host>:<pinot-controller-port>/`` ``` + +The expected connection string using username and password is formatted as follows: + +``` +pinot://<username>:<password>@<pinot-broker-host>:<pinot-broker-port>/query/sql?controller=http://<pinot-controller-host>:<pinot-controller-port>/verify_ssl=true`` +``` diff --git a/docs/docs/databases/trino.mdx b/docs/docs/databases/trino.mdx index 4d6bfcf343..1328924967 100644 --- a/docs/docs/databases/trino.mdx +++ b/docs/docs/databases/trino.mdx @@ -56,6 +56,8 @@ In `Secure Extra` field, config as following example: All fields in `auth_params` are passed directly to the [`KerberosAuthentication`](https://github.com/trinodb/trino-python-client/blob/0.306.0/trino/auth.py#L40) class. +NOTE: Kerberos authentication requires installing the [`trino-python-client`](https://github.com/trinodb/trino-python-client) locally with either the `all` or `kerberos` optional features, i.e., installing `trino[all]` or `trino[kerberos]` respectively. + #### 3. Certificate Authentication In `Secure Extra` field, config as following example: ```json diff --git a/docs/docs/frequently-asked-questions.mdx b/docs/docs/frequently-asked-questions.mdx index df4ee7a442..11682136d7 100644 --- a/docs/docs/frequently-asked-questions.mdx +++ b/docs/docs/frequently-asked-questions.mdx @@ -1,7 +1,7 @@ --- title: Frequently Asked Questions hide_title: true -sidebar_position: 7 +sidebar_position: 8 --- ## Frequently Asked Questions diff --git a/docs/docs/installation/_category_.json b/docs/docs/installation/_category_.json index 0a1b013d88..096f63b1ef 100644 --- a/docs/docs/installation/_category_.json +++ b/docs/docs/installation/_category_.json @@ -1,4 +1,4 @@ { "label": "Installation and Configuration", - "position": 2 + "position": 3 } diff --git a/docs/docs/intro.mdx b/docs/docs/intro.mdx index 0f0315fc05..cd93aa6be6 100644 --- a/docs/docs/intro.mdx +++ b/docs/docs/intro.mdx @@ -15,7 +15,7 @@ Here are a **few different ways you can get started with Superset**: - Install Superset [from scratch](https://superset.apache.org/docs/installation/installing-superset-from-scratch/) - Deploy Superset locally with one command - [using Docker Compose](installation/installing-superset-using-docker-compose) + [using Docker Compose](https://superset.apache.org/docs/installation/installing-superset-using-docker-compose) - Deploy Superset [with Kubernetes](https://superset.apache.org/docs/installation/running-on-kubernetes) - Run a [Docker image](https://hub.docker.com/r/apache/superset) from Dockerhub - Download Superset [from Pypi here](https://pypi.org/project/apache-superset/) diff --git a/docs/docs/miscellaneous/_category_.json b/docs/docs/miscellaneous/_category_.json index f6f2299e95..16bf78ed5c 100644 --- a/docs/docs/miscellaneous/_category_.json +++ b/docs/docs/miscellaneous/_category_.json @@ -1,4 +1,4 @@ { "label": "Miscellaneous", - "position": 5 + "position": 6 } diff --git a/docs/docs/quickstart.mdx b/docs/docs/quickstart.mdx new file mode 100644 index 0000000000..115f04929c --- /dev/null +++ b/docs/docs/quickstart.mdx @@ -0,0 +1,86 @@ +--- +title: Quickstart +hide_title: false +sidebar_position: 2 +--- + +**Ready to give Apache Superset a try?** This quickstart will help you run Superset on your local machine in +**5 simple steps**. It assumes that you have [Docker](https://www.docker.com) installed. + +### 1. Get Superset +To get started, set the `SUPERSET_VERSION` environment variable with the latest Superset version. +[Click here](https://github.com/apache/superset/releases) to check the latest version. + +``` +$ export SUPERSET_VERSION=<latest_version> +``` + +Pull the Superset image from Docker Hub: + +``` +$ docker pull apache/superset:$SUPERSET_VERSION +``` + +### 2. Start Superset +:::tip +Note that some configuration is mandatory for Superset in order to start. In particular, Superset will not start without +a user-specified value of `SECRET_KEY` in a Superset configuration file or `SUPERSET_SECRET_KEY` as an environment variable. +Please see [Configuring Superset](https://superset.apache.org/docs/installation/configuring-superset/) for more details. +::: +``` +$ docker run -d -p 8080:8088 \ + -e "SUPERSET_SECRET_KEY=$(openssl rand -base64 42)" \ + -e "TALISMAN_ENABLED=False" \ + --name superset apache/superset:$SUPERSET_VERSION +``` + +### 3. Create an account +``` +$ docker exec -it superset superset fab create-admin \ + --username admin \ + --firstname Admin \ + --lastname Admin \ + --email admin@localhost \ + --password admin +``` + +### 4. Configure Superset +``` +$ docker exec -it superset superset db upgrade && + docker exec -it superset superset load_examples && + docker exec -it superset superset init +``` +:::tip +This step can take some time. While you wait, feel free to join the official Slack channel to check for new releases, +ask questions, and engage with the community. +[Click here to join.](https://apache-superset.slack.com/join/shared_invite/zt-26ol9ge4y-kzUnSo9inRepOay0ufBTsA#/shared-invite/email) +::: + +### 5. Start using Superset +After configuring your fresh instance, head over to [http://localhost:8080](http://localhost:8080) and +log in with the default created account: +``` +username: admin +password: admin +``` + +#### 🎉 Congratulations! Superset is now up and running on your machine! 🎉 + +### Wrapping Up +Once you're done with Superset, you can stop and remove it just like any other container: +``` +$ docker container rm -f superset +``` +:::tip +You can use the same container more than once, as Superset will persist data locally. However, make sure to properly stop all +processes by running Docker `stop` command. By doing so, you can avoid data corruption and/or loss of data. +::: + +## What's next? + +From this point on, you can head on to: +- [Create your first Dashboard](https://superset.apache.org/docs/creating-charts-dashboards/creating-your-first-dashboard) +- [Connect to a Database](https://superset.apache.org/docs/databases/installing-database-drivers) +- [Configure Superset](https://superset.apache.org/docs/installation/configuring-superset/) + +Or just explore our Documentation! diff --git a/docs/docs/security/_category_.json b/docs/docs/security/_category_.json index 7d24a44873..d88d7e53bb 100644 --- a/docs/docs/security/_category_.json +++ b/docs/docs/security/_category_.json @@ -1,4 +1,4 @@ { "label": "Security", - "position": 10 + "position": 9 } diff --git a/docs/docs/security/cves.mdx b/docs/docs/security/cves.mdx index 9577650537..ea6ac0b65b 100644 --- a/docs/docs/security/cves.mdx +++ b/docs/docs/security/cves.mdx @@ -1,9 +1,27 @@ --- -title: CVEs by release +title: CVEs fixed by release hide_title: true sidebar_position: 2 --- +#### Version 3.0.0 + +| CVE | Title | Affected | +|:---------------|:------------------------------------------------------------------------|---------:| +| CVE-2023-42502 | Open Redirect Vulnerability | < 3.0.0 | +| CVE-2023-42504 | Lack of rate limiting allows for possible denial of service | < 3.0.0 | +| CVE-2023-42505 | Sensitive information disclosure on db connection details | < 3.0.0 | + + +#### Version 2.1.2 + +| CVE | Title | Affected | +|:---------------|:------------------------------------------------------------------------|---------:| +| CVE-2023-40610 | Privilege escalation with default examples database | < 2.1.2 | +| CVE-2023-42501 | Unnecessary read permissions within the Gamma role | < 2.1.2 | +| CVE-2023-43701 | Stored XSS on API endpoint | < 2.1.2 | + + #### Version 2.1.1 | CVE | Title | Affected | diff --git a/docs/src/resources/data.js b/docs/src/resources/data.js index a07be55267..42cf835a49 100644 --- a/docs/src/resources/data.js +++ b/docs/src/resources/data.js @@ -117,4 +117,9 @@ export const Databases = [ href: 'https://www.microsoft.com/en-us/sql-server', imgName: 'msql.png', }, + { + title: 'Apache Doris', + href: 'https://doris.apache.org/', + imgName: 'doris.png', + }, ]; diff --git a/docs/src/styles/main.less b/docs/src/styles/main.less index 80dee90eca..d10047fdea 100644 --- a/docs/src/styles/main.less +++ b/docs/src/styles/main.less @@ -117,6 +117,7 @@ a > span > svg { font-size: 14px; font-weight: 400; background-color: #fff; + transition: all 0.5s; .get-started-button { border-radius: 10px; diff --git a/docs/static/img/databases/doris.png b/docs/static/img/databases/doris.png new file mode 100644 index 0000000000..4d88f2a36c Binary files /dev/null and b/docs/static/img/databases/doris.png differ diff --git a/helm/superset/Chart.yaml b/helm/superset/Chart.yaml index 60e2510eb9..cbca942569 100644 --- a/helm/superset/Chart.yaml +++ b/helm/superset/Chart.yaml @@ -29,7 +29,7 @@ maintainers: - name: craig-rueda email: craig@craigrueda.com url: https://github.com/craig-rueda -version: 0.10.14 +version: 0.11.2 dependencies: - name: postgresql version: 12.1.6 diff --git a/helm/superset/README.md b/helm/superset/README.md index d32ee985fe..1eaf4928c1 100644 --- a/helm/superset/README.md +++ b/helm/superset/README.md @@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs # superset -![Version: 0.10.14](https://img.shields.io/badge/Version-0.10.14-informational?style=flat-square) +![Version: 0.11.2](https://img.shields.io/badge/Version-0.11.2-informational?style=flat-square) Apache Superset is a modern, enterprise-ready business intelligence web application @@ -40,6 +40,12 @@ helm repo add superset http://apache.github.io/superset/ helm install my-superset superset/superset ``` +Make sure you set your own `SECRET_KEY` to something unique and secret. This secret key is used by Flask for +securely signing the session cookie and will be used to encrypt sensitive data on Superset's metadata database. +It should be a long random bytes or str. + +On helm this can be set on `extraSecretEnv.SUPERSET_SECRET_KEY` or `configOverrides.secrets` + ## Requirements | Repository | Name | Version | @@ -124,6 +130,7 @@ helm install my-superset superset/superset | supersetCeleryBeat.containerSecurityContext | object | `{}` | | | supersetCeleryBeat.deploymentAnnotations | object | `{}` | Annotations to be added to supersetCeleryBeat deployment | | supersetCeleryBeat.enabled | bool | `false` | This is only required if you intend to use alerts and reports | +| supersetCeleryBeat.extraContainers | list | `[]` | Launch additional containers into supersetCeleryBeat pods | | supersetCeleryBeat.forceReload | bool | `false` | If true, forces deployment to reload on each upgrade | | supersetCeleryBeat.initContainers | list | a container waiting for postgres | List of init containers | | supersetCeleryBeat.podAnnotations | object | `{}` | Annotations to be added to supersetCeleryBeat pods | @@ -136,6 +143,7 @@ helm install my-superset superset/superset | supersetCeleryFlower.containerSecurityContext | object | `{}` | | | supersetCeleryFlower.deploymentAnnotations | object | `{}` | Annotations to be added to supersetCeleryFlower deployment | | supersetCeleryFlower.enabled | bool | `false` | Enables a Celery flower deployment (management UI to monitor celery jobs) WARNING: on superset 1.x, this requires a Superset image that has `flower<1.0.0` installed (which is NOT the case of the default images) flower>=1.0.0 requires Celery 5+ which Superset 1.5 does not support | +| supersetCeleryFlower.extraContainers | list | `[]` | Launch additional containers into supersetCeleryFlower pods | | supersetCeleryFlower.initContainers | list | a container waiting for postgres and redis | List of init containers | | supersetCeleryFlower.livenessProbe.failureThreshold | int | `3` | | | supersetCeleryFlower.livenessProbe.httpGet.path | string | `"/api/workers"` | | @@ -223,6 +231,7 @@ helm install my-superset superset/superset | supersetWebsockets.containerSecurityContext | object | `{}` | | | supersetWebsockets.deploymentAnnotations | object | `{}` | | | supersetWebsockets.enabled | bool | `false` | This is only required if you intend to use `GLOBAL_ASYNC_QUERIES` in `ws` mode see https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries | +| supersetWebsockets.extraContainers | list | `[]` | Launch additional containers into supersetWebsockets pods | | supersetWebsockets.image.pullPolicy | string | `"IfNotPresent"` | | | supersetWebsockets.image.repository | string | `"oneacrefund/superset-websocket"` | There is no official image (yet), this one is community-supported | | supersetWebsockets.image.tag | string | `"latest"` | | diff --git a/helm/superset/README.md.gotmpl b/helm/superset/README.md.gotmpl index c17a7e31a7..facb955e31 100644 --- a/helm/superset/README.md.gotmpl +++ b/helm/superset/README.md.gotmpl @@ -39,6 +39,12 @@ helm repo add superset http://apache.github.io/superset/ helm install my-superset superset/superset ``` +Make sure you set your own `SECRET_KEY` to something unique and secret. This secret key is used by Flask for +securely signing the session cookie and will be used to encrypt sensitive data on Superset's metadata database. +It should be a long random bytes or str. + +On helm this can be set on `extraSecretEnv.SUPERSET_SECRET_KEY` or `configOverrides.secrets` + {{ template "chart.requirementsSection" . }} {{ template "chart.valuesSection" . }} diff --git a/helm/superset/templates/_helpers.tpl b/helm/superset/templates/_helpers.tpl index 40b769054e..26d68ce603 100644 --- a/helm/superset/templates/_helpers.tpl +++ b/helm/superset/templates/_helpers.tpl @@ -82,7 +82,6 @@ DATA_CACHE_CONFIG = CACHE_CONFIG SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{env('DB_USER')}:{env('DB_PASS')}@{env('DB_HOST')}:{env('DB_PORT')}/{env('DB_NAME')}" SQLALCHEMY_TRACK_MODIFICATIONS = True -SECRET_KEY = env('SECRET_KEY', 'thisISaSECRET_1234') class CeleryConfig: imports = ("superset.sql_lab", ) diff --git a/helm/superset/templates/deployment-beat.yaml b/helm/superset/templates/deployment-beat.yaml index 43754efb06..30d1eff61a 100644 --- a/helm/superset/templates/deployment-beat.yaml +++ b/helm/superset/templates/deployment-beat.yaml @@ -42,6 +42,7 @@ spec: metadata: annotations: checksum/superset_config.py: {{ include "superset-config" . | sha256sum }} + checksum/superset_bootstrap.sh: {{ tpl .Values.bootstrapScript . | sha256sum }} checksum/connections: {{ .Values.supersetNode.connections | toYaml | sha256sum }} checksum/extraConfigs: {{ .Values.extraConfigs | toYaml | sha256sum }} checksum/extraSecrets: {{ .Values.extraSecrets | toYaml | sha256sum }} @@ -119,6 +120,9 @@ spec: {{- else }} {{- toYaml .Values.resources | nindent 12 }} {{- end }} + {{- if .Values.supersetCeleryBeat.extraContainers }} + {{- toYaml .Values.supersetCeleryBeat.extraContainers | nindent 8 }} + {{- end }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} {{- end }} diff --git a/helm/superset/templates/deployment-flower.yaml b/helm/superset/templates/deployment-flower.yaml index 2213ffa353..e4b05a17e9 100644 --- a/helm/superset/templates/deployment-flower.yaml +++ b/helm/superset/templates/deployment-flower.yaml @@ -115,6 +115,9 @@ spec: {{- else }} {{- toYaml .Values.resources | nindent 12 }} {{- end }} + {{- if .Values.supersetCeleryFlower.extraContainers }} + {{- toYaml .Values.supersetCeleryFlower.extraContainers | nindent 8 }} + {{- end }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} {{- end }} diff --git a/helm/superset/templates/deployment-worker.yaml b/helm/superset/templates/deployment-worker.yaml index d84e7e9561..2710ff40fe 100644 --- a/helm/superset/templates/deployment-worker.yaml +++ b/helm/superset/templates/deployment-worker.yaml @@ -48,6 +48,7 @@ spec: metadata: annotations: checksum/superset_config.py: {{ include "superset-config" . | sha256sum }} + checksum/superset_bootstrap.sh: {{ tpl .Values.bootstrapScript . | sha256sum }} checksum/connections: {{ .Values.supersetNode.connections | toYaml | sha256sum }} checksum/extraConfigs: {{ .Values.extraConfigs | toYaml | sha256sum }} checksum/extraSecrets: {{ .Values.extraSecrets | toYaml | sha256sum }} diff --git a/helm/superset/templates/deployment-ws.yaml b/helm/superset/templates/deployment-ws.yaml index 6bc9faac67..7612900b07 100644 --- a/helm/superset/templates/deployment-ws.yaml +++ b/helm/superset/templates/deployment-ws.yaml @@ -114,6 +114,9 @@ spec: {{- if .Values.supersetWebsockets.livenessProbe }} livenessProbe: {{- .Values.supersetWebsockets.livenessProbe | toYaml | nindent 12 }} {{- end }} + {{- if .Values.supersetWebsockets.extraContainers }} + {{- toYaml .Values.supersetWebsockets.extraContainers | nindent 8 }} + {{- end }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} {{- end }} diff --git a/helm/superset/templates/init-job.yaml b/helm/superset/templates/init-job.yaml index 5b39d20e10..43839c0d95 100644 --- a/helm/superset/templates/init-job.yaml +++ b/helm/superset/templates/init-job.yaml @@ -63,7 +63,7 @@ spec: name: {{ tpl .Values.envFromSecret . }} {{- range .Values.envFromSecrets }} - secretRef: - name: {{ tpl . $ }} + name: {{ tpl . $ | quote }} {{- end }} imagePullPolicy: {{ .Values.image.pullPolicy }} {{- if .Values.init.containerSecurityContext }} diff --git a/helm/superset/values.yaml b/helm/superset/values.yaml index 67f685bf18..26d4547420 100644 --- a/helm/superset/values.yaml +++ b/helm/superset/values.yaml @@ -93,6 +93,8 @@ extraSecretEnv: {} # # Google API Keys: https://console.cloud.google.com/apis/credentials # GOOGLE_KEY: ... # GOOGLE_SECRET: ... + # # Generate your own secret key for encryption. Use openssl rand -base64 42 to generate a good key + # SUPERSET_SECRET_KEY: 'CHANGE_ME_TO_A_COMPLEX_RANDOM_SECRET' # -- Extra files to mount on `/app/pythonpath` extraConfigs: {} @@ -441,6 +443,8 @@ supersetCeleryBeat: - /bin/sh - -c - dockerize -wait "tcp://$DB_HOST:$DB_PORT" -wait "tcp://$REDIS_HOST:$REDIS_PORT" -timeout 120s + # -- Launch additional containers into supersetCeleryBeat pods + extraContainers: [] # -- Annotations to be added to supersetCeleryBeat deployment deploymentAnnotations: {} # -- Affinity to be added to supersetCeleryBeat deployment @@ -522,6 +526,8 @@ supersetCeleryFlower: - /bin/sh - -c - dockerize -wait "tcp://$DB_HOST:$DB_PORT" -wait "tcp://$REDIS_HOST:$REDIS_PORT" -timeout 120s + # -- Launch additional containers into supersetCeleryFlower pods + extraContainers: [] # -- Annotations to be added to supersetCeleryFlower deployment deploymentAnnotations: {} # -- Affinity to be added to supersetCeleryFlower deployment @@ -588,6 +594,8 @@ supersetWebsockets: http: nil command: [] resources: {} + # -- Launch additional containers into supersetWebsockets pods + extraContainers: [] deploymentAnnotations: {} # -- Affinity to be added to supersetWebsockets deployment affinity: {} diff --git a/pytest.ini b/pytest.ini index fdb50114d8..3fec965e72 100644 --- a/pytest.ini +++ b/pytest.ini @@ -17,4 +17,4 @@ [pytest] testpaths = tests -python_files = *_test.py test_*.py *_tests.py +python_files = *_test.py test_*.py *_tests.py *viz/utils.py diff --git a/requirements/base.txt b/requirements/base.txt index d056b403c3..e8b1b43f91 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -18,7 +18,10 @@ apsw==3.42.0.1 async-timeout==4.0.2 # via redis attrs==23.1.0 - # via jsonschema + # via + # cattrs + # jsonschema + # requests-cache babel==2.9.1 # via flask-babel backoff==1.11.1 @@ -31,10 +34,12 @@ bottleneck==1.3.7 # via pandas brotli==1.0.9 # via flask-compress -cachelib==0.6.0 +cachelib==0.9.0 # via # flask-caching # flask-session +cattrs==23.2.1 + # via requests-cache celery==5.2.2 # via apache-superset certifi==2023.7.22 @@ -85,6 +90,8 @@ dnspython==2.1.0 # via email-validator email-validator==1.1.3 # via flask-appbuilder +exceptiongroup==1.1.1 + # via cattrs flask==2.2.5 # via # apache-superset @@ -99,11 +106,11 @@ flask==2.2.5 # flask-session # flask-sqlalchemy # flask-wtf -flask-appbuilder==4.3.9 +flask-appbuilder==4.3.10 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder -flask-caching==1.11.1 +flask-caching==2.1.0 # via apache-superset flask-compress==1.13 # via apache-superset @@ -136,7 +143,9 @@ geographiclib==1.52 geopy==2.2.0 # via apache-superset greenlet==2.0.2 - # via shillelagh + # via + # shillelagh + # sqlalchemy gunicorn==21.2.0 # via apache-superset hashids==1.3.1 @@ -152,7 +161,10 @@ idna==3.2 # email-validator # requests importlib-metadata==6.6.0 - # via apache-superset + # via + # apache-superset + # flask + # shillelagh importlib-resources==5.12.0 # via limits isodate==0.6.0 @@ -232,6 +244,8 @@ parsedatetime==2.6 # via apache-superset pgsanity==0.2.9 # via apache-superset +platformdirs==3.8.1 + # via requests-cache polyline==2.0.0 # via apache-superset prison==0.2.1 @@ -285,12 +299,16 @@ pyyaml==6.0.1 redis==4.5.4 # via apache-superset requests==2.31.0 + # via + # requests-cache + # shillelagh +requests-cache==1.1.1 # via shillelagh rich==13.3.4 # via flask-limiter selenium==3.141.0 # via apache-superset -shillelagh==1.2.6 +shillelagh==1.2.10 # via apache-superset shortid==0.1.2 # via apache-superset @@ -303,6 +321,7 @@ six==1.16.0 # paramiko # prison # python-dateutil + # url-normalize # wtforms-json slack-sdk==3.21.3 # via apache-superset @@ -328,14 +347,18 @@ tabulate==0.8.9 typing-extensions==4.4.0 # via # apache-superset + # cattrs # flask-limiter # limits # shillelagh tzdata==2023.3 # via pandas +url-normalize==1.4.3 + # via requests-cache urllib3==1.26.6 # via # requests + # requests-cache # selenium vine==5.0.0 # via @@ -363,7 +386,9 @@ wtforms-json==0.3.5 xlsxwriter==3.0.7 # via apache-superset zipp==3.15.0 - # via importlib-metadata + # via + # importlib-metadata + # importlib-resources # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/development.txt b/requirements/development.txt index 04962ae537..a73e3a70c5 100644 --- a/requirements/development.txt +++ b/requirements/development.txt @@ -74,8 +74,6 @@ pickleshare==0.7.5 # via ipython pillow==9.5.0 # via apache-superset -platformdirs==3.8.1 - # via pylint progress==1.6 # via -r requirements/development.in psycopg2-binary==2.9.6 diff --git a/requirements/testing.txt b/requirements/testing.txt index 00fe734540..c1f6e55d12 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -26,8 +26,6 @@ docker==6.1.1 # via -r requirements/testing.in ephem==4.1.4 # via lunarcalendar -exceptiongroup==1.1.1 - # via pytest flask-testing==0.8.1 # via -r requirements/testing.in fonttools==4.39.4 @@ -123,8 +121,6 @@ pyee==9.0.4 # via playwright pyfakefs==5.2.2 # via -r requirements/testing.in -pyhive[presto]==0.7.0 - # via apache-superset pytest==7.3.1 # via # -r requirements/testing.in @@ -144,8 +140,6 @@ rsa==4.9 # via google-auth setuptools-git==1.2 # via prophet -shillelagh[gsheetsapi]==1.2.6 - # via apache-superset sqlalchemy-bigquery==1.6.1 # via apache-superset statsd==4.0.1 diff --git a/scripts/docker_build_push.sh b/scripts/docker_build_push.sh index 80d08c47e3..8ae82faaeb 100755 --- a/scripts/docker_build_push.sh +++ b/scripts/docker_build_push.sh @@ -85,6 +85,7 @@ else DEV_TAG="${REPO_NAME}:${LATEST_TAG}-dev" fi +for BUILD_PLATFORM in $ARCHITECTURE_FOR_BUILD; do # # Build the dev image # @@ -96,7 +97,7 @@ docker buildx build --target dev \ -t "${REPO_NAME}:${SHA}-dev" \ -t "${REPO_NAME}:${REFSPEC}-dev" \ -t "${DEV_TAG}" \ - --platform linux/amd64 \ + --platform ${BUILD_PLATFORM} \ --label "sha=${SHA}" \ --label "built_at=$(date)" \ --label "target=dev" \ @@ -113,7 +114,7 @@ docker buildx build --target lean \ -t "${REPO_NAME}:${SHA}" \ -t "${REPO_NAME}:${REFSPEC}" \ -t "${REPO_NAME}:${LATEST_TAG}" \ - --platform linux/amd64 \ + --platform ${BUILD_PLATFORM} \ --label "sha=${SHA}" \ --label "built_at=$(date)" \ --label "target=lean" \ @@ -130,7 +131,7 @@ docker buildx build --target lean \ -t "${REPO_NAME}:${SHA}-py310" \ -t "${REPO_NAME}:${REFSPEC}-py310" \ -t "${REPO_NAME}:${LATEST_TAG}-py310" \ - --platform linux/amd64 \ + --platform ${BUILD_PLATFORM} \ --build-arg PY_VER="3.10-slim-bookworm"\ --label "sha=${SHA}" \ --label "built_at=$(date)" \ @@ -148,7 +149,7 @@ docker buildx build --target lean \ -t "${REPO_NAME}:${SHA}-py39" \ -t "${REPO_NAME}:${REFSPEC}-py39" \ -t "${REPO_NAME}:${LATEST_TAG}-py39" \ - --platform linux/amd64 \ + --platform ${BUILD_PLATFORM} \ --build-arg PY_VER="3.9-slim-bullseye"\ --label "sha=${SHA}" \ --label "built_at=$(date)" \ @@ -156,8 +157,6 @@ docker buildx build --target lean \ --label "build_actor=${GITHUB_ACTOR}" \ . - -for BUILD_PLATFORM in $ARCHITECTURE_FOR_BUILD; do # # Build the "websocket" image # diff --git a/setup.py b/setup.py index 5173ad6dea..c3cc887bbf 100644 --- a/setup.py +++ b/setup.py @@ -83,8 +83,8 @@ setup( "cryptography>=41.0.2, <41.1.0", "deprecation>=2.1.0, <2.2.0", "flask>=2.2.5, <3.0.0", - "flask-appbuilder>=4.3.9, <5.0.0", - "flask-caching>=1.11.1, <2.0", + "flask-appbuilder>=4.3.10, <5.0.0", + "flask-caching>=2.1.0, <3", "flask-compress>=1.13, <2.0", "flask-talisman>=1.0.0, <2.0", "flask-login>=0.6.0, < 1.0", @@ -118,7 +118,7 @@ setup( "PyJWT>=2.4.0, <3.0", "redis>=4.5.4, <5.0", "selenium>=3.141.0, <4.10.0", - "shillelagh>=1.2.6,<2.0", + "shillelagh>=1.2.10, <2.0", "shortid", "sshtunnel>=0.4.0, <0.5", "simplejson>=3.15.0", @@ -146,6 +146,7 @@ setup( "cockroachdb": ["cockroachdb>=0.3.5, <0.4"], "cors": ["flask-cors>=2.0.0"], "crate": ["crate[sqlalchemy]>=0.26.0, <0.27"], + "databend": ["databend-sqlalchemy>=0.3.2, <1.0"], "databricks": [ "databricks-sql-connector>=2.0.2, <3", "sqlalchemy-databricks>=0.2.0", @@ -162,7 +163,7 @@ setup( "excel": ["xlrd>=1.2.0, <1.3"], "firebird": ["sqlalchemy-firebird>=0.7.0, <0.8"], "firebolt": ["firebolt-sqlalchemy>=0.0.1"], - "gsheets": ["shillelagh[gsheetsapi]>=1.2.6, <2"], + "gsheets": ["shillelagh[gsheetsapi]>=1.2.10, <2"], "hana": ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"], "hive": [ "pyhive[hive]>=0.6.5;python_version<'3.11'", @@ -191,7 +192,7 @@ setup( "redshift": ["sqlalchemy-redshift>=0.8.1, < 0.9"], "rockset": ["rockset-sqlalchemy>=0.0.1, <1.0.0"], "shillelagh": [ - "shillelagh[datasetteapi,gsheetsapi,socrata,weatherapi]>=1.2.6,<2" + "shillelagh[datasetteapi,gsheetsapi,socrata,weatherapi]>=1.2.10, <2" ], "snowflake": ["snowflake-sqlalchemy>=1.2.4, <2"], "spark": [ @@ -201,10 +202,11 @@ setup( "thrift>=0.14.1, <1.0.0", ], "teradata": ["teradatasql>=16.20.0.23"], - "thumbnails": ["Pillow>=9.5.0, <10.0.0"], + "thumbnails": ["Pillow>=10.0.1, <11"], "vertica": ["sqlalchemy-vertica-python>=0.5.9, < 0.6"], "netezza": ["nzalchemy>=11.0.2"], "starrocks": ["starrocks>=1.0.0"], + "doris": ["pydoris>=1.0.0, <2.0.0"], }, python_requires="~=3.9", author="Apache Software Foundation", diff --git a/superset-embedded-sdk/package-lock.json b/superset-embedded-sdk/package-lock.json index febe3b9ea5..0112ed3a38 100644 --- a/superset-embedded-sdk/package-lock.json +++ b/superset-embedded-sdk/package-lock.json @@ -18,7 +18,7 @@ "@babel/preset-env": "^7.16.11", "@babel/preset-typescript": "^7.16.7", "@types/jest": "^27.4.1", - "axios": "^0.25.0", + "axios": "^1.6.0", "babel-loader": "^8.2.3", "jest": "^27.5.1", "typescript": "^4.5.5", @@ -2977,12 +2977,28 @@ "dev": true }, "node_modules/axios": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", - "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.0.tgz", + "integrity": "sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==", "dev": true, "dependencies": { - "follow-redirects": "^1.14.7" + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" } }, "node_modules/babel-jest": { @@ -4087,9 +4103,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.14.8", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.8.tgz", - "integrity": "sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", + "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", "dev": true, "funding": [ { @@ -7021,6 +7037,12 @@ "node": ">= 6" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, "node_modules/psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", @@ -10431,12 +10453,27 @@ "dev": true }, "axios": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", - "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.0.tgz", + "integrity": "sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==", "dev": true, "requires": { - "follow-redirects": "^1.14.7" + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + }, + "dependencies": { + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + } } }, "babel-jest": { @@ -11279,9 +11316,9 @@ } }, "follow-redirects": { - "version": "1.14.8", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.8.tgz", - "integrity": "sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", + "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", "dev": true }, "form-data": { @@ -13464,6 +13501,12 @@ "sisteransi": "^1.0.5" } }, + "proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, "psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", diff --git a/superset-embedded-sdk/package.json b/superset-embedded-sdk/package.json index dfe1801ac9..55ed198598 100644 --- a/superset-embedded-sdk/package.json +++ b/superset-embedded-sdk/package.json @@ -42,7 +42,7 @@ "@babel/preset-env": "^7.16.11", "@babel/preset-typescript": "^7.16.7", "@types/jest": "^27.4.1", - "axios": "^0.25.0", + "axios": "^1.6.0", "babel-loader": "^8.2.3", "jest": "^27.5.1", "typescript": "^4.5.5", diff --git a/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/alerts.test.ts b/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/alerts.test.ts index a695541cee..b677507a46 100644 --- a/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/alerts.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/alerts.test.ts @@ -29,10 +29,9 @@ describe('Alert list view', () => { cy.getBySel('sort-header').eq(2).contains('Name'); cy.getBySel('sort-header').eq(3).contains('Schedule'); cy.getBySel('sort-header').eq(4).contains('Notification method'); - cy.getBySel('sort-header').eq(5).contains('Created by'); - cy.getBySel('sort-header').eq(6).contains('Owners'); - cy.getBySel('sort-header').eq(7).contains('Modified'); - cy.getBySel('sort-header').eq(8).contains('Active'); + cy.getBySel('sort-header').eq(5).contains('Owners'); + cy.getBySel('sort-header').eq(6).contains('Last modified'); + cy.getBySel('sort-header').eq(7).contains('Active'); // TODO Cypress won't recognize the Actions column // cy.getBySel('sort-header').eq(9).contains('Actions'); }); diff --git a/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/reports.test.ts b/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/reports.test.ts index e267d76f6f..a227fa03d7 100644 --- a/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/reports.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/alerts_and_reports/reports.test.ts @@ -29,10 +29,9 @@ describe('Report list view', () => { cy.getBySel('sort-header').eq(2).contains('Name'); cy.getBySel('sort-header').eq(3).contains('Schedule'); cy.getBySel('sort-header').eq(4).contains('Notification method'); - cy.getBySel('sort-header').eq(5).contains('Created by'); - cy.getBySel('sort-header').eq(6).contains('Owners'); - cy.getBySel('sort-header').eq(7).contains('Modified'); - cy.getBySel('sort-header').eq(8).contains('Active'); + cy.getBySel('sort-header').eq(5).contains('Owners'); + cy.getBySel('sort-header').eq(6).contains('Last modified'); + cy.getBySel('sort-header').eq(7).contains('Active'); // TODO Cypress won't recognize the Actions column // cy.getBySel('sort-header').eq(9).contains('Actions'); }); diff --git a/superset-frontend/cypress-base/cypress/e2e/chart_list/filter.test.ts b/superset-frontend/cypress-base/cypress/e2e/chart_list/filter.test.ts index acd11669be..00b09e2fb8 100644 --- a/superset-frontend/cypress-base/cypress/e2e/chart_list/filter.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/chart_list/filter.test.ts @@ -35,14 +35,14 @@ describe('Charts filters', () => { setFilter('Owner', 'admin user'); }); - it('should allow filtering by "Created by" correctly', () => { - setFilter('Created by', 'alpha user'); - setFilter('Created by', 'admin user'); + it('should allow filtering by "Modified by" correctly', () => { + setFilter('Modified by', 'alpha user'); + setFilter('Modified by', 'admin user'); }); - it('should allow filtering by "Chart type" correctly', () => { - setFilter('Chart type', 'Area Chart (legacy)'); - setFilter('Chart type', 'Bubble Chart'); + it('should allow filtering by "Type" correctly', () => { + setFilter('Type', 'Area Chart (legacy)'); + setFilter('Type', 'Bubble Chart'); }); it('should allow filtering by "Dataset" correctly', () => { @@ -51,7 +51,7 @@ describe('Charts filters', () => { }); it('should allow filtering by "Dashboards" correctly', () => { - setFilter('Dashboards', 'Unicode Test'); - setFilter('Dashboards', 'Tabbed Dashboard'); + setFilter('Dashboard', 'Unicode Test'); + setFilter('Dashboard', 'Tabbed Dashboard'); }); }); diff --git a/superset-frontend/cypress-base/cypress/e2e/chart_list/list.test.ts b/superset-frontend/cypress-base/cypress/e2e/chart_list/list.test.ts index 6664281abe..44f348edc5 100644 --- a/superset-frontend/cypress-base/cypress/e2e/chart_list/list.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/chart_list/list.test.ts @@ -109,14 +109,12 @@ describe('Charts list', () => { it('should load rows in list mode', () => { cy.getBySel('listview-table').should('be.visible'); - cy.getBySel('sort-header').eq(1).contains('Chart'); - cy.getBySel('sort-header').eq(2).contains('Visualization type'); + cy.getBySel('sort-header').eq(1).contains('Name'); + cy.getBySel('sort-header').eq(2).contains('Type'); cy.getBySel('sort-header').eq(3).contains('Dataset'); - // cy.getBySel('sort-header').eq(4).contains('Dashboards added to'); - cy.getBySel('sort-header').eq(4).contains('Modified by'); + cy.getBySel('sort-header').eq(4).contains('Owners'); cy.getBySel('sort-header').eq(5).contains('Last modified'); - cy.getBySel('sort-header').eq(6).contains('Created by'); - cy.getBySel('sort-header').eq(7).contains('Actions'); + cy.getBySel('sort-header').eq(6).contains('Actions'); }); it('should sort correctly in list mode', () => { diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts index 812ad945da..62bab84d1b 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts @@ -515,7 +515,7 @@ describe('Dashboard edit', () => { // label Anthony cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .eq(2) - .should('have.css', 'fill', 'rgb(0, 122, 135)'); + .should('have.css', 'fill', 'rgb(244, 176, 42)'); // open main tab and nested tab openTab(0, 0); @@ -526,7 +526,7 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() - .should('have.css', 'fill', 'rgb(0, 122, 135)'); + .should('have.css', 'fill', 'rgb(244, 176, 42)'); }); it('should apply the color scheme across main tabs', () => { @@ -557,7 +557,7 @@ describe('Dashboard edit', () => { cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(204, 0, 134)'); + .should('have.css', 'fill', 'rgb(156, 52, 152)'); // change scheme now that charts are rendered across the main tabs editDashboard(); diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts index e8457ba94b..7683d7f878 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts @@ -113,7 +113,7 @@ function prepareDashboardFilters( }, type: 'NATIVE_FILTER', description: '', - chartsInScope: [6], + chartsInScope: [5], tabsInScope: [], }); }); @@ -150,7 +150,7 @@ function prepareDashboardFilters( meta: { width: 4, height: 50, - chartId: 6, + chartId: 5, sliceName: 'Most Populated Countries', }, }, @@ -414,7 +414,7 @@ describe('Native filters', () => { cy.createSampleDashboards([0]); }); - it('Verify that default value is respected after revisit', () => { + it.only('Verify that default value is respected after revisit', () => { prepareDashboardFilters([ { name: 'country_name', column: 'country_name', datasetId: 2 }, ]); diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/tabs.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/tabs.test.ts index 6fc89c1446..ba442e600a 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/tabs.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/tabs.test.ts @@ -25,7 +25,6 @@ import { TABBED_DASHBOARD } from 'cypress/utils/urls'; import { expandFilterOnLeftPanel } from './utils'; const TREEMAP = { name: 'Treemap', viz: 'treemap_v2' }; -const FILTER_BOX = { name: 'Region Filter', viz: 'filter_box' }; const LINE_CHART = { name: 'Growth Rate', viz: 'line' }; const BOX_PLOT = { name: 'Box plot', viz: 'box_plot' }; const BIG_NUMBER = { name: 'Number of Girls', viz: 'big_number_total' }; @@ -41,7 +40,6 @@ function topLevelTabs() { function resetTabs() { topLevelTabs(); cy.get('@top-level-tabs').first().click(); - waitForChartLoad(FILTER_BOX); waitForChartLoad(TREEMAP); waitForChartLoad(BIG_NUMBER); waitForChartLoad(TABLE); @@ -96,7 +94,6 @@ describe('Dashboard tabs', () => { it.skip('should send new queries when tab becomes visible', () => { // landing in first tab - waitForChartLoad(FILTER_BOX); waitForChartLoad(TREEMAP); getChartAliasBySpec(TREEMAP).then(treemapAlias => { diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts index ca539039cf..c63df51d10 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts @@ -23,7 +23,6 @@ import { ChartSpec, waitForChartLoad } from 'cypress/utils'; export const WORLD_HEALTH_CHARTS = [ { name: '% Rural', viz: 'world_map' }, { name: 'Most Populated Countries', viz: 'table' }, - { name: 'Region Filter', viz: 'filter_box' }, { name: "World's Population", viz: 'big_number' }, { name: 'Growth Rate', viz: 'line' }, { name: 'Rural Breakdown', viz: 'sunburst' }, diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard_list/filter.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard_list/filter.test.ts index 4654b3b5c2..854ea541c7 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard_list/filter.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard_list/filter.test.ts @@ -35,9 +35,9 @@ describe('Dashboards filters', () => { setFilter('Owner', 'admin user'); }); - it('should allow filtering by "Created by" correctly', () => { - setFilter('Created by', 'alpha user'); - setFilter('Created by', 'admin user'); + it('should allow filtering by "Modified by" correctly', () => { + setFilter('Modified by', 'alpha user'); + setFilter('Modified by', 'admin user'); }); it('should allow filtering by "Status" correctly', () => { diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard_list/list.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard_list/list.test.ts index 9bc6eed224..7dfb7cd673 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard_list/list.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard_list/list.test.ts @@ -54,13 +54,11 @@ describe('Dashboards list', () => { it('should load rows in list mode', () => { cy.getBySel('listview-table').should('be.visible'); - cy.getBySel('sort-header').eq(1).contains('Title'); - cy.getBySel('sort-header').eq(2).contains('Modified by'); - cy.getBySel('sort-header').eq(3).contains('Status'); - cy.getBySel('sort-header').eq(4).contains('Modified'); - cy.getBySel('sort-header').eq(5).contains('Created by'); - cy.getBySel('sort-header').eq(6).contains('Owners'); - cy.getBySel('sort-header').eq(7).contains('Actions'); + cy.getBySel('sort-header').eq(1).contains('Name'); + cy.getBySel('sort-header').eq(2).contains('Status'); + cy.getBySel('sort-header').eq(3).contains('Owners'); + cy.getBySel('sort-header').eq(4).contains('Last modified'); + cy.getBySel('sort-header').eq(5).contains('Actions'); }); it('should sort correctly in list mode', () => { diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js index 770e1e1c04..591ba31776 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js @@ -89,6 +89,6 @@ describe('Visualization > Distribution bar chart', () => { ).should('exist'); cy.get('.dist_bar .nv-legend .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(255, 90, 95)'); + .should('have.css', 'fill', 'rgb(41, 105, 107)'); }); }); diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts index 5cc398c7f3..8499db5946 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts @@ -85,7 +85,7 @@ describe('Visualization > Line', () => { ).should('exist'); cy.get('.line .nv-legend .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(255, 90, 95)'); + .should('have.css', 'fill', 'rgb(41, 105, 107)'); }); it('should work with adhoc metric', () => { diff --git a/superset-frontend/cypress-base/cypress/support/e2e.ts b/superset-frontend/cypress-base/cypress/support/e2e.ts index 6642e0120c..cccc7b2005 100644 --- a/superset-frontend/cypress-base/cypress/support/e2e.ts +++ b/superset-frontend/cypress-base/cypress/support/e2e.ts @@ -18,7 +18,7 @@ */ import '@cypress/code-coverage/support'; import '@applitools/eyes-cypress/commands'; -import failOnConsoleError, { Config } from 'cypress-fail-on-console-error'; +import failOnConsoleError from 'cypress-fail-on-console-error'; require('cy-verify-downloads').addCustomCommand(); diff --git a/superset-frontend/lerna.json b/superset-frontend/lerna.json index 3a16712db2..07bef7fcfd 100644 --- a/superset-frontend/lerna.json +++ b/superset-frontend/lerna.json @@ -1,7 +1,7 @@ { "lerna": "3.2.1", "npmClient": "npm", - "packages": ["packages/*", "plugins/*"], + "packages": ["packages/*", "plugins/*", "src/setup/*"], "useWorkspaces": true, "version": "0.18.25", "ignoreChanges": [ diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 0a79bfe8de..c4e771b259 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -10,7 +10,8 @@ "license": "Apache-2.0", "workspaces": [ "packages/*", - "plugins/*" + "plugins/*", + "src/setup/*" ], "dependencies": { "@ant-design/icons": "^5.0.1", @@ -259,7 +260,7 @@ "less-loader": "^10.2.0", "mini-css-extract-plugin": "^2.7.6", "mock-socket": "^9.0.3", - "node-fetch": "^2.6.1", + "node-fetch": "^2.6.7", "prettier": "^2.4.1", "prettier-plugin-packagejson": "^2.2.15", "process": "^0.11.10", @@ -47771,9 +47772,9 @@ "dev": true }, "node_modules/nx/node_modules/axios": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz", - "integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.1.tgz", + "integrity": "sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g==", "dev": true, "dependencies": { "follow-redirects": "^1.15.0", @@ -101307,9 +101308,9 @@ "dev": true }, "axios": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz", - "integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.1.tgz", + "integrity": "sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g==", "dev": true, "requires": { "follow-redirects": "^1.15.0", diff --git a/superset-frontend/package.json b/superset-frontend/package.json index a0f5bdd8c0..3b6310fa71 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -33,7 +33,8 @@ }, "workspaces": [ "packages/*", - "plugins/*" + "plugins/*", + "src/setup/*" ], "scripts": { "_lint": "eslint --ignore-path=.eslintignore --ext .js,.jsx,.ts,tsx .", @@ -324,7 +325,7 @@ "less-loader": "^10.2.0", "mini-css-extract-plugin": "^2.7.6", "mock-socket": "^9.0.3", - "node-fetch": "^2.6.1", + "node-fetch": "^2.6.7", "prettier": "^2.4.1", "prettier-plugin-packagejson": "^2.2.15", "process": "^0.11.10", diff --git a/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts b/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts index 34f373f0f4..dcb1de62a5 100644 --- a/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts +++ b/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts @@ -36,7 +36,6 @@ export interface ChartMetadataConfig { description?: string; datasourceCount?: number; enableNoResults?: boolean; - show?: boolean; supportedAnnotationTypes?: string[]; thumbnail: string; useLegacyApi?: boolean; @@ -64,8 +63,6 @@ export default class ChartMetadata { description: string; - show: boolean; - supportedAnnotationTypes: string[]; thumbnail: string; @@ -100,7 +97,6 @@ export default class ChartMetadata { canBeAnnotationTypes = [], credits = [], description = '', - show = true, supportedAnnotationTypes = [], thumbnail, useLegacyApi = false, @@ -120,7 +116,6 @@ export default class ChartMetadata { this.name = name; this.credits = credits; this.description = description; - this.show = show; this.canBeAnnotationTypes = canBeAnnotationTypes; this.canBeAnnotationTypesLookup = canBeAnnotationTypes.reduce( (prev: LookupTable, type: string) => { diff --git a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts index 1c4d278f6c..b3884a8488 100644 --- a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts +++ b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts @@ -58,7 +58,6 @@ export enum AppSection { export type FilterState = { value?: any; [key: string]: any }; export type DataMask = { - __cache?: FilterState; extraFormData?: ExtraFormData; filterState?: FilterState; ownState?: JsonObject; diff --git a/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts b/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts index 462065b84f..a126f502a9 100644 --- a/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts +++ b/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts @@ -24,27 +24,19 @@ const schemes = [ id: 'bnbColors', label: 'Airbnb Colors', colors: [ - '#ff5a5f', // rausch - '#7b0051', // hackb - '#007A87', // kazan - '#00d1c1', // babu - '#8ce071', // lima - '#ffb400', // beach - '#b4a76c', // barol - '#ff8083', - '#cc0086', - '#00a1b3', - '#00ffeb', - '#bbedab', - '#ffd266', - '#cbc29a', - '#ff3339', - '#ff1ab1', - '#005c66', - '#00b3a5', - '#55d12e', - '#b37e00', - '#988b4e', + '#29696B', + '#5BCACE', + '#F4B02A', + '#F1826A', + '#792EB2', + '#C96EC6', + '#921E50', + '#B27700', + '#9C3498', + '#9C3498', + '#E4679D', + '#C32F0E', + '#9D63CA', ], }, ].map(s => new CategoricalScheme(s)); diff --git a/superset-frontend/packages/superset-ui-core/src/components/SafeMarkdown.tsx b/superset-frontend/packages/superset-ui-core/src/components/SafeMarkdown.tsx index b0826ce2ed..2b36802d4b 100644 --- a/superset-frontend/packages/superset-ui-core/src/components/SafeMarkdown.tsx +++ b/superset-frontend/packages/superset-ui-core/src/components/SafeMarkdown.tsx @@ -67,6 +67,7 @@ function SafeMarkdown({ rehypePlugins={rehypePlugins} remarkPlugins={[remarkGfm]} skipHtml={false} + transformLinkUri={null} > {source} </ReactMarkdown> diff --git a/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts b/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts index 0e7e0c9783..27646442de 100644 --- a/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts +++ b/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts @@ -127,6 +127,14 @@ export interface SQLResultTableExtentionProps { expandedColumns?: string[]; } +/** + * Interface for extensions to Slice Header + */ +export interface SliceHeaderExtension { + sliceId: number; + dashboardId: number; +} + export type Extensions = Partial<{ 'alertsreports.header.icon': React.ComponentType; 'embedded.documentation.configuration_details': React.ComponentType<ConfigDetailsProps>; @@ -147,4 +155,5 @@ export type Extensions = Partial<{ 'dataset.delete.related': React.ComponentType<DatasetDeleteRelatedExtensionProps>; 'sqleditor.extension.form': React.ComponentType<SQLFormExtensionProps>; 'sqleditor.extension.resultTable': React.ComponentType<SQLResultTableExtentionProps>; + 'dashboard.slice.header': React.ComponentType<SliceHeaderExtension>; }>; diff --git a/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx b/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx index 8fd06cb6f8..9b950e4246 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx +++ b/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx @@ -44,6 +44,9 @@ describe('isProbablyHTML', () => { const plainText = 'Just a plain text'; const isHTML = isProbablyHTML(plainText); expect(isHTML).toBe(false); + + const trickyText = 'a <= 10 and b > 10'; + expect(isProbablyHTML(trickyText)).toBe(false); }); }); diff --git a/superset-frontend/packages/superset-ui-core/src/utils/html.tsx b/superset-frontend/packages/superset-ui-core/src/utils/html.tsx index 3215eb9b9d..fffd43bda8 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/html.tsx +++ b/superset-frontend/packages/superset-ui-core/src/utils/html.tsx @@ -28,7 +28,9 @@ export function sanitizeHtml(htmlString: string) { } export function isProbablyHTML(text: string) { - return /<[^>]+>/.test(text); + return Array.from( + new DOMParser().parseFromString(text, 'text/html').body.childNodes, + ).some(({ nodeType }) => nodeType === 1); } export function sanitizeHtmlIfNeeded(htmlString: string) { diff --git a/superset-frontend/packages/superset-ui-core/src/validator/index.ts b/superset-frontend/packages/superset-ui-core/src/validator/index.ts index 532efcc959..169675b682 100644 --- a/superset-frontend/packages/superset-ui-core/src/validator/index.ts +++ b/superset-frontend/packages/superset-ui-core/src/validator/index.ts @@ -22,3 +22,4 @@ export { default as legacyValidateNumber } from './legacyValidateNumber'; export { default as validateInteger } from './validateInteger'; export { default as validateNumber } from './validateNumber'; export { default as validateNonEmpty } from './validateNonEmpty'; +export { default as validateMapboxStylesUrl } from './validateMapboxStylesUrl'; diff --git a/superset-frontend/packages/superset-ui-core/src/validator/validateMapboxStylesUrl.ts b/superset-frontend/packages/superset-ui-core/src/validator/validateMapboxStylesUrl.ts new file mode 100644 index 0000000000..bfbbaa7168 --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/src/validator/validateMapboxStylesUrl.ts @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { t } from '../translation'; + +/** + * Validate a [Mapbox styles URL](https://docs.mapbox.com/help/glossary/style-url/) + * @param v + */ +export default function validateMapboxStylesUrl(v: unknown) { + if ( + typeof v === 'string' && + v.trim().length > 0 && + v.trim().startsWith('mapbox://styles/') + ) { + return false; + } + + return t('is expected to be a Mapbox URL'); +} diff --git a/superset-frontend/packages/superset-ui-core/test/validator/validateMapboxStylesUrl.test.ts b/superset-frontend/packages/superset-ui-core/test/validator/validateMapboxStylesUrl.test.ts new file mode 100644 index 0000000000..dbd5822666 --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/test/validator/validateMapboxStylesUrl.test.ts @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { validateMapboxStylesUrl } from '@superset-ui/core'; +import './setup'; + +describe('validateMapboxStylesUrl', () => { + it('should validate mapbox style URLs', () => { + expect( + validateMapboxStylesUrl('mapbox://styles/mapbox/streets-v9'), + ).toEqual(false); + expect( + validateMapboxStylesUrl( + 'mapbox://styles/foobar/clp2dr5r4008a01pcg4ad45m8', + ), + ).toEqual(false); + }); + + [ + 123, + ['mapbox://styles/mapbox/streets-v9'], + { url: 'mapbox://styles/mapbox/streets-v9' }, + 'https://superset.apache.org/', + 'mapbox://tileset/mapbox/streets-v9', + ].forEach(value => { + it(`should not validate ${value}`, () => { + expect(validateMapboxStylesUrl(value)).toEqual( + 'is expected to be a Mapbox URL', + ); + }); + }); +}); diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx index 6cdca623a1..dd95ffada5 100644 --- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx +++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx @@ -42,7 +42,7 @@ export const Basic = () => { allColumnsY: 'LAT', clusteringRadius: '60', globalOpacity: 1, - mapboxColor: 'rgb(0, 122, 135)', + mapboxColor: 'rgb(244, 176, 42)', mapboxLabel: [], mapboxStyle: 'mapbox://styles/mapbox/light-v9', pandasAggfunc: 'sum', diff --git a/superset-frontend/plugins/legacy-plugin-chart-map-box/src/controlPanel.ts b/superset-frontend/plugins/legacy-plugin-chart-map-box/src/controlPanel.ts index 1dc75d96ef..e0b6524609 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-map-box/src/controlPanel.ts +++ b/superset-frontend/plugins/legacy-plugin-chart-map-box/src/controlPanel.ts @@ -16,7 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -import { FeatureFlag, isFeatureEnabled, t } from '@superset-ui/core'; +import { + FeatureFlag, + isFeatureEnabled, + t, + validateMapboxStylesUrl, +} from '@superset-ui/core'; import { columnChoices, ControlPanelConfig, @@ -224,6 +229,8 @@ const config: ControlPanelConfig = { label: t('Map Style'), clearable: false, renderTrigger: true, + freeForm: true, + validators: [validateMapboxStylesUrl], choices: [ ['mapbox://styles/mapbox/streets-v9', t('Streets')], ['mapbox://styles/mapbox/dark-v9', t('Dark')], @@ -236,7 +243,10 @@ const config: ControlPanelConfig = { ['mapbox://styles/mapbox/outdoors-v9', t('Outdoors')], ], default: 'mapbox://styles/mapbox/light-v9', - description: t('Base layer map style'), + description: t( + 'Base layer map style. See Mapbox documentation: %s', + 'https://docs.mapbox.com/help/glossary/style-url/', + ), }, }, ], diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/controlPanel.ts index 8571fe23d0..8f4df671c3 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/controlPanel.ts @@ -27,7 +27,8 @@ export default { label: t('Map'), expanded: true, controlSetRows: [ - [mapboxStyle, viewport], + [mapboxStyle], + [viewport], [ { name: 'deck_slices', diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Arc/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Arc/controlPanel.ts index 3794ef38da..664f389a0b 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Arc/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Arc/controlPanel.ts @@ -76,10 +76,7 @@ const config: ControlPanelConfig = { }, { label: t('Map'), - controlSetRows: [ - [mapboxStyle, viewport], - [autozoom, null], - ], + controlSetRows: [[mapboxStyle], [autozoom, viewport]], }, { label: t('Arc'), diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Contour/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Contour/controlPanel.ts index 238029aada..407cab3162 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Contour/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Contour/controlPanel.ts @@ -52,8 +52,8 @@ const config: ControlPanelConfig = { label: t('Map'), expanded: true, controlSetRows: [ - [mapboxStyle, viewport], - [autozoom], + [mapboxStyle], + [autozoom, viewport], [ { name: 'cellSize', diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/controlPanel.ts index 9b8e33d739..fa9a03a8f3 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/controlPanel.ts @@ -53,7 +53,8 @@ const config: ControlPanelConfig = { { label: t('Map'), controlSetRows: [ - [mapboxStyle, viewport], + [mapboxStyle], + [viewport], ['color_scheme'], [autozoom], [gridSize], diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Heatmap/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Heatmap/controlPanel.ts index 6fa41c2e21..fd343eed16 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Heatmap/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Heatmap/controlPanel.ts @@ -99,7 +99,8 @@ const config: ControlPanelConfig = { { label: t('Map'), controlSetRows: [ - [mapboxStyle, viewport], + [mapboxStyle], + [viewport], ['linear_color_scheme'], [autozoom], [ diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/controlPanel.ts index 2f9293c521..8865ed0052 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/controlPanel.ts @@ -53,8 +53,8 @@ const config: ControlPanelConfig = { { label: t('Map'), controlSetRows: [ - [mapboxStyle, viewport], - ['color_scheme'], + [mapboxStyle], + ['color_scheme', viewport], [autozoom], [gridSize], [extruded], diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Path/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Path/controlPanel.ts index 80691efa6b..b0403b3596 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Path/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Path/controlPanel.ts @@ -67,7 +67,8 @@ const config: ControlPanelConfig = { label: t('Map'), expanded: true, controlSetRows: [ - [mapboxStyle, viewport], + [mapboxStyle], + [viewport], ['color_picker'], [lineWidth], [ diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/controlPanel.ts index ef3d45a956..9afeb1b415 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/controlPanel.ts @@ -62,10 +62,7 @@ const config: ControlPanelConfig = { { label: t('Map'), expanded: true, - controlSetRows: [ - [mapboxStyle, viewport], - [autozoom, null], - ], + controlSetRows: [[mapboxStyle], [autozoom, viewport]], }, { label: t('Point Size'), diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/controlPanel.ts index caf052581c..82aeda1745 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/controlPanel.ts @@ -52,10 +52,7 @@ const config: ControlPanelConfig = { }, { label: t('Map'), - controlSetRows: [ - [mapboxStyle, viewport], - [autozoom, null], - ], + controlSetRows: [[mapboxStyle], [autozoom, viewport]], }, { label: t('Grid'), diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx index 9a123e91c3..5b307efd90 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx @@ -25,6 +25,7 @@ import { isFeatureEnabled, t, validateNonEmpty, + validateMapboxStylesUrl, } from '@superset-ui/core'; import { D3_FORMAT_OPTIONS, sharedControls } from '@superset-ui/chart-controls'; import { columnChoices, PRIMARY_COLOR } from './controls'; @@ -370,6 +371,8 @@ export const mapboxStyle = { label: t('Map Style'), clearable: false, renderTrigger: true, + freeForm: true, + validators: [validateMapboxStylesUrl], choices: [ ['mapbox://styles/mapbox/streets-v9', t('Streets')], ['mapbox://styles/mapbox/dark-v9', t('Dark')], @@ -379,7 +382,10 @@ export const mapboxStyle = { ['mapbox://styles/mapbox/outdoors-v9', t('Outdoors')], ], default: 'mapbox://styles/mapbox/light-v9', - description: t('Base layer map style'), + description: t( + 'Base layer map style. See Mapbox documentation: %s', + 'https://docs.mapbox.com/help/glossary/style-url/', + ), }, }; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/constants.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/constants.ts index 0f9bc0f305..89b03d5e90 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/constants.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/constants.ts @@ -17,6 +17,7 @@ * under the License. */ import { DEFAULT_LEGEND_FORM_DATA } from '../constants'; +import { defaultXAxis } from '../defaults'; import { EchartsBubbleFormData } from './types'; export const DEFAULT_FORM_DATA: Partial<EchartsBubbleFormData> = { @@ -26,9 +27,10 @@ export const DEFAULT_FORM_DATA: Partial<EchartsBubbleFormData> = { logYAxis: false, xAxisTitleMargin: 30, yAxisTitleMargin: 30, + truncateXAxis: false, truncateYAxis: false, yAxisBounds: [null, null], - xAxisLabelRotation: 0, + xAxisLabelRotation: defaultXAxis.xAxisLabelRotation, opacity: 0.6, }; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/controlPanel.tsx index 53fba5de2b..521ae98130 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/controlPanel.tsx @@ -26,10 +26,15 @@ import { } from '@superset-ui/chart-controls'; import { DEFAULT_FORM_DATA } from './constants'; -import { legendSection } from '../controls'; +import { + legendSection, + truncateXAxis, + xAxisBounds, + xAxisLabelRotation, +} from '../controls'; +import { defaultYAxis } from '../defaults'; -const { logAxis, truncateYAxis, yAxisBounds, xAxisLabelRotation, opacity } = - DEFAULT_FORM_DATA; +const { logAxis, truncateYAxis, yAxisBounds, opacity } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ @@ -127,26 +132,7 @@ const config: ControlPanelConfig = { }, }, ], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], [ { name: 'x_axis_title_margin', @@ -211,7 +197,7 @@ const config: ControlPanelConfig = { [0, '0°'], [45, '45°'], ], - default: xAxisLabelRotation, + default: defaultYAxis.yAxisLabelRotation, renderTrigger: true, description: t( 'Input field supports custom rotation. e.g. 30 for 30°', @@ -246,6 +232,8 @@ const config: ControlPanelConfig = { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts index 7962bc2c36..01d9ed3c53 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts @@ -28,9 +28,9 @@ import { import { EchartsBubbleChartProps, EchartsBubbleFormData } from './types'; import { DEFAULT_FORM_DATA, MINIMUM_BUBBLE_SIZE } from './constants'; import { defaultGrid } from '../defaults'; -import { getLegendProps } from '../utils/series'; +import { getLegendProps, getMinAndMaxFromBounds } from '../utils/series'; import { Refs } from '../types'; -import { parseYAxisBound } from '../utils/controls'; +import { parseAxisBound } from '../utils/controls'; import { getDefaultTooltip } from '../utils/tooltip'; import { getPadding } from '../Timeseries/transformers'; import { convertInteger } from '../utils/convertInteger'; @@ -84,6 +84,7 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { series: bubbleSeries, xAxisLabel: bubbleXAxisTitle, yAxisLabel: bubbleYAxisTitle, + xAxisBounds, xAxisFormat, yAxisFormat, yAxisBounds, @@ -91,6 +92,7 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { logYAxis, xAxisTitleMargin, yAxisTitleMargin, + truncateXAxis, truncateYAxis, xAxisLabelRotation, yAxisLabelRotation, @@ -104,7 +106,7 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { const colorFn = CategoricalColorNamespace.getScale(colorScheme as string); - const legends: string[] = []; + const legends = new Set<string>(); const series: ScatterSeriesOption[] = []; const xAxisLabel: string = getMetricLabel(x); @@ -114,9 +116,8 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { const refs: Refs = {}; data.forEach(datum => { - const name = - ((bubbleSeries ? datum[bubbleSeries] : datum[entity]) as string) || - NULL_STRING; + const dataName = bubbleSeries ? datum[bubbleSeries] : datum[entity]; + const name = dataName ? String(dataName) : NULL_STRING; const bubbleSeriesValue = bubbleSeries ? datum[bubbleSeries] : null; series.push({ @@ -133,7 +134,7 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { type: 'scatter', itemStyle: { color: colorFn(name), opacity }, }); - legends.push(name); + legends.add(name); }); normalizeSymbolSize(series, maxBubbleSize); @@ -142,7 +143,8 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { const yAxisFormatter = getNumberFormatter(yAxisFormat); const tooltipSizeFormatter = getNumberFormatter(tooltipSizeFormat); - const [min, max] = yAxisBounds.map(parseYAxisBound); + const [xAxisMin, xAxisMax] = xAxisBounds.map(parseAxisBound); + const [yAxisMin, yAxisMax] = yAxisBounds.map(parseAxisBound); const padding = getPadding( showLegend, @@ -156,6 +158,7 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { convertInteger(xAxisTitleMargin), ); + const xAxisType = logXAxis ? AxisType.log : AxisType.value; const echartOptions: EChartsCoreOption = { series, xAxis: { @@ -173,7 +176,8 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { fontWight: 'bolder', }, nameGap: convertInteger(xAxisTitleMargin), - type: logXAxis ? AxisType.log : AxisType.value, + type: xAxisType, + ...getMinAndMaxFromBounds(xAxisType, truncateXAxis, xAxisMin, xAxisMax), }, yAxis: { axisLabel: { formatter: yAxisFormatter }, @@ -190,13 +194,13 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { fontWight: 'bolder', }, nameGap: convertInteger(yAxisTitleMargin), - min, - max, + min: yAxisMin, + max: yAxisMax, type: logYAxis ? AxisType.log : AxisType.value, }, legend: { ...getLegendProps(legendType, legendOrientation, showLegend, theme), - data: legends, + data: Array.from(legends), }, tooltip: { show: !inContextMenu, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx index c9f9027a3e..f54b3d01dc 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx @@ -32,7 +32,11 @@ import { import { DEFAULT_FORM_DATA } from './types'; import { EchartsTimeseriesSeriesType } from '../Timeseries/types'; -import { legendSection, richTooltipSection } from '../controls'; +import { + legendSection, + richTooltipSection, + xAxisLabelRotation, +} from '../controls'; const { area, @@ -49,7 +53,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, yAxisIndex, } = DEFAULT_FORM_DATA; @@ -314,26 +317,7 @@ const config: ControlPanelConfig = { ...legendSection, [<ControlSubSectionHeader>{t('X Axis')}</ControlSubSectionHeader>], ['x_axis_time_format'], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], ...richTooltipSection, // eslint-disable-next-line react/jsx-key [<ControlSubSectionHeader>{t('Y Axis')}</ControlSubSectionHeader>], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts index 47411e2477..8bc01582af 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts @@ -53,7 +53,7 @@ import { ForecastSeriesEnum, Refs, } from '../types'; -import { parseYAxisBound } from '../utils/controls'; +import { parseAxisBound } from '../utils/controls'; import { getOverMaxHiddenFormatter, dedupSeries, @@ -345,9 +345,9 @@ export default function transformProps( }); // yAxisBounds need to be parsed to replace incompatible values with undefined - let [min, max] = (yAxisBounds || []).map(parseYAxisBound); + let [min, max] = (yAxisBounds || []).map(parseAxisBound); let [minSecondary, maxSecondary] = (yAxisBoundsSecondary || []).map( - parseYAxisBound, + parseAxisBound, ); const array = ensureIsArray(chartProps.rawFormData?.time_compare); diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx index 8515139548..1ca2805b2f 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx @@ -37,6 +37,9 @@ import { richTooltipSection, seriesOrderSection, percentageThresholdControl, + xAxisLabelRotation, + truncateXAxis, + xAxisBounds, } from '../../controls'; import { AreaChartStackControlOptions } from '../../constants'; @@ -51,7 +54,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ @@ -191,26 +193,7 @@ const config: ControlPanelConfig = { }, }, ], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], ...richTooltipSection, // eslint-disable-next-line react/jsx-key [<ControlSubSectionHeader>{t('Y Axis')}</ControlSubSectionHeader>], @@ -240,6 +223,8 @@ const config: ControlPanelConfig = { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx index 47fe550ad7..1fecf64e6a 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx @@ -35,6 +35,9 @@ import { richTooltipSection, seriesOrderSection, showValueSection, + truncateXAxis, + xAxisBounds, + xAxisLabelRotation, } from '../../../controls'; import { OrientationType } from '../../types'; @@ -49,7 +52,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, orientation, } = DEFAULT_FORM_DATA; @@ -163,21 +165,9 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { ], [ { - name: 'xAxisLabelRotation', + name: xAxisLabelRotation.name, config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), + ...xAxisLabelRotation.config, visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isVertical(controls) : isHorizontal(controls), }, @@ -223,6 +213,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx index 637a5fbc57..488dac0738 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx @@ -38,6 +38,9 @@ import { richTooltipSection, seriesOrderSection, showValueSection, + truncateXAxis, + xAxisBounds, + xAxisLabelRotation, } from '../../../controls'; const { @@ -52,7 +55,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ @@ -179,26 +181,7 @@ const config: ControlPanelConfig = { }, }, ], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], ...richTooltipSection, // eslint-disable-next-line react/jsx-key [<ControlSubSectionHeader>{t('Y Axis')}</ControlSubSectionHeader>], @@ -228,6 +211,8 @@ const config: ControlPanelConfig = { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx index ffcee71792..436000c62d 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx @@ -37,6 +37,9 @@ import { richTooltipSection, seriesOrderSection, showValueSection, + truncateXAxis, + xAxisBounds, + xAxisLabelRotation, } from '../../../controls'; const { @@ -48,7 +51,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ @@ -122,26 +124,7 @@ const config: ControlPanelConfig = { }, }, ], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], // eslint-disable-next-line react/jsx-key ...richTooltipSection, // eslint-disable-next-line react/jsx-key @@ -172,6 +155,8 @@ const config: ControlPanelConfig = { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx index cb7164e0ab..2e05ab8202 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx @@ -37,6 +37,9 @@ import { richTooltipSection, seriesOrderSection, showValueSectionWithoutStack, + truncateXAxis, + xAxisBounds, + xAxisLabelRotation, } from '../../../controls'; const { @@ -48,7 +51,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ @@ -121,26 +123,7 @@ const config: ControlPanelConfig = { }, }, ], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], // eslint-disable-next-line react/jsx-key ...richTooltipSection, // eslint-disable-next-line react/jsx-key @@ -172,6 +155,8 @@ const config: ControlPanelConfig = { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx index 1921e698c2..311b85cac0 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx @@ -35,6 +35,9 @@ import { richTooltipSection, seriesOrderSection, showValueSection, + truncateXAxis, + xAxisBounds, + xAxisLabelRotation, } from '../../controls'; const { @@ -48,7 +51,6 @@ const { truncateYAxis, yAxisBounds, zoomable, - xAxisLabelRotation, } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ @@ -173,26 +175,7 @@ const config: ControlPanelConfig = { }, }, ], - [ - { - name: 'xAxisLabelRotation', - config: { - type: 'SelectControl', - freeForm: true, - clearable: false, - label: t('Rotate x axis label'), - choices: [ - [0, '0°'], - [45, '45°'], - ], - default: xAxisLabelRotation, - renderTrigger: true, - description: t( - 'Input field supports custom rotation. e.g. 30 for 30°', - ), - }, - }, - ], + [xAxisLabelRotation], ...richTooltipSection, // eslint-disable-next-line react/jsx-key [<ControlSubSectionHeader>{t('Y Axis')}</ControlSubSectionHeader>], @@ -222,6 +205,8 @@ const config: ControlPanelConfig = { }, }, ], + [truncateXAxis], + [xAxisBounds], [ { name: 'truncateYAxis', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/constants.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/constants.ts index 17629c0996..215996ab12 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/constants.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/constants.ts @@ -30,6 +30,7 @@ import { DEFAULT_LEGEND_FORM_DATA, DEFAULT_TITLE_FORM_DATA, } from '../constants'; +import { defaultXAxis } from '../defaults'; // @ts-ignore export const DEFAULT_FORM_DATA: EchartsTimeseriesFormData = { @@ -57,11 +58,12 @@ export const DEFAULT_FORM_DATA: EchartsTimeseriesFormData = { seriesType: EchartsTimeseriesSeriesType.Line, stack: false, tooltipTimeFormat: 'smart_date', + truncateXAxis: true, truncateYAxis: false, yAxisBounds: [null, null], zoomable: false, richTooltip: true, - xAxisLabelRotation: 0, + xAxisLabelRotation: defaultXAxis.xAxisLabelRotation, groupby: [], showValue: false, onlyTotal: false, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts index d44ae93580..e42ac183b6 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts @@ -20,9 +20,13 @@ import { invert } from 'lodash'; import { AnnotationLayer, + AxisType, + buildCustomFormatters, CategoricalColorNamespace, + CurrencyFormatter, ensureIsArray, GenericDataType, + getCustomFormatter, getMetricLabel, getNumberFormatter, getXAxisLabel, @@ -34,9 +38,6 @@ import { isTimeseriesAnnotationLayer, t, TimeseriesChartDataResponseResult, - buildCustomFormatters, - getCustomFormatter, - CurrencyFormatter, } from '@superset-ui/core'; import { extractExtraMetrics, @@ -48,12 +49,12 @@ import { ZRLineType } from 'echarts/types/src/util/types'; import { EchartsTimeseriesChartProps, EchartsTimeseriesFormData, - TimeseriesChartTransformedProps, OrientationType, + TimeseriesChartTransformedProps, } from './types'; import { DEFAULT_FORM_DATA } from './constants'; import { ForecastSeriesEnum, ForecastValue, Refs } from '../types'; -import { parseYAxisBound } from '../utils/controls'; +import { parseAxisBound } from '../utils/controls'; import { calculateLowerLogTick, dedupSeries, @@ -63,6 +64,7 @@ import { getAxisType, getColtypesMapping, getLegendProps, + getMinAndMaxFromBounds, } from '../utils/series'; import { extractAnnotationLabels, @@ -88,8 +90,8 @@ import { } from './transformers'; import { StackControlsValue, - TIMESERIES_CONSTANTS, TIMEGRAIN_TO_TIMESTAMP, + TIMESERIES_CONSTANTS, } from '../constants'; import { getDefaultTooltip } from '../utils/tooltip'; import { @@ -160,8 +162,10 @@ export default function transformProps( stack, tooltipTimeFormat, tooltipSortByMetric, + truncateXAxis, truncateYAxis, xAxis: xAxisOrig, + xAxisBounds, xAxisLabelRotation, xAxisSortSeries, xAxisSortSeriesAscending, @@ -387,15 +391,20 @@ export default function transformProps( } }); - // yAxisBounds need to be parsed to replace incompatible values with undefined - let [min, max] = (yAxisBounds || []).map(parseYAxisBound); + // axis bounds need to be parsed to replace incompatible values with undefined + const [xAxisMin, xAxisMax] = (xAxisBounds || []).map(parseAxisBound); + let [yAxisMin, yAxisMax] = (yAxisBounds || []).map(parseAxisBound); // default to 0-100% range when doing row-level contribution chart if ((contributionMode === 'row' || isAreaExpand) && stack) { - if (min === undefined) min = 0; - if (max === undefined) max = 1; - } else if (logAxis && min === undefined && minPositiveValue !== undefined) { - min = calculateLowerLogTick(minPositiveValue); + if (yAxisMin === undefined) yAxisMin = 0; + if (yAxisMax === undefined) yAxisMax = 1; + } else if ( + logAxis && + yAxisMin === undefined && + minPositiveValue !== undefined + ) { + yAxisMin = calculateLowerLogTick(minPositiveValue); } const tooltipFormatter = @@ -448,15 +457,17 @@ export default function transformProps( rotate: xAxisLabelRotation, }, minInterval: - xAxisType === 'time' && timeGrainSqla + xAxisType === AxisType.time && timeGrainSqla ? TIMEGRAIN_TO_TIMESTAMP[timeGrainSqla] : 0, + ...getMinAndMaxFromBounds(xAxisType, truncateXAxis, xAxisMin, xAxisMax), }; + let yAxis: any = { ...defaultYAxis, - type: logAxis ? 'log' : 'value', - min, - max, + type: logAxis ? AxisType.log : AxisType.value, + min: yAxisMin, + max: yAxisMax, minorTick: { show: true }, minorSplitLine: { show: minorSplitLine }, axisLabel: { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts index 1873086d99..65da981e49 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts @@ -75,10 +75,12 @@ export type EchartsTimeseriesFormData = QueryFormData & { stack: StackType; timeCompare?: string[]; tooltipTimeFormat?: string; + truncateXAxis: boolean; truncateYAxis: boolean; yAxisFormat?: string; xAxisTimeFormat?: string; timeGrainSqla?: TimeGranularity; + xAxisBounds: [number | undefined | null, number | undefined | null]; yAxisBounds: [number | undefined | null, number | undefined | null]; zoomable: boolean; richTooltip: boolean; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/buildQuery.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/buildQuery.ts index e47effb3c2..deb3571938 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/buildQuery.ts @@ -19,15 +19,14 @@ import { buildQueryContext, ensureIsArray, - getXAxisColumn, - isXAxisSet, QueryFormData, } from '@superset-ui/core'; export default function buildQuery(formData: QueryFormData) { + const { x_axis, granularity_sqla, groupby } = formData; const columns = [ - ...(isXAxisSet(formData) ? ensureIsArray(getXAxisColumn(formData)) : []), - ...ensureIsArray(formData.groupby), + ...ensureIsArray(x_axis || granularity_sqla), + ...ensureIsArray(groupby), ]; return buildQueryContext(formData, baseQueryObject => [ { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/controlPanel.tsx index 7a71dd4fcb..d07e5175e6 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/controlPanel.tsx @@ -17,25 +17,27 @@ * under the License. */ import React from 'react'; -import { t } from '@superset-ui/core'; +import { hasGenericChartAxes, t } from '@superset-ui/core'; import { ControlPanelConfig, ControlSubSectionHeader, D3_TIME_FORMAT_DOCS, DEFAULT_TIME_FORMAT, formatSelectOptions, + sections, sharedControls, } from '@superset-ui/chart-controls'; import { showValueControl } from '../controls'; const config: ControlPanelConfig = { controlPanelSections: [ + sections.genericTime, { label: t('Query'), expanded: true, controlSetRows: [ - ['x_axis'], - ['time_grain_sqla'], + [hasGenericChartAxes ? 'x_axis' : null], + [hasGenericChartAxes ? 'time_grain_sqla' : null], ['groupby'], ['metric'], ['adhoc_filters'], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts index c0d7a11067..b8c66fabb1 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts @@ -61,7 +61,7 @@ export default class EchartsWaterfallChartPlugin extends ChartPlugin< { url: example3 }, ], name: t('Waterfall Chart'), - tags: [t('Categorical'), t('Comparison'), t('ECharts')], + tags: [t('Categorical'), t('Comparison'), t('ECharts'), t('Popular')], thumbnail, }), transformProps, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts index 7b5faed1b2..84fbbf6cb9 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts @@ -185,6 +185,7 @@ export default function transformProps( const { setDataMask = () => {}, onContextMenu, onLegendStateChanged } = hooks; const { currencyFormat, + granularitySqla = '', groupby, increaseColor, decreaseColor, @@ -213,7 +214,10 @@ export default function transformProps( const breakdownName = isAdhocColumn(breakdownColumn) ? breakdownColumn.label! : breakdownColumn; - const xAxisName = isAdhocColumn(xAxis) ? xAxis.label! : xAxis; + const xAxisColumn = xAxis || granularitySqla; + const xAxisName = isAdhocColumn(xAxisColumn) + ? xAxisColumn.label! + : xAxisColumn; const metricLabel = getMetricLabel(metric); const transformedData = transformer({ diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx index 8f311e47e5..13fe754d99 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx @@ -29,6 +29,7 @@ import { } from '@superset-ui/chart-controls'; import { DEFAULT_LEGEND_FORM_DATA, StackControlOptions } from './constants'; import { DEFAULT_FORM_DATA } from './Timeseries/constants'; +import { defaultXAxis } from './defaults'; const { legendMargin, legendOrientation, legendType, showLegend } = DEFAULT_LEGEND_FORM_DATA; @@ -243,8 +244,57 @@ const sortSeriesAscending: ControlSetItem = { }, }; +export const xAxisLabelRotation = { + name: 'xAxisLabelRotation', + config: { + type: 'SelectControl', + freeForm: true, + clearable: false, + label: t('Rotate x axis label'), + choices: [ + [0, '0°'], + [45, '45°'], + [90, '90°'], + ], + default: defaultXAxis.xAxisLabelRotation, + renderTrigger: true, + description: t('Input field supports custom rotation. e.g. 30 for 30°'), + }, +}; + export const seriesOrderSection: ControlSetRow[] = [ [<ControlSubSectionHeader>{t('Series Order')}</ControlSubSectionHeader>], [sortSeriesType], [sortSeriesAscending], ]; + +export const truncateXAxis: ControlSetItem = { + name: 'truncateXAxis', + config: { + type: 'CheckboxControl', + label: t('Truncate X Axis'), + default: DEFAULT_FORM_DATA.truncateXAxis, + renderTrigger: true, + description: t( + 'Truncate X Axis. Can be overridden by specifying a min or max bound. Only applicable for numercal X axis.', + ), + }, +}; + +export const xAxisBounds: ControlSetItem = { + name: 'xAxisBounds', + config: { + type: 'BoundsControl', + label: t('X Axis Bounds'), + renderTrigger: true, + default: DEFAULT_FORM_DATA.xAxisBounds, + description: t( + 'Bounds for numerical X axis. Not applicable for temporal or categorical axes. ' + + 'When left empty, the bounds are dynamically defined based on the min/max of the data. ' + + "Note that this feature will only expand the axis range. It won't " + + "narrow the data's extent.", + ), + visibility: ({ controls }: ControlPanelsContainerProps) => + Boolean(controls?.truncateXAxis?.value), + }, +}; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/defaults.ts b/superset-frontend/plugins/plugin-chart-echarts/src/defaults.ts index c5ada14932..be37d6fcbf 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/defaults.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/defaults.ts @@ -24,6 +24,11 @@ export const defaultGrid = { export const defaultYAxis = { scale: true, + yAxisLabelRotation: 0, +}; + +export const defaultXAxis = { + xAxisLabelRotation: 0, }; export const defaultLegendPadding = { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/utils/controls.ts b/superset-frontend/plugins/plugin-chart-echarts/src/utils/controls.ts index 27f8fb1447..67a5414112 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/utils/controls.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/utils/controls.ts @@ -20,7 +20,7 @@ import { validateNumber } from '@superset-ui/core'; // eslint-disable-next-line import/prefer-default-export -export function parseYAxisBound( +export function parseAxisBound( bound?: string | number | null, ): number | undefined { if (bound === undefined || bound === null || Number.isNaN(Number(bound))) { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts b/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts index 663548f25d..aa353f66d1 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts @@ -25,12 +25,12 @@ import { DTTM_ALIAS, ensureIsArray, GenericDataType, + LegendState, + normalizeTimestamp, NumberFormats, NumberFormatter, - TimeFormatter, SupersetTheme, - normalizeTimestamp, - LegendState, + TimeFormatter, ValueFormatter, } from '@superset-ui/core'; import { SortSeriesType } from '@superset-ui/chart-controls'; @@ -512,6 +512,9 @@ export function getAxisType(dataType?: GenericDataType): AxisType { if (dataType === GenericDataType.TEMPORAL) { return AxisType.time; } + if (dataType === GenericDataType.NUMERIC) { + return AxisType.value; + } return AxisType.category; } @@ -540,3 +543,17 @@ export function calculateLowerLogTick(minPositiveValue: number) { const logBase10 = Math.floor(Math.log10(minPositiveValue)); return Math.pow(10, logBase10); } + +export function getMinAndMaxFromBounds( + axisType: AxisType, + truncateAxis: boolean, + min?: number, + max?: number, +): { min: number | 'dataMin'; max: number | 'dataMax' } | {} { + return truncateAxis && axisType === AxisType.value + ? { + min: min === undefined ? 'dataMin' : min, + max: max === undefined ? 'dataMax' : max, + } + : {}; +} diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/Bubble/transformProps.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/Bubble/transformProps.test.ts index 2bb4ae0fc6..1a92a43257 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/Bubble/transformProps.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/Bubble/transformProps.test.ts @@ -48,6 +48,7 @@ describe('Bubble transformProps', () => { expressionType: 'simple', label: 'SUM(sales)', }, + xAxisBounds: [null, null], yAxisBounds: [null, null], }; const chartProps = new ChartProps({ diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/utils/controls.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/utils/controls.test.ts index 60ced57739..cb0faac595 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/utils/controls.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/utils/controls.test.ts @@ -16,22 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -import { parseYAxisBound } from '../../src/utils/controls'; +import { parseAxisBound } from '../../src/utils/controls'; describe('parseYAxisBound', () => { it('should return undefined for invalid values', () => { - expect(parseYAxisBound(null)).toBeUndefined(); - expect(parseYAxisBound(undefined)).toBeUndefined(); - expect(parseYAxisBound(NaN)).toBeUndefined(); - expect(parseYAxisBound('abc')).toBeUndefined(); + expect(parseAxisBound(null)).toBeUndefined(); + expect(parseAxisBound(undefined)).toBeUndefined(); + expect(parseAxisBound(NaN)).toBeUndefined(); + expect(parseAxisBound('abc')).toBeUndefined(); }); it('should return numeric value for valid values', () => { - expect(parseYAxisBound(0)).toEqual(0); - expect(parseYAxisBound('0')).toEqual(0); - expect(parseYAxisBound(1)).toEqual(1); - expect(parseYAxisBound('1')).toEqual(1); - expect(parseYAxisBound(10.1)).toEqual(10.1); - expect(parseYAxisBound('10.1')).toEqual(10.1); + expect(parseAxisBound(0)).toEqual(0); + expect(parseAxisBound('0')).toEqual(0); + expect(parseAxisBound(1)).toEqual(1); + expect(parseAxisBound('1')).toEqual(1); + expect(parseAxisBound(10.1)).toEqual(10.1); + expect(parseAxisBound('10.1')).toEqual(10.1); }); }); diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts index 75faee93e5..b445dceabb 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts @@ -18,6 +18,7 @@ */ import { SortSeriesType } from '@superset-ui/chart-controls'; import { + AxisType, DataRecord, GenericDataType, getNumberFormatter, @@ -31,9 +32,11 @@ import { extractSeries, extractShowValueIndexes, formatSeriesName, + getAxisType, getChartPadding, getLegendProps, getOverMaxHiddenFormatter, + getMinAndMaxFromBounds, sanitizeHtml, sortAndFilterSeries, sortRows, @@ -870,3 +873,37 @@ test('calculateLowerLogTick', () => { expect(calculateLowerLogTick(2)).toEqual(1); expect(calculateLowerLogTick(0.005)).toEqual(0.001); }); + +test('getAxisType', () => { + expect(getAxisType(GenericDataType.TEMPORAL)).toEqual(AxisType.time); + expect(getAxisType(GenericDataType.NUMERIC)).toEqual(AxisType.value); + expect(getAxisType(GenericDataType.BOOLEAN)).toEqual(AxisType.category); + expect(getAxisType(GenericDataType.STRING)).toEqual(AxisType.category); +}); + +test('getMinAndMaxFromBounds returns empty object when not truncating', () => { + expect(getMinAndMaxFromBounds(AxisType.value, false, 10, 100)).toEqual({}); +}); + +test('getMinAndMaxFromBounds returns automatic bounds when truncating', () => { + expect( + getMinAndMaxFromBounds(AxisType.value, true, undefined, undefined), + ).toEqual({ + min: 'dataMin', + max: 'dataMax', + }); +}); + +test('getMinAndMaxFromBounds returns automatic upper bound when truncating', () => { + expect(getMinAndMaxFromBounds(AxisType.value, true, 10, undefined)).toEqual({ + min: 10, + max: 'dataMax', + }); +}); + +test('getMinAndMaxFromBounds returns automatic lower bound when truncating', () => { + expect(getMinAndMaxFromBounds(AxisType.value, true, undefined, 100)).toEqual({ + min: 'dataMin', + max: 100, + }); +}); diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js index 44b4307a19..567d3383d7 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.js @@ -99,6 +99,8 @@ export const CREATE_DATASOURCE_STARTED = 'CREATE_DATASOURCE_STARTED'; export const CREATE_DATASOURCE_SUCCESS = 'CREATE_DATASOURCE_SUCCESS'; export const CREATE_DATASOURCE_FAILED = 'CREATE_DATASOURCE_FAILED'; +export const SET_EDITOR_TAB_LAST_UPDATE = 'SET_EDITOR_TAB_LAST_UPDATE'; + export const addInfoToast = addInfoToastAction; export const addSuccessToast = addSuccessToastAction; export const addDangerToast = addDangerToastAction; @@ -160,6 +162,10 @@ export function updateQueryEditor(alterations) { return { type: UPDATE_QUERY_EDITOR, alterations }; } +export function setEditorTabLastUpdate(timestamp) { + return { type: SET_EDITOR_TAB_LAST_UPDATE, timestamp }; +} + export function scheduleQuery(query) { return dispatch => SupersetClient.post({ @@ -237,44 +243,11 @@ export function startQuery(query) { } export function querySuccess(query, results) { - return function (dispatch) { - const sqlEditorId = results?.query?.sqlEditorId; - const sync = - sqlEditorId && - !query.isDataPreview && - isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${sqlEditorId}`), - postPayload: { latest_query_id: query.id }, - }) - : Promise.resolve(); - - return sync - .then(() => dispatch({ type: QUERY_SUCCESS, query, results })) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while storing the latest query id in the backend. ' + - 'Please contact your administrator if this problem persists.', - ), - ), - ), - ); - }; + return { type: QUERY_SUCCESS, query, results }; } export function queryFailed(query, msg, link, errors) { return function (dispatch) { - const sync = - !query.isDataPreview && - isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${query.sqlEditorId}`), - postPayload: { latest_query_id: query.id }, - }) - : Promise.resolve(); - const eventData = { has_err: true, start_offset: query.startDttm, @@ -295,22 +268,7 @@ export function queryFailed(query, msg, link, errors) { }); }); - return ( - sync - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while storing the latest query id in the backend. ' + - 'Please contact your administrator if this problem persists.', - ), - ), - ), - ) - // We should always show the error message, even if we couldn't sync the - // state to the backend - .then(() => dispatch({ type: QUERY_FAILED, query, msg, link, errors })) - ); + dispatch({ type: QUERY_FAILED, query, msg, link, errors }); }; } @@ -557,14 +515,15 @@ export function addQueryEditor(queryEditor) { ? SupersetClient.post({ endpoint: '/tabstateview/', postPayload: { queryEditor }, - }) - : Promise.resolve({ json: { id: shortid.generate() } }); + }).then(({ json }) => ({ ...json, loaded: true })) + : Promise.resolve({ id: shortid.generate() }); return sync - .then(({ json }) => { + .then(({ id, loaded }) => { const newQueryEditor = { ...queryEditor, - id: json.id.toString(), + id: id.toString(), + loaded, }; return dispatch({ type: ADD_QUERY_EDITOR, @@ -736,11 +695,6 @@ export function switchQueryEditor(queryEditor, displayLimit) { schema: json.schema, queryLimit: json.query_limit, remoteId: json.saved_query?.id, - validationResult: { - id: null, - errors: [], - completed: false, - }, hideLeftBar: json.hide_left_bar, }; dispatch(loadQueryEditor(loadedQueryEditor)); @@ -770,31 +724,10 @@ export function setActiveSouthPaneTab(tabId) { export function toggleLeftBar(queryEditor) { const hideLeftBar = !queryEditor.hideLeftBar; - return function (dispatch) { - const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { hide_left_bar: hideLeftBar }, - }) - : Promise.resolve(); - - return sync - .then(() => - dispatch({ - type: QUERY_EDITOR_TOGGLE_LEFT_BAR, - queryEditor, - hideLeftBar, - }), - ) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while hiding the left bar. Please contact your administrator.', - ), - ), - ), - ); + return { + type: QUERY_EDITOR_TOGGLE_LEFT_BAR, + queryEditor, + hideLeftBar, }; } @@ -856,110 +789,26 @@ export function removeQuery(query) { } export function queryEditorSetDb(queryEditor, dbId) { - return function (dispatch) { - const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { database_id: dbId }, - }) - : Promise.resolve(); - - return sync - .then(() => dispatch({ type: QUERY_EDITOR_SETDB, queryEditor, dbId })) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while setting the tab database ID. Please contact your administrator.', - ), - ), - ), - ); - }; + return { type: QUERY_EDITOR_SETDB, queryEditor, dbId }; } export function queryEditorSetSchema(queryEditor, schema) { - return function (dispatch) { - const sync = - isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) && - typeof queryEditor === 'object' - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { schema }, - }) - : Promise.resolve(); - - return sync - .then(() => - dispatch({ - type: QUERY_EDITOR_SET_SCHEMA, - queryEditor: queryEditor || {}, - schema, - }), - ) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while setting the tab schema. Please contact your administrator.', - ), - ), - ), - ); + return { + type: QUERY_EDITOR_SET_SCHEMA, + queryEditor: queryEditor || {}, + schema, }; } export function queryEditorSetAutorun(queryEditor, autorun) { - return function (dispatch) { - const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { autorun }, - }) - : Promise.resolve(); - - return sync - .then(() => - dispatch({ type: QUERY_EDITOR_SET_AUTORUN, queryEditor, autorun }), - ) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while setting the tab autorun. Please contact your administrator.', - ), - ), - ), - ); - }; + return { type: QUERY_EDITOR_SET_AUTORUN, queryEditor, autorun }; } export function queryEditorSetTitle(queryEditor, name, id) { - return function (dispatch) { - const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${id}`), - postPayload: { label: name }, - }) - : Promise.resolve(); - - return sync - .then(() => - dispatch({ - type: QUERY_EDITOR_SET_TITLE, - queryEditor: { ...queryEditor, id }, - name, - }), - ) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while setting the tab name. Please contact your administrator.', - ), - ), - ), - ); + return { + type: QUERY_EDITOR_SET_TITLE, + queryEditor: { ...queryEditor, id }, + name, }; } @@ -1029,32 +878,19 @@ export function updateSavedQuery(query, clientId) { .then(() => dispatch(updateQueryEditor(query))); } -export function queryEditorSetSql(queryEditor, sql) { - return { type: QUERY_EDITOR_SET_SQL, queryEditor, sql }; +export function queryEditorSetSql(queryEditor, sql, queryId) { + return { type: QUERY_EDITOR_SET_SQL, queryEditor, sql, queryId }; } -export function formatQuery(queryEditor) { - return function (dispatch, getState) { - const { sql } = getUpToDateQuery(getState(), queryEditor); - return SupersetClient.post({ - endpoint: `/api/v1/sqllab/format_sql/`, - body: JSON.stringify({ sql }), - headers: { 'Content-Type': 'application/json' }, - }).then(({ json }) => { - dispatch(queryEditorSetSql(queryEditor, json.result)); - }); - }; -} - -export function queryEditorSetAndSaveSql(targetQueryEditor, sql) { +export function queryEditorSetAndSaveSql(targetQueryEditor, sql, queryId) { return function (dispatch, getState) { const queryEditor = getUpToDateQuery(getState(), targetQueryEditor); // saved query and set tab state use this action - dispatch(queryEditorSetSql(queryEditor, sql)); + dispatch(queryEditorSetSql(queryEditor, sql, queryId)); if (isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)) { return SupersetClient.put({ endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { sql, latest_query_id: queryEditor.latestQueryId }, + postPayload: { sql, latest_query_id: queryId }, }).catch(() => dispatch( addDangerToast( @@ -1071,59 +907,32 @@ export function queryEditorSetAndSaveSql(targetQueryEditor, sql) { }; } -export function queryEditorSetQueryLimit(queryEditor, queryLimit) { - return function (dispatch) { - const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { query_limit: queryLimit }, - }) - : Promise.resolve(); +export function formatQuery(queryEditor) { + return function (dispatch, getState) { + const { sql } = getUpToDateQuery(getState(), queryEditor); + return SupersetClient.post({ + endpoint: `/api/v1/sqllab/format_sql/`, + body: JSON.stringify({ sql }), + headers: { 'Content-Type': 'application/json' }, + }).then(({ json }) => { + dispatch(queryEditorSetSql(queryEditor, json.result)); + }); + }; +} - return sync - .then(() => - dispatch({ - type: QUERY_EDITOR_SET_QUERY_LIMIT, - queryEditor, - queryLimit, - }), - ) - .catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while setting the tab name. Please contact your administrator.', - ), - ), - ), - ); +export function queryEditorSetQueryLimit(queryEditor, queryLimit) { + return { + type: QUERY_EDITOR_SET_QUERY_LIMIT, + queryEditor, + queryLimit, }; } export function queryEditorSetTemplateParams(queryEditor, templateParams) { - return function (dispatch) { - dispatch({ - type: QUERY_EDITOR_SET_TEMPLATE_PARAMS, - queryEditor, - templateParams, - }); - const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) - ? SupersetClient.put({ - endpoint: encodeURI(`/tabstateview/${queryEditor.id}`), - postPayload: { template_params: templateParams }, - }) - : Promise.resolve(); - - return sync.catch(() => - dispatch( - addDangerToast( - t( - 'An error occurred while setting the tab template parameters. ' + - 'Please contact your administrator.', - ), - ), - ), - ); + return { + type: QUERY_EDITOR_SET_TEMPLATE_PARAMS, + queryEditor, + templateParams, }; } diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js index dbf4e8a5c5..175ea06ec3 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js @@ -32,7 +32,6 @@ import { initialState, queryId, } from 'src/SqlLab/fixtures'; -import { QueryState } from '@superset-ui/core'; const middlewares = [thunk]; const mockStore = configureMockStore(middlewares); @@ -531,88 +530,6 @@ describe('async actions', () => { afterEach(fetchMock.resetHistory); - describe('querySuccess', () => { - it('updates the tab state in the backend', () => { - expect.assertions(2); - - const store = mockStore({}); - const results = { query: { sqlEditorId: 'abcd' } }; - const expectedActions = [ - { - type: actions.QUERY_SUCCESS, - query, - results, - }, - ]; - return store.dispatch(actions.querySuccess(query, results)).then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); - }); - }); - - describe('fetchQueryResults', () => { - it('updates the tab state in the backend', () => { - expect.assertions(2); - - const results = { - data: mockBigNumber, - query: { sqlEditorId: 'abcd' }, - status: QueryState.SUCCESS, - query_id: 'efgh', - }; - fetchMock.get(fetchQueryEndpoint, JSON.stringify(results), { - overwriteRoutes: true, - }); - const store = mockStore({}); - const expectedActions = [ - { - type: actions.REQUEST_QUERY_RESULTS, - query, - }, - // missing below - { - type: actions.QUERY_SUCCESS, - query, - results, - }, - ]; - return store.dispatch(actions.fetchQueryResults(query)).then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); - }); - - it("doesn't update the tab state in the backend on stoppped query", () => { - expect.assertions(2); - - const results = { - status: QueryState.STOPPED, - query_id: 'efgh', - }; - fetchMock.get(fetchQueryEndpoint, JSON.stringify(results), { - overwriteRoutes: true, - }); - const store = mockStore({}); - const expectedActions = [ - { - type: actions.REQUEST_QUERY_RESULTS, - query, - }, - // missing below - { - type: actions.QUERY_SUCCESS, - query, - results, - }, - ]; - return store.dispatch(actions.fetchQueryResults(query)).then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(0); - }); - }); - }); - describe('addQueryEditor', () => { it('updates the tab state in the backend', () => { expect.assertions(2); @@ -621,7 +538,7 @@ describe('async actions', () => { const expectedActions = [ { type: actions.ADD_QUERY_EDITOR, - queryEditor: { ...queryEditor, id: '1' }, + queryEditor: { ...queryEditor, id: '1', loaded: true }, }, ]; return store.dispatch(actions.addQueryEditor(queryEditor)).then(() => { @@ -673,7 +590,7 @@ describe('async actions', () => { describe('queryEditorSetDb', () => { it('updates the tab state in the backend', () => { - expect.assertions(2); + expect.assertions(1); const dbId = 42; const store = mockStore({}); @@ -684,18 +601,14 @@ describe('async actions', () => { dbId, }, ]; - return store - .dispatch(actions.queryEditorSetDb(queryEditor, dbId)) - .then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); + store.dispatch(actions.queryEditorSetDb(queryEditor, dbId)); + expect(store.getActions()).toEqual(expectedActions); }); }); describe('queryEditorSetSchema', () => { it('updates the tab state in the backend', () => { - expect.assertions(2); + expect.assertions(1); const schema = 'schema'; const store = mockStore({}); @@ -706,18 +619,14 @@ describe('async actions', () => { schema, }, ]; - return store - .dispatch(actions.queryEditorSetSchema(queryEditor, schema)) - .then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); + store.dispatch(actions.queryEditorSetSchema(queryEditor, schema)); + expect(store.getActions()).toEqual(expectedActions); }); }); describe('queryEditorSetAutorun', () => { it('updates the tab state in the backend', () => { - expect.assertions(2); + expect.assertions(1); const autorun = true; const store = mockStore({}); @@ -728,18 +637,14 @@ describe('async actions', () => { autorun, }, ]; - return store - .dispatch(actions.queryEditorSetAutorun(queryEditor, autorun)) - .then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); + store.dispatch(actions.queryEditorSetAutorun(queryEditor, autorun)); + expect(store.getActions()).toEqual(expectedActions); }); }); describe('queryEditorSetTitle', () => { it('updates the tab state in the backend', () => { - expect.assertions(2); + expect.assertions(1); const name = 'name'; const store = mockStore({}); @@ -750,14 +655,10 @@ describe('async actions', () => { name, }, ]; - return store - .dispatch( - actions.queryEditorSetTitle(queryEditor, name, queryEditor.id), - ) - .then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); + store.dispatch( + actions.queryEditorSetTitle(queryEditor, name, queryEditor.id), + ); + expect(store.getActions()).toEqual(expectedActions); }); }); @@ -803,7 +704,7 @@ describe('async actions', () => { describe('queryEditorSetQueryLimit', () => { it('updates the tab state in the backend', () => { - expect.assertions(2); + expect.assertions(1); const queryLimit = 10; const store = mockStore({}); @@ -814,18 +715,16 @@ describe('async actions', () => { queryLimit, }, ]; - return store - .dispatch(actions.queryEditorSetQueryLimit(queryEditor, queryLimit)) - .then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); + store.dispatch( + actions.queryEditorSetQueryLimit(queryEditor, queryLimit), + ); + expect(store.getActions()).toEqual(expectedActions); }); }); describe('queryEditorSetTemplateParams', () => { it('updates the tab state in the backend', () => { - expect.assertions(2); + expect.assertions(1); const templateParams = '{"foo": "bar"}'; const store = mockStore({}); @@ -836,14 +735,11 @@ describe('async actions', () => { templateParams, }, ]; - return store - .dispatch( - actions.queryEditorSetTemplateParams(queryEditor, templateParams), - ) - .then(() => { - expect(store.getActions()).toEqual(expectedActions); - expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1); - }); + store.dispatch( + actions.queryEditorSetTemplateParams(queryEditor, templateParams), + ); + + expect(store.getActions()).toEqual(expectedActions); }); }); diff --git a/superset-frontend/src/SqlLab/components/EditorAutoSync/EditorAutoSync.test.tsx b/superset-frontend/src/SqlLab/components/EditorAutoSync/EditorAutoSync.test.tsx new file mode 100644 index 0000000000..52e1d44b24 --- /dev/null +++ b/superset-frontend/src/SqlLab/components/EditorAutoSync/EditorAutoSync.test.tsx @@ -0,0 +1,137 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import fetchMock from 'fetch-mock'; +import { render, waitFor } from 'spec/helpers/testing-library'; +import ToastContainer from 'src/components/MessageToasts/ToastContainer'; +import { initialState, defaultQueryEditor } from 'src/SqlLab/fixtures'; +import { logging } from '@superset-ui/core'; +import EditorAutoSync from '.'; + +jest.mock('@superset-ui/core', () => ({ + ...jest.requireActual('@superset-ui/core'), + logging: { + warn: jest.fn(), + }, +})); + +const editorTabLastUpdatedAt = Date.now(); +const unsavedSqlLabState = { + ...initialState.sqlLab, + unsavedQueryEditor: { + id: defaultQueryEditor.id, + name: 'updated tab name', + updatedAt: editorTabLastUpdatedAt + 100, + }, + editorTabLastUpdatedAt, +}; +beforeAll(() => { + jest.useFakeTimers(); +}); + +afterAll(() => { + jest.useRealTimers(); +}); + +test('sync the unsaved editor tab state when there are new changes since the last update', async () => { + const updateEditorTabState = `glob:*/tabstateview/${defaultQueryEditor.id}`; + fetchMock.put(updateEditorTabState, 200); + expect(fetchMock.calls(updateEditorTabState)).toHaveLength(0); + render(<EditorAutoSync />, { + useRedux: true, + initialState: { + ...initialState, + sqlLab: unsavedSqlLabState, + }, + }); + await waitFor(() => jest.runAllTimers()); + expect(fetchMock.calls(updateEditorTabState)).toHaveLength(1); + fetchMock.restore(); +}); + +test('skip syncing the unsaved editor tab state when the updates are already synced', async () => { + const updateEditorTabState = `glob:*/tabstateview/${defaultQueryEditor.id}`; + fetchMock.put(updateEditorTabState, 200); + expect(fetchMock.calls(updateEditorTabState)).toHaveLength(0); + render(<EditorAutoSync />, { + useRedux: true, + initialState: { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + unsavedQueryEditor: { + id: defaultQueryEditor.id, + name: 'updated tab name', + updatedAt: editorTabLastUpdatedAt - 100, + }, + editorTabLastUpdatedAt, + }, + }, + }); + await waitFor(() => jest.runAllTimers()); + expect(fetchMock.calls(updateEditorTabState)).toHaveLength(0); + fetchMock.restore(); +}); + +test('renders an error toast when the sync failed', async () => { + const updateEditorTabState = `glob:*/tabstateview/${defaultQueryEditor.id}`; + fetchMock.put(updateEditorTabState, { + throws: new Error('errorMessage'), + }); + expect(fetchMock.calls(updateEditorTabState)).toHaveLength(0); + render( + <> + <EditorAutoSync /> + <ToastContainer /> + </>, + { + useRedux: true, + initialState: { + ...initialState, + sqlLab: unsavedSqlLabState, + }, + }, + ); + await waitFor(() => jest.runAllTimers()); + + expect(logging.warn).toHaveBeenCalledTimes(1); + expect(logging.warn).toHaveBeenCalledWith( + 'An error occurred while saving your editor state.', + expect.anything(), + ); + fetchMock.restore(); +}); diff --git a/superset-frontend/src/SqlLab/components/EditorAutoSync/index.tsx b/superset-frontend/src/SqlLab/components/EditorAutoSync/index.tsx new file mode 100644 index 0000000000..51399753e9 --- /dev/null +++ b/superset-frontend/src/SqlLab/components/EditorAutoSync/index.tsx @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { useRef, useEffect } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { logging } from '@superset-ui/core'; +import { + SqlLabRootState, + QueryEditor, + UnsavedQueryEditor, +} from 'src/SqlLab/types'; +import { useUpdateSqlEditorTabMutation } from 'src/hooks/apiResources/sqlEditorTabs'; +import { useDebounceValue } from 'src/hooks/useDebounceValue'; +import { setEditorTabLastUpdate } from 'src/SqlLab/actions/sqlLab'; + +const INTERVAL = 5000; + +function hasUnsavedChanges( + queryEditor: QueryEditor, + lastSavedTimestamp: number, +) { + return ( + queryEditor.inLocalStorage || + (queryEditor.updatedAt && queryEditor.updatedAt > lastSavedTimestamp) + ); +} + +export function filterUnsavedQueryEditorList( + queryEditors: QueryEditor[], + unsavedQueryEditor: UnsavedQueryEditor, + lastSavedTimestamp: number, +) { + return queryEditors + .map(queryEditor => ({ + ...queryEditor, + ...(unsavedQueryEditor.id === queryEditor.id && unsavedQueryEditor), + })) + .filter(queryEditor => hasUnsavedChanges(queryEditor, lastSavedTimestamp)); +} + +const EditorAutoSync: React.FC = () => { + const queryEditors = useSelector<SqlLabRootState, QueryEditor[]>( + state => state.sqlLab.queryEditors, + ); + const unsavedQueryEditor = useSelector<SqlLabRootState, UnsavedQueryEditor>( + state => state.sqlLab.unsavedQueryEditor, + ); + const editorTabLastUpdatedAt = useSelector<SqlLabRootState, number>( + state => state.sqlLab.editorTabLastUpdatedAt, + ); + const dispatch = useDispatch(); + const lastSavedTimestampRef = useRef<number>(editorTabLastUpdatedAt); + const [updateSqlEditor, { error }] = useUpdateSqlEditorTabMutation(); + + const debouncedUnsavedQueryEditor = useDebounceValue( + unsavedQueryEditor, + INTERVAL, + ); + + useEffect(() => { + const unsaved = filterUnsavedQueryEditorList( + queryEditors, + debouncedUnsavedQueryEditor, + lastSavedTimestampRef.current, + ); + + Promise.all( + unsaved + // TODO: Migrate migrateQueryEditorFromLocalStorage + // in TabbedSqlEditors logic by addSqlEditor mutation later + .filter(({ inLocalStorage }) => !inLocalStorage) + .map(queryEditor => updateSqlEditor({ queryEditor })), + ).then(resolvers => { + if (!resolvers.some(result => 'error' in result)) { + lastSavedTimestampRef.current = Date.now(); + dispatch(setEditorTabLastUpdate(lastSavedTimestampRef.current)); + } + }); + }, [debouncedUnsavedQueryEditor, dispatch, queryEditors, updateSqlEditor]); + + useEffect(() => { + if (error) { + logging.warn('An error occurred while saving your editor state.', error); + } + }, [dispatch, error]); + + return null; +}; + +export default EditorAutoSync; diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx index f4808f52fd..a2ffcf85cb 100644 --- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx +++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx @@ -45,7 +45,7 @@ export interface QueryAutoRefreshProps { // returns true if the Query.state matches one of the specifc values indicating the query is still processing on server export const isQueryRunning = (q: Query): boolean => - runningQueryStateList.includes(q?.state); + runningQueryStateList.includes(q?.state) && !q?.resultsKey; // returns true if at least one query is running and within the max age to poll timeframe export const shouldCheckForQueries = (queryList: QueryDictionary): boolean => { diff --git a/superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx b/superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx index 6fd84a0d2a..ad1881b5d9 100644 --- a/superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx +++ b/superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx @@ -19,9 +19,10 @@ import React from 'react'; import { render, screen } from 'spec/helpers/testing-library'; import QueryHistory from 'src/SqlLab/components/QueryHistory'; +import { initialState } from 'src/SqlLab/fixtures'; const mockedProps = { - queries: [], + queryEditorId: 123, displayLimit: 1000, latestQueryId: 'yhMUZCGb', }; @@ -32,7 +33,7 @@ const setup = (overrides = {}) => ( describe('QueryHistory', () => { it('Renders an empty state for query history', () => { - render(setup()); + render(setup(), { useRedux: true, initialState }); const emptyStateText = screen.getByText( /run a query to display query history/i, diff --git a/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx b/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx index cab1160144..311a125d55 100644 --- a/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx +++ b/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx @@ -16,13 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import React, { useMemo } from 'react'; +import { shallowEqual, useSelector } from 'react-redux'; import { EmptyStateMedium } from 'src/components/EmptyState'; -import { t, styled, QueryResponse } from '@superset-ui/core'; +import { t, styled } from '@superset-ui/core'; import QueryTable from 'src/SqlLab/components/QueryTable'; +import { SqlLabRootState } from 'src/SqlLab/types'; interface QueryHistoryProps { - queries: QueryResponse[]; + queryEditorId: string | number; displayLimit: number; latestQueryId: string | undefined; } @@ -39,11 +41,23 @@ const StyledEmptyStateWrapper = styled.div` `; const QueryHistory = ({ - queries, + queryEditorId, displayLimit, latestQueryId, -}: QueryHistoryProps) => - queries.length > 0 ? ( +}: QueryHistoryProps) => { + const queries = useSelector( + ({ sqlLab: { queries } }: SqlLabRootState) => queries, + shallowEqual, + ); + const editorQueries = useMemo( + () => + Object.values(queries).filter( + ({ sqlEditorId }) => String(sqlEditorId) === String(queryEditorId), + ), + [queries, queryEditorId], + ); + + return editorQueries.length > 0 ? ( <QueryTable columns={[ 'state', @@ -55,7 +69,7 @@ const QueryHistory = ({ 'results', 'actions', ]} - queries={queries} + queries={editorQueries} displayLimit={displayLimit} latestQueryId={latestQueryId} /> @@ -67,5 +81,6 @@ const QueryHistory = ({ /> </StyledEmptyStateWrapper> ); +}; export default QueryHistory; diff --git a/superset-frontend/src/SqlLab/components/QueryTable/index.tsx b/superset-frontend/src/SqlLab/components/QueryTable/index.tsx index 6ddae08e68..3282a939ef 100644 --- a/superset-frontend/src/SqlLab/components/QueryTable/index.tsx +++ b/superset-frontend/src/SqlLab/components/QueryTable/index.tsx @@ -25,7 +25,7 @@ import { t, useTheme, QueryResponse } from '@superset-ui/core'; import { useDispatch, useSelector } from 'react-redux'; import { - queryEditorSetAndSaveSql, + queryEditorSetSql, cloneQueryToNewTab, fetchQueryResults, clearQueryResults, @@ -109,7 +109,9 @@ const QueryTable = ({ const data = useMemo(() => { const restoreSql = (query: QueryResponse) => { - dispatch(queryEditorSetAndSaveSql({ id: query.sqlEditorId }, query.sql)); + dispatch( + queryEditorSetSql({ id: query.sqlEditorId }, query.sql, query.id), + ); }; const openQueryInNewTab = (query: QueryResponse) => { @@ -249,8 +251,7 @@ const QueryTable = ({ modalBody={ <ResultSet showSql - user={user} - query={query} + queryId={query.id} height={400} displayLimit={displayLimit} defaultQueryLimit={1000} diff --git a/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx b/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx index d823c586f7..e5844fed5c 100644 --- a/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx +++ b/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx @@ -37,65 +37,91 @@ import { const mockedProps = { cache: true, - query: queries[0], + queryId: queries[0].id, height: 140, database: { allows_virtual_table_explore: true }, - user, + displayLimit: 1000, defaultQueryLimit: 1000, }; -const stoppedQueryProps = { ...mockedProps, query: stoppedQuery }; -const runningQueryProps = { ...mockedProps, query: runningQuery }; -const fetchingQueryProps = { - ...mockedProps, - query: { - dbId: 1, - cached: false, - ctas: false, - id: 'ryhHUZCGb', - progress: 100, - state: 'fetching', - startDttm: Date.now() - 500, - }, -}; -const cachedQueryProps = { ...mockedProps, query: cachedQuery }; -const failedQueryWithErrorMessageProps = { - ...mockedProps, - query: failedQueryWithErrorMessage, -}; -const failedQueryWithErrorsProps = { - ...mockedProps, - query: failedQueryWithErrors, -}; -const newProps = { - query: { - cached: false, - resultsKey: 'new key', - results: { - data: [{ a: 1 }], +const stoppedQueryState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [stoppedQuery.id]: stoppedQuery, }, }, }; +const runningQueryState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [runningQuery.id]: runningQuery, + }, + }, +}; +const fetchingQueryState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [mockedProps.queryId]: { + dbId: 1, + cached: false, + ctas: false, + id: 'ryhHUZCGb', + progress: 100, + state: 'fetching', + startDttm: Date.now() - 500, + }, + }, + }, +}; +const cachedQueryState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [cachedQuery.id]: cachedQuery, + }, + }, +}; +const failedQueryWithErrorMessageState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [failedQueryWithErrorMessage.id]: failedQueryWithErrorMessage, + }, + }, +}; +const failedQueryWithErrorsState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [failedQueryWithErrors.id]: failedQueryWithErrors, + }, + }, +}; + +const newProps = { + displayLimit: 1001, +}; const asyncQueryProps = { ...mockedProps, database: { allow_run_async: true }, }; -const asyncRefetchDataPreviewProps = { - ...asyncQueryProps, - query: { - state: 'success', - results: undefined, - isDataPreview: true, - }, -}; -const asyncRefetchResultsTableProps = { - ...asyncQueryProps, - query: { - state: 'success', - results: undefined, - resultsKey: 'async results key', - }, -}; + +const reRunQueryEndpoint = 'glob:*/api/v1/sqllab/execute/'; fetchMock.get('glob:*/api/v1/dataset/?*', { result: [] }); +fetchMock.post(reRunQueryEndpoint, { result: [] }); +fetchMock.get('glob:*/api/v1/sqllab/results/*', { result: [] }); + +beforeEach(() => { + fetchMock.resetHistory(); +}); const middlewares = [thunk]; const mockStore = configureStore(middlewares); @@ -107,25 +133,47 @@ const setup = (props?: any, store?: Store) => describe('ResultSet', () => { test('renders a Table', async () => { - const { getByTestId } = setup(mockedProps, mockStore(initialState)); + const { getByTestId } = setup( + mockedProps, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [queries[0].id]: queries[0], + }, + }, + }), + ); const table = getByTestId('table-container'); expect(table).toBeInTheDocument(); }); test('should render success query', async () => { + const query = queries[0]; const { queryAllByText, getByTestId } = setup( mockedProps, - mockStore(initialState), + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [query.id]: query, + }, + }, + }), ); const table = getByTestId('table-container'); expect(table).toBeInTheDocument(); const firstColumn = queryAllByText( - mockedProps.query.results?.columns[0].column_name ?? '', + query.results?.columns[0].column_name ?? '', )[0]; const secondColumn = queryAllByText( - mockedProps.query.results?.columns[1].column_name ?? '', + query.results?.columns[1].column_name ?? '', )[0]; expect(firstColumn).toBeInTheDocument(); expect(secondColumn).toBeInTheDocument(); @@ -135,12 +183,24 @@ describe('ResultSet', () => { }); test('should render empty results', async () => { - const props = { - ...mockedProps, - query: { ...mockedProps.query, results: { data: [] } }, + const query = { + ...queries[0], + results: { data: [] }, }; await waitFor(() => { - setup(props, mockStore(initialState)); + setup( + mockedProps, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [query.id]: query, + }, + }, + }), + ); }); const alert = screen.getByRole('alert'); @@ -149,42 +209,70 @@ describe('ResultSet', () => { }); test('should call reRunQuery if timed out', async () => { - const store = mockStore(initialState); - const propsWithError = { - ...mockedProps, - query: { ...queries[0], errorMessage: 'Your session timed out' }, + const query = { + ...queries[0], + errorMessage: 'Your session timed out', }; + const store = mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [query.id]: query, + }, + }, + }); - setup(propsWithError, store); + expect(fetchMock.calls(reRunQueryEndpoint)).toHaveLength(0); + setup(mockedProps, store); expect(store.getActions()).toHaveLength(1); expect(store.getActions()[0].query.errorMessage).toEqual( 'Your session timed out', ); expect(store.getActions()[0].type).toEqual('START_QUERY'); + await waitFor(() => + expect(fetchMock.calls(reRunQueryEndpoint)).toHaveLength(1), + ); }); test('should not call reRunQuery if no error', async () => { - const store = mockStore(initialState); + const query = queries[0]; + const store = mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [query.id]: query, + }, + }, + }); setup(mockedProps, store); expect(store.getActions()).toEqual([]); + expect(fetchMock.calls(reRunQueryEndpoint)).toHaveLength(0); }); test('should render cached query', async () => { - const store = mockStore(initialState); - const { rerender } = setup(cachedQueryProps, store); + const store = mockStore(cachedQueryState); + const { rerender } = setup( + { ...mockedProps, queryId: cachedQuery.id }, + store, + ); // @ts-ignore - rerender(<ResultSet {...newProps} />); - expect(store.getActions()).toHaveLength(2); - expect(store.getActions()[0].query.results).toEqual( - cachedQueryProps.query.results, - ); + rerender(<ResultSet {...mockedProps} {...newProps} />); + expect(store.getActions()).toHaveLength(1); + expect(store.getActions()[0].query.results).toEqual(cachedQuery.results); expect(store.getActions()[0].type).toEqual('CLEAR_QUERY_RESULTS'); }); test('should render stopped query', async () => { await waitFor(() => { - setup(stoppedQueryProps, mockStore(initialState)); + setup( + { ...mockedProps, queryId: stoppedQuery.id }, + mockStore(stoppedQueryState), + ); }); const alert = screen.getByRole('alert'); @@ -192,15 +280,18 @@ describe('ResultSet', () => { }); test('should render running/pending/fetching query', async () => { - const { getByTestId } = setup(runningQueryProps, mockStore(initialState)); + const { getByTestId } = setup( + { ...mockedProps, queryId: runningQuery.id }, + mockStore(runningQueryState), + ); const progressBar = getByTestId('progress-bar'); expect(progressBar).toBeInTheDocument(); }); test('should render fetching w/ 100 progress query', async () => { const { getByRole, getByText } = setup( - fetchingQueryProps, - mockStore(initialState), + mockedProps, + mockStore(fetchingQueryState), ); const loading = getByRole('status'); expect(loading).toBeInTheDocument(); @@ -209,7 +300,10 @@ describe('ResultSet', () => { test('should render a failed query with an error message', async () => { await waitFor(() => { - setup(failedQueryWithErrorMessageProps, mockStore(initialState)); + setup( + { ...mockedProps, queryId: failedQueryWithErrorMessage.id }, + mockStore(failedQueryWithErrorMessageState), + ); }); expect(screen.getByText('Database error')).toBeInTheDocument(); @@ -218,44 +312,129 @@ describe('ResultSet', () => { test('should render a failed query with an errors object', async () => { await waitFor(() => { - setup(failedQueryWithErrorsProps, mockStore(initialState)); + setup( + { ...mockedProps, queryId: failedQueryWithErrors.id }, + mockStore(failedQueryWithErrorsState), + ); }); expect(screen.getByText('Database error')).toBeInTheDocument(); }); test('renders if there is no limit in query.results but has queryLimit', async () => { + const query = { + ...queries[0], + }; + await waitFor(() => { + setup( + mockedProps, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [query.id]: query, + }, + }, + }), + ); + }); const { getByRole } = setup(mockedProps, mockStore(initialState)); expect(getByRole('table')).toBeInTheDocument(); }); test('renders if there is a limit in query.results but not queryLimit', async () => { - const props = { ...mockedProps, query: queryWithNoQueryLimit }; - const { getByRole } = setup(props, mockStore(initialState)); + const props = { ...mockedProps, queryId: queryWithNoQueryLimit.id }; + const { getByRole } = setup( + props, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [queryWithNoQueryLimit.id]: queryWithNoQueryLimit, + }, + }, + }), + ); expect(getByRole('table')).toBeInTheDocument(); }); test('Async queries - renders "Fetch data preview" button when data preview has no results', () => { - setup(asyncRefetchDataPreviewProps, mockStore(initialState)); + const asyncRefetchDataPreviewQuery = { + ...queries[0], + state: 'success', + results: undefined, + isDataPreview: true, + }; + setup( + { ...asyncQueryProps, queryId: asyncRefetchDataPreviewQuery.id }, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [asyncRefetchDataPreviewQuery.id]: asyncRefetchDataPreviewQuery, + }, + }, + }), + ); expect( screen.getByRole('button', { name: /fetch data preview/i, }), ).toBeVisible(); - expect(screen.queryByRole('grid')).toBe(null); + expect(screen.queryByRole('table')).toBe(null); }); test('Async queries - renders "Refetch results" button when a query has no results', () => { - setup(asyncRefetchResultsTableProps, mockStore(initialState)); + const asyncRefetchResultsTableQuery = { + ...queries[0], + state: 'success', + results: undefined, + resultsKey: 'async results key', + }; + + setup( + { ...asyncQueryProps, queryId: asyncRefetchResultsTableQuery.id }, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [asyncRefetchResultsTableQuery.id]: asyncRefetchResultsTableQuery, + }, + }, + }), + ); expect( screen.getByRole('button', { name: /refetch results/i, }), ).toBeVisible(); - expect(screen.queryByRole('grid')).toBe(null); + expect(screen.queryByRole('table')).toBe(null); }); test('Async queries - renders on the first call', () => { - setup(asyncQueryProps, mockStore(initialState)); + const query = { + ...queries[0], + }; + setup( + { ...asyncQueryProps, queryId: query.id }, + mockStore({ + ...initialState, + user, + sqlLab: { + ...initialState.sqlLab, + queries: { + [query.id]: query, + }, + }, + }), + ); expect(screen.getByRole('table')).toBeVisible(); expect( screen.queryByRole('button', { diff --git a/superset-frontend/src/SqlLab/components/ResultSet/index.tsx b/superset-frontend/src/SqlLab/components/ResultSet/index.tsx index 35eac78044..69e4508434 100644 --- a/superset-frontend/src/SqlLab/components/ResultSet/index.tsx +++ b/superset-frontend/src/SqlLab/components/ResultSet/index.tsx @@ -17,13 +17,14 @@ * under the License. */ import React, { useCallback, useEffect, useState } from 'react'; -import { useDispatch } from 'react-redux'; +import { shallowEqual, useDispatch, useSelector } from 'react-redux'; +import { useHistory } from 'react-router-dom'; +import pick from 'lodash/pick'; import ButtonGroup from 'src/components/ButtonGroup'; import Alert from 'src/components/Alert'; import Button from 'src/components/Button'; import shortid from 'shortid'; import { - QueryResponse, QueryState, styled, t, @@ -40,8 +41,7 @@ import { ISimpleColumn, SaveDatasetModal, } from 'src/SqlLab/components/SaveDatasetModal'; -import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; -import { EXPLORE_CHART_DEFAULT } from 'src/SqlLab/types'; +import { EXPLORE_CHART_DEFAULT, SqlLabRootState } from 'src/SqlLab/types'; import { mountExploreUrl } from 'src/explore/exploreUtils'; import { postFormData } from 'src/explore/exploreUtils/formData'; import ProgressBar from 'src/components/ProgressBar'; @@ -81,12 +81,11 @@ export interface ResultSetProps { database?: Record<string, any>; displayLimit: number; height: number; - query: QueryResponse; + queryId: string; search?: boolean; showSql?: boolean; showSqlInline?: boolean; visualize?: boolean; - user: UserWithPermissionsAndRoles; defaultQueryLimit: number; } @@ -144,14 +143,44 @@ const ResultSet = ({ database = {}, displayLimit, height, - query, + queryId, search = true, showSql = false, showSqlInline = false, visualize = true, - user, defaultQueryLimit, }: ResultSetProps) => { + const user = useSelector(({ user }: SqlLabRootState) => user, shallowEqual); + const query = useSelector( + ({ sqlLab: { queries } }: SqlLabRootState) => + pick(queries[queryId], [ + 'id', + 'errorMessage', + 'cached', + 'results', + 'resultsKey', + 'dbId', + 'tab', + 'sql', + 'templateParams', + 'schema', + 'rows', + 'queryLimit', + 'limitingFactor', + 'trackingUrl', + 'state', + 'errors', + 'link', + 'ctas', + 'ctas_method', + 'tempSchema', + 'tempTable', + 'isDataPreview', + 'progress', + 'extra', + ]), + shallowEqual, + ); const ResultTable = extensionsRegistry.get('sqleditor.extension.resultTable') ?? FilterableTable; @@ -161,6 +190,7 @@ const ResultSet = ({ const [showSaveDatasetModal, setShowSaveDatasetModal] = useState(false); const [alertIsOpen, setAlertIsOpen] = useState(false); + const history = useHistory(); const dispatch = useDispatch(); const reRunQueryIfSessionTimeoutErrorOnMount = useCallback(() => { @@ -177,8 +207,8 @@ const ResultSet = ({ reRunQueryIfSessionTimeoutErrorOnMount(); }, [reRunQueryIfSessionTimeoutErrorOnMount]); - const fetchResults = (query: QueryResponse) => { - dispatch(fetchQueryResults(query, displayLimit)); + const fetchResults = (q: typeof query) => { + dispatch(fetchQueryResults(q, displayLimit)); }; const prevQuery = usePrevious(query); @@ -215,9 +245,11 @@ const ResultSet = ({ setSearchText(event.target.value); }; - const createExploreResultsOnClick = async () => { + const createExploreResultsOnClick = async (clickEvent: React.MouseEvent) => { const { results } = query; + const openInNewWindow = clickEvent.metaKey; + if (results?.query_id) { const key = await postFormData(results.query_id, 'query', { ...EXPLORE_CHART_DEFAULT, @@ -229,7 +261,11 @@ const ResultSet = ({ const url = mountExploreUrl(null, { [URL_PARAMS.formDataKey.name]: key, }); - window.open(url, '_blank', 'noreferrer'); + if (openInNewWindow) { + window.open(url, '_blank', 'noreferrer'); + } else { + history.push(url); + } } else { addDangerToast(t('Unable to create chart without a query id.')); } @@ -471,7 +507,7 @@ const ResultSet = ({ <ResultlessStyles> <ErrorMessageWithStackTrace title={t('Database error')} - error={query?.errors?.[0]} + error={query?.extra?.errors?.[0] || query?.errors?.[0]} subtitle={<MonospaceDiv>{query.errorMessage}</MonospaceDiv>} copyText={query.errorMessage || undefined} link={query.link} @@ -654,4 +690,4 @@ const ResultSet = ({ ); }; -export default ResultSet; +export default React.memo(ResultSet); diff --git a/superset-frontend/src/SqlLab/components/SouthPane/Results.test.tsx b/superset-frontend/src/SqlLab/components/SouthPane/Results.test.tsx new file mode 100644 index 0000000000..c70c039fe5 --- /dev/null +++ b/superset-frontend/src/SqlLab/components/SouthPane/Results.test.tsx @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import { render } from 'spec/helpers/testing-library'; +import { initialState, table, defaultQueryEditor } from 'src/SqlLab/fixtures'; +import { denormalizeTimestamp } from '@superset-ui/core'; +import { LOCALSTORAGE_MAX_QUERY_AGE_MS } from 'src/SqlLab/constants'; +import Results from './Results'; + +const mockedProps = { + queryEditorId: defaultQueryEditor.id, + latestQueryId: 'LCly_kkIN', + height: 1, + displayLimit: 1, + defaultQueryLimit: 100, +}; + +const mockedEmptyProps = { + queryEditorId: 'random_id', + latestQueryId: 'empty_query_id', + height: 100, + displayLimit: 100, + defaultQueryLimit: 100, +}; + +const mockedExpiredProps = { + ...mockedEmptyProps, + latestQueryId: 'expired_query_id', +}; + +const latestQueryProgressMsg = 'LATEST QUERY MESSAGE - LCly_kkIN'; +const expireDateTime = Date.now() - LOCALSTORAGE_MAX_QUERY_AGE_MS - 1; + +const mockState = { + ...initialState, + sqlLab: { + ...initialState, + offline: false, + tables: [ + { + ...table, + dataPreviewQueryId: '2g2_iRFMl', + queryEditorId: defaultQueryEditor.id, + }, + ], + databases: {}, + queries: { + LCly_kkIN: { + cached: false, + changed_on: denormalizeTimestamp(new Date().toISOString()), + db: 'main', + dbId: 1, + id: 'LCly_kkIN', + startDttm: Date.now(), + sqlEditorId: defaultQueryEditor.id, + extra: { progress: latestQueryProgressMsg }, + sql: 'select * from table1', + }, + lXJa7F9_r: { + cached: false, + changed_on: denormalizeTimestamp(new Date(1559238500401).toISOString()), + db: 'main', + dbId: 1, + id: 'lXJa7F9_r', + startDttm: 1559238500401, + sqlEditorId: defaultQueryEditor.id, + sql: 'select * from table2', + }, + '2g2_iRFMl': { + cached: false, + changed_on: denormalizeTimestamp(new Date(1559238506925).toISOString()), + db: 'main', + dbId: 1, + id: '2g2_iRFMl', + startDttm: 1559238506925, + sqlEditorId: defaultQueryEditor.id, + sql: 'select * from table3', + }, + expired_query_id: { + cached: false, + changed_on: denormalizeTimestamp( + new Date(expireDateTime).toISOString(), + ), + db: 'main', + dbId: 1, + id: 'expired_query_id', + startDttm: expireDateTime, + sqlEditorId: defaultQueryEditor.id, + sql: 'select * from table4', + }, + }, + }, +}; + +test('Renders an empty state for results', async () => { + const { getByText } = render(<Results {...mockedEmptyProps} />, { + useRedux: true, + initialState: mockState, + }); + const emptyStateText = getByText(/run a query to display results/i); + expect(emptyStateText).toBeVisible(); +}); + +test('Renders an empty state for expired results', async () => { + const { getByText } = render(<Results {...mockedExpiredProps} />, { + useRedux: true, + initialState: mockState, + }); + const emptyStateText = getByText(/run a query to display results/i); + expect(emptyStateText).toBeVisible(); +}); + +test('should pass latest query down to ResultSet component', async () => { + const { getByText } = render(<Results {...mockedProps} />, { + useRedux: true, + initialState: mockState, + }); + expect(getByText(latestQueryProgressMsg)).toBeVisible(); +}); diff --git a/superset-frontend/src/SqlLab/components/SouthPane/Results.tsx b/superset-frontend/src/SqlLab/components/SouthPane/Results.tsx new file mode 100644 index 0000000000..4e1b6219ae --- /dev/null +++ b/superset-frontend/src/SqlLab/components/SouthPane/Results.tsx @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import { shallowEqual, useSelector } from 'react-redux'; +import Alert from 'src/components/Alert'; +import { EmptyStateMedium } from 'src/components/EmptyState'; +import { FeatureFlag, styled, t, isFeatureEnabled } from '@superset-ui/core'; + +import { SqlLabRootState } from 'src/SqlLab/types'; +import ResultSet from '../ResultSet'; +import { LOCALSTORAGE_MAX_QUERY_AGE_MS } from '../../constants'; + +const EXTRA_HEIGHT_RESULTS = 8; // we need extra height in RESULTS tab. because the height from props was calculated based on PREVIEW tab. + +type Props = { + latestQueryId: string; + height: number; + displayLimit: number; + defaultQueryLimit: number; +}; + +const StyledEmptyStateWrapper = styled.div` + height: 100%; + .ant-empty-image img { + margin-right: 28px; + } + + p { + margin-right: 28px; + } +`; + +const Results: React.FC<Props> = ({ + latestQueryId, + height, + displayLimit, + defaultQueryLimit, +}) => { + const databases = useSelector( + ({ sqlLab: { databases } }: SqlLabRootState) => databases, + shallowEqual, + ); + const latestQuery = useSelector( + ({ sqlLab: { queries } }: SqlLabRootState) => queries[latestQueryId || ''], + shallowEqual, + ); + + if ( + !latestQuery || + Date.now() - latestQuery.startDttm > LOCALSTORAGE_MAX_QUERY_AGE_MS + ) { + return ( + <StyledEmptyStateWrapper> + <EmptyStateMedium + title={t('Run a query to display results')} + image="document.svg" + /> + </StyledEmptyStateWrapper> + ); + } + + if ( + isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) && + latestQuery.state === 'success' && + !latestQuery.resultsKey && + !latestQuery.results + ) { + return ( + <Alert + type="warning" + message={t('No stored results found, you need to re-run your query')} + /> + ); + } + + return ( + <ResultSet + search + queryId={latestQuery.id} + height={height + EXTRA_HEIGHT_RESULTS} + database={databases[latestQuery.dbId]} + displayLimit={displayLimit} + defaultQueryLimit={defaultQueryLimit} + showSql + showSqlInline + /> + ); +}; + +export default Results; diff --git a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx index 80a102ff21..c978a4ca32 100644 --- a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx +++ b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx @@ -17,15 +17,12 @@ * under the License. */ import React from 'react'; -import configureStore from 'redux-mock-store'; -import thunk from 'redux-thunk'; -import { render, screen, waitFor } from 'spec/helpers/testing-library'; -import SouthPane, { SouthPaneProps } from 'src/SqlLab/components/SouthPane'; +import { render } from 'spec/helpers/testing-library'; +import SouthPane from 'src/SqlLab/components/SouthPane'; import '@testing-library/jest-dom/extend-expect'; import { STATUS_OPTIONS } from 'src/SqlLab/constants'; import { initialState, table, defaultQueryEditor } from 'src/SqlLab/fixtures'; import { denormalizeTimestamp } from '@superset-ui/core'; -import { Store } from 'redux'; const mockedProps = { queryEditorId: defaultQueryEditor.id, @@ -37,29 +34,32 @@ const mockedProps = { const mockedEmptyProps = { queryEditorId: 'random_id', - latestQueryId: '', + latestQueryId: 'empty_query_id', height: 100, displayLimit: 100, defaultQueryLimit: 100, }; -jest.mock('src/SqlLab/components/SqlEditorLeftBar', () => jest.fn()); - const latestQueryProgressMsg = 'LATEST QUERY MESSAGE - LCly_kkIN'; -const middlewares = [thunk]; -const mockStore = configureStore(middlewares); -const store = mockStore({ +const mockState = { ...initialState, sqlLab: { - ...initialState, + ...initialState.sqlLab, offline: false, tables: [ { ...table, + name: 'table3', dataPreviewQueryId: '2g2_iRFMl', queryEditorId: defaultQueryEditor.id, }, + { + ...table, + name: 'table4', + dataPreviewQueryId: 'erWdqEWPm', + queryEditorId: defaultQueryEditor.id, + }, ], databases: {}, queries: { @@ -72,6 +72,7 @@ const store = mockStore({ startDttm: Date.now(), sqlEditorId: defaultQueryEditor.id, extra: { progress: latestQueryProgressMsg }, + sql: 'select * from table1', }, lXJa7F9_r: { cached: false, @@ -81,6 +82,7 @@ const store = mockStore({ id: 'lXJa7F9_r', startDttm: 1559238500401, sqlEditorId: defaultQueryEditor.id, + sql: 'select * from table2', }, '2g2_iRFMl': { cached: false, @@ -90,6 +92,7 @@ const store = mockStore({ id: '2g2_iRFMl', startDttm: 1559238506925, sqlEditorId: defaultQueryEditor.id, + sql: 'select * from table3', }, erWdqEWPm: { cached: false, @@ -99,44 +102,38 @@ const store = mockStore({ id: 'erWdqEWPm', startDttm: 1559238516395, sqlEditorId: defaultQueryEditor.id, + sql: 'select * from table4', }, }, }, -}); -const setup = (props: SouthPaneProps, store: Store) => - render(<SouthPane {...props} />, { +}; + +test('should render offline when the state is offline', async () => { + const { getByText } = render(<SouthPane {...mockedEmptyProps} />, { useRedux: true, - ...(store && { store }), + initialState: { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + offline: true, + }, + }, }); -describe('SouthPane', () => { - const renderAndWait = (props: SouthPaneProps, store: Store) => - waitFor(async () => setup(props, store)); + expect(getByText(STATUS_OPTIONS.offline)).toBeVisible(); +}); - it('Renders an empty state for results', async () => { - await renderAndWait(mockedEmptyProps, store); - const emptyStateText = screen.getByText(/run a query to display results/i); - expect(emptyStateText).toBeVisible(); +test('should render tabs for table preview queries', () => { + const { getAllByRole } = render(<SouthPane {...mockedProps} />, { + useRedux: true, + initialState: mockState, }); - it('should render offline when the state is offline', async () => { - await renderAndWait( - mockedEmptyProps, - mockStore({ - ...initialState, - sqlLab: { - ...initialState.sqlLab, - offline: true, - }, - }), - ); - - expect(screen.getByText(STATUS_OPTIONS.offline)).toBeVisible(); - }); - - it('should pass latest query down to ResultSet component', async () => { - await renderAndWait(mockedProps, store); - - expect(screen.getByText(latestQueryProgressMsg)).toBeVisible(); + const tabs = getAllByRole('tab'); + expect(tabs).toHaveLength(mockState.sqlLab.tables.length + 2); + expect(tabs[0]).toHaveTextContent('Results'); + expect(tabs[1]).toHaveTextContent('Query history'); + mockState.sqlLab.tables.forEach(({ name }, index) => { + expect(tabs[index + 2]).toHaveTextContent(`Preview: \`${name}\``); }); }); diff --git a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx index 38a20f9f6d..0bbce99b1c 100644 --- a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx +++ b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx @@ -19,10 +19,8 @@ import React, { createRef, useMemo } from 'react'; import { shallowEqual, useDispatch, useSelector } from 'react-redux'; import shortid from 'shortid'; -import Alert from 'src/components/Alert'; import Tabs from 'src/components/Tabs'; -import { EmptyStateMedium } from 'src/components/EmptyState'; -import { FeatureFlag, styled, t, isFeatureEnabled } from '@superset-ui/core'; +import { styled, t } from '@superset-ui/core'; import { setActiveSouthPaneTab } from 'src/SqlLab/actions/sqlLab'; @@ -33,11 +31,11 @@ import ResultSet from '../ResultSet'; import { STATUS_OPTIONS, STATE_TYPE_MAP, - LOCALSTORAGE_MAX_QUERY_AGE_MS, STATUS_OPTIONS_LOCALIZED, } from '../../constants'; +import Results from './Results'; -const TAB_HEIGHT = 140; +const TAB_HEIGHT = 130; /* editorQueries are queries executed by users passed from SqlEditor component @@ -85,18 +83,6 @@ const StyledPane = styled.div<StyledPaneProps>` } `; -const EXTRA_HEIGHT_RESULTS = 24; // we need extra height in RESULTS tab. because the height from props was calculated based on PREVIEW tab. -const StyledEmptyStateWrapper = styled.div` - height: 100%; - .ant-empty-image img { - margin-right: 28px; - } - - p { - margin-right: 28px; - } -`; - const SouthPane = ({ queryEditorId, latestQueryId, @@ -105,128 +91,43 @@ const SouthPane = ({ defaultQueryLimit, }: SouthPaneProps) => { const dispatch = useDispatch(); - const user = useSelector(({ user }: SqlLabRootState) => user, shallowEqual); - const { databases, offline, queries, tables } = useSelector( - ({ sqlLab: { databases, offline, queries, tables } }: SqlLabRootState) => ({ - databases, + const { offline, tables } = useSelector( + ({ sqlLab: { offline, tables } }: SqlLabRootState) => ({ offline, - queries, tables, }), shallowEqual, ); - const editorQueries = useMemo( - () => - Object.values(queries).filter( - ({ sqlEditorId }) => sqlEditorId === queryEditorId, - ), - [queries, queryEditorId], + const queries = useSelector( + ({ sqlLab: { queries } }: SqlLabRootState) => Object.keys(queries), + shallowEqual, ); - const dataPreviewQueries = useMemo( - () => - tables - .filter( - ({ dataPreviewQueryId, queryEditorId: qeId }) => - dataPreviewQueryId && - queryEditorId === qeId && - queries[dataPreviewQueryId], - ) - .map(({ name, dataPreviewQueryId }) => ({ - ...queries[dataPreviewQueryId || ''], - tableName: name, - })), - [queries, queryEditorId, tables], - ); - const latestQuery = useMemo( - () => editorQueries.find(({ id }) => id === latestQueryId), - [editorQueries, latestQueryId], - ); - const activeSouthPaneTab = useSelector<SqlLabRootState, string>( state => state.sqlLab.activeSouthPaneTab as string, ) ?? 'Results'; + + const querySet = useMemo(() => new Set(queries), [queries]); + const dataPreviewQueries = useMemo( + () => + tables.filter( + ({ dataPreviewQueryId, queryEditorId: qeId }) => + dataPreviewQueryId && + queryEditorId === qeId && + querySet.has(dataPreviewQueryId), + ), + [queryEditorId, tables, querySet], + ); const innerTabContentHeight = height - TAB_HEIGHT; const southPaneRef = createRef<HTMLDivElement>(); const switchTab = (id: string) => { dispatch(setActiveSouthPaneTab(id)); }; - const renderOfflineStatus = () => ( + + return offline ? ( <Label className="m-r-3" type={STATE_TYPE_MAP[STATUS_OPTIONS.offline]}> {STATUS_OPTIONS_LOCALIZED.offline} </Label> - ); - - const renderResults = () => { - let results; - if (latestQuery) { - if (latestQuery?.extra?.errors) { - latestQuery.errors = latestQuery.extra.errors; - } - if ( - isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) && - latestQuery.state === 'success' && - !latestQuery.resultsKey && - !latestQuery.results - ) { - results = ( - <Alert - type="warning" - message={t( - 'No stored results found, you need to re-run your query', - )} - /> - ); - return results; - } - if (Date.now() - latestQuery.startDttm <= LOCALSTORAGE_MAX_QUERY_AGE_MS) { - results = ( - <ResultSet - search - query={latestQuery} - user={user} - height={innerTabContentHeight + EXTRA_HEIGHT_RESULTS} - database={databases[latestQuery.dbId]} - displayLimit={displayLimit} - defaultQueryLimit={defaultQueryLimit} - showSql - showSqlInline - /> - ); - } - } else { - results = ( - <StyledEmptyStateWrapper> - <EmptyStateMedium - title={t('Run a query to display results')} - image="document.svg" - /> - </StyledEmptyStateWrapper> - ); - } - return results; - }; - - const renderDataPreviewTabs = () => - dataPreviewQueries.map(query => ( - <Tabs.TabPane - tab={t('Preview: `%s`', decodeURIComponent(query.tableName))} - key={query.id} - > - <ResultSet - query={query} - visualize={false} - csv={false} - cache - user={user} - height={innerTabContentHeight} - displayLimit={displayLimit} - defaultQueryLimit={defaultQueryLimit} - /> - </Tabs.TabPane> - )); - return offline ? ( - renderOfflineStatus() ) : ( <StyledPane data-test="south-pane" @@ -243,16 +144,41 @@ const SouthPane = ({ animated={false} > <Tabs.TabPane tab={t('Results')} key="Results"> - {renderResults()} + {latestQueryId && ( + <Results + height={innerTabContentHeight} + latestQueryId={latestQueryId} + displayLimit={displayLimit} + defaultQueryLimit={defaultQueryLimit} + /> + )} </Tabs.TabPane> <Tabs.TabPane tab={t('Query history')} key="History"> <QueryHistory - queries={editorQueries} + queryEditorId={queryEditorId} displayLimit={displayLimit} latestQueryId={latestQueryId} /> </Tabs.TabPane> - {renderDataPreviewTabs()} + {dataPreviewQueries.map( + ({ name, dataPreviewQueryId }) => + dataPreviewQueryId && ( + <Tabs.TabPane + tab={t('Preview: `%s`', decodeURIComponent(name))} + key={dataPreviewQueryId} + > + <ResultSet + queryId={dataPreviewQueryId} + visualize={false} + csv={false} + cache + height={innerTabContentHeight} + displayLimit={displayLimit} + defaultQueryLimit={defaultQueryLimit} + /> + </Tabs.TabPane> + ), + )} </Tabs> </StyledPane> ); diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx index 63f67170d0..6a25492ce5 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx @@ -145,8 +145,8 @@ describe('SqlEditor', () => { (SqlEditorLeftBar as jest.Mock).mockImplementation(() => ( <div data-test="mock-sql-editor-left-bar" /> )); - (ResultSet as jest.Mock).mockClear(); - (ResultSet as jest.Mock).mockImplementation(() => ( + (ResultSet as unknown as jest.Mock).mockClear(); + (ResultSet as unknown as jest.Mock).mockImplementation(() => ( <div data-test="mock-result-set" /> )); }); @@ -182,7 +182,8 @@ describe('SqlEditor', () => { const editor = await findByTestId('react-ace'); const sql = 'select *'; const renderCount = (SqlEditorLeftBar as jest.Mock).mock.calls.length; - const renderCountForSouthPane = (ResultSet as jest.Mock).mock.calls.length; + const renderCountForSouthPane = (ResultSet as unknown as jest.Mock).mock + .calls.length; expect(SqlEditorLeftBar).toHaveBeenCalledTimes(renderCount); expect(ResultSet).toHaveBeenCalledTimes(renderCountForSouthPane); fireEvent.change(editor, { target: { value: sql } }); diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx b/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx index 609cb917b6..73941fbc79 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx @@ -557,10 +557,9 @@ const SqlEditor: React.FC<Props> = ({ [setQueryEditorAndSaveSql], ); - const onSqlChanged = (sql: string) => { + const onSqlChanged = useEffectEvent((sql: string) => { dispatch(queryEditorSetSql(queryEditor, sql)); - setQueryEditorAndSaveSqlWithDebounce(sql); - }; + }); // Return the heights for the ace editor and the south pane as an object // given the height of the sql editor, north pane percent and south pane percent. @@ -785,7 +784,7 @@ const SqlEditor: React.FC<Props> = ({ )} <AceEditorWrapper autocomplete={autocompleteEnabled} - onBlur={setQueryEditorAndSaveSql} + onBlur={onSqlChanged} onChange={onSqlChanged} queryEditorId={queryEditor.id} height={`${aceEditorHeight}px`} diff --git a/superset-frontend/src/SqlLab/fixtures.ts b/superset-frontend/src/SqlLab/fixtures.ts index 4f6ad9ceb5..c578aac3fc 100644 --- a/superset-frontend/src/SqlLab/fixtures.ts +++ b/superset-frontend/src/SqlLab/fixtures.ts @@ -25,6 +25,7 @@ import { QueryResponse, QueryState, } from '@superset-ui/core'; +import { LatestQueryEditorVersion } from 'src/SqlLab/types'; import { ISaveableDatasource } from 'src/SqlLab/components/SaveDatasetModal'; export const mockedActions = sinon.stub({ ...actions }); @@ -181,6 +182,7 @@ export const table = { }; export const defaultQueryEditor = { + version: LatestQueryEditorVersion, id: 'dfsadfs', autorun: false, dbId: undefined, diff --git a/superset-frontend/src/SqlLab/middlewares/persistSqlLabStateEnhancer.js b/superset-frontend/src/SqlLab/middlewares/persistSqlLabStateEnhancer.js index 4e32095e28..d1bec5e0c1 100644 --- a/superset-frontend/src/SqlLab/middlewares/persistSqlLabStateEnhancer.js +++ b/superset-frontend/src/SqlLab/middlewares/persistSqlLabStateEnhancer.js @@ -18,6 +18,9 @@ */ // TODO: requires redux-localstorage > 1.0 for typescript support import persistState from 'redux-localstorage'; +import { pickBy } from 'lodash'; +import { isFeatureEnabled, FeatureFlag } from '@superset-ui/core'; +import { filterUnsavedQueryEditorList } from 'src/SqlLab/components/EditorAutoSync'; import { emptyTablePersistData, emptyQueryResults, @@ -38,6 +41,39 @@ const sqlLabPersistStateConfig = { slicer: paths => state => { const subset = {}; paths.forEach(path => { + if (isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)) { + const { + queryEditors, + editorTabLastUpdatedAt, + unsavedQueryEditor, + tables, + queries, + tabHistory, + } = state.sqlLab; + const unsavedQueryEditors = filterUnsavedQueryEditorList( + queryEditors, + unsavedQueryEditor, + editorTabLastUpdatedAt, + ); + if (unsavedQueryEditors.length > 0) { + const hasFinishedMigrationFromLocalStorage = + unsavedQueryEditors.every( + ({ inLocalStorage }) => !inLocalStorage, + ); + subset.sqlLab = { + queryEditors: unsavedQueryEditors, + ...(!hasFinishedMigrationFromLocalStorage && { + tabHistory, + tables: tables.filter(table => table.inLocalStorage), + queries: pickBy( + queries, + query => query.inLocalStorage && !query.isDataPreview, + ), + }), + }; + } + return; + } // this line is used to remove old data from browser localStorage. // we used to persist all redux state into localStorage, but // it caused configurations passed from server-side got override. diff --git a/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts b/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts index aca11e2cca..1dd3220fcc 100644 --- a/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts +++ b/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts @@ -54,6 +54,10 @@ const apiDataWithTabState = { }, }; describe('getInitialState', () => { + afterEach(() => { + localStorage.clear(); + }); + it('should output the user that is passed in', () => { expect(getInitialState(apiData).user?.userId).toEqual(1); }); @@ -134,10 +138,6 @@ describe('getInitialState', () => { }); describe('dedupe tables schema', () => { - afterEach(() => { - localStorage.clear(); - }); - it('should dedupe the table schema', () => { localStorage.setItem( 'redux', @@ -245,4 +245,109 @@ describe('getInitialState', () => { ); }); }); + + describe('restore unsaved changes for PERSISTENCE mode', () => { + const lastUpdatedTime = Date.now(); + const expectedValue = 'updated editor value'; + beforeEach(() => { + localStorage.setItem( + 'redux', + JSON.stringify({ + sqlLab: { + queryEditors: [ + { + // restore cached value since updates are after server update time + id: '1', + name: expectedValue, + updatedAt: lastUpdatedTime + 100, + }, + { + // no update required given that last updated time comes before server update time + id: '2', + name: expectedValue, + updatedAt: lastUpdatedTime - 100, + }, + { + // no update required given that there's no updatedAt + id: '3', + name: expectedValue, + }, + ], + }, + }), + ); + }); + + it('restore unsaved changes for PERSISTENCE mode', () => { + const apiDataWithLocalStorage = { + ...apiData, + active_tab: { + ...apiDataWithTabState.active_tab, + id: 1, + label: 'persisted tab', + table_schemas: [], + extra_json: { + updatedAt: lastUpdatedTime, + }, + }, + tab_state_ids: [{ id: 1, label: '' }], + }; + expect( + getInitialState(apiDataWithLocalStorage).sqlLab.queryEditors[0], + ).toEqual( + expect.objectContaining({ + id: '1', + name: expectedValue, + }), + ); + }); + + it('skip unsaved changes for expired data', () => { + const apiDataWithLocalStorage = { + ...apiData, + active_tab: { + ...apiDataWithTabState.active_tab, + id: 2, + label: 'persisted tab', + table_schemas: [], + extra_json: { + updatedAt: lastUpdatedTime, + }, + }, + tab_state_ids: [{ id: 2, label: '' }], + }; + expect( + getInitialState(apiDataWithLocalStorage).sqlLab.queryEditors[1], + ).toEqual( + expect.objectContaining({ + id: '2', + name: apiDataWithLocalStorage.active_tab.label, + }), + ); + }); + + it('skip unsaved changes for legacy cache data', () => { + const apiDataWithLocalStorage = { + ...apiData, + active_tab: { + ...apiDataWithTabState.active_tab, + id: 3, + label: 'persisted tab', + table_schemas: [], + extra_json: { + updatedAt: lastUpdatedTime, + }, + }, + tab_state_ids: [{ id: 3, label: '' }], + }; + expect( + getInitialState(apiDataWithLocalStorage).sqlLab.queryEditors[2], + ).toEqual( + expect.objectContaining({ + id: '3', + name: apiDataWithLocalStorage.active_tab.label, + }), + ); + }); + }); }); diff --git a/superset-frontend/src/SqlLab/reducers/getInitialState.ts b/superset-frontend/src/SqlLab/reducers/getInitialState.ts index e2aa1d4688..8d72a313b2 100644 --- a/superset-frontend/src/SqlLab/reducers/getInitialState.ts +++ b/superset-frontend/src/SqlLab/reducers/getInitialState.ts @@ -20,11 +20,13 @@ import { t } from '@superset-ui/core'; import getToastsFromPyFlashMessages from 'src/components/MessageToasts/getToastsFromPyFlashMessages'; import type { BootstrapData } from 'src/types/bootstrapTypes'; import type { InitialState } from 'src/hooks/apiResources/sqlLab'; -import type { +import { QueryEditor, UnsavedQueryEditor, SqlLabRootState, Table, + LatestQueryEditorVersion, + QueryEditorVersion, } from 'src/SqlLab/types'; export function dedupeTabHistory(tabHistory: string[]) { @@ -53,6 +55,7 @@ export default function getInitialState({ */ let queryEditors: Record<string, QueryEditor> = {}; const defaultQueryEditor = { + version: LatestQueryEditorVersion, loaded: true, name: t('Untitled query'), sql: 'SELECT *\nFROM\nWHERE', @@ -73,6 +76,7 @@ export default function getInitialState({ let queryEditor: QueryEditor; if (activeTab && activeTab.id === id) { queryEditor = { + version: activeTab.extra_json?.version ?? QueryEditorVersion.v1, id: id.toString(), loaded: true, name: activeTab.label, @@ -88,6 +92,7 @@ export default function getInitialState({ schema: activeTab.schema, queryLimit: activeTab.query_limit, hideLeftBar: activeTab.hide_left_bar, + updatedAt: activeTab.extra_json?.updatedAt, }; } else { // dummy state, actual state will be loaded on tab switch @@ -103,11 +108,12 @@ export default function getInitialState({ [queryEditor.id]: queryEditor, }; }); - const tabHistory = activeTab ? [activeTab.id.toString()] : []; let tables = {} as Record<string, Table>; - const editorTabLastUpdatedAt = Date.now(); + let editorTabLastUpdatedAt = Date.now(); if (activeTab) { + editorTabLastUpdatedAt = + activeTab.extra_json?.updatedAt || editorTabLastUpdatedAt; activeTab.table_schemas .filter(tableSchema => tableSchema.description !== null) .forEach(tableSchema => { @@ -153,37 +159,57 @@ export default function getInitialState({ // add query editors and tables to state with a special flag so they can // be migrated if the `SQLLAB_BACKEND_PERSISTENCE` feature flag is on sqlLab.queryEditors.forEach(qe => { + const hasConflictFromBackend = Boolean(queryEditors[qe.id]); + const unsavedUpdatedAt = queryEditors[qe.id]?.updatedAt; + const hasUnsavedUpdateSinceLastSave = + qe.updatedAt && + (!unsavedUpdatedAt || qe.updatedAt > unsavedUpdatedAt); + const cachedQueryEditor: UnsavedQueryEditor = + !hasConflictFromBackend || hasUnsavedUpdateSinceLastSave ? qe : {}; queryEditors = { ...queryEditors, [qe.id]: { ...queryEditors[qe.id], - ...qe, - name: qe.title || qe.name, - ...(unsavedQueryEditor.id === qe.id && unsavedQueryEditor), - inLocalStorage: true, + ...cachedQueryEditor, + name: + cachedQueryEditor.title || + cachedQueryEditor.name || + queryEditors[qe.id]?.name, + ...(cachedQueryEditor.id && + unsavedQueryEditor.id === qe.id && + unsavedQueryEditor), + inLocalStorage: !hasConflictFromBackend, loaded: true, }, }; }); const expandedTables = new Set(); - tables = sqlLab.tables.reduce((merged, table) => { - const expanded = !expandedTables.has(table.queryEditorId); - if (expanded) { - expandedTables.add(table.queryEditorId); - } - return { - ...merged, - [table.id]: { - ...tables[table.id], - ...table, - expanded, - }, - }; - }, tables); - Object.values(sqlLab.queries).forEach(query => { - queries[query.id] = { ...query, inLocalStorage: true }; - }); - tabHistory.push(...sqlLab.tabHistory); + + if (sqlLab.tables) { + tables = sqlLab.tables.reduce((merged, table) => { + const expanded = !expandedTables.has(table.queryEditorId); + if (expanded) { + expandedTables.add(table.queryEditorId); + } + return { + ...merged, + [table.id]: { + ...tables[table.id], + ...table, + expanded, + inLocalStorage: true, + }, + }; + }, tables); + } + if (sqlLab.queries) { + Object.values(sqlLab.queries).forEach(query => { + queries[query.id] = { ...query, inLocalStorage: true }; + }); + } + if (sqlLab.tabHistory) { + tabHistory.push(...sqlLab.tabHistory); + } } } } catch (error) { diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js index 278109564f..ce9eed9b9d 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js @@ -29,7 +29,7 @@ import { extendArr, } from '../../reduxUtils'; -function alterUnsavedQueryEditorState(state, updatedState, id) { +function alterUnsavedQueryEditorState(state, updatedState, id, silent = false) { if (state.tabHistory[state.tabHistory.length - 1] !== id) { const { queryEditors } = alterInArr( state, @@ -45,6 +45,7 @@ function alterUnsavedQueryEditorState(state, updatedState, id) { unsavedQueryEditor: { ...(state.unsavedQueryEditor.id === id && state.unsavedQueryEditor), ...(id ? { id, ...updatedState } : state.unsavedQueryEditor), + ...(!silent && { updatedAt: new Date().getTime() }), }, }; } @@ -64,7 +65,10 @@ export default function sqlLabReducer(state = {}, action) { ...mergeUnsavedState, tabHistory: [...state.tabHistory, action.queryEditor.id], }; - return addToArr(newState, 'queryEditors', action.queryEditor); + return addToArr(newState, 'queryEditors', { + ...action.queryEditor, + updatedAt: new Date().getTime(), + }); }, [actions.QUERY_EDITOR_SAVED]() { const { query, result, clientId } = action; @@ -308,6 +312,7 @@ export default function sqlLabReducer(state = {}, action) { latestQueryId: action.query.id, }, action.query.sqlEditorId, + action.query.isDataPreview, ), }; }, @@ -340,7 +345,7 @@ export default function sqlLabReducer(state = {}, action) { return state; } const alts = { - endDttm: now(), + endDttm: action?.results?.query?.endDttm || now(), progress: 100, results: action.results, rows: action?.results?.query?.rows || 0, @@ -378,14 +383,12 @@ export default function sqlLabReducer(state = {}, action) { qeIds.indexOf(action.queryEditor?.id) > -1 && state.tabHistory[state.tabHistory.length - 1] !== action.queryEditor.id ) { - const mergeUnsavedState = alterInArr( - state, - 'queryEditors', - state.unsavedQueryEditor, - { + const mergeUnsavedState = { + ...alterInArr(state, 'queryEditors', state.unsavedQueryEditor, { ...state.unsavedQueryEditor, - }, - ); + }), + unsavedQueryEditor: {}, + }; return { ...(action.queryEditor.id === state.unsavedQueryEditor.id ? alterInArr( @@ -522,12 +525,20 @@ export default function sqlLabReducer(state = {}, action) { }; }, [actions.QUERY_EDITOR_SET_SQL]() { + const { unsavedQueryEditor } = state; + if ( + unsavedQueryEditor?.id === action.queryEditor.id && + unsavedQueryEditor.sql === action.sql + ) { + return state; + } return { ...state, ...alterUnsavedQueryEditorState( state, { sql: action.sql, + ...(action.queryId && { latestQueryId: action.queryId }), }, action.queryEditor.id, ), @@ -566,6 +577,7 @@ export default function sqlLabReducer(state = {}, action) { selectedText: action.sql, }, action.queryEditor.id, + true, ), }; }, @@ -662,7 +674,14 @@ export default function sqlLabReducer(state = {}, action) { if (!change) { newQueries = state.queries; } - return { ...state, queries: newQueries, queriesLastUpdate }; + return { + ...state, + queries: newQueries, + queriesLastUpdate: + queriesLastUpdate > state.queriesLastUpdate + ? queriesLastUpdate + : Date.now(), + }; }, [actions.CLEAR_INACTIVE_QUERIES]() { const { queries } = state; @@ -689,7 +708,11 @@ export default function sqlLabReducer(state = {}, action) { }, ]), ); - return { ...state, queries: cleanedQueries }; + return { + ...state, + queries: cleanedQueries, + queriesLastUpdate: Date.now(), + }; }, [actions.SET_USER_OFFLINE]() { return { ...state, offline: action.offline }; @@ -708,6 +731,9 @@ export default function sqlLabReducer(state = {}, action) { [actions.CREATE_DATASOURCE_FAILED]() { return { ...state, isDatasourceLoading: false, errorMessage: action.err }; }, + [actions.SET_EDITOR_TAB_LAST_UPDATE]() { + return { ...state, editorTabLastUpdatedAt: action.timestamp }; + }, }; if (action.type in actionHandlers) { return actionHandlers[action.type](); diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js index e1a234734b..5a70f10bb3 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js @@ -20,6 +20,7 @@ import { QueryState } from '@superset-ui/core'; import sqlLabReducer from 'src/SqlLab/reducers/sqlLab'; import * as actions from 'src/SqlLab/actions/sqlLab'; import { table, initialState as mockState } from '../fixtures'; +import { QUERY_UPDATE_FREQ } from '../components/QueryAutoRefresh'; const initialState = mockState.sqlLab; @@ -404,6 +405,7 @@ describe('sqlLabReducer', () => { }; }); it('updates queries that have already been completed', () => { + const current = Date.now(); newState = sqlLabReducer( { ...newState, @@ -418,9 +420,10 @@ describe('sqlLabReducer', () => { }, }, }, - actions.clearInactiveQueries(Date.now()), + actions.clearInactiveQueries(QUERY_UPDATE_FREQ), ); expect(newState.queries.abcd.state).toBe(QueryState.SUCCESS); + expect(newState.queriesLastUpdate).toBeGreaterThanOrEqual(current); }); }); }); diff --git a/superset-frontend/src/SqlLab/types.ts b/superset-frontend/src/SqlLab/types.ts index 5ecd69293c..6eb42718f0 100644 --- a/superset-frontend/src/SqlLab/types.ts +++ b/superset-frontend/src/SqlLab/types.ts @@ -29,7 +29,14 @@ export type QueryDictionary = { [id: string]: QueryResponse; }; +export enum QueryEditorVersion { + v1 = 1, +} + +export const LatestQueryEditorVersion = QueryEditorVersion.v1; + export interface QueryEditor { + version: QueryEditorVersion; id: string; dbId?: number; name: string; @@ -48,6 +55,7 @@ export interface QueryEditor { inLocalStorage?: boolean; northPercent?: number; southPercent?: number; + updatedAt?: number; } export type toastState = { @@ -86,7 +94,7 @@ export type SqlLabRootState = { errorMessage: string | null; unsavedQueryEditor: UnsavedQueryEditor; queryCostEstimates?: Record<string, QueryCostEstimate>; - editorTabLastUpdatedAt?: number; + editorTabLastUpdatedAt: number; }; localStorageUsageInKilobytes: number; messageToasts: toastState[]; diff --git a/superset-frontend/src/SqlLab/utils/emptyQueryResults.test.js b/superset-frontend/src/SqlLab/utils/emptyQueryResults.test.js index 9984e1efca..f08fccbef7 100644 --- a/superset-frontend/src/SqlLab/utils/emptyQueryResults.test.js +++ b/superset-frontend/src/SqlLab/utils/emptyQueryResults.test.js @@ -83,10 +83,11 @@ describe('reduxStateToLocalStorageHelper', () => { }); it('should only return selected keys for query editor', () => { - const queryEditors = [defaultQueryEditor]; - expect(Object.keys(queryEditors[0])).toContain('schema'); + const queryEditors = [{ ...defaultQueryEditor, dummy: 'value' }]; + expect(Object.keys(queryEditors[0])).toContain('dummy'); const clearedQueryEditors = clearQueryEditors(queryEditors); - expect(Object.keys(clearedQueryEditors)[0]).not.toContain('schema'); + expect(Object.keys(clearedQueryEditors[0])).toContain('version'); + expect(Object.keys(clearedQueryEditors[0])).not.toContain('dummy'); }); }); diff --git a/superset-frontend/src/SqlLab/utils/reduxStateToLocalStorageHelper.js b/superset-frontend/src/SqlLab/utils/reduxStateToLocalStorageHelper.js index 281f08bcb3..f82711362d 100644 --- a/superset-frontend/src/SqlLab/utils/reduxStateToLocalStorageHelper.js +++ b/superset-frontend/src/SqlLab/utils/reduxStateToLocalStorageHelper.js @@ -26,6 +26,7 @@ import { } from '../constants'; const PERSISTENT_QUERY_EDITOR_KEYS = new Set([ + 'version', 'remoteId', 'autorun', 'dbId', diff --git a/superset-frontend/src/assets/images/doris.png b/superset-frontend/src/assets/images/doris.png new file mode 100644 index 0000000000..4d88f2a36c Binary files /dev/null and b/superset-frontend/src/assets/images/doris.png differ diff --git a/superset-frontend/src/components/AuditInfo/ModifiedInfo.test.tsx b/superset-frontend/src/components/AuditInfo/ModifiedInfo.test.tsx new file mode 100644 index 0000000000..af9d6913d8 --- /dev/null +++ b/superset-frontend/src/components/AuditInfo/ModifiedInfo.test.tsx @@ -0,0 +1,42 @@ +import React from 'react'; +import { render, screen, waitFor } from 'spec/helpers/testing-library'; +import '@testing-library/jest-dom'; +import userEvent from '@testing-library/user-event'; + +import { ModifiedInfo } from '.'; + +const TEST_DATE = '2023-11-20'; +const USER = { + id: 1, + first_name: 'Foo', + last_name: 'Bar', +}; + +test('should render a tooltip when user is provided', async () => { + render(<ModifiedInfo user={USER} date={TEST_DATE} />); + + const dateElement = screen.getByTestId('audit-info-date'); + expect(dateElement).toBeInTheDocument(); + expect(screen.getByText(TEST_DATE)).toBeInTheDocument(); + expect(screen.queryByText('Modified by: Foo Bar')).not.toBeInTheDocument(); + userEvent.hover(dateElement); + const tooltip = await screen.findByRole('tooltip'); + expect(tooltip).toBeInTheDocument(); + expect(screen.getByText('Modified by: Foo Bar')).toBeInTheDocument(); +}); + +test('should render only the date if username is not provided', async () => { + render(<ModifiedInfo date={TEST_DATE} />); + + const dateElement = screen.getByTestId('audit-info-date'); + expect(dateElement).toBeInTheDocument(); + expect(screen.getByText(TEST_DATE)).toBeInTheDocument(); + userEvent.hover(dateElement); + await waitFor( + () => { + const tooltip = screen.queryByRole('tooltip'); + expect(tooltip).not.toBeInTheDocument(); + }, + { timeout: 1000 }, + ); +}); diff --git a/superset-frontend/src/components/AuditInfo/index.tsx b/superset-frontend/src/components/AuditInfo/index.tsx new file mode 100644 index 0000000000..24223a1554 --- /dev/null +++ b/superset-frontend/src/components/AuditInfo/index.tsx @@ -0,0 +1,30 @@ +import React from 'react'; + +import Owner from 'src/types/Owner'; +import { Tooltip } from 'src/components/Tooltip'; +import getOwnerName from 'src/utils/getOwnerName'; +import { t } from '@superset-ui/core'; + +export type ModifiedInfoProps = { + user?: Owner; + date: string; +}; + +export const ModifiedInfo = ({ user, date }: ModifiedInfoProps) => { + const dateSpan = ( + <span className="no-wrap" data-test="audit-info-date"> + {date} + </span> + ); + + if (user) { + const userName = getOwnerName(user); + const title = t('Modified by: %s', userName); + return ( + <Tooltip title={title} placement="bottom"> + {dateSpan} + </Tooltip> + ); + } + return dateSpan; +}; diff --git a/superset-frontend/src/components/Chart/Chart.jsx b/superset-frontend/src/components/Chart/Chart.jsx index af90ae6b0a..da9a81516f 100644 --- a/superset-frontend/src/components/Chart/Chart.jsx +++ b/superset-frontend/src/components/Chart/Chart.jsx @@ -169,7 +169,7 @@ class Chart extends React.PureComponent { // Create chart with POST request this.props.actions.postChartFormData( this.props.formData, - this.props.force || getUrlParam(URL_PARAMS.force), // allow override via url params force=true + Boolean(this.props.force || getUrlParam(URL_PARAMS.force)), // allow override via url params force=true this.props.timeout, this.props.chartId, this.props.dashboardId, diff --git a/superset-frontend/src/components/Chart/chartAction.js b/superset-frontend/src/components/Chart/chartAction.js index 9e5dc0eddd..8cd3785ae5 100644 --- a/superset-frontend/src/components/Chart/chartAction.js +++ b/superset-frontend/src/components/Chart/chartAction.js @@ -183,7 +183,7 @@ const v1ChartDataRequest = async ( const qs = {}; if (sliceId !== undefined) qs.form_data = `{"slice_id":${sliceId}}`; if (dashboardId !== undefined) qs.dashboard_id = dashboardId; - if (force !== false) qs.force = force; + if (force) qs.force = force; const allowDomainSharding = // eslint-disable-next-line camelcase @@ -269,9 +269,12 @@ export function runAnnotationQuery({ return Promise.resolve(); } - const granularity = fd.time_grain_sqla || fd.granularity; - fd.time_grain_sqla = granularity; - fd.granularity = granularity; + // In the original formData the `granularity` attribute represents the time grain (eg + // `P1D`), but in the request payload it corresponds to the name of the column where + // the time grain should be applied (eg, `Date`), so we need to move things around. + fd.time_grain_sqla = fd.time_grain_sqla || fd.granularity; + fd.granularity = fd.granularity_sqla; + const overridesKeys = Object.keys(annotation.overrides); if (overridesKeys.includes('since') || overridesKeys.includes('until')) { annotation.overrides = { diff --git a/superset-frontend/src/components/Chart/chartActions.test.js b/superset-frontend/src/components/Chart/chartActions.test.js index 65b008de62..b3a6fed9f5 100644 --- a/superset-frontend/src/components/Chart/chartActions.test.js +++ b/superset-frontend/src/components/Chart/chartActions.test.js @@ -21,6 +21,7 @@ import fetchMock from 'fetch-mock'; import sinon from 'sinon'; import * as chartlib from '@superset-ui/core'; +import { SupersetClient } from '@superset-ui/core'; import { LOG_EVENT } from 'src/logger/actions'; import * as exploreUtils from 'src/explore/exploreUtils'; import * as actions from 'src/components/Chart/chartAction'; @@ -51,7 +52,7 @@ describe('chart actions', () => { .callsFake(() => MOCK_URL); getChartDataUriStub = sinon .stub(exploreUtils, 'getChartDataUri') - .callsFake(() => URI(MOCK_URL)); + .callsFake(({ qs }) => URI(MOCK_URL).query(qs)); fakeMetadata = { useLegacyApi: true }; metadataRegistryStub = sinon .stub(chartlib, 'getChartMetadataRegistry') @@ -81,7 +82,7 @@ describe('chart actions', () => { }); it('should query with the built query', async () => { - const actionThunk = actions.postChartFormData({}); + const actionThunk = actions.postChartFormData({}, null); await actionThunk(dispatch); expect(fetchMock.calls(MOCK_URL)).toHaveLength(1); @@ -233,4 +234,70 @@ describe('chart actions', () => { expect(json.result[0].value.toString()).toEqual(expectedBigNumber); }); }); + + describe('runAnnotationQuery', () => { + const mockDispatch = jest.fn(); + const mockGetState = () => ({ + charts: { + chartKey: { + latestQueryFormData: { + time_grain_sqla: 'P1D', + granularity_sqla: 'Date', + }, + }, + }, + }); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should dispatch annotationQueryStarted and annotationQuerySuccess on successful query', async () => { + const annotation = { + name: 'Holidays', + annotationType: 'EVENT', + sourceType: 'NATIVE', + color: null, + opacity: '', + style: 'solid', + width: 1, + showMarkers: false, + hideLine: false, + value: 1, + overrides: { + time_range: null, + }, + show: true, + showLabel: false, + titleColumn: '', + descriptionColumns: [], + timeColumn: '', + intervalEndColumn: '', + }; + const key = undefined; + + const postSpy = jest.spyOn(SupersetClient, 'post'); + postSpy.mockImplementation(() => + Promise.resolve({ json: { result: [] } }), + ); + const buildV1ChartDataPayloadSpy = jest.spyOn( + exploreUtils, + 'buildV1ChartDataPayload', + ); + + const queryFunc = actions.runAnnotationQuery({ annotation, key }); + await queryFunc(mockDispatch, mockGetState); + + expect(buildV1ChartDataPayloadSpy).toHaveBeenCalledWith({ + formData: { + granularity: 'Date', + granularity_sqla: 'Date', + time_grain_sqla: 'P1D', + }, + force: false, + resultFormat: 'json', + resultType: 'full', + }); + }); + }); }); diff --git a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx index 7635361d89..874d22ea6b 100644 --- a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx +++ b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx @@ -290,7 +290,13 @@ test('Sends the correct db when changing the database', async () => { test('Sends the correct schema when changing the schema', async () => { const props = createProps(); - render(<DatabaseSelector {...props} />, { useRedux: true, store }); + const { rerender } = render(<DatabaseSelector {...props} db={null} />, { + useRedux: true, + store, + }); + await waitFor(() => expect(fetchMock.calls(databaseApiRoute).length).toBe(1)); + rerender(<DatabaseSelector {...props} />); + expect(props.onSchemaChange).toBeCalledTimes(0); const select = screen.getByRole('combobox', { name: 'Select schema or type to search schemas', }); @@ -301,4 +307,5 @@ test('Sends the correct schema when changing the schema', async () => { await waitFor(() => expect(props.onSchemaChange).toHaveBeenCalledWith('information_schema'), ); + expect(props.onSchemaChange).toBeCalledTimes(1); }); diff --git a/superset-frontend/src/components/DatabaseSelector/index.tsx b/superset-frontend/src/components/DatabaseSelector/index.tsx index d17489a9c2..7b4afd9af0 100644 --- a/superset-frontend/src/components/DatabaseSelector/index.tsx +++ b/superset-frontend/src/components/DatabaseSelector/index.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import React, { ReactNode, useState, useMemo, useEffect } from 'react'; +import React, { ReactNode, useState, useMemo, useEffect, useRef } from 'react'; import { styled, SupersetClient, t } from '@superset-ui/core'; import rison from 'rison'; import { AsyncSelect, Select } from 'src/components'; @@ -133,6 +133,8 @@ export default function DatabaseSelector({ const [currentSchema, setCurrentSchema] = useState<SchemaOption | undefined>( schema ? { label: schema, value: schema, title: schema } : undefined, ); + const schemaRef = useRef(schema); + schemaRef.current = schema; const { addSuccessToast } = useToasts(); const loadDatabases = useMemo( @@ -215,7 +217,7 @@ export default function DatabaseSelector({ function changeSchema(schema: SchemaOption | undefined) { setCurrentSchema(schema); - if (onSchemaChange) { + if (onSchemaChange && schema?.value !== schemaRef.current) { onSchemaChange(schema?.value); } } @@ -229,7 +231,9 @@ export default function DatabaseSelector({ onSuccess: (schemas, isFetched) => { if (schemas.length === 1) { changeSchema(schemas[0]); - } else if (!schemas.find(schemaOption => schema === schemaOption.value)) { + } else if ( + !schemas.find(schemaOption => schemaRef.current === schemaOption.value) + ) { changeSchema(undefined); } diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx index 86b5c22777..751001297a 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx @@ -1114,7 +1114,7 @@ class DatasourceEditor extends React.PureComponent { <div css={{ width: 'calc(100% - 34px)', marginTop: -16 }}> <Field fieldKey="table_name" - label={t('Dataset name')} + label={t('Name')} control={ <TextControl controlId="table_name" diff --git a/superset-frontend/src/components/DynamicEditableTitle/index.tsx b/superset-frontend/src/components/DynamicEditableTitle/index.tsx index 86205bebc2..670962de5f 100644 --- a/superset-frontend/src/components/DynamicEditableTitle/index.tsx +++ b/superset-frontend/src/components/DynamicEditableTitle/index.tsx @@ -113,10 +113,7 @@ export const DynamicEditableTitle = ({ // then we can measure the width of that span to resize the input element useLayoutEffect(() => { if (sizerRef?.current) { - sizerRef.current.innerHTML = (currentTitle || placeholder).replace( - /\s/g, - ' ', - ); + sizerRef.current.textContent = currentTitle || placeholder; } }, [currentTitle, placeholder, sizerRef]); diff --git a/superset-frontend/src/components/Select/AsyncSelect.test.tsx b/superset-frontend/src/components/Select/AsyncSelect.test.tsx index c1442a6b70..0bb24b474a 100644 --- a/superset-frontend/src/components/Select/AsyncSelect.test.tsx +++ b/superset-frontend/src/components/Select/AsyncSelect.test.tsx @@ -868,6 +868,20 @@ test('fires onChange when clearing the selection in multiple mode', async () => expect(onChange).toHaveBeenCalledTimes(1); }); +test('fires onChange when pasting a selection', async () => { + const onChange = jest.fn(); + render(<AsyncSelect {...defaultProps} onChange={onChange} />); + await open(); + const input = getElementByClassName('.ant-select-selection-search-input'); + const paste = createEvent.paste(input, { + clipboardData: { + getData: () => OPTIONS[0].label, + }, + }); + fireEvent(input, paste); + expect(onChange).toHaveBeenCalledTimes(1); +}); + test('does not duplicate options when using numeric values', async () => { render( <AsyncSelect diff --git a/superset-frontend/src/components/Select/AsyncSelect.tsx b/superset-frontend/src/components/Select/AsyncSelect.tsx index 20de7bb591..d102af7483 100644 --- a/superset-frontend/src/components/Select/AsyncSelect.tsx +++ b/superset-frontend/src/components/Select/AsyncSelect.tsx @@ -554,6 +554,7 @@ const AsyncSelect = forwardRef( ...values, ]); } + fireOnChange(); }; const shouldRenderChildrenOptions = useMemo( diff --git a/superset-frontend/src/components/Select/Select.test.tsx b/superset-frontend/src/components/Select/Select.test.tsx index a6b8307582..2910353295 100644 --- a/superset-frontend/src/components/Select/Select.test.tsx +++ b/superset-frontend/src/components/Select/Select.test.tsx @@ -985,6 +985,20 @@ test('fires onChange when clearing the selection in multiple mode', async () => expect(onChange).toHaveBeenCalledTimes(1); }); +test('fires onChange when pasting a selection', async () => { + const onChange = jest.fn(); + render(<Select {...defaultProps} onChange={onChange} />); + await open(); + const input = getElementByClassName('.ant-select-selection-search-input'); + const paste = createEvent.paste(input, { + clipboardData: { + getData: () => OPTIONS[0].label, + }, + }); + fireEvent(input, paste); + expect(onChange).toHaveBeenCalledTimes(1); +}); + test('does not duplicate options when using numeric values', async () => { render( <Select diff --git a/superset-frontend/src/components/Select/Select.tsx b/superset-frontend/src/components/Select/Select.tsx index 6ccc1e1715..1e3bc73758 100644 --- a/superset-frontend/src/components/Select/Select.tsx +++ b/superset-frontend/src/components/Select/Select.tsx @@ -571,6 +571,7 @@ const Select = forwardRef( ]); } } + fireOnChange(); }; return ( diff --git a/superset-frontend/src/dashboard/actions/dashboardState.js b/superset-frontend/src/dashboard/actions/dashboardState.js index dcf1020e6d..b461275d8c 100644 --- a/superset-frontend/src/dashboard/actions/dashboardState.js +++ b/superset-frontend/src/dashboard/actions/dashboardState.js @@ -611,9 +611,14 @@ export function setDirectPathToChild(path) { return { type: SET_DIRECT_PATH, path }; } +export const SET_ACTIVE_TAB = 'SET_ACTIVE_TAB'; +export function setActiveTab(tabId, prevTabId) { + return { type: SET_ACTIVE_TAB, tabId, prevTabId }; +} + export const SET_ACTIVE_TABS = 'SET_ACTIVE_TABS'; -export function setActiveTabs(tabId, prevTabId) { - return { type: SET_ACTIVE_TABS, tabId, prevTabId }; +export function setActiveTabs(activeTabs) { + return { type: SET_ACTIVE_TABS, activeTabs }; } export const SET_FOCUSED_FILTER_FIELD = 'SET_FOCUSED_FILTER_FIELD'; diff --git a/superset-frontend/src/dashboard/components/Dashboard.jsx b/superset-frontend/src/dashboard/components/Dashboard.jsx index 827f0f455d..6e909f3b15 100644 --- a/superset-frontend/src/dashboard/components/Dashboard.jsx +++ b/superset-frontend/src/dashboard/components/Dashboard.jsx @@ -25,9 +25,8 @@ import Loading from 'src/components/Loading'; import getBootstrapData from 'src/utils/getBootstrapData'; import getChartIdsFromLayout from '../util/getChartIdsFromLayout'; import getLayoutComponentFromChartId from '../util/getLayoutComponentFromChartId'; -import DashboardBuilder from './DashboardBuilder/DashboardBuilder'; + import { - chartPropShape, slicePropShape, dashboardInfoPropShape, dashboardStatePropShape, @@ -53,7 +52,6 @@ const propTypes = { }).isRequired, dashboardInfo: dashboardInfoPropShape.isRequired, dashboardState: dashboardStatePropShape.isRequired, - charts: PropTypes.objectOf(chartPropShape).isRequired, slices: PropTypes.objectOf(slicePropShape).isRequired, activeFilters: PropTypes.object.isRequired, chartConfiguration: PropTypes.object, @@ -213,11 +211,6 @@ class Dashboard extends React.PureComponent { } } - // return charts in array - getAllCharts() { - return Object.values(this.props.charts); - } - applyFilters() { const { appliedFilters } = this; const { activeFilters, ownDataCharts } = this.props; @@ -288,11 +281,7 @@ class Dashboard extends React.PureComponent { if (this.context.loading) { return <Loading />; } - return ( - <> - <DashboardBuilder /> - </> - ); + return this.props.children; } } diff --git a/superset-frontend/src/dashboard/components/Dashboard.test.jsx b/superset-frontend/src/dashboard/components/Dashboard.test.jsx index 56a696f913..a66eab37e3 100644 --- a/superset-frontend/src/dashboard/components/Dashboard.test.jsx +++ b/superset-frontend/src/dashboard/components/Dashboard.test.jsx @@ -21,7 +21,6 @@ import { shallow } from 'enzyme'; import sinon from 'sinon'; import Dashboard from 'src/dashboard/components/Dashboard'; -import DashboardBuilder from 'src/dashboard/components/DashboardBuilder/DashboardBuilder'; import { CHART_TYPE } from 'src/dashboard/util/componentTypes'; import newComponentFactory from 'src/dashboard/util/newComponentFactory'; @@ -63,8 +62,14 @@ describe('Dashboard', () => { loadStats: {}, }; + const ChildrenComponent = () => <div>Test</div>; + function setup(overrideProps) { - const wrapper = shallow(<Dashboard {...props} {...overrideProps} />); + const wrapper = shallow( + <Dashboard {...props} {...overrideProps}> + <ChildrenComponent /> + </Dashboard>, + ); return wrapper; } @@ -76,9 +81,9 @@ describe('Dashboard', () => { '3_country_name': { values: ['USA'], scope: [] }, }; - it('should render a DashboardBuilder', () => { + it('should render the children component', () => { const wrapper = setup(); - expect(wrapper.find(DashboardBuilder)).toExist(); + expect(wrapper.find(ChildrenComponent)).toExist(); }); describe('UNSAFE_componentWillReceiveProps', () => { diff --git a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx index 7c3dd23392..02a3a49971 100644 --- a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx +++ b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx @@ -25,7 +25,7 @@ import DashboardBuilder from 'src/dashboard/components/DashboardBuilder/Dashboar import useStoredSidebarWidth from 'src/components/ResizableSidebar/useStoredSidebarWidth'; import { fetchFaveStar, - setActiveTabs, + setActiveTab, setDirectPathToChild, } from 'src/dashboard/actions/dashboardState'; import { @@ -41,7 +41,7 @@ fetchMock.get('glob:*/csstemplateasyncmodelview/api/read', {}); jest.mock('src/dashboard/actions/dashboardState', () => ({ ...jest.requireActual('src/dashboard/actions/dashboardState'), fetchFaveStar: jest.fn(), - setActiveTabs: jest.fn(), + setActiveTab: jest.fn(), setDirectPathToChild: jest.fn(), })); jest.mock('src/components/ResizableSidebar/useStoredSidebarWidth'); @@ -90,7 +90,7 @@ describe('DashboardBuilder', () => { favStarStub = (fetchFaveStar as jest.Mock).mockReturnValue({ type: 'mock-action', }); - activeTabsStub = (setActiveTabs as jest.Mock).mockReturnValue({ + activeTabsStub = (setActiveTab as jest.Mock).mockReturnValue({ type: 'mock-action', }); (useStoredSidebarWidth as jest.Mock).mockImplementation(() => [ diff --git a/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx b/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx index cb5d261a1b..6dba29c661 100644 --- a/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx +++ b/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx @@ -59,7 +59,7 @@ const StyledFilterCount = styled.div` vertical-align: middle; color: ${theme.colors.grayscale.base}; &:hover { - color: ${theme.colors.grayscale.light1} + color: ${theme.colors.grayscale.light1}; } } diff --git a/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx b/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx index 92d34a4faa..3a1421e380 100644 --- a/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx +++ b/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx @@ -681,7 +681,7 @@ const PropertiesModal = ({ </Row> <Row gutter={16}> <Col xs={24} md={12}> - <FormItem label={t('Title')} name="title"> + <FormItem label={t('Name')} name="title"> <Input data-test="dashboard-title-input" type="text" diff --git a/superset-frontend/src/dashboard/components/SliceHeader/SliceHeader.test.tsx b/superset-frontend/src/dashboard/components/SliceHeader/SliceHeader.test.tsx index e16cab8daa..f452e22ac8 100644 --- a/superset-frontend/src/dashboard/components/SliceHeader/SliceHeader.test.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeader/SliceHeader.test.tsx @@ -19,6 +19,7 @@ import React from 'react'; import { Router } from 'react-router-dom'; import { createMemoryHistory } from 'history'; +import { getExtensionsRegistry } from '@superset-ui/core'; import { render, screen } from 'spec/helpers/testing-library'; import userEvent from '@testing-library/user-event'; import SliceHeader from '.'; @@ -472,3 +473,15 @@ test('Correct actions to "SliceHeaderControls"', () => { userEvent.click(screen.getByTestId('handleToggleFullSize')); expect(props.handleToggleFullSize).toBeCalledTimes(1); }); + +test('Add extension to SliceHeader', () => { + const extensionsRegistry = getExtensionsRegistry(); + extensionsRegistry.set('dashboard.slice.header', () => ( + <div>This is an extension</div> + )); + + const props = createProps(); + render(<SliceHeader {...props} />, { useRedux: true, useRouter: true }); + + expect(screen.getByText('This is an extension')).toBeInTheDocument(); +}); diff --git a/superset-frontend/src/dashboard/components/SliceHeader/index.tsx b/superset-frontend/src/dashboard/components/SliceHeader/index.tsx index c9cb74a8af..ea4f3b63ba 100644 --- a/superset-frontend/src/dashboard/components/SliceHeader/index.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeader/index.tsx @@ -24,7 +24,7 @@ import React, { useRef, useState, } from 'react'; -import { css, styled, t } from '@superset-ui/core'; +import { css, getExtensionsRegistry, styled, t } from '@superset-ui/core'; import { useUiConfig } from 'src/components/UiConfigContext'; import { Tooltip } from 'src/components/Tooltip'; import { useSelector } from 'react-redux'; @@ -38,6 +38,8 @@ import { RootState } from 'src/dashboard/types'; import { getSliceHeaderTooltip } from 'src/dashboard/util/getSliceHeaderTooltip'; import { DashboardPageIdContext } from 'src/dashboard/containers/DashboardPage'; +const extensionsRegistry = getExtensionsRegistry(); + type SliceHeaderProps = SliceHeaderControlsProps & { innerRef?: string; updateSliceName?: (arg0: string) => void; @@ -161,6 +163,7 @@ const SliceHeader: FC<SliceHeaderProps> = ({ width, height, }) => { + const SliceHeaderExtension = extensionsRegistry.get('dashboard.slice.header'); const uiConfig = useUiConfig(); const dashboardPageId = useContext(DashboardPageIdContext); const [headerTooltip, setHeaderTooltip] = useState<ReactNode | null>(null); @@ -239,6 +242,12 @@ const SliceHeader: FC<SliceHeaderProps> = ({ <div className="header-controls"> {!editMode && ( <> + {SliceHeaderExtension && ( + <SliceHeaderExtension + sliceId={slice.slice_id} + dashboardId={dashboardId} + /> + )} {crossFilterValue && ( <Tooltip placement="top" diff --git a/superset-frontend/src/dashboard/components/SyncDashboardState/SyncDashboardState.test.tsx b/superset-frontend/src/dashboard/components/SyncDashboardState/SyncDashboardState.test.tsx new file mode 100644 index 0000000000..1565a43e19 --- /dev/null +++ b/superset-frontend/src/dashboard/components/SyncDashboardState/SyncDashboardState.test.tsx @@ -0,0 +1,34 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import { render } from 'spec/helpers/testing-library'; +import { getItem, LocalStorageKeys } from 'src/utils/localStorageHelpers'; +import SyncDashboardState from '.'; + +test('stores the dashboard info with local storages', () => { + const testDashboardPageId = 'dashboardPageId'; + render(<SyncDashboardState dashboardPageId={testDashboardPageId} />, { + useRedux: true, + }); + expect(getItem(LocalStorageKeys.dashboard__explore_context, {})).toEqual({ + [testDashboardPageId]: expect.objectContaining({ + dashboardPageId: testDashboardPageId, + }), + }); +}); diff --git a/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx b/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx new file mode 100644 index 0000000000..b25d243292 --- /dev/null +++ b/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React, { useEffect } from 'react'; +import pick from 'lodash/pick'; +import { shallowEqual, useSelector } from 'react-redux'; +import { DashboardContextForExplore } from 'src/types/DashboardContextForExplore'; +import { + getItem, + LocalStorageKeys, + setItem, +} from 'src/utils/localStorageHelpers'; +import { RootState } from 'src/dashboard/types'; +import { getActiveFilters } from 'src/dashboard/util/activeDashboardFilters'; + +type Props = { dashboardPageId: string }; + +const EMPTY_OBJECT = {}; + +export const getDashboardContextLocalStorage = () => { + const dashboardsContexts = getItem( + LocalStorageKeys.dashboard__explore_context, + {}, + ); + // A new dashboard tab id is generated on each dashboard page opening. + // We mark ids as redundant when user leaves the dashboard, because they won't be reused. + // Then we remove redundant dashboard contexts from local storage in order not to clutter it + return Object.fromEntries( + Object.entries(dashboardsContexts).filter( + ([, value]) => !value.isRedundant, + ), + ); +}; + +const updateDashboardTabLocalStorage = ( + dashboardPageId: string, + dashboardContext: DashboardContextForExplore, +) => { + const dashboardsContexts = getDashboardContextLocalStorage(); + setItem(LocalStorageKeys.dashboard__explore_context, { + ...dashboardsContexts, + [dashboardPageId]: dashboardContext, + }); +}; + +const SyncDashboardState: React.FC<Props> = ({ dashboardPageId }) => { + const dashboardContextForExplore = useSelector< + RootState, + DashboardContextForExplore + >( + ({ dashboardInfo, dashboardState, nativeFilters, dataMask }) => ({ + labelColors: dashboardInfo.metadata?.label_colors || EMPTY_OBJECT, + sharedLabelColors: + dashboardInfo.metadata?.shared_label_colors || EMPTY_OBJECT, + colorScheme: dashboardState?.colorScheme, + chartConfiguration: + dashboardInfo.metadata?.chart_configuration || EMPTY_OBJECT, + nativeFilters: Object.entries(nativeFilters.filters).reduce( + (acc, [key, filterValue]) => ({ + ...acc, + [key]: pick(filterValue, ['chartsInScope']), + }), + {}, + ), + dataMask, + dashboardId: dashboardInfo.id, + filterBoxFilters: getActiveFilters(), + dashboardPageId, + }), + shallowEqual, + ); + + useEffect(() => { + updateDashboardTabLocalStorage(dashboardPageId, dashboardContextForExplore); + return () => { + // mark tab id as redundant when dashboard unmounts - case when user opens + // Explore in the same tab + updateDashboardTabLocalStorage(dashboardPageId, { + ...dashboardContextForExplore, + isRedundant: true, + }); + }; + }, [dashboardContextForExplore, dashboardPageId]); + + return null; +}; + +export default SyncDashboardState; diff --git a/superset-frontend/src/dashboard/components/gridComponents/Tabs.jsx b/superset-frontend/src/dashboard/components/gridComponents/Tabs.jsx index 7d9a46b75d..67f4b3c598 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Tabs.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Tabs.jsx @@ -51,7 +51,7 @@ const propTypes = { // actions (from DashboardComponent.jsx) logEvent: PropTypes.func.isRequired, - setActiveTabs: PropTypes.func, + setActiveTab: PropTypes.func, // grid related availableColumnCount: PropTypes.number, @@ -75,7 +75,7 @@ const defaultProps = { columnWidth: 0, activeTabs: [], directPathToChild: [], - setActiveTabs() {}, + setActiveTab() {}, onResizeStart() {}, onResize() {}, onResizeStop() {}, @@ -125,12 +125,12 @@ export class Tabs extends React.PureComponent { } componentDidMount() { - this.props.setActiveTabs(this.state.activeKey); + this.props.setActiveTab(this.state.activeKey); } componentDidUpdate(prevProps, prevState) { if (prevState.activeKey !== this.state.activeKey) { - this.props.setActiveTabs(this.state.activeKey, prevState.activeKey); + this.props.setActiveTab(this.state.activeKey, prevState.activeKey); } } diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx index 37739e5370..96f51f5359 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx @@ -49,7 +49,6 @@ const VerticalFilterControlTitle = styled.h4` const HorizontalFilterControlTitle = styled(VerticalFilterControlTitle)` font-weight: ${({ theme }) => theme.typography.weights.normal}; color: ${({ theme }) => theme.colors.grayscale.base}; - max-width: ${({ theme }) => theme.gridUnit * 15}px; ${truncationCSS}; `; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx index 5235edcdc3..f44a1a1df6 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx @@ -52,6 +52,7 @@ import { onFiltersRefreshSuccess, setDirectPathToChild, } from 'src/dashboard/actions/dashboardState'; +import { RESPONSIVE_WIDTH } from 'src/filters/components/common'; import { FAST_DEBOUNCE } from 'src/constants'; import { dispatchHoverAction, dispatchFocusAction } from './utils'; import { FilterControlProps } from './types'; @@ -322,7 +323,7 @@ const FilterValue: React.FC<FilterControlProps> = ({ ) : ( <SuperChart height={HEIGHT} - width="100%" + width={RESPONSIVE_WIDTH} showOverflow={showOverflow} formData={formData} displaySettings={displaySettings} diff --git a/superset-frontend/src/dashboard/containers/Dashboard.ts b/superset-frontend/src/dashboard/containers/Dashboard.ts index 50e42fef24..5f9b29b95d 100644 --- a/superset-frontend/src/dashboard/containers/Dashboard.ts +++ b/superset-frontend/src/dashboard/containers/Dashboard.ts @@ -39,7 +39,6 @@ function mapStateToProps(state: RootState) { const { datasources, sliceEntities, - charts, dataMask, dashboardInfo, dashboardState, @@ -54,7 +53,6 @@ function mapStateToProps(state: RootState) { userId: dashboardInfo.userId, dashboardInfo, dashboardState, - charts, datasources, // filters prop: a map structure for all the active filter_box's values and scope in this dashboard, // for each filter field. map key is [chartId_column] diff --git a/superset-frontend/src/dashboard/containers/DashboardComponent.jsx b/superset-frontend/src/dashboard/containers/DashboardComponent.jsx index 08b7ed9f82..68478adb07 100644 --- a/superset-frontend/src/dashboard/containers/DashboardComponent.jsx +++ b/superset-frontend/src/dashboard/containers/DashboardComponent.jsx @@ -35,7 +35,7 @@ import { } from 'src/dashboard/actions/dashboardLayout'; import { setDirectPathToChild, - setActiveTabs, + setActiveTab, setFullSizeChartId, } from 'src/dashboard/actions/dashboardState'; @@ -109,7 +109,7 @@ function mapDispatchToProps(dispatch) { handleComponentDrop, setDirectPathToChild, setFullSizeChartId, - setActiveTabs, + setActiveTab, logEvent, }, dispatch, diff --git a/superset-frontend/src/dashboard/containers/DashboardPage.tsx b/superset-frontend/src/dashboard/containers/DashboardPage.tsx index aef0fb3b6e..7dd618850e 100644 --- a/superset-frontend/src/dashboard/containers/DashboardPage.tsx +++ b/superset-frontend/src/dashboard/containers/DashboardPage.tsx @@ -28,7 +28,6 @@ import { t, useTheme, } from '@superset-ui/core'; -import pick from 'lodash/pick'; import { useDispatch, useSelector } from 'react-redux'; import { useToasts } from 'src/components/MessageToasts/withToasts'; import Loading from 'src/components/Loading'; @@ -42,11 +41,7 @@ import { setDatasources } from 'src/dashboard/actions/datasources'; import injectCustomCss from 'src/dashboard/util/injectCustomCss'; import setupPlugins from 'src/setup/setupPlugins'; -import { - getItem, - LocalStorageKeys, - setItem, -} from 'src/utils/localStorageHelpers'; +import { LocalStorageKeys, setItem } from 'src/utils/localStorageHelpers'; import { URL_PARAMS } from 'src/constants'; import { getUrlParam } from 'src/utils/urlUtils'; import { getFilterSets } from 'src/dashboard/actions/nativeFilters'; @@ -55,25 +50,28 @@ import { getFilterValue, getPermalinkValue, } from 'src/dashboard/components/nativeFilters/FilterBar/keyValue'; -import { DashboardContextForExplore } from 'src/types/DashboardContextForExplore'; +import DashboardContainer from 'src/dashboard/containers/Dashboard'; + import shortid from 'shortid'; import { RootState } from '../types'; -import { getActiveFilters } from '../util/activeDashboardFilters'; import { chartContextMenuStyles, filterCardPopoverStyle, headerStyles, } from '../styles'; +import SyncDashboardState, { + getDashboardContextLocalStorage, +} from '../components/SyncDashboardState'; export const DashboardPageIdContext = React.createContext(''); setupPlugins(); -const DashboardContainer = React.lazy( +const DashboardBuilder = React.lazy( () => import( /* webpackChunkName: "DashboardContainer" */ /* webpackPreload: true */ - 'src/dashboard/containers/Dashboard' + 'src/dashboard/components/DashboardBuilder/DashboardBuilder' ), ); @@ -83,74 +81,15 @@ type PageProps = { idOrSlug: string; }; -const getDashboardContextLocalStorage = () => { - const dashboardsContexts = getItem( - LocalStorageKeys.dashboard__explore_context, - {}, - ); - // A new dashboard tab id is generated on each dashboard page opening. - // We mark ids as redundant when user leaves the dashboard, because they won't be reused. - // Then we remove redundant dashboard contexts from local storage in order not to clutter it - return Object.fromEntries( - Object.entries(dashboardsContexts).filter( - ([, value]) => !value.isRedundant, - ), - ); -}; - -const updateDashboardTabLocalStorage = ( - dashboardPageId: string, - dashboardContext: DashboardContextForExplore, -) => { - const dashboardsContexts = getDashboardContextLocalStorage(); - setItem(LocalStorageKeys.dashboard__explore_context, { - ...dashboardsContexts, - [dashboardPageId]: dashboardContext, - }); -}; - -const useSyncDashboardStateWithLocalStorage = () => { - const dashboardPageId = useMemo(() => shortid.generate(), []); - const dashboardContextForExplore = useSelector< - RootState, - DashboardContextForExplore - >(({ dashboardInfo, dashboardState, nativeFilters, dataMask }) => ({ - labelColors: dashboardInfo.metadata?.label_colors || {}, - sharedLabelColors: dashboardInfo.metadata?.shared_label_colors || {}, - colorScheme: dashboardState?.colorScheme, - chartConfiguration: dashboardInfo.metadata?.chart_configuration || {}, - nativeFilters: Object.entries(nativeFilters.filters).reduce( - (acc, [key, filterValue]) => ({ - ...acc, - [key]: pick(filterValue, ['chartsInScope']), - }), - {}, - ), - dataMask, - dashboardId: dashboardInfo.id, - filterBoxFilters: getActiveFilters(), - dashboardPageId, - })); - - useEffect(() => { - updateDashboardTabLocalStorage(dashboardPageId, dashboardContextForExplore); - return () => { - // mark tab id as redundant when dashboard unmounts - case when user opens - // Explore in the same tab - updateDashboardTabLocalStorage(dashboardPageId, { - ...dashboardContextForExplore, - isRedundant: true, - }); - }; - }, [dashboardContextForExplore, dashboardPageId]); - return dashboardPageId; -}; - export const DashboardPage: FC<PageProps> = ({ idOrSlug }: PageProps) => { const theme = useTheme(); const dispatch = useDispatch(); const history = useHistory(); - const dashboardPageId = useSyncDashboardStateWithLocalStorage(); + const dashboardPageId = useMemo(() => shortid.generate(), []); + const hasDashboardInfoInitiated = useSelector<RootState, Boolean>( + ({ dashboardInfo }) => + dashboardInfo && Object.keys(dashboardInfo).length > 0, + ); const { addDangerToast } = useToasts(); const { result: dashboard, error: dashboardApiError } = useDashboard(idOrSlug); @@ -284,7 +223,7 @@ export const DashboardPage: FC<PageProps> = ({ idOrSlug }: PageProps) => { }, [addDangerToast, datasets, datasetsApiError, dispatch]); if (error) throw error; // caught in error boundary - if (!readyToRender || !isDashboardHydrated.current) return <Loading />; + if (!readyToRender || !hasDashboardInfoInitiated) return <Loading />; return ( <> @@ -295,8 +234,11 @@ export const DashboardPage: FC<PageProps> = ({ idOrSlug }: PageProps) => { chartContextMenuStyles(theme), ]} /> + <SyncDashboardState dashboardPageId={dashboardPageId} /> <DashboardPageIdContext.Provider value={dashboardPageId}> - <DashboardContainer /> + <DashboardContainer> + <DashboardBuilder /> + </DashboardContainer> </DashboardPageIdContext.Provider> </> ); diff --git a/superset-frontend/src/dashboard/reducers/dashboardState.js b/superset-frontend/src/dashboard/reducers/dashboardState.js index 5d81cd8ac1..015cb9822c 100644 --- a/superset-frontend/src/dashboard/reducers/dashboardState.js +++ b/superset-frontend/src/dashboard/reducers/dashboardState.js @@ -37,6 +37,7 @@ import { SET_DIRECT_PATH, SET_FOCUSED_FILTER_FIELD, UNSET_FOCUSED_FILTER_FIELD, + SET_ACTIVE_TAB, SET_ACTIVE_TABS, SET_FULL_SIZE_CHART_ID, ON_FILTERS_REFRESH, @@ -179,7 +180,7 @@ export default function dashboardStateReducer(state = {}, action) { directPathLastUpdated: Date.now(), }; }, - [SET_ACTIVE_TABS]() { + [SET_ACTIVE_TAB]() { const newActiveTabs = new Set(state.activeTabs); newActiveTabs.delete(action.prevTabId); newActiveTabs.add(action.tabId); @@ -188,6 +189,12 @@ export default function dashboardStateReducer(state = {}, action) { activeTabs: Array.from(newActiveTabs), }; }, + [SET_ACTIVE_TABS]() { + return { + ...state, + activeTabs: action.activeTabs, + }; + }, [SET_OVERRIDE_CONFIRM]() { return { ...state, diff --git a/superset-frontend/src/dashboard/reducers/dashboardState.test.ts b/superset-frontend/src/dashboard/reducers/dashboardState.test.ts index 274b26733c..3a8adc6cbb 100644 --- a/superset-frontend/src/dashboard/reducers/dashboardState.test.ts +++ b/superset-frontend/src/dashboard/reducers/dashboardState.test.ts @@ -18,21 +18,33 @@ */ import dashboardStateReducer from './dashboardState'; -import { setActiveTabs } from '../actions/dashboardState'; +import { setActiveTab, setActiveTabs } from '../actions/dashboardState'; describe('DashboardState reducer', () => { - it('SET_ACTIVE_TABS', () => { + it('SET_ACTIVE_TAB', () => { expect( - dashboardStateReducer({ activeTabs: [] }, setActiveTabs('tab1')), + dashboardStateReducer({ activeTabs: [] }, setActiveTab('tab1')), ).toEqual({ activeTabs: ['tab1'] }); expect( - dashboardStateReducer({ activeTabs: ['tab1'] }, setActiveTabs('tab1')), + dashboardStateReducer({ activeTabs: ['tab1'] }, setActiveTab('tab1')), ).toEqual({ activeTabs: ['tab1'] }); expect( dashboardStateReducer( { activeTabs: ['tab1'] }, - setActiveTabs('tab2', 'tab1'), + setActiveTab('tab2', 'tab1'), ), ).toEqual({ activeTabs: ['tab2'] }); }); + + it('SET_ACTIVE_TABS', () => { + expect( + dashboardStateReducer({ activeTabs: [] }, setActiveTabs(['tab1'])), + ).toEqual({ activeTabs: ['tab1'] }); + expect( + dashboardStateReducer( + { activeTabs: ['tab1', 'tab2'] }, + setActiveTabs(['tab3', 'tab4']), + ), + ).toEqual({ activeTabs: ['tab3', 'tab4'] }); + }); }); diff --git a/superset-frontend/src/dataMask/reducer.ts b/superset-frontend/src/dataMask/reducer.ts index 6e9a5fae54..f2163a54a4 100644 --- a/superset-frontend/src/dataMask/reducer.ts +++ b/superset-frontend/src/dataMask/reducer.ts @@ -56,7 +56,6 @@ export function getInitialDataMask( } return { ...otherProps, - __cache: {}, extraFormData: {}, filterState: {}, ownState: {}, diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx new file mode 100644 index 0000000000..e7ff7cd9a7 --- /dev/null +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import { render, fireEvent } from '@testing-library/react'; +import '@testing-library/jest-dom/extend-expect'; +import { Provider } from 'react-redux'; +import configureMockStore from 'redux-mock-store'; +import thunk from 'redux-thunk'; +import { supersetTheme, ThemeProvider } from '@superset-ui/core'; +import ColumnSelectPopover from 'src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover'; + +const middlewares = [thunk]; +const mockStore = configureMockStore(middlewares); + +describe('ColumnSelectPopover - onTabChange function', () => { + it('updates adhocColumn when switching to sqlExpression tab with custom label', () => { + const mockColumns = [{ column_name: 'year' }]; + const mockOnClose = jest.fn(); + const mockOnChange = jest.fn(); + const mockGetCurrentTab = jest.fn(); + const mockSetDatasetModal = jest.fn(); + const mockSetLabel = jest.fn(); + + const store = mockStore({ explore: { datasource: { type: 'table' } } }); + + const { container, getByText } = render( + <Provider store={store}> + <ThemeProvider theme={supersetTheme}> + <ColumnSelectPopover + columns={mockColumns} + editedColumn={mockColumns[0]} + getCurrentTab={mockGetCurrentTab} + hasCustomLabel + isTemporal + label="Custom Label" + onChange={mockOnChange} + onClose={mockOnClose} + setDatasetModal={mockSetDatasetModal} + setLabel={mockSetLabel} + /> + </ThemeProvider> + </Provider>, + ); + + const sqlExpressionTab = container.querySelector( + '#adhoc-metric-edit-tabs-tab-sqlExpression', + ); + expect(sqlExpressionTab).not.toBeNull(); + fireEvent.click(sqlExpressionTab!); + expect(mockGetCurrentTab).toHaveBeenCalledWith('sqlExpression'); + + const saveButton = getByText('Save'); + fireEvent.click(saveButton); + expect(mockOnChange).toHaveBeenCalledWith({ + label: 'Custom Label', + sqlExpression: 'year', + expressionType: 'SQL', + }); + }); +}); diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx index 4806e5394a..96abf36484 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx @@ -68,6 +68,7 @@ interface ColumnSelectPopoverProps { editedColumn?: ColumnMeta | AdhocColumn; onChange: (column: ColumnMeta | AdhocColumn) => void; onClose: () => void; + hasCustomLabel: boolean; setLabel: (title: string) => void; getCurrentTab: (tab: string) => void; label: string; @@ -93,13 +94,14 @@ const getInitialColumnValues = ( const ColumnSelectPopover = ({ columns, editedColumn, + getCurrentTab, + hasCustomLabel, + isTemporal, + label, onChange, onClose, setDatasetModal, setLabel, - getCurrentTab, - label, - isTemporal, }: ColumnSelectPopoverProps) => { const datasourceType = useSelector<ExplorePageState, string | undefined>( state => state.explore.datasource.type, @@ -117,6 +119,7 @@ const ColumnSelectPopover = ({ const [selectedSimpleColumn, setSelectedSimpleColumn] = useState< ColumnMeta | undefined >(initialSimpleColumn); + const [selectedTab, setSelectedTab] = useState<string | null>(null); const [resizeButton, width, height] = useResizeButton( POPOVER_INITIAL_WIDTH, @@ -188,7 +191,34 @@ const ColumnSelectPopover = ({ useEffect(() => { getCurrentTab(defaultActiveTabKey); - }, [defaultActiveTabKey, getCurrentTab]); + setSelectedTab(defaultActiveTabKey); + }, [defaultActiveTabKey, getCurrentTab, setSelectedTab]); + + useEffect(() => { + /* if the adhoc column is not set (because it was never edited) but the + * tab is selected and the label has changed, then we need to set the + * adhoc column manually */ + if ( + adhocColumn === undefined && + selectedTab === 'sqlExpression' && + hasCustomLabel + ) { + const sqlExpression = + selectedSimpleColumn?.column_name || + selectedCalculatedColumn?.expression || + ''; + setAdhocColumn({ label, sqlExpression, expressionType: 'SQL' }); + } + }, [ + adhocColumn, + defaultActiveTabKey, + hasCustomLabel, + getCurrentTab, + label, + selectedCalculatedColumn, + selectedSimpleColumn, + selectedTab, + ]); const onSave = useCallback(() => { if (adhocColumn && adhocColumn.label !== label) { @@ -225,6 +255,7 @@ const ColumnSelectPopover = ({ const onTabChange = useCallback( tab => { getCurrentTab(tab); + setSelectedTab(tab); // @ts-ignore sqlEditorRef.current?.editor.focus(); }, diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopoverTrigger.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopoverTrigger.tsx index 4340317f04..341d91e616 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopoverTrigger.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopoverTrigger.tsx @@ -103,6 +103,7 @@ const ColumnSelectPopoverTrigger = ({ setDatasetModal={setDatasetModal} onClose={handleClosePopover} onChange={onColumnEdit} + hasCustomLabel={hasCustomLabel} label={popoverLabel} setLabel={setPopoverLabel} getCurrentTab={getCurrentTab} @@ -114,6 +115,7 @@ const ColumnSelectPopoverTrigger = ({ columns, editedColumn, getCurrentTab, + hasCustomLabel, handleClosePopover, isTemporal, onColumnEdit, @@ -121,10 +123,13 @@ const ColumnSelectPopoverTrigger = ({ ], ); - const onLabelChange = useCallback((e: any) => { - setPopoverLabel(e.target.value); - setHasCustomLabel(true); - }, []); + const onLabelChange = useCallback( + (e: any) => { + setPopoverLabel(e.target.value); + setHasCustomLabel(true); + }, + [setPopoverLabel, setHasCustomLabel], + ); const popoverTitle = useMemo( () => ( diff --git a/superset-frontend/src/features/annotations/AnnotationModal.tsx b/superset-frontend/src/features/annotations/AnnotationModal.tsx index a5c5aa9c31..dd1107dfba 100644 --- a/superset-frontend/src/features/annotations/AnnotationModal.tsx +++ b/superset-frontend/src/features/annotations/AnnotationModal.tsx @@ -287,7 +287,7 @@ const AnnotationModal: FunctionComponent<AnnotationModalProps> = ({ </StyledAnnotationTitle> <AnnotationContainer> <div className="control-label"> - {t('Annotation name')} + {t('Name')} <span className="required">*</span> </div> <input diff --git a/superset-frontend/src/features/cssTemplates/CssTemplateModal.tsx b/superset-frontend/src/features/cssTemplates/CssTemplateModal.tsx index 73bbfe7555..bd3c5b13a6 100644 --- a/superset-frontend/src/features/cssTemplates/CssTemplateModal.tsx +++ b/superset-frontend/src/features/cssTemplates/CssTemplateModal.tsx @@ -105,6 +105,9 @@ const CssTemplateModal: FunctionComponent<CssTemplateModalProps> = ({ const update_id = currentCssTemplate.id; delete currentCssTemplate.id; delete currentCssTemplate.created_by; + delete currentCssTemplate.changed_by; + delete currentCssTemplate.changed_on_delta_humanized; + updateResource(update_id, currentCssTemplate).then(response => { if (!response) { return; @@ -235,7 +238,7 @@ const CssTemplateModal: FunctionComponent<CssTemplateModalProps> = ({ </StyledCssTemplateTitle> <TemplateContainer> <div className="control-label"> - {t('CSS template name')} + {t('Name')} <span className="required">*</span> </div> <input diff --git a/superset-frontend/src/features/cssTemplates/types.ts b/superset-frontend/src/features/cssTemplates/types.ts index 1bb5b2e659..5e7e1af97a 100644 --- a/superset-frontend/src/features/cssTemplates/types.ts +++ b/superset-frontend/src/features/cssTemplates/types.ts @@ -1,3 +1,5 @@ +import Owner from 'src/types/Owner'; + /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -16,17 +18,12 @@ * specific language governing permissions and limitations * under the License. */ -type CreatedByUser = { - id: number; - first_name: string; - last_name: string; -}; - export type TemplateObject = { id?: number; changed_on_delta_humanized?: string; created_on?: string; - created_by?: CreatedByUser; + changed_by?: Owner; + created_by?: Owner; css?: string; template_name: string; }; diff --git a/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx b/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx index 55c3875f98..45706da586 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx @@ -202,7 +202,7 @@ const ExtraOptions = ({ /> </div> </StyledInputContainer> - <StyledInputContainer> + <StyledInputContainer css={no_margin_bottom}> <div className="input-container"> <IndeterminateCheckbox id="disable_data_preview" @@ -220,6 +220,22 @@ const ExtraOptions = ({ /> </div> </StyledInputContainer> + <StyledInputContainer> + <div className="input-container"> + <IndeterminateCheckbox + id="expand_rows" + indeterminate={false} + checked={!!extraJson?.schema_options?.expand_rows} + onChange={onExtraInputChange} + labelText={t('Enable row expansion in schemas')} + /> + <InfoTooltip + tooltip={t( + 'For Trino, describe full schemas of nested ROW types, expanding them with dotted paths', + )} + /> + </div> + </StyledInputContainer> </StyledExpandableForm> </StyledInputContainer> </Collapse.Panel> diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx index bcd9fbe694..ba443e0099 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx @@ -674,7 +674,7 @@ describe('DatabaseModal', () => { const exposeInSQLLabCheckbox = screen.getByRole('checkbox', { name: /expose database in sql lab/i, }); - // This is both the checkbox and it's respective SVG + // This is both the checkbox and its respective SVG // const exposeInSQLLabCheckboxSVG = checkboxOffSVGs[0].parentElement; const exposeInSQLLabText = screen.getByText( /expose database in sql lab/i, @@ -721,6 +721,13 @@ describe('DatabaseModal', () => { /Disable SQL Lab data preview queries/i, ); + const enableRowExpansionCheckbox = screen.getByRole('checkbox', { + name: /enable row expansion in schemas/i, + }); + const enableRowExpansionText = screen.getByText( + /enable row expansion in schemas/i, + ); + // ---------- Assertions ---------- const visibleComponents = [ closeButton, @@ -737,6 +744,7 @@ describe('DatabaseModal', () => { checkboxOffSVGs[2], checkboxOffSVGs[3], checkboxOffSVGs[4], + checkboxOffSVGs[5], tooltipIcons[0], tooltipIcons[1], tooltipIcons[2], @@ -744,6 +752,7 @@ describe('DatabaseModal', () => { tooltipIcons[4], tooltipIcons[5], tooltipIcons[6], + tooltipIcons[7], exposeInSQLLabText, allowCTASText, allowCVASText, @@ -754,6 +763,7 @@ describe('DatabaseModal', () => { enableQueryCostEstimationText, allowDbExplorationText, disableSQLLabDataPreviewQueriesText, + enableRowExpansionText, ]; // These components exist in the DOM but are not visible const invisibleComponents = [ @@ -764,6 +774,7 @@ describe('DatabaseModal', () => { enableQueryCostEstimationCheckbox, allowDbExplorationCheckbox, disableSQLLabDataPreviewQueriesCheckbox, + enableRowExpansionCheckbox, ]; visibleComponents.forEach(component => { expect(component).toBeVisible(); @@ -771,8 +782,8 @@ describe('DatabaseModal', () => { invisibleComponents.forEach(component => { expect(component).not.toBeVisible(); }); - expect(checkboxOffSVGs).toHaveLength(5); - expect(tooltipIcons).toHaveLength(7); + expect(checkboxOffSVGs).toHaveLength(6); + expect(tooltipIcons).toHaveLength(8); }); test('renders the "Advanced" - PERFORMANCE tab correctly', async () => { diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.tsx index 0c1ac56369..18c93f2bf4 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/index.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/index.tsx @@ -307,6 +307,18 @@ export function dbReducer( }), }; } + if (action.payload.name === 'expand_rows') { + return { + ...trimmedState, + extra: JSON.stringify({ + ...extraJson, + schema_options: { + ...extraJson?.schema_options, + [action.payload.name]: !!action.payload.value, + }, + }), + }; + } return { ...trimmedState, extra: JSON.stringify({ diff --git a/superset-frontend/src/features/databases/types.ts b/superset-frontend/src/features/databases/types.ts index e138a91436..1d616fa13c 100644 --- a/superset-frontend/src/features/databases/types.ts +++ b/superset-frontend/src/features/databases/types.ts @@ -226,5 +226,8 @@ export interface ExtraJson { table_cache_timeout?: number; // in Performance }; // No field, holds schema and table timeout schemas_allowed_for_file_upload?: string[]; // in Security + schema_options?: { + expand_rows?: boolean; + }; version?: string; } diff --git a/superset-frontend/src/features/home/Menu.tsx b/superset-frontend/src/features/home/Menu.tsx index 56a2fd611e..67b72fc515 100644 --- a/superset-frontend/src/features/home/Menu.tsx +++ b/superset-frontend/src/features/home/Menu.tsx @@ -24,7 +24,7 @@ import { getUrlParam } from 'src/utils/urlUtils'; import { Row, Col, Grid } from 'src/components'; import { MainNav as DropdownMenu, MenuMode } from 'src/components/Menu'; import { Tooltip } from 'src/components/Tooltip'; -import { Link, useLocation } from 'react-router-dom'; +import { NavLink, useLocation } from 'react-router-dom'; import { GenericLink } from 'src/components/GenericLink/GenericLink'; import Icons from 'src/components/Icons'; import { useUiConfig } from 'src/components/UiConfigContext'; @@ -154,6 +154,29 @@ const globalStyles = (theme: SupersetTheme) => css` margin-left: ${theme.gridUnit * 1.75}px; } } + .ant-menu-item-selected { + background-color: transparent; + &:not(.ant-menu-item-active) { + color: inherit; + border-bottom-color: transparent; + & > a { + color: inherit; + } + } + } + .ant-menu-horizontal > .ant-menu-item:has(> .is-active) { + color: ${theme.colors.primary.base}; + border-bottom-color: ${theme.colors.primary.base}; + & > a { + color: ${theme.colors.primary.base}; + } + } + .ant-menu-vertical > .ant-menu-item:has(> .is-active) { + background-color: ${theme.colors.primary.light5}; + & > a { + color: ${theme.colors.primary.base}; + } + } `; const { SubMenu } = DropdownMenu; @@ -226,9 +249,9 @@ export function Menu({ if (url && isFrontendRoute) { return ( <DropdownMenu.Item key={label} role="presentation"> - <Link role="button" to={url}> + <NavLink role="button" to={url} activeClassName="is-active"> {label} - </Link> + </NavLink> </DropdownMenu.Item> ); } @@ -253,7 +276,13 @@ export function Menu({ return ( <DropdownMenu.Item key={`${child.label}`}> {child.isFrontendRoute ? ( - <Link to={child.url || ''}>{child.label}</Link> + <NavLink + to={child.url || ''} + exact + activeClassName="is-active" + > + {child.label} + </NavLink> ) : ( <a href={child.url}>{child.label}</a> )} diff --git a/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx b/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx index d7e7af7126..d14d48d0e5 100644 --- a/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx +++ b/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx @@ -385,10 +385,10 @@ function RowLevelSecurityModal(props: RowLevelSecurityModalProps) { <StyledInputContainer> <div className="control-label"> - {t('Tables')} <span className="required">*</span> + {t('Datasets')} <span className="required">*</span> <InfoTooltip tooltip={t( - 'These are the tables this filter will be applied to.', + 'These are the datasets this filter will be applied to.', )} /> </div> diff --git a/superset-frontend/src/features/tags/TagModal.test.tsx b/superset-frontend/src/features/tags/TagModal.test.tsx index 5f4fd4e2b9..99b7a3365e 100644 --- a/superset-frontend/src/features/tags/TagModal.test.tsx +++ b/superset-frontend/src/features/tags/TagModal.test.tsx @@ -56,10 +56,12 @@ test('renders correctly in edit mode', () => { changed_on_delta_humanized: '', created_on_delta_humanized: '', created_by: { + id: 1, first_name: 'joe', last_name: 'smith', }, changed_by: { + id: 2, first_name: 'tom', last_name: 'brown', }, diff --git a/superset-frontend/src/features/tags/TagModal.tsx b/superset-frontend/src/features/tags/TagModal.tsx index 4339d69130..5057c8441d 100644 --- a/superset-frontend/src/features/tags/TagModal.tsx +++ b/superset-frontend/src/features/tags/TagModal.tsx @@ -26,7 +26,7 @@ import { Input } from 'antd'; import { Divider } from 'src/components'; import Button from 'src/components/Button'; import { Tag } from 'src/views/CRUD/types'; -import { fetchObjects } from 'src/features/tags/tags'; +import { fetchObjectsByTagIds } from 'src/features/tags/tags'; const StyledModalBody = styled.div` .ant-select-dropdown { @@ -88,6 +88,14 @@ const TagModal: React.FC<TagModalProps> = ({ setSavedQueriesToTag([]); }; + const clearTagForm = () => { + setTagName(''); + setDescription(''); + setDashboardsToTag([]); + setChartsToTag([]); + setSavedQueriesToTag([]); + }; + useEffect(() => { const resourceMap: { [key: string]: TaggableResourceOption[] } = { [TaggableResources.Dashboard]: [], @@ -107,8 +115,8 @@ const TagModal: React.FC<TagModalProps> = ({ }; clearResources(); if (isEditMode) { - fetchObjects( - { tags: editTag.name, types: null }, + fetchObjectsByTagIds( + { tagIds: [editTag.id], types: null }, (data: Tag[]) => { data.forEach(updateResourceOptions); setDashboardsToTag(resourceMap[TaggableResources.Dashboard]); @@ -225,7 +233,9 @@ const TagModal: React.FC<TagModalProps> = ({ }) .then(({ json = {} }) => { refreshData(); + clearTagForm(); addSuccessToast(t('Tag updated')); + onHide(); }) .catch(err => { addDangerToast(err.message || 'Error Updating Tag'); @@ -241,24 +251,19 @@ const TagModal: React.FC<TagModalProps> = ({ }) .then(({ json = {} }) => { refreshData(); + clearTagForm(); addSuccessToast(t('Tag created')); + onHide(); }) .catch(err => addDangerToast(err.message || 'Error Creating Tag')); } - onHide(); }; return ( <Modal title={modalTitle} onHide={() => { - if (clearOnHide) { - setTagName(''); - setDescription(''); - setDashboardsToTag([]); - setChartsToTag([]); - setSavedQueriesToTag([]); - } + if (clearOnHide) clearTagForm(); onHide(); }} show={show} diff --git a/superset-frontend/src/features/tags/tags.ts b/superset-frontend/src/features/tags/tags.ts index 45c4e88fc5..db172681cb 100644 --- a/superset-frontend/src/features/tags/tags.ts +++ b/superset-frontend/src/features/tags/tags.ts @@ -194,3 +194,20 @@ export function fetchObjects( .then(({ json }) => callback(json.result)) .catch(response => error(response)); } + +export function fetchObjectsByTagIds( + { + tagIds = [], + types, + }: { tagIds: number[] | undefined; types: string | null }, + callback: (json: JsonObject) => void, + error: (response: Response) => void, +) { + let url = `/api/v1/tag/get_objects/?tagIds=${tagIds}`; + if (types) { + url += `&types=${types}`; + } + SupersetClient.get({ endpoint: url }) + .then(({ json }) => callback(json.result)) + .catch(response => error(response)); +} diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx index c035f81c01..99e6259871 100644 --- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx +++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx @@ -91,15 +91,6 @@ describe('SelectFilterPlugin', () => { test('Add multiple values with first render', async () => { getWrapper(); expect(setDataMask).toHaveBeenCalledWith({ - extraFormData: {}, - filterState: { - value: ['boy'], - }, - }); - expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { @@ -118,9 +109,6 @@ describe('SelectFilterPlugin', () => { userEvent.click(screen.getByTitle('girl')); expect(await screen.findByTitle(/girl/i)).toBeInTheDocument(); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { @@ -146,9 +134,6 @@ describe('SelectFilterPlugin', () => { }), ); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { adhoc_filters: [ { @@ -174,9 +159,6 @@ describe('SelectFilterPlugin', () => { }), ); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: {}, filterState: { label: undefined, @@ -191,9 +173,6 @@ describe('SelectFilterPlugin', () => { expect(await screen.findByTitle('girl')).toBeInTheDocument(); userEvent.click(screen.getByTitle('girl')); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { @@ -216,9 +195,6 @@ describe('SelectFilterPlugin', () => { expect(await screen.findByRole('combobox')).toBeInTheDocument(); userEvent.click(screen.getByTitle(NULL_STRING)); expect(setDataMask).toHaveBeenLastCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx index 7d8ab55fb5..a4b9f5b05e 100644 --- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx +++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx @@ -37,7 +37,6 @@ import { Select } from 'src/components'; import { SLOW_DEBOUNCE } from 'src/constants'; import { hasOption, propertyComparator } from 'src/components/Select/utils'; import { FilterBarOrientation } from 'src/dashboard/types'; -import { uniqWith, isEqual } from 'lodash'; import { PluginFilterSelectProps, SelectValue } from './types'; import { FilterPluginStyle, StatusMessage, StyledFormItem } from '../common'; import { getDataRecordFormatter, getSelectExtraFormData } from '../../utils'; @@ -46,15 +45,11 @@ type DataMaskAction = | { type: 'ownState'; ownState: JsonObject } | { type: 'filterState'; - __cache: JsonObject; extraFormData: ExtraFormData; filterState: { value: SelectValue; label?: string }; }; -function reducer( - draft: DataMask & { __cache?: JsonObject }, - action: DataMaskAction, -) { +function reducer(draft: DataMask, action: DataMaskAction) { switch (action.type) { case 'ownState': draft.ownState = { @@ -63,10 +58,18 @@ function reducer( }; return draft; case 'filterState': - draft.extraFormData = action.extraFormData; - // eslint-disable-next-line no-underscore-dangle - draft.__cache = action.__cache; - draft.filterState = { ...draft.filterState, ...action.filterState }; + if ( + JSON.stringify(draft.extraFormData) !== + JSON.stringify(action.extraFormData) + ) { + draft.extraFormData = action.extraFormData; + } + if ( + JSON.stringify(draft.filterState) !== JSON.stringify(action.filterState) + ) { + draft.filterState = { ...draft.filterState, ...action.filterState }; + } + return draft; default: return draft; @@ -130,7 +133,6 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { const suffix = inverseSelection && values?.length ? t(' (excluded)') : ''; dispatchDataMask({ type: 'filterState', - __cache: filterState, extraFormData: getSelectExtraFormData( col, values, @@ -219,16 +221,13 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { }, [filterState.validateMessage, filterState.validateStatus]); const uniqueOptions = useMemo(() => { - const allOptions = [...data]; - return uniqWith(allOptions, isEqual).map(row => { - const [value] = groupby.map(col => row[col]); - return { - label: labelFormatter(value, datatype), - value, - isNewOption: false, - }; - }); - }, [data, datatype, groupby, labelFormatter]); + const allOptions = new Set([...data.map(el => el[col])]); + return [...allOptions].map((value: string) => ({ + label: labelFormatter(value, datatype), + value, + isNewOption: false, + })); + }, [data, datatype, col, labelFormatter]); const options = useMemo(() => { if (search && !multiSelect && !hasOption(search, uniqueOptions, true)) { diff --git a/superset-frontend/src/filters/components/common.ts b/superset-frontend/src/filters/components/common.ts index af1fe9c791..cb6d7f22f1 100644 --- a/superset-frontend/src/filters/components/common.ts +++ b/superset-frontend/src/filters/components/common.ts @@ -20,9 +20,11 @@ import { styled } from '@superset-ui/core'; import { PluginFilterStylesProps } from './types'; import FormItem from '../../components/Form/FormItem'; +export const RESPONSIVE_WIDTH = 0; + export const FilterPluginStyle = styled.div<PluginFilterStylesProps>` min-height: ${({ height }) => height}px; - width: ${({ width }) => width}px; + width: ${({ width }) => (width === RESPONSIVE_WIDTH ? '100%' : `${width}px`)}; `; export const StyledFormItem = styled(FormItem)` diff --git a/superset-frontend/src/hooks/apiResources/dashboards.ts b/superset-frontend/src/hooks/apiResources/dashboards.ts index b21cc668c0..61896ba130 100644 --- a/superset-frontend/src/hooks/apiResources/dashboards.ts +++ b/superset-frontend/src/hooks/apiResources/dashboards.ts @@ -31,6 +31,7 @@ export const useDashboard = (idOrSlug: string | number) => (dashboard.json_metadata && JSON.parse(dashboard.json_metadata)) || {}, position_data: dashboard.position_json && JSON.parse(dashboard.position_json), + owners: dashboard.owners || [], }), ); diff --git a/superset-frontend/src/hooks/apiResources/sqlEditorTabs.test.ts b/superset-frontend/src/hooks/apiResources/sqlEditorTabs.test.ts new file mode 100644 index 0000000000..d0f2230f13 --- /dev/null +++ b/superset-frontend/src/hooks/apiResources/sqlEditorTabs.test.ts @@ -0,0 +1,99 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import fetchMock from 'fetch-mock'; +import { act, renderHook } from '@testing-library/react-hooks'; +import { + createWrapper, + defaultStore as store, +} from 'spec/helpers/testing-library'; +import { api } from 'src/hooks/apiResources/queryApi'; +import { LatestQueryEditorVersion } from 'src/SqlLab/types'; +import { useUpdateSqlEditorTabMutation } from './sqlEditorTabs'; + +const expectedQueryEditor = { + version: LatestQueryEditorVersion, + id: '123', + dbId: 456, + name: 'tab 1', + sql: 'SELECT * from example_table', + schema: 'my_schema', + templateParams: '{"a": 1, "v": "str"}', + queryLimit: 1000, + remoteId: null, + autorun: false, + hideLeftBar: false, + updatedAt: Date.now(), +}; + +afterEach(() => { + fetchMock.reset(); + act(() => { + store.dispatch(api.util.resetApiState()); + }); +}); + +test('puts api request with formData', async () => { + const tabStateMutationApiRoute = `glob:*/tabstateview/${expectedQueryEditor.id}`; + fetchMock.put(tabStateMutationApiRoute, 200); + const { result, waitFor } = renderHook( + () => useUpdateSqlEditorTabMutation(), + { + wrapper: createWrapper({ + useRedux: true, + store, + }), + }, + ); + act(() => { + result.current[0]({ + queryEditor: expectedQueryEditor, + }); + }); + await waitFor(() => + expect(fetchMock.calls(tabStateMutationApiRoute).length).toBe(1), + ); + const formData = fetchMock.calls(tabStateMutationApiRoute)[0][1] + ?.body as FormData; + expect(formData.get('database_id')).toBe(`${expectedQueryEditor.dbId}`); + expect(formData.get('schema')).toBe( + JSON.stringify(`${expectedQueryEditor.schema}`), + ); + expect(formData.get('sql')).toBe( + JSON.stringify(`${expectedQueryEditor.sql}`), + ); + expect(formData.get('label')).toBe( + JSON.stringify(`${expectedQueryEditor.name}`), + ); + expect(formData.get('query_limit')).toBe(`${expectedQueryEditor.queryLimit}`); + expect(formData.has('latest_query_id')).toBe(false); + expect(formData.get('template_params')).toBe( + JSON.stringify(`${expectedQueryEditor.templateParams}`), + ); + expect(formData.get('hide_left_bar')).toBe( + `${expectedQueryEditor.hideLeftBar}`, + ); + expect(formData.get('extra_json')).toBe( + JSON.stringify( + JSON.stringify({ + updatedAt: expectedQueryEditor.updatedAt, + version: LatestQueryEditorVersion, + }), + ), + ); +}); diff --git a/superset-frontend/src/hooks/apiResources/sqlEditorTabs.ts b/superset-frontend/src/hooks/apiResources/sqlEditorTabs.ts new file mode 100644 index 0000000000..71e0cf2936 --- /dev/null +++ b/superset-frontend/src/hooks/apiResources/sqlEditorTabs.ts @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { pickBy } from 'lodash'; +import { QueryEditor, LatestQueryEditorVersion } from 'src/SqlLab/types'; +import { api, JsonResponse } from './queryApi'; + +export type EditorMutationParams = { + queryEditor: QueryEditor; + extra?: Record<string, any>; +}; + +const sqlEditorApi = api.injectEndpoints({ + endpoints: builder => ({ + updateSqlEditorTab: builder.mutation<JsonResponse, EditorMutationParams>({ + query: ({ + queryEditor: { + version = LatestQueryEditorVersion, + id, + dbId, + schema, + queryLimit, + sql, + name, + latestQueryId, + hideLeftBar, + templateParams, + autorun, + updatedAt, + }, + extra, + }) => ({ + method: 'PUT', + endpoint: encodeURI(`/tabstateview/${id}`), + postPayload: pickBy( + { + database_id: dbId, + schema, + sql, + label: name, + query_limit: queryLimit, + latest_query_id: latestQueryId, + template_params: templateParams, + hide_left_bar: hideLeftBar, + autorun, + extra_json: JSON.stringify({ updatedAt, version, ...extra }), + }, + value => value !== undefined, + ), + }), + }), + }), +}); + +export const { useUpdateSqlEditorTabMutation } = sqlEditorApi; diff --git a/superset-frontend/src/hooks/apiResources/sqlLab.ts b/superset-frontend/src/hooks/apiResources/sqlLab.ts index 123db414e2..16e8ffde6c 100644 --- a/superset-frontend/src/hooks/apiResources/sqlLab.ts +++ b/superset-frontend/src/hooks/apiResources/sqlLab.ts @@ -50,7 +50,7 @@ export type InitialState = { template_params: string | null; hide_left_bar?: boolean; saved_query: { id: number } | null; - extra_json?: object; + extra_json?: Record<string, any>; }; databases: object[]; queries: Record< diff --git a/superset-frontend/src/hooks/useDebounceValue.ts b/superset-frontend/src/hooks/useDebounceValue.ts index 711b2dbd5a..862c837707 100644 --- a/superset-frontend/src/hooks/useDebounceValue.ts +++ b/superset-frontend/src/hooks/useDebounceValue.ts @@ -19,8 +19,8 @@ import { useState, useEffect } from 'react'; import { FAST_DEBOUNCE } from 'src/constants'; -export function useDebounceValue(value: string, delay = FAST_DEBOUNCE) { - const [debouncedValue, setDebouncedValue] = useState(value); +export function useDebounceValue<T>(value: T, delay = FAST_DEBOUNCE) { + const [debouncedValue, setDebouncedValue] = useState<T>(value); useEffect(() => { const handler: NodeJS.Timeout = setTimeout(() => { diff --git a/superset-frontend/src/pages/AlertReportList/index.tsx b/superset-frontend/src/pages/AlertReportList/index.tsx index b0cd0a4622..c6d14d186f 100644 --- a/superset-frontend/src/pages/AlertReportList/index.tsx +++ b/superset-frontend/src/pages/AlertReportList/index.tsx @@ -53,6 +53,8 @@ import { isUserAdmin } from 'src/dashboard/util/permissionUtils'; import Owner from 'src/types/Owner'; import AlertReportModal from 'src/features/alerts/AlertReportModal'; import { AlertObject, AlertState } from 'src/features/alerts/types'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const extensionsRegistry = getExtensionsRegistry(); @@ -303,18 +305,6 @@ function AlertList({ disableSortBy: true, size: 'xl', }, - { - Cell: ({ - row: { - original: { created_by }, - }, - }: any) => - created_by ? `${created_by.first_name} ${created_by.last_name}` : '', - Header: t('Created by'), - id: 'created_by', - disableSortBy: true, - size: 'xl', - }, { Cell: ({ row: { @@ -329,10 +319,13 @@ function AlertList({ { Cell: ({ row: { - original: { changed_on_delta_humanized: changedOn }, + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, }, - }: any) => <span className="no-wrap">{changedOn}</span>, - Header: t('Modified'), + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, + Header: t('Last modified'), accessor: 'changed_on_delta_humanized', size: 'xl', }, @@ -407,6 +400,10 @@ function AlertList({ disableSortBy: true, size: 'xl', }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [canDelete, canEdit, isReportEnabled, toggleActive], ); @@ -448,6 +445,13 @@ function AlertList({ const filters: Filters = useMemo( () => [ + { + Header: t('Name'), + key: 'search', + id: 'name', + input: 'search', + operator: FilterOperator.contains, + }, { Header: t('Owner'), key: 'owner', @@ -465,23 +469,6 @@ function AlertList({ ), paginate: true, }, - { - Header: t('Created by'), - key: 'created_by', - id: 'created_by', - input: 'select', - operator: FilterOperator.relationOneMany, - unfilteredLabel: 'All', - fetchSelects: createFetchRelated( - 'report', - 'created_by', - createErrorHandler(errMsg => - t('An error occurred while fetching created by values: %s', errMsg), - ), - user, - ), - paginate: true, - }, { Header: t('Status'), key: 'status', @@ -504,11 +491,24 @@ function AlertList({ ], }, { - Header: t('Search'), - key: 'search', - id: 'name', - input: 'search', - operator: FilterOperator.contains, + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', + input: 'select', + operator: FilterOperator.relationOneMany, + unfilteredLabel: t('All'), + fetchSelects: createFetchRelated( + 'report', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, }, ], [], diff --git a/superset-frontend/src/pages/AllEntities/index.tsx b/superset-frontend/src/pages/AllEntities/index.tsx index ca815795d6..b94cab846d 100644 --- a/superset-frontend/src/pages/AllEntities/index.tsx +++ b/superset-frontend/src/pages/AllEntities/index.tsx @@ -33,8 +33,9 @@ import { PageHeaderWithActions } from 'src/components/PageHeaderWithActions'; import { Tag } from 'src/views/CRUD/types'; import TagModal from 'src/features/tags/TagModal'; import withToasts, { useToasts } from 'src/components/MessageToasts/withToasts'; -import { fetchObjects, fetchSingleTag } from 'src/features/tags/tags'; +import { fetchObjectsByTagIds, fetchSingleTag } from 'src/features/tags/tags'; import Loading from 'src/components/Loading'; +import getOwnerName from 'src/utils/getOwnerName'; interface TaggedObject { id: number; @@ -132,7 +133,7 @@ function AllEntities() { const owner: Owner = { type: MetadataType.OWNER, - createdBy: `${tag?.created_by.first_name} ${tag?.created_by.last_name}`, + createdBy: getOwnerName(tag?.created_by), createdOn: tag?.created_on_delta_humanized || '', }; items.push(owner); @@ -140,14 +141,18 @@ function AllEntities() { const lastModified: LastModified = { type: MetadataType.LAST_MODIFIED, value: tag?.changed_on_delta_humanized || '', - modifiedBy: `${tag?.changed_by.first_name} ${tag?.changed_by.last_name}`, + modifiedBy: getOwnerName(tag?.changed_by), }; items.push(lastModified); const fetchTaggedObjects = () => { setLoading(true); - fetchObjects( - { tags: tag?.name || '', types: null }, + if (!tag) { + addDangerToast('Error tag object is not referenced!'); + return; + } + fetchObjectsByTagIds( + { tagIds: [tag?.id] || '', types: null }, (data: TaggedObject[]) => { const objects = { dashboard: [], chart: [], query: [] }; data.forEach(function (object) { diff --git a/superset-frontend/src/pages/AnnotationLayerList/index.tsx b/superset-frontend/src/pages/AnnotationLayerList/index.tsx index fc909538c0..fff5743b5a 100644 --- a/superset-frontend/src/pages/AnnotationLayerList/index.tsx +++ b/superset-frontend/src/pages/AnnotationLayerList/index.tsx @@ -21,7 +21,6 @@ import React, { useMemo, useState } from 'react'; import rison from 'rison'; import { t, SupersetClient } from '@superset-ui/core'; import { Link, useHistory } from 'react-router-dom'; -import moment from 'moment'; import { useListViewResource } from 'src/views/CRUD/hooks'; import { createFetchRelated, createErrorHandler } from 'src/views/CRUD/utils'; import withToasts from 'src/components/MessageToasts/withToasts'; @@ -36,9 +35,10 @@ import DeleteModal from 'src/components/DeleteModal'; import ConfirmStatusChange from 'src/components/ConfirmStatusChange'; import AnnotationLayerModal from 'src/features/annotationLayers/AnnotationLayerModal'; import { AnnotationLayerObject } from 'src/features/annotationLayers/types'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const PAGE_SIZE = 25; -const MOMENT_FORMAT = 'MMM DD, YYYY'; interface AnnotationLayersListProps { addDangerToast: (msg: string) => void; @@ -156,65 +156,16 @@ function AnnotationLayersList({ { Cell: ({ row: { - original: { changed_on: changedOn }, + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, }, - }: any) => { - const date = new Date(changedOn); - const utc = new Date( - Date.UTC( - date.getFullYear(), - date.getMonth(), - date.getDate(), - date.getHours(), - date.getMinutes(), - date.getSeconds(), - date.getMilliseconds(), - ), - ); - - return moment(utc).format(MOMENT_FORMAT); - }, + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, Header: t('Last modified'), accessor: 'changed_on', size: 'xl', }, - { - Cell: ({ - row: { - original: { created_on: createdOn }, - }, - }: any) => { - const date = new Date(createdOn); - const utc = new Date( - Date.UTC( - date.getFullYear(), - date.getMonth(), - date.getDate(), - date.getHours(), - date.getMinutes(), - date.getSeconds(), - date.getMilliseconds(), - ), - ); - - return moment(utc).format(MOMENT_FORMAT); - }, - Header: t('Created on'), - accessor: 'created_on', - size: 'xl', - }, - { - accessor: 'created_by', - disableSortBy: true, - Header: t('Created by'), - Cell: ({ - row: { - original: { created_by: createdBy }, - }, - }: any) => - createdBy ? `${createdBy.first_name} ${createdBy.last_name}` : '', - size: 'xl', - }, { Cell: ({ row: { original } }: any) => { const handleEdit = () => handleAnnotationLayerEdit(original); @@ -249,6 +200,10 @@ function AnnotationLayersList({ hidden: !canEdit && !canDelete, size: 'xl', }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [canDelete, canCreate], ); @@ -280,15 +235,22 @@ function AnnotationLayersList({ const filters: Filters = useMemo( () => [ { - Header: t('Created by'), - key: 'created_by', - id: 'created_by', + Header: t('Name'), + key: 'search', + id: 'name', + input: 'search', + operator: FilterOperator.contains, + }, + { + Header: t('Changed by'), + key: 'changed_by', + id: 'changed_by', input: 'select', operator: FilterOperator.relationOneMany, unfilteredLabel: t('All'), fetchSelects: createFetchRelated( 'annotation_layer', - 'created_by', + 'changed_by', createErrorHandler(errMsg => t( 'An error occurred while fetching dataset datasource values: %s', @@ -299,13 +261,6 @@ function AnnotationLayersList({ ), paginate: true, }, - { - Header: t('Search'), - key: 'search', - id: 'name', - input: 'search', - operator: FilterOperator.contains, - }, ], [], ); diff --git a/superset-frontend/src/pages/AnnotationList/index.tsx b/superset-frontend/src/pages/AnnotationList/index.tsx index 980a18ba72..e04b48080f 100644 --- a/superset-frontend/src/pages/AnnotationList/index.tsx +++ b/superset-frontend/src/pages/AnnotationList/index.tsx @@ -154,7 +154,7 @@ function AnnotationList({ () => [ { accessor: 'short_descr', - Header: t('Label'), + Header: t('Name'), }, { accessor: 'long_descr', diff --git a/superset-frontend/src/pages/ChartList/index.tsx b/superset-frontend/src/pages/ChartList/index.tsx index d13113158e..5ed967d7c1 100644 --- a/superset-frontend/src/pages/ChartList/index.tsx +++ b/superset-frontend/src/pages/ChartList/index.tsx @@ -29,7 +29,6 @@ import { import React, { useState, useMemo, useCallback } from 'react'; import rison from 'rison'; import { uniqBy } from 'lodash'; -import moment from 'moment'; import { useSelector } from 'react-redux'; import { createErrorHandler, @@ -69,11 +68,13 @@ import setupPlugins from 'src/setup/setupPlugins'; import InfoTooltip from 'src/components/InfoTooltip'; import CertifiedBadge from 'src/components/CertifiedBadge'; import { GenericLink } from 'src/components/GenericLink/GenericLink'; -import Owner from 'src/types/Owner'; import { loadTags } from 'src/components/Tags/utils'; +import FacePile from 'src/components/FacePile'; import ChartCard from 'src/features/charts/ChartCard'; import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; import { findPermission } from 'src/utils/findPermission'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const FlexRowContainer = styled.div` align-items: center; @@ -245,10 +246,6 @@ function ChartList(props: ChartListProps) { }); setPreparingExport(true); }; - const changedByName = (lastSavedBy: Owner) => - lastSavedBy?.first_name - ? `${lastSavedBy?.first_name} ${lastSavedBy?.last_name}` - : null; function handleBulkChartDelete(chartsToDelete: Chart[]) { SupersetClient.delete({ @@ -366,7 +363,7 @@ function ChartList(props: ChartListProps) { )} </FlexRowContainer> ), - Header: t('Chart'), + Header: t('Name'), accessor: 'slice_name', }, { @@ -375,7 +372,7 @@ function ChartList(props: ChartListProps) { original: { viz_type: vizType }, }, }: any) => registry.get(vizType)?.name || vizType, - Header: t('Visualization type'), + Header: t('Type'), accessor: 'viz_type', size: 'xxl', }, @@ -438,44 +435,27 @@ function ChartList(props: ChartListProps) { { Cell: ({ row: { - original: { last_saved_by: lastSavedBy }, + original: { owners = [] }, }, - }: any) => <>{changedByName(lastSavedBy)}</>, - Header: t('Modified by'), - accessor: 'last_saved_by.first_name', + }: any) => <FacePile users={owners} />, + Header: t('Owners'), + accessor: 'owners', + disableSortBy: true, size: 'xl', }, { Cell: ({ row: { - original: { last_saved_at: lastSavedAt }, + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, }, - }: any) => ( - <span className="no-wrap"> - {lastSavedAt ? moment.utc(lastSavedAt).fromNow() : null} - </span> - ), + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, Header: t('Last modified'), accessor: 'last_saved_at', size: 'xl', }, - { - accessor: 'owners', - hidden: true, - disableSortBy: true, - }, - { - Cell: ({ - row: { - original: { created_by: createdBy }, - }, - }: any) => - createdBy ? `${createdBy.first_name} ${createdBy.last_name}` : '', - Header: t('Created by'), - accessor: 'created_by', - disableSortBy: true, - size: 'xl', - }, { Cell: ({ row: { original } }: any) => { const handleDelete = () => @@ -563,6 +543,10 @@ function ChartList(props: ChartListProps) { disableSortBy: true, hidden: !canEdit && !canDelete, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [ userId, @@ -597,58 +581,14 @@ function ChartList(props: ChartListProps) { const filters: Filters = useMemo(() => { const filters_list = [ { - Header: t('Search'), + Header: t('Name'), key: 'search', id: 'slice_name', input: 'search', operator: FilterOperator.chartAllText, }, { - Header: t('Owner'), - key: 'owner', - id: 'owners', - input: 'select', - operator: FilterOperator.relationManyMany, - unfilteredLabel: t('All'), - fetchSelects: createFetchRelated( - 'chart', - 'owners', - createErrorHandler(errMsg => - addDangerToast( - t( - 'An error occurred while fetching chart owners values: %s', - errMsg, - ), - ), - ), - props.user, - ), - paginate: true, - }, - { - Header: t('Created by'), - key: 'created_by', - id: 'created_by', - input: 'select', - operator: FilterOperator.relationOneMany, - unfilteredLabel: t('All'), - fetchSelects: createFetchRelated( - 'chart', - 'created_by', - createErrorHandler(errMsg => - addDangerToast( - t( - 'An error occurred while fetching chart created by values: %s', - errMsg, - ), - ), - ), - props.user, - ), - paginate: true, - }, - { - Header: t('Chart type'), + Header: t('Type'), key: 'viz_type', id: 'viz_type', input: 'select', @@ -683,8 +623,43 @@ function ChartList(props: ChartListProps) { fetchSelects: createFetchDatasets, paginate: true, }, + ...(isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM) && canReadTag + ? [ + { + Header: t('Tag'), + key: 'tags', + id: 'tags', + input: 'select', + operator: FilterOperator.chartTags, + unfilteredLabel: t('All'), + fetchSelects: loadTags, + }, + ] + : []), { - Header: t('Dashboards'), + Header: t('Owner'), + key: 'owner', + id: 'owners', + input: 'select', + operator: FilterOperator.relationManyMany, + unfilteredLabel: t('All'), + fetchSelects: createFetchRelated( + 'chart', + 'owners', + createErrorHandler(errMsg => + addDangerToast( + t( + 'An error occurred while fetching chart owners values: %s', + errMsg, + ), + ), + ), + props.user, + ), + paginate: true, + }, + { + Header: t('Dashboard'), key: 'dashboards', id: 'dashboards', input: 'select', @@ -707,18 +682,27 @@ function ChartList(props: ChartListProps) { { label: t('No'), value: false }, ], }, - ] as Filters; - if (isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM) && canReadTag) { - filters_list.push({ - Header: t('Tags'), - key: 'tags', - id: 'tags', + { + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', input: 'select', - operator: FilterOperator.chartTags, + operator: FilterOperator.relationOneMany, unfilteredLabel: t('All'), - fetchSelects: loadTags, - }); - } + fetchSelects: createFetchRelated( + 'chart', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + props.user, + ), + paginate: true, + }, + ] as Filters; return filters_list; }, [addDangerToast, favoritesFilter, props.user]); diff --git a/superset-frontend/src/pages/CssTemplateList/index.tsx b/superset-frontend/src/pages/CssTemplateList/index.tsx index f777f8e743..b77217b22f 100644 --- a/superset-frontend/src/pages/CssTemplateList/index.tsx +++ b/superset-frontend/src/pages/CssTemplateList/index.tsx @@ -21,13 +21,11 @@ import React, { useMemo, useState } from 'react'; import { t, SupersetClient } from '@superset-ui/core'; import rison from 'rison'; -import moment from 'moment'; import { useListViewResource } from 'src/views/CRUD/hooks'; -import { createFetchRelated, createErrorHandler } from 'src/views/CRUD/utils'; +import { createErrorHandler, createFetchRelated } from 'src/views/CRUD/utils'; import withToasts from 'src/components/MessageToasts/withToasts'; import SubMenu, { SubMenuProps } from 'src/features/home/SubMenu'; import DeleteModal from 'src/components/DeleteModal'; -import { Tooltip } from 'src/components/Tooltip'; import ConfirmStatusChange from 'src/components/ConfirmStatusChange'; import ActionsBar, { ActionProps } from 'src/components/ListView/ActionsBar'; import ListView, { @@ -37,6 +35,8 @@ import ListView, { } from 'src/components/ListView'; import CssTemplateModal from 'src/features/cssTemplates/CssTemplateModal'; import { TemplateObject } from 'src/features/cssTemplates/types'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const PAGE_SIZE = 25; @@ -138,66 +138,12 @@ function CssTemplatesList({ changed_by: changedBy, }, }, - }: any) => { - let name = 'null'; - - if (changedBy) { - name = `${changedBy.first_name} ${changedBy.last_name}`; - } - - return ( - <Tooltip - id="allow-run-async-header-tooltip" - title={t('Last modified by %s', name)} - placement="right" - > - <span>{changedOn}</span> - </Tooltip> - ); - }, + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, Header: t('Last modified'), accessor: 'changed_on_delta_humanized', size: 'xl', disableSortBy: true, }, - { - Cell: ({ - row: { - original: { created_on: createdOn }, - }, - }: any) => { - const date = new Date(createdOn); - const utc = new Date( - Date.UTC( - date.getFullYear(), - date.getMonth(), - date.getDate(), - date.getHours(), - date.getMinutes(), - date.getSeconds(), - date.getMilliseconds(), - ), - ); - - return moment(utc).fromNow(); - }, - Header: t('Created on'), - accessor: 'created_on', - size: 'xl', - disableSortBy: true, - }, - { - accessor: 'created_by', - disableSortBy: true, - Header: t('Created by'), - Cell: ({ - row: { - original: { created_by: createdBy }, - }, - }: any) => - createdBy ? `${createdBy.first_name} ${createdBy.last_name}` : '', - size: 'xl', - }, { Cell: ({ row: { original } }: any) => { const handleEdit = () => handleCssTemplateEdit(original); @@ -232,6 +178,10 @@ function CssTemplatesList({ hidden: !canEdit && !canDelete, size: 'xl', }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [canDelete, canCreate], ); @@ -270,15 +220,22 @@ function CssTemplatesList({ const filters: Filters = useMemo( () => [ { - Header: t('Created by'), - key: 'created_by', - id: 'created_by', + Header: t('Name'), + key: 'search', + id: 'template_name', + input: 'search', + operator: FilterOperator.contains, + }, + { + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', input: 'select', operator: FilterOperator.relationOneMany, unfilteredLabel: t('All'), fetchSelects: createFetchRelated( 'css_template', - 'created_by', + 'changed_by', createErrorHandler(errMsg => t( 'An error occurred while fetching dataset datasource values: %s', @@ -289,13 +246,6 @@ function CssTemplatesList({ ), paginate: true, }, - { - Header: t('Search'), - key: 'search', - id: 'template_name', - input: 'search', - operator: FilterOperator.contains, - }, ], [], ); diff --git a/superset-frontend/src/pages/DashboardList/index.tsx b/superset-frontend/src/pages/DashboardList/index.tsx index 6542d85129..e82b701859 100644 --- a/superset-frontend/src/pages/DashboardList/index.tsx +++ b/superset-frontend/src/pages/DashboardList/index.tsx @@ -57,13 +57,17 @@ import { Tooltip } from 'src/components/Tooltip'; import ImportModelsModal from 'src/components/ImportModal/index'; import Dashboard from 'src/dashboard/containers/Dashboard'; -import { Dashboard as CRUDDashboard } from 'src/views/CRUD/types'; +import { + Dashboard as CRUDDashboard, + QueryObjectColumns, +} from 'src/views/CRUD/types'; import CertifiedBadge from 'src/components/CertifiedBadge'; import { loadTags } from 'src/components/Tags/utils'; import DashboardCard from 'src/features/dashboards/DashboardCard'; import { DashboardStatus } from 'src/features/dashboards/types'; import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; import { findPermission } from 'src/utils/findPermission'; +import { ModifiedInfo } from 'src/components/AuditInfo'; const PAGE_SIZE = 25; const PASSWORDS_NEEDED_MESSAGE = t( @@ -108,11 +112,7 @@ const Actions = styled.div` `; function DashboardList(props: DashboardListProps) { - const { - addDangerToast, - addSuccessToast, - user: { userId }, - } = props; + const { addDangerToast, addSuccessToast, user } = props; const { roles } = useSelector<any, UserWithPermissionsAndRoles>( state => state.user, @@ -178,7 +178,7 @@ function DashboardList(props: DashboardListProps) { }; // TODO: Fix usage of localStorage keying on the user id - const userKey = dangerouslyGetItemDoNotUse(userId?.toString(), null); + const userKey = dangerouslyGetItemDoNotUse(user?.userId?.toString(), null); const canCreate = hasPerm('can_write'); const canEdit = hasPerm('can_write'); @@ -274,7 +274,7 @@ function DashboardList(props: DashboardListProps) { original: { id }, }, }: any) => - userId && ( + user?.userId && ( <FaveStar itemId={id} saveFaveStar={saveFavoriteStatus} @@ -285,7 +285,7 @@ function DashboardList(props: DashboardListProps) { id: 'id', disableSortBy: true, size: 'xs', - hidden: !userId, + hidden: !user?.userId, }, { Cell: ({ @@ -310,9 +310,20 @@ function DashboardList(props: DashboardListProps) { {dashboardTitle} </Link> ), - Header: t('Title'), + Header: t('Name'), accessor: 'dashboard_title', }, + { + Cell: ({ + row: { + original: { status }, + }, + }: any) => + status === DashboardStatus.PUBLISHED ? t('Published') : t('Draft'), + Header: t('Status'), + accessor: 'published', + size: 'xl', + }, { Cell: ({ row: { @@ -338,49 +349,6 @@ function DashboardList(props: DashboardListProps) { disableSortBy: true, hidden: !isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM), }, - { - Cell: ({ - row: { - original: { changed_by_name: changedByName }, - }, - }: any) => <>{changedByName}</>, - Header: t('Modified by'), - accessor: 'changed_by.first_name', - size: 'xl', - }, - { - Cell: ({ - row: { - original: { status }, - }, - }: any) => - status === DashboardStatus.PUBLISHED ? t('Published') : t('Draft'), - Header: t('Status'), - accessor: 'published', - size: 'xl', - }, - { - Cell: ({ - row: { - original: { changed_on_delta_humanized: changedOn }, - }, - }: any) => <span className="no-wrap">{changedOn}</span>, - Header: t('Modified'), - accessor: 'changed_on_delta_humanized', - size: 'xl', - }, - { - Cell: ({ - row: { - original: { created_by: createdBy }, - }, - }: any) => - createdBy ? `${createdBy.first_name} ${createdBy.last_name}` : '', - Header: t('Created by'), - accessor: 'created_by', - disableSortBy: true, - size: 'xl', - }, { Cell: ({ row: { @@ -392,6 +360,19 @@ function DashboardList(props: DashboardListProps) { disableSortBy: true, size: 'xl', }, + { + Cell: ({ + row: { + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, + }, + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, + Header: t('Last modified'), + accessor: 'changed_on_delta_humanized', + size: 'xl', + }, { Cell: ({ row: { original } }: any) => { const handleDelete = () => @@ -475,9 +456,13 @@ function DashboardList(props: DashboardListProps) { hidden: !canEdit && !canDelete && !canExport, disableSortBy: true, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [ - userId, + user?.userId, canEdit, canDelete, canExport, @@ -509,12 +494,37 @@ function DashboardList(props: DashboardListProps) { const filters: Filters = useMemo(() => { const filters_list = [ { - Header: t('Search'), + Header: t('Name'), key: 'search', id: 'dashboard_title', input: 'search', operator: FilterOperator.titleOrSlug, }, + { + Header: t('Status'), + key: 'published', + id: 'published', + input: 'select', + operator: FilterOperator.equals, + unfilteredLabel: t('Any'), + selects: [ + { label: t('Published'), value: true }, + { label: t('Draft'), value: false }, + ], + }, + ...(isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM) && canReadTag + ? [ + { + Header: t('Tag'), + key: 'tags', + id: 'tags', + input: 'select', + operator: FilterOperator.dashboardTags, + unfilteredLabel: t('All'), + fetchSelects: loadTags, + }, + ] + : []), { Header: t('Owner'), key: 'owner', @@ -537,41 +547,7 @@ function DashboardList(props: DashboardListProps) { ), paginate: true, }, - { - Header: t('Created by'), - key: 'created_by', - id: 'created_by', - input: 'select', - operator: FilterOperator.relationOneMany, - unfilteredLabel: t('All'), - fetchSelects: createFetchRelated( - 'dashboard', - 'created_by', - createErrorHandler(errMsg => - addDangerToast( - t( - 'An error occurred while fetching dashboard created by values: %s', - errMsg, - ), - ), - ), - props.user, - ), - paginate: true, - }, - { - Header: t('Status'), - key: 'published', - id: 'published', - input: 'select', - operator: FilterOperator.equals, - unfilteredLabel: t('Any'), - selects: [ - { label: t('Published'), value: true }, - { label: t('Draft'), value: false }, - ], - }, - ...(userId ? [favoritesFilter] : []), + ...(user?.userId ? [favoritesFilter] : []), { Header: t('Certified'), key: 'certified', @@ -585,18 +561,27 @@ function DashboardList(props: DashboardListProps) { { label: t('No'), value: false }, ], }, - ] as Filters; - if (isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM) && canReadTag) { - filters_list.push({ - Header: t('Tags'), - key: 'tags', - id: 'tags', + { + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', input: 'select', - operator: FilterOperator.dashboardTags, + operator: FilterOperator.relationOneMany, unfilteredLabel: t('All'), - fetchSelects: loadTags, - }); - } + fetchSelects: createFetchRelated( + 'dashboard', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, + }, + ] as Filters; return filters_list; }, [addDangerToast, favoritesFilter, props.user]); @@ -632,7 +617,7 @@ function DashboardList(props: DashboardListProps) { ? userKey.thumbnails : isFeatureEnabled(FeatureFlag.THUMBNAILS) } - userId={userId} + userId={user?.userId} loading={loading} openDashboardEditModal={openDashboardEditModal} saveFavoriteStatus={saveFavoriteStatus} @@ -646,7 +631,7 @@ function DashboardList(props: DashboardListProps) { favoriteStatus, hasPerm, loading, - userId, + user?.userId, saveFavoriteStatus, userKey, ], @@ -743,7 +728,7 @@ function DashboardList(props: DashboardListProps) { addSuccessToast, addDangerToast, undefined, - userId, + user?.userId, ); setDashboardToDelete(null); }} diff --git a/superset-frontend/src/pages/DatabaseList/DatabaseList.test.jsx b/superset-frontend/src/pages/DatabaseList/DatabaseList.test.jsx index fd989b50d2..b1bfb245d3 100644 --- a/superset-frontend/src/pages/DatabaseList/DatabaseList.test.jsx +++ b/superset-frontend/src/pages/DatabaseList/DatabaseList.test.jsx @@ -218,7 +218,7 @@ describe('Admin DatabaseList', () => { await waitForComponentToPaint(wrapper); expect(fetchMock.lastCall()[0]).toMatchInlineSnapshot( - `"http://localhost/api/v1/database/?q=(filters:!((col:expose_in_sqllab,opr:eq,value:!t),(col:allow_run_async,opr:eq,value:!f),(col:database_name,opr:ct,value:fooo)),order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)"`, + `"http://localhost/api/v1/database/?q=(filters:!((col:database_name,opr:ct,value:fooo),(col:expose_in_sqllab,opr:eq,value:!t),(col:allow_run_async,opr:eq,value:!f)),order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)"`, ); }); diff --git a/superset-frontend/src/pages/DatabaseList/index.tsx b/superset-frontend/src/pages/DatabaseList/index.tsx index d2308bd117..8c98392aca 100644 --- a/superset-frontend/src/pages/DatabaseList/index.tsx +++ b/superset-frontend/src/pages/DatabaseList/index.tsx @@ -32,7 +32,11 @@ import { LocalStorageKeys, setItem } from 'src/utils/localStorageHelpers'; import Loading from 'src/components/Loading'; import { useListViewResource } from 'src/views/CRUD/hooks'; -import { createErrorHandler, uploadUserPerms } from 'src/views/CRUD/utils'; +import { + createErrorHandler, + createFetchRelated, + uploadUserPerms, +} from 'src/views/CRUD/utils'; import withToasts from 'src/components/MessageToasts/withToasts'; import SubMenu, { SubMenuProps } from 'src/features/home/SubMenu'; import DeleteModal from 'src/components/DeleteModal'; @@ -48,6 +52,8 @@ import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; import type { MenuObjectProps } from 'src/types/bootstrapTypes'; import DatabaseModal from 'src/features/databases/DatabaseModal'; import { DatabaseObject } from 'src/features/databases/types'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const extensionsRegistry = getExtensionsRegistry(); const DatabaseDeleteRelatedExtension = extensionsRegistry.get( @@ -67,6 +73,11 @@ interface DatabaseDeleteObject extends DatabaseObject { interface DatabaseListProps { addDangerToast: (msg: string) => void; addSuccessToast: (msg: string) => void; + user: { + userId: string | number; + firstName: string; + lastName: string; + }; } const IconCheck = styled(Icons.Check)` @@ -90,7 +101,11 @@ function BooleanDisplay({ value }: { value: Boolean }) { return value ? <IconCheck /> : <IconCancelX />; } -function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { +function DatabaseList({ + addDangerToast, + addSuccessToast, + user, +}: DatabaseListProps) { const { state: { loading, @@ -105,7 +120,7 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { t('database'), addDangerToast, ); - const user = useSelector<any, UserWithPermissionsAndRoles>( + const fullUser = useSelector<any, UserWithPermissionsAndRoles>( state => state.user, ); const showDatabaseModal = getUrlParam(URL_PARAMS.showDatabaseModal); @@ -123,11 +138,11 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { null, ); const [allowUploads, setAllowUploads] = useState<boolean>(false); - const isAdmin = isUserAdmin(user); + const isAdmin = isUserAdmin(fullUser); const showUploads = allowUploads || isAdmin; const [preparingExport, setPreparingExport] = useState<boolean>(false); - const { roles } = user; + const { roles } = fullUser; const { CSV_EXTENSIONS, COLUMNAR_EXTENSIONS, @@ -313,7 +328,7 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { () => [ { accessor: 'database_name', - Header: t('Database'), + Header: t('Name'), }, { accessor: 'backend', @@ -380,23 +395,14 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { size: 'md', }, { - accessor: 'created_by', - disableSortBy: true, - Header: t('Created by'), Cell: ({ row: { - original: { created_by: createdBy }, + original: { + changed_by: changedBy, + changed_on_delta_humanized: changedOn, + }, }, - }: any) => - createdBy ? `${createdBy.first_name} ${createdBy.last_name}` : '', - size: 'xl', - }, - { - Cell: ({ - row: { - original: { changed_on_delta_humanized: changedOn }, - }, - }: any) => changedOn, + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, Header: t('Last modified'), accessor: 'changed_on_delta_humanized', size: 'xl', @@ -470,12 +476,23 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { hidden: !canEdit && !canDelete, disableSortBy: true, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [canDelete, canEdit, canExport], ); const filters: Filters = useMemo( () => [ + { + Header: t('Name'), + key: 'search', + id: 'database_name', + input: 'search', + operator: FilterOperator.contains, + }, { Header: t('Expose in SQL Lab'), key: 'expose_in_sql_lab', @@ -509,11 +526,24 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) { ], }, { - Header: t('Search'), - key: 'search', - id: 'database_name', - input: 'search', - operator: FilterOperator.contains, + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', + input: 'select', + operator: FilterOperator.relationOneMany, + unfilteredLabel: t('All'), + fetchSelects: createFetchRelated( + 'database', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, }, ], [], diff --git a/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx b/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx index 916dd0615b..c316001bb4 100644 --- a/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx +++ b/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx @@ -285,56 +285,41 @@ describe('RTL', () => { }); describe('Prevent unsafe URLs', () => { + const columnCount = 8; + const exploreUrlIndex = 1; + const getTdIndex = (rowNumber: number): number => + rowNumber * columnCount + exploreUrlIndex; + const mockedProps = {}; let wrapper: any; it('Check prevent unsafe is on renders relative links', async () => { - const tdColumnsNumber = 9; useSelectorMock.mockReturnValue(true); wrapper = await mountAndWait(mockedProps); const tdElements = wrapper.find(ListView).find('td'); - expect( - tdElements - .at(0 * tdColumnsNumber + 1) - .find('a') - .prop('href'), - ).toBe('/https://www.google.com?0'); - expect( - tdElements - .at(1 * tdColumnsNumber + 1) - .find('a') - .prop('href'), - ).toBe('/https://www.google.com?1'); - expect( - tdElements - .at(2 * tdColumnsNumber + 1) - .find('a') - .prop('href'), - ).toBe('/https://www.google.com?2'); + expect(tdElements.at(getTdIndex(0)).find('a').prop('href')).toBe( + '/https://www.google.com?0', + ); + expect(tdElements.at(getTdIndex(1)).find('a').prop('href')).toBe( + '/https://www.google.com?1', + ); + expect(tdElements.at(getTdIndex(2)).find('a').prop('href')).toBe( + '/https://www.google.com?2', + ); }); it('Check prevent unsafe is off renders absolute links', async () => { - const tdColumnsNumber = 9; useSelectorMock.mockReturnValue(false); wrapper = await mountAndWait(mockedProps); const tdElements = wrapper.find(ListView).find('td'); - expect( - tdElements - .at(0 * tdColumnsNumber + 1) - .find('a') - .prop('href'), - ).toBe('https://www.google.com?0'); - expect( - tdElements - .at(1 * tdColumnsNumber + 1) - .find('a') - .prop('href'), - ).toBe('https://www.google.com?1'); - expect( - tdElements - .at(2 * tdColumnsNumber + 1) - .find('a') - .prop('href'), - ).toBe('https://www.google.com?2'); + expect(tdElements.at(getTdIndex(0)).find('a').prop('href')).toBe( + 'https://www.google.com?0', + ); + expect(tdElements.at(getTdIndex(1)).find('a').prop('href')).toBe( + 'https://www.google.com?1', + ); + expect(tdElements.at(getTdIndex(2)).find('a').prop('href')).toBe( + 'https://www.google.com?2', + ); }); }); diff --git a/superset-frontend/src/pages/DatasetList/index.tsx b/superset-frontend/src/pages/DatasetList/index.tsx index d86d7a7b0f..8a39cb0463 100644 --- a/superset-frontend/src/pages/DatasetList/index.tsx +++ b/superset-frontend/src/pages/DatasetList/index.tsx @@ -70,6 +70,8 @@ import { } from 'src/features/datasets/constants'; import DuplicateDatasetModal from 'src/features/datasets/DuplicateDatasetModal'; import { useSelector } from 'react-redux'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const extensionsRegistry = getExtensionsRegistry(); const DatasetDeleteRelatedExtension = extensionsRegistry.get( @@ -380,26 +382,6 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({ accessor: 'schema', size: 'lg', }, - { - Cell: ({ - row: { - original: { changed_on_delta_humanized: changedOn }, - }, - }: any) => <span className="no-wrap">{changedOn}</span>, - Header: t('Modified'), - accessor: 'changed_on_delta_humanized', - size: 'xl', - }, - { - Cell: ({ - row: { - original: { changed_by_name: changedByName }, - }, - }: any) => changedByName, - Header: t('Modified by'), - accessor: 'changed_by.first_name', - size: 'xl', - }, { accessor: 'database', disableSortBy: true, @@ -416,6 +398,19 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({ disableSortBy: true, size: 'lg', }, + { + Cell: ({ + row: { + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, + }, + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, + Header: t('Last modified'), + accessor: 'changed_on_delta_humanized', + size: 'xl', + }, { accessor: 'sql', hidden: true, @@ -515,6 +510,10 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({ hidden: !canEdit && !canDelete && !canDuplicate, disableSortBy: true, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [canEdit, canDelete, canExport, openDatasetEditModal, canDuplicate, user], ); @@ -522,31 +521,23 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({ const filterTypes: Filters = useMemo( () => [ { - Header: t('Search'), + Header: t('Name'), key: 'search', id: 'table_name', input: 'search', operator: FilterOperator.contains, }, { - Header: t('Owner'), - key: 'owner', - id: 'owners', + Header: t('Type'), + key: 'sql', + id: 'sql', input: 'select', - operator: FilterOperator.relationManyMany, + operator: FilterOperator.datasetIsNullOrEmpty, unfilteredLabel: 'All', - fetchSelects: createFetchRelated( - 'dataset', - 'owners', - createErrorHandler(errMsg => - t( - 'An error occurred while fetching dataset owner values: %s', - errMsg, - ), - ), - user, - ), - paginate: true, + selects: [ + { label: t('Virtual'), value: false }, + { label: t('Physical'), value: true }, + ], }, { Header: t('Database'), @@ -581,16 +572,24 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({ paginate: true, }, { - Header: t('Type'), - key: 'sql', - id: 'sql', + Header: t('Owner'), + key: 'owner', + id: 'owners', input: 'select', - operator: FilterOperator.datasetIsNullOrEmpty, + operator: FilterOperator.relationManyMany, unfilteredLabel: 'All', - selects: [ - { label: t('Virtual'), value: false }, - { label: t('Physical'), value: true }, - ], + fetchSelects: createFetchRelated( + 'dataset', + 'owners', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset owner values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, }, { Header: t('Certified'), @@ -605,6 +604,26 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({ { label: t('No'), value: false }, ], }, + { + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', + input: 'select', + operator: FilterOperator.relationOneMany, + unfilteredLabel: t('All'), + fetchSelects: createFetchRelated( + 'dataset', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, + }, ], [user], ); diff --git a/superset-frontend/src/pages/QueryHistoryList/index.tsx b/superset-frontend/src/pages/QueryHistoryList/index.tsx index 63e916e399..94b646d9e4 100644 --- a/superset-frontend/src/pages/QueryHistoryList/index.tsx +++ b/superset-frontend/src/pages/QueryHistoryList/index.tsx @@ -34,6 +34,7 @@ import { } from 'src/views/CRUD/utils'; import withToasts from 'src/components/MessageToasts/withToasts'; import { useListViewResource } from 'src/views/CRUD/hooks'; +import Label from 'src/components/Label'; import SubMenu, { SubMenuProps } from 'src/features/home/SubMenu'; import Popover from 'src/components/Popover'; import { commonMenuData } from 'src/features/home/commonMenuData'; @@ -52,6 +53,7 @@ import { QueryObject, QueryObjectColumns } from 'src/views/CRUD/types'; import Icons from 'src/components/Icons'; import QueryPreviewModal from 'src/features/queries/QueryPreviewModal'; import { addSuccessToast } from 'src/components/MessageToasts/actions'; +import getOwnerName from 'src/utils/getOwnerName'; const PAGE_SIZE = 25; const SQL_PREVIEW_MAX_LINES = 4; @@ -88,6 +90,11 @@ const StyledPopoverItem = styled.div` color: ${({ theme }) => theme.colors.grayscale.dark2}; `; +const TimerLabel = styled(Label)` + text-align: left; + font-family: ${({ theme }) => theme.typography.families.monospace}; +`; + function QueryList({ addDangerToast }: QueryListProps) { const { state: { loading, resourceCount: queryCount, resourceCollection: queries }, @@ -204,7 +211,7 @@ function QueryList({ addDangerToast }: QueryListProps) { size: 'xl', Cell: ({ row: { - original: { start_time, end_time }, + original: { start_time }, }, }: any) => { const startMoment = moment.utc(start_time).local(); @@ -218,19 +225,25 @@ function QueryList({ addDangerToast }: QueryListProps) { {formattedStartTimeData[1]} </> ); - - return end_time ? ( - <Tooltip - title={t( - 'Duration: %s', - moment(moment.utc(end_time - start_time)).format(TIME_WITH_MS), - )} - placement="bottom" - > - <span>{formattedStartTime}</span> - </Tooltip> - ) : ( - formattedStartTime + return formattedStartTime; + }, + }, + { + Header: t('Duration'), + size: 'xl', + Cell: ({ + row: { + original: { status, start_time, end_time }, + }, + }: any) => { + const timerType = status === QueryState.FAILED ? 'danger' : status; + const timerTime = end_time + ? moment(moment.utc(end_time - start_time)).format(TIME_WITH_MS) + : '00:00:00.000'; + return ( + <TimerLabel type={timerType} role="timer"> + {timerTime} + </TimerLabel> ); }, }, @@ -299,7 +312,7 @@ function QueryList({ addDangerToast }: QueryListProps) { row: { original: { user }, }, - }: any) => (user ? `${user.first_name} ${user.last_name}` : ''), + }: any) => getOwnerName(user), }, { accessor: QueryObjectColumns.user, diff --git a/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx b/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx index a4621ed10e..6721f73add 100644 --- a/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx +++ b/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx @@ -187,8 +187,8 @@ describe('RuleList RTL', () => { const searchFilters = screen.queryAllByTestId('filters-search'); expect(searchFilters).toHaveLength(2); - const typeFilter = await screen.findByTestId('filters-select'); - expect(typeFilter).toBeInTheDocument(); + const typeFilter = screen.queryAllByTestId('filters-select'); + expect(typeFilter).toHaveLength(2); }); it('renders correct list columns', async () => { @@ -201,7 +201,7 @@ describe('RuleList RTL', () => { const fitlerTypeColumn = await within(table).findByText('Filter Type'); const groupKeyColumn = await within(table).findByText('Group Key'); const clauseColumn = await within(table).findByText('Clause'); - const modifiedColumn = await within(table).findByText('Modified'); + const modifiedColumn = await within(table).findByText('Last modified'); const actionsColumn = await within(table).findByText('Actions'); expect(nameColumn).toBeInTheDocument(); diff --git a/superset-frontend/src/pages/RowLevelSecurityList/index.tsx b/superset-frontend/src/pages/RowLevelSecurityList/index.tsx index 3c1e3b8aae..bef42284d0 100644 --- a/superset-frontend/src/pages/RowLevelSecurityList/index.tsx +++ b/superset-frontend/src/pages/RowLevelSecurityList/index.tsx @@ -33,7 +33,9 @@ import rison from 'rison'; import { useListViewResource } from 'src/views/CRUD/hooks'; import RowLevelSecurityModal from 'src/features/rls/RowLevelSecurityModal'; import { RLSObject } from 'src/features/rls/types'; -import { createErrorHandler } from 'src/views/CRUD/utils'; +import { createErrorHandler, createFetchRelated } from 'src/views/CRUD/utils'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { QueryObjectColumns } from 'src/views/CRUD/types'; const Actions = styled.div` color: ${({ theme }) => theme.colors.grayscale.base}; @@ -43,7 +45,7 @@ interface RLSProps { addDangerToast: (msg: string) => void; addSuccessToast: (msg: string) => void; user: { - userId?: string | number; + userId: string | number; firstName: string; lastName: string; }; @@ -146,10 +148,13 @@ function RowLevelSecurityList(props: RLSProps) { { Cell: ({ row: { - original: { changed_on_delta_humanized: changedOn }, + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, }, - }: any) => <span className="no-wrap">{changedOn}</span>, - Header: t('Modified'), + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, + Header: t('Last modified'), accessor: 'changed_on_delta_humanized', size: 'xl', }, @@ -218,6 +223,10 @@ function RowLevelSecurityList(props: RLSProps) { hidden: !canEdit && !canWrite && !canExport, disableSortBy: true, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [ user.userId, @@ -270,6 +279,26 @@ function RowLevelSecurityList(props: RLSProps) { input: 'search', operator: FilterOperator.startsWith, }, + { + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', + input: 'select', + operator: FilterOperator.relationOneMany, + unfilteredLabel: t('All'), + fetchSelects: createFetchRelated( + 'rowlevelsecurity', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, + }, ], [user], ); diff --git a/superset-frontend/src/pages/SavedQueryList/index.tsx b/superset-frontend/src/pages/SavedQueryList/index.tsx index 3ee62c2ce6..d48ffef8c9 100644 --- a/superset-frontend/src/pages/SavedQueryList/index.tsx +++ b/superset-frontend/src/pages/SavedQueryList/index.tsx @@ -18,20 +18,19 @@ */ import { - isFeatureEnabled, FeatureFlag, + isFeatureEnabled, styled, SupersetClient, t, } from '@superset-ui/core'; -import React, { useState, useMemo, useCallback } from 'react'; +import React, { useCallback, useMemo, useState } from 'react'; import { Link, useHistory } from 'react-router-dom'; import rison from 'rison'; -import moment from 'moment'; import { - createFetchRelated, - createFetchDistinct, createErrorHandler, + createFetchDistinct, + createFetchRelated, } from 'src/views/CRUD/utils'; import { useSelector } from 'react-redux'; import Popover from 'src/components/Popover'; @@ -39,11 +38,11 @@ import withToasts from 'src/components/MessageToasts/withToasts'; import { useListViewResource } from 'src/views/CRUD/hooks'; import ConfirmStatusChange from 'src/components/ConfirmStatusChange'; import handleResourceExport from 'src/utils/export'; -import SubMenu, { SubMenuProps, ButtonProps } from 'src/features/home/SubMenu'; +import SubMenu, { ButtonProps, SubMenuProps } from 'src/features/home/SubMenu'; import ListView, { - ListViewProps, - Filters, FilterOperator, + Filters, + ListViewProps, } from 'src/components/ListView'; import Loading from 'src/components/Loading'; import DeleteModal from 'src/components/DeleteModal'; @@ -51,15 +50,14 @@ import ActionsBar, { ActionProps } from 'src/components/ListView/ActionsBar'; import { TagsList } from 'src/components/Tags'; import { Tooltip } from 'src/components/Tooltip'; import { commonMenuData } from 'src/features/home/commonMenuData'; -import { SavedQueryObject } from 'src/views/CRUD/types'; +import { QueryObjectColumns, SavedQueryObject } from 'src/views/CRUD/types'; import copyTextToClipboard from 'src/utils/copy'; import Tag from 'src/types/TagType'; import ImportModelsModal from 'src/components/ImportModal/index'; +import { ModifiedInfo } from 'src/components/AuditInfo'; +import { loadTags } from 'src/components/Tags/utils'; import Icons from 'src/components/Icons'; -import { - BootstrapUser, - UserWithPermissionsAndRoles, -} from 'src/types/bootstrapTypes'; +import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; import SavedQueryPreviewModal from 'src/features/queries/SavedQueryPreviewModal'; import { findPermission } from 'src/utils/findPermission'; @@ -80,7 +78,11 @@ const CONFIRM_OVERWRITE_MESSAGE = t( interface SavedQueryListProps { addDangerToast: (msg: string) => void; addSuccessToast: (msg: string) => void; - user: BootstrapUser; + user: { + userId: string | number; + firstName: string; + lastName: string; + }; } const StyledTableLabel = styled.div` @@ -99,6 +101,7 @@ const StyledPopoverItem = styled.div` function SavedQueryList({ addDangerToast, addSuccessToast, + user, }: SavedQueryListProps) { const { state: { @@ -348,41 +351,6 @@ function SavedQueryList({ size: 'xl', disableSortBy: true, }, - { - Cell: ({ - row: { - original: { created_on: createdOn }, - }, - }: any) => { - const date = new Date(createdOn); - const utc = new Date( - Date.UTC( - date.getFullYear(), - date.getMonth(), - date.getDate(), - date.getHours(), - date.getMinutes(), - date.getSeconds(), - date.getMilliseconds(), - ), - ); - - return moment(utc).fromNow(); - }, - Header: t('Created on'), - accessor: 'created_on', - size: 'xl', - }, - { - Cell: ({ - row: { - original: { changed_on_delta_humanized: changedOn }, - }, - }: any) => changedOn, - Header: t('Modified'), - accessor: 'changed_on_delta_humanized', - size: 'xl', - }, { Cell: ({ row: { @@ -397,6 +365,19 @@ function SavedQueryList({ disableSortBy: true, hidden: !isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM), }, + { + Cell: ({ + row: { + original: { + changed_by: changedBy, + changed_on_delta_humanized: changedOn, + }, + }, + }: any) => <ModifiedInfo user={changedBy} date={changedOn} />, + Header: t('Last modified'), + accessor: 'changed_on_delta_humanized', + size: 'xl', + }, { Cell: ({ row: { original } }: any) => { const handlePreview = () => { @@ -452,12 +433,23 @@ function SavedQueryList({ id: 'actions', disableSortBy: true, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [canDelete, canEdit, canExport, copyQueryLink, handleSavedQueryPreview], ); const filters: Filters = useMemo( () => [ + { + Header: t('Name'), + id: 'label', + key: 'search', + input: 'search', + operator: FilterOperator.allText, + }, { Header: t('Database'), key: 'database', @@ -497,28 +489,42 @@ function SavedQueryList({ ), paginate: true, }, - + ...((isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM) && canReadTag + ? [ + { + Header: t('Tag'), + id: 'tags', + key: 'tags', + input: 'select', + operator: FilterOperator.savedQueryTags, + fetchSelects: loadTags, + }, + ] + : []) as Filters), { - Header: t('Search'), - id: 'label', - key: 'search', - input: 'search', - operator: FilterOperator.allText, + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', + input: 'select', + operator: FilterOperator.relationOneMany, + unfilteredLabel: t('All'), + fetchSelects: createFetchRelated( + 'saved_query', + 'changed_by', + createErrorHandler(errMsg => + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, + ), + ), + user, + ), + paginate: true, }, ], [addDangerToast], ); - if (isFeatureEnabled(FeatureFlag.TAGGING_SYSTEM) && canReadTag) { - filters.push({ - Header: t('Tags'), - id: 'tags', - key: 'tags', - input: 'search', - operator: FilterOperator.savedQueryTags, - }); - } - return ( <> <SubMenu {...menuData} /> diff --git a/superset-frontend/src/pages/SqlLab/index.tsx b/superset-frontend/src/pages/SqlLab/index.tsx index e9f84f1b1d..3f19b54c29 100644 --- a/superset-frontend/src/pages/SqlLab/index.tsx +++ b/superset-frontend/src/pages/SqlLab/index.tsx @@ -18,7 +18,7 @@ */ import React, { useEffect } from 'react'; import { useDispatch, useSelector } from 'react-redux'; -import { css } from '@superset-ui/core'; +import { css, isFeatureEnabled, FeatureFlag } from '@superset-ui/core'; import { useSqlLabInitialState } from 'src/hooks/apiResources/sqlLab'; import type { InitialState } from 'src/hooks/apiResources/sqlLab'; import { resetState } from 'src/SqlLab/actions/sqlLab'; @@ -27,16 +27,17 @@ import type { SqlLabRootState } from 'src/SqlLab/types'; import { SqlLabGlobalStyles } from 'src/SqlLab//SqlLabGlobalStyles'; import App from 'src/SqlLab/components/App'; import Loading from 'src/components/Loading'; +import EditorAutoSync from 'src/SqlLab/components/EditorAutoSync'; import useEffectEvent from 'src/hooks/useEffectEvent'; import { LocationProvider } from './LocationContext'; export default function SqlLab() { - const editorTabLastUpdatedAt = useSelector<SqlLabRootState, number>( - state => state.sqlLab.editorTabLastUpdatedAt || 0, + const lastInitializedAt = useSelector<SqlLabRootState, number>( + state => state.sqlLab.queriesLastUpdate || 0, ); const { data, isLoading, isError, error, fulfilledTimeStamp } = useSqlLabInitialState(); - const shouldInitialize = editorTabLastUpdatedAt <= (fulfilledTimeStamp || 0); + const shouldInitialize = lastInitializedAt <= (fulfilledTimeStamp || 0); const dispatch = useDispatch(); const initBootstrapData = useEffectEvent( @@ -72,6 +73,9 @@ export default function SqlLab() { > <SqlLabGlobalStyles /> <App /> + {isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) && ( + <EditorAutoSync /> + )} </div> </LocationProvider> ); diff --git a/superset-frontend/src/pages/Tags/index.tsx b/superset-frontend/src/pages/Tags/index.tsx index a66d7c7b61..d395ce7cde 100644 --- a/superset-frontend/src/pages/Tags/index.tsx +++ b/superset-frontend/src/pages/Tags/index.tsx @@ -19,9 +19,9 @@ import React, { useMemo, useState } from 'react'; import { isFeatureEnabled, FeatureFlag, t } from '@superset-ui/core'; import { - createFetchRelated, - createErrorHandler, Actions, + createErrorHandler, + createFetchRelated, } from 'src/views/CRUD/utils'; import { useListViewResource, useFavoriteStatus } from 'src/views/CRUD/hooks'; import ConfirmStatusChange from 'src/components/ConfirmStatusChange'; @@ -35,13 +35,13 @@ import { dangerouslyGetItemDoNotUse } from 'src/utils/localStorageHelpers'; import withToasts from 'src/components/MessageToasts/withToasts'; import Icons from 'src/components/Icons'; import { Tooltip } from 'src/components/Tooltip'; -import FacePile from 'src/components/FacePile'; import { Link } from 'react-router-dom'; import { deleteTags } from 'src/features/tags/tags'; import { Tag as AntdTag } from 'antd'; -import { Tag } from 'src/views/CRUD/types'; +import { QueryObjectColumns, Tag } from 'src/views/CRUD/types'; import TagModal from 'src/features/tags/TagModal'; import FaveStar from 'src/components/FaveStar'; +import { ModifiedInfo } from 'src/components/AuditInfo'; const PAGE_SIZE = 25; @@ -56,11 +56,8 @@ interface TagListProps { } function TagList(props: TagListProps) { - const { - addDangerToast, - addSuccessToast, - user: { userId }, - } = props; + const { addDangerToast, addSuccessToast, user } = props; + const { userId } = user; const { state: { @@ -162,24 +159,16 @@ function TagList(props: TagListProps) { { Cell: ({ row: { - original: { changed_on_delta_humanized: changedOn }, + original: { + changed_on_delta_humanized: changedOn, + changed_by: changedBy, + }, }, - }: any) => <span className="no-wrap">{changedOn}</span>, - Header: t('Modified'), + }: any) => <ModifiedInfo date={changedOn} user={changedBy} />, + Header: t('Last modified'), accessor: 'changed_on_delta_humanized', size: 'xl', }, - { - Cell: ({ - row: { - original: { created_by: createdBy }, - }, - }: any) => (createdBy ? <FacePile users={[createdBy]} /> : ''), - Header: t('Created by'), - accessor: 'created_by', - disableSortBy: true, - size: 'xl', - }, { Cell: ({ row: { original } }: any) => { const handleEdit = () => handleTagEdit(original); @@ -238,6 +227,10 @@ function TagList(props: TagListProps) { hidden: !canDelete, disableSortBy: true, }, + { + accessor: QueryObjectColumns.changed_by, + hidden: true, + }, ], [userId, canDelete, refreshData, addSuccessToast, addDangerToast], ); @@ -245,32 +238,31 @@ function TagList(props: TagListProps) { const filters: Filters = useMemo(() => { const filters_list = [ { - Header: t('Created by'), - id: 'created_by', + Header: t('Name'), + id: 'name', + input: 'search', + operator: FilterOperator.contains, + }, + { + Header: t('Modified by'), + key: 'changed_by', + id: 'changed_by', input: 'select', operator: FilterOperator.relationOneMany, unfilteredLabel: t('All'), fetchSelects: createFetchRelated( 'tag', - 'created_by', + 'changed_by', createErrorHandler(errMsg => - addDangerToast( - t( - 'An error occurred while fetching tag created by values: %s', - errMsg, - ), + t( + 'An error occurred while fetching dataset datasource values: %s', + errMsg, ), ), - props.user, + user, ), paginate: true, }, - { - Header: t('Search'), - id: 'name', - input: 'search', - operator: FilterOperator.contains, - }, ] as Filters; return filters_list; }, [addDangerToast, props.user]); diff --git a/superset-frontend/src/setup/setupClient.ts b/superset-frontend/src/setup/setupClient.ts index 80ce6b54bb..c6f2399436 100644 --- a/superset-frontend/src/setup/setupClient.ts +++ b/superset-frontend/src/setup/setupClient.ts @@ -18,13 +18,18 @@ */ import { SupersetClient, logging, ClientConfig } from '@superset-ui/core'; import parseCookie from 'src/utils/parseCookie'; +import getBootstrapData from 'src/utils/getBootstrapData'; + +const bootstrapData = getBootstrapData(); function getDefaultConfiguration(): ClientConfig { const csrfNode = document.querySelector<HTMLInputElement>('#csrf_token'); const csrfToken = csrfNode?.value; // when using flask-jwt-extended csrf is set in cookies - const cookieCSRFToken = parseCookie().csrf_access_token || ''; + const jwtAccessCsrfCookieName = + bootstrapData.common.conf.JWT_ACCESS_CSRF_COOKIE_NAME; + const cookieCSRFToken = parseCookie()[jwtAccessCsrfCookieName] || ''; return { protocol: ['http:', 'https:'].includes(window?.location?.protocol) diff --git a/superset-frontend/src/utils/getOwnerName.test.ts b/superset-frontend/src/utils/getOwnerName.test.ts new file mode 100644 index 0000000000..a4a25e57b2 --- /dev/null +++ b/superset-frontend/src/utils/getOwnerName.test.ts @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import getOwnerName from './getOwnerName'; + +test('render owner name correctly', () => { + expect(getOwnerName({ id: 1, first_name: 'Foo', last_name: 'Bar' })).toEqual( + 'Foo Bar', + ); +}); + +test('return empty string for undefined owner', () => { + expect(getOwnerName(undefined)).toEqual(''); +}); diff --git a/superset-frontend/src/utils/getOwnerName.ts b/superset-frontend/src/utils/getOwnerName.ts new file mode 100644 index 0000000000..2534c45f2c --- /dev/null +++ b/superset-frontend/src/utils/getOwnerName.ts @@ -0,0 +1,26 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import Owner from 'src/types/Owner'; + +export default function getOwnerName(owner?: Owner): string { + if (!owner) { + return ''; + } + return `${owner.first_name} ${owner.last_name}`; +} diff --git a/superset-frontend/src/views/CRUD/types.ts b/superset-frontend/src/views/CRUD/types.ts index 5a53b57696..2fff111b47 100644 --- a/superset-frontend/src/views/CRUD/types.ts +++ b/superset-frontend/src/views/CRUD/types.ts @@ -112,6 +112,7 @@ export interface QueryObject { export enum QueryObjectColumns { id = 'id', changed_on = 'changed_on', + changed_by = 'changed_by', database = 'database', database_name = 'database.database_name', schema = 'schema', @@ -138,17 +139,11 @@ export type ImportResourceName = export interface Tag { changed_on_delta_humanized: string; - changed_by: { - first_name: string; - last_name: string; - }; + changed_by: Owner; created_on_delta_humanized: string; name: string; id: number; - created_by: { - first_name: string; - last_name: string; - }; + created_by: Owner; description: string; type: string; } diff --git a/superset-frontend/src/views/store.ts b/superset-frontend/src/views/store.ts index 55df81c588..a9c3a9eb13 100644 --- a/superset-frontend/src/views/store.ts +++ b/superset-frontend/src/views/store.ts @@ -38,7 +38,6 @@ import logger from 'src/middleware/loggerMiddleware'; import saveModal from 'src/explore/reducers/saveModalReducer'; import explore from 'src/explore/reducers/exploreReducer'; import exploreDatasources from 'src/explore/reducers/datasourcesReducer'; -import { FeatureFlag, isFeatureEnabled } from '@superset-ui/core'; import { persistSqlLabStateEnhancer } from 'src/SqlLab/middlewares/persistSqlLabStateEnhancer'; import sqlLabReducer from 'src/SqlLab/reducers/sqlLab'; @@ -167,9 +166,7 @@ export function setupStore({ }, middleware: getMiddleware, devTools: process.env.WEBPACK_MODE === 'development' && !disableDebugger, - ...(!isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) && { - enhancers: [persistSqlLabStateEnhancer as StoreEnhancer], - }), + enhancers: [persistSqlLabStateEnhancer as StoreEnhancer], ...overrides, }); } diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index 88443ef398..a0f4d1f4ea 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -9,7 +9,7 @@ "version": "0.0.1", "license": "Apache-2.0", "dependencies": { - "@types/lodash": "^4.14.201", + "@types/lodash": "^4.14.202", "cookie": "^0.6.0", "hot-shots": "^10.0.0", "ioredis": "^4.28.0", @@ -20,17 +20,17 @@ "ws": "^8.14.2" }, "devDependencies": { - "@types/cookie": "^0.5.4", + "@types/cookie": "^0.6.0", "@types/ioredis": "^4.27.8", "@types/jest": "^27.0.2", - "@types/jsonwebtoken": "^9.0.4", - "@types/node": "^20.8.10", - "@types/uuid": "^9.0.6", - "@types/ws": "^8.5.9", + "@types/jsonwebtoken": "^9.0.5", + "@types/node": "^20.10.4", + "@types/uuid": "^9.0.7", + "@types/ws": "^8.5.10", "@typescript-eslint/eslint-plugin": "^5.61.0", "@typescript-eslint/parser": "^5.62.0", - "eslint": "^8.53.0", - "eslint-config-prettier": "^9.0.0", + "eslint": "^8.55.0", + "eslint-config-prettier": "^9.1.0", "jest": "^27.3.1", "prettier": "^3.0.3", "ts-jest": "^27.0.7", @@ -773,9 +773,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.3.tgz", - "integrity": "sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -829,9 +829,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.53.0.tgz", - "integrity": "sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz", + "integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1351,9 +1351,9 @@ } }, "node_modules/@types/cookie": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.5.4.tgz", - "integrity": "sha512-7z/eR6O859gyWIAjuvBWFzNURmf2oPBmJlfVWkwehU5nzIyjwBsTh7WMmEEV4JFnHuQ3ex4oyTvfKzcyJVDBNA==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", "dev": true }, "node_modules/@types/graceful-fs": { @@ -1415,23 +1415,23 @@ "dev": true }, "node_modules/@types/jsonwebtoken": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.4.tgz", - "integrity": "sha512-8UYapdmR0QlxgvJmyE8lP7guxD0UGVMfknsdtCFZh4ovShdBl3iOI4zdvqBHrB/IS+xUj3PSx73Qkey1fhWz+g==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.5.tgz", + "integrity": "sha512-VRLSGzik+Unrup6BsouBeHsf4d1hOEgYWTm/7Nmw1sXoN1+tRly/Gy/po3yeahnP4jfnQWWAhQAqcNfH7ngOkA==", "dev": true, "dependencies": { "@types/node": "*" } }, "node_modules/@types/lodash": { - "version": "4.14.201", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.201.tgz", - "integrity": "sha512-y9euML0cim1JrykNxADLfaG0FgD1g/yTHwUs/Jg9ZIU7WKj2/4IW9Lbb1WZbvck78W/lfGXFfe+u2EGfIJXdLQ==" + "version": "4.14.202", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.202.tgz", + "integrity": "sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==" }, "node_modules/@types/node": { - "version": "20.8.10", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.10.tgz", - "integrity": "sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w==", + "version": "20.10.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.4.tgz", + "integrity": "sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -1456,15 +1456,15 @@ "dev": true }, "node_modules/@types/uuid": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.6.tgz", - "integrity": "sha512-BT2Krtx4xaO6iwzwMFUYvWBWkV2pr37zD68Vmp1CDV196MzczBRxuEpD6Pr395HAgebC/co7hOphs53r8V7jew==", + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.7.tgz", + "integrity": "sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g==", "dev": true }, "node_modules/@types/ws": { - "version": "8.5.9", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.9.tgz", - "integrity": "sha512-jbdrY0a8lxfdTp/+r7Z4CkycbOFN8WX+IOchLJr3juT/xzbJ8URyTVSJ/hvNdadTgM1mnedb47n+Y31GsFnQlg==", + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", + "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", "dev": true, "dependencies": { "@types/node": "*" @@ -2604,15 +2604,15 @@ } }, "node_modules/eslint": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.53.0.tgz", - "integrity": "sha512-N4VuiPjXDUa4xVeV/GC/RV3hQW9Nw+Y463lkWaKKXKYMvmRiRDAtfpuPFLN+E1/6ZhyR8J2ig+eVREnYgUsiag==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz", + "integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.3", - "@eslint/js": "8.53.0", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.55.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -2659,9 +2659,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.0.0.tgz", - "integrity": "sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, "bin": { "eslint-config-prettier": "bin/cli.js" @@ -6738,9 +6738,9 @@ "dev": true }, "@eslint/eslintrc": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.3.tgz", - "integrity": "sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "requires": { "ajv": "^6.12.4", @@ -6781,9 +6781,9 @@ } }, "@eslint/js": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.53.0.tgz", - "integrity": "sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz", + "integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==", "dev": true }, "@humanwhocodes/config-array": { @@ -7205,9 +7205,9 @@ } }, "@types/cookie": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.5.4.tgz", - "integrity": "sha512-7z/eR6O859gyWIAjuvBWFzNURmf2oPBmJlfVWkwehU5nzIyjwBsTh7WMmEEV4JFnHuQ3ex4oyTvfKzcyJVDBNA==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", "dev": true }, "@types/graceful-fs": { @@ -7269,23 +7269,23 @@ "dev": true }, "@types/jsonwebtoken": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.4.tgz", - "integrity": "sha512-8UYapdmR0QlxgvJmyE8lP7guxD0UGVMfknsdtCFZh4ovShdBl3iOI4zdvqBHrB/IS+xUj3PSx73Qkey1fhWz+g==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.5.tgz", + "integrity": "sha512-VRLSGzik+Unrup6BsouBeHsf4d1hOEgYWTm/7Nmw1sXoN1+tRly/Gy/po3yeahnP4jfnQWWAhQAqcNfH7ngOkA==", "dev": true, "requires": { "@types/node": "*" } }, "@types/lodash": { - "version": "4.14.201", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.201.tgz", - "integrity": "sha512-y9euML0cim1JrykNxADLfaG0FgD1g/yTHwUs/Jg9ZIU7WKj2/4IW9Lbb1WZbvck78W/lfGXFfe+u2EGfIJXdLQ==" + "version": "4.14.202", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.202.tgz", + "integrity": "sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==" }, "@types/node": { - "version": "20.8.10", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.10.tgz", - "integrity": "sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w==", + "version": "20.10.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.4.tgz", + "integrity": "sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==", "dev": true, "requires": { "undici-types": "~5.26.4" @@ -7310,15 +7310,15 @@ "dev": true }, "@types/uuid": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.6.tgz", - "integrity": "sha512-BT2Krtx4xaO6iwzwMFUYvWBWkV2pr37zD68Vmp1CDV196MzczBRxuEpD6Pr395HAgebC/co7hOphs53r8V7jew==", + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.7.tgz", + "integrity": "sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g==", "dev": true }, "@types/ws": { - "version": "8.5.9", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.9.tgz", - "integrity": "sha512-jbdrY0a8lxfdTp/+r7Z4CkycbOFN8WX+IOchLJr3juT/xzbJ8URyTVSJ/hvNdadTgM1mnedb47n+Y31GsFnQlg==", + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", + "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", "dev": true, "requires": { "@types/node": "*" @@ -8162,15 +8162,15 @@ } }, "eslint": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.53.0.tgz", - "integrity": "sha512-N4VuiPjXDUa4xVeV/GC/RV3hQW9Nw+Y463lkWaKKXKYMvmRiRDAtfpuPFLN+E1/6ZhyR8J2ig+eVREnYgUsiag==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz", + "integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.3", - "@eslint/js": "8.53.0", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.55.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -8341,9 +8341,9 @@ } }, "eslint-config-prettier": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.0.0.tgz", - "integrity": "sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, "requires": {} }, diff --git a/superset-websocket/package.json b/superset-websocket/package.json index 1de571d61a..0d1433fb45 100644 --- a/superset-websocket/package.json +++ b/superset-websocket/package.json @@ -16,7 +16,7 @@ }, "license": "Apache-2.0", "dependencies": { - "@types/lodash": "^4.14.201", + "@types/lodash": "^4.14.202", "cookie": "^0.6.0", "hot-shots": "^10.0.0", "ioredis": "^4.28.0", @@ -27,17 +27,17 @@ "ws": "^8.14.2" }, "devDependencies": { - "@types/cookie": "^0.5.4", + "@types/cookie": "^0.6.0", "@types/ioredis": "^4.27.8", "@types/jest": "^27.0.2", - "@types/jsonwebtoken": "^9.0.4", - "@types/node": "^20.8.10", - "@types/uuid": "^9.0.6", - "@types/ws": "^8.5.9", + "@types/jsonwebtoken": "^9.0.5", + "@types/node": "^20.10.4", + "@types/uuid": "^9.0.7", + "@types/ws": "^8.5.10", "@typescript-eslint/eslint-plugin": "^5.61.0", "@typescript-eslint/parser": "^5.62.0", - "eslint": "^8.53.0", - "eslint-config-prettier": "^9.0.0", + "eslint": "^8.55.0", + "eslint-config-prettier": "^9.1.0", "jest": "^27.3.1", "prettier": "^3.0.3", "ts-jest": "^27.0.7", diff --git a/superset/annotation_layers/annotations/api.py b/superset/annotation_layers/annotations/api.py index 4c95b3c105..0be6efbfa9 100644 --- a/superset/annotation_layers/annotations/api.py +++ b/superset/annotation_layers/annotations/api.py @@ -24,22 +24,6 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import ngettext from marshmallow import ValidationError -from superset.annotation_layers.annotations.commands.create import ( - CreateAnnotationCommand, -) -from superset.annotation_layers.annotations.commands.delete import ( - DeleteAnnotationCommand, -) -from superset.annotation_layers.annotations.commands.exceptions import ( - AnnotationCreateFailedError, - AnnotationDeleteFailedError, - AnnotationInvalidError, - AnnotationNotFoundError, - AnnotationUpdateFailedError, -) -from superset.annotation_layers.annotations.commands.update import ( - UpdateAnnotationCommand, -) from superset.annotation_layers.annotations.filters import AnnotationAllTextFilter from superset.annotation_layers.annotations.schemas import ( AnnotationPostSchema, @@ -47,7 +31,17 @@ from superset.annotation_layers.annotations.schemas import ( get_delete_ids_schema, openapi_spec_methods_override, ) -from superset.annotation_layers.commands.exceptions import AnnotationLayerNotFoundError +from superset.commands.annotation_layer.annotation.create import CreateAnnotationCommand +from superset.commands.annotation_layer.annotation.delete import DeleteAnnotationCommand +from superset.commands.annotation_layer.annotation.exceptions import ( + AnnotationCreateFailedError, + AnnotationDeleteFailedError, + AnnotationInvalidError, + AnnotationNotFoundError, + AnnotationUpdateFailedError, +) +from superset.commands.annotation_layer.annotation.update import UpdateAnnotationCommand +from superset.commands.annotation_layer.exceptions import AnnotationLayerNotFoundError from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.models.annotations import Annotation from superset.views.base_api import ( diff --git a/superset/annotation_layers/api.py b/superset/annotation_layers/api.py index b7a3b301bc..5606e944ef 100644 --- a/superset/annotation_layers/api.py +++ b/superset/annotation_layers/api.py @@ -23,17 +23,6 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import ngettext from marshmallow import ValidationError -from superset.annotation_layers.commands.create import CreateAnnotationLayerCommand -from superset.annotation_layers.commands.delete import DeleteAnnotationLayerCommand -from superset.annotation_layers.commands.exceptions import ( - AnnotationLayerCreateFailedError, - AnnotationLayerDeleteFailedError, - AnnotationLayerDeleteIntegrityError, - AnnotationLayerInvalidError, - AnnotationLayerNotFoundError, - AnnotationLayerUpdateFailedError, -) -from superset.annotation_layers.commands.update import UpdateAnnotationLayerCommand from superset.annotation_layers.filters import AnnotationLayerAllTextFilter from superset.annotation_layers.schemas import ( AnnotationLayerPostSchema, @@ -41,6 +30,17 @@ from superset.annotation_layers.schemas import ( get_delete_ids_schema, openapi_spec_methods_override, ) +from superset.commands.annotation_layer.create import CreateAnnotationLayerCommand +from superset.commands.annotation_layer.delete import DeleteAnnotationLayerCommand +from superset.commands.annotation_layer.exceptions import ( + AnnotationLayerCreateFailedError, + AnnotationLayerDeleteFailedError, + AnnotationLayerDeleteIntegrityError, + AnnotationLayerInvalidError, + AnnotationLayerNotFoundError, + AnnotationLayerUpdateFailedError, +) +from superset.commands.annotation_layer.update import UpdateAnnotationLayerCommand from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.extensions import event_logger from superset.models.annotations import AnnotationLayer @@ -99,7 +99,7 @@ class AnnotationLayerRestApi(BaseSupersetModelRestApi): ] search_filters = {"name": [AnnotationLayerAllTextFilter]} - allowed_rel_fields = {"created_by"} + allowed_rel_fields = {"created_by", "changed_by"} apispec_parameter_schemas = { "get_delete_ids_schema": get_delete_ids_schema, diff --git a/superset/charts/api.py b/superset/charts/api.py index 768d330291..191f09c66e 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -32,21 +32,6 @@ from werkzeug.wrappers import Response as WerkzeugResponse from werkzeug.wsgi import FileWrapper from superset import app, is_feature_enabled, thumbnail_cache -from superset.charts.commands.create import CreateChartCommand -from superset.charts.commands.delete import DeleteChartCommand -from superset.charts.commands.exceptions import ( - ChartCreateFailedError, - ChartDeleteFailedError, - ChartForbiddenError, - ChartInvalidError, - ChartNotFoundError, - ChartUpdateFailedError, - DashboardsForbiddenError, -) -from superset.charts.commands.export import ExportChartsCommand -from superset.charts.commands.importers.dispatcher import ImportChartsCommand -from superset.charts.commands.update import UpdateChartCommand -from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand from superset.charts.filters import ( ChartAllTextFilter, ChartCertifiedFilter, @@ -69,6 +54,21 @@ from superset.charts.schemas import ( screenshot_query_schema, thumbnail_query_schema, ) +from superset.commands.chart.create import CreateChartCommand +from superset.commands.chart.delete import DeleteChartCommand +from superset.commands.chart.exceptions import ( + ChartCreateFailedError, + ChartDeleteFailedError, + ChartForbiddenError, + ChartInvalidError, + ChartNotFoundError, + ChartUpdateFailedError, + DashboardsForbiddenError, +) +from superset.commands.chart.export import ExportChartsCommand +from superset.commands.chart.importers.dispatcher import ImportChartsCommand +from superset.commands.chart.update import UpdateChartCommand +from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand from superset.commands.exceptions import CommandException from superset.commands.importers.exceptions import ( IncorrectFormatError, @@ -273,7 +273,7 @@ class ChartRestApi(BaseSupersetModelRestApi): "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), } - allowed_rel_fields = {"owners", "created_by"} + allowed_rel_fields = {"owners", "created_by", "changed_by"} @expose("/", methods=("POST",)) @protect() diff --git a/superset/charts/data/api.py b/superset/charts/data/api.py index c8ed840c7c..a62e6a2407 100644 --- a/superset/charts/data/api.py +++ b/superset/charts/data/api.py @@ -30,19 +30,19 @@ from marshmallow import ValidationError from superset import is_feature_enabled, security_manager from superset.async_events.async_query_manager import AsyncQueryTokenException from superset.charts.api import ChartRestApi -from superset.charts.commands.exceptions import ( - ChartDataCacheLoadError, - ChartDataQueryFailedError, -) -from superset.charts.data.commands.create_async_job_command import ( - CreateAsyncChartDataJobCommand, -) -from superset.charts.data.commands.get_data_command import ChartDataCommand from superset.charts.data.query_context_cache_loader import QueryContextCacheLoader from superset.charts.post_processing import apply_post_process from superset.charts.schemas import ChartDataQueryContextSchema +from superset.commands.chart.data.create_async_job_command import ( + CreateAsyncChartDataJobCommand, +) +from superset.commands.chart.data.get_data_command import ChartDataCommand +from superset.commands.chart.exceptions import ( + ChartDataCacheLoadError, + ChartDataQueryFailedError, +) from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType -from superset.connectors.base.models import BaseDatasource +from superset.connectors.sqla.models import BaseDatasource from superset.daos.exceptions import DatasourceNotFound from superset.exceptions import QueryObjectValidationError from superset.extensions import event_logger diff --git a/superset/charts/data/query_context_cache_loader.py b/superset/charts/data/query_context_cache_loader.py index 97fa733a3e..1bdabd33f4 100644 --- a/superset/charts/data/query_context_cache_loader.py +++ b/superset/charts/data/query_context_cache_loader.py @@ -17,7 +17,7 @@ from typing import Any from superset import cache -from superset.charts.commands.exceptions import ChartDataCacheLoadError +from superset.commands.chart.exceptions import ChartDataCacheLoadError class QueryContextCacheLoader: # pylint: disable=too-few-public-methods diff --git a/superset/charts/post_processing.py b/superset/charts/post_processing.py index 939714642f..ebcae32f8f 100644 --- a/superset/charts/post_processing.py +++ b/superset/charts/post_processing.py @@ -40,7 +40,7 @@ from superset.utils.core import ( ) if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource from superset.models.sql_lab import Query diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index 0ad68ceb49..48e0cbb318 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -27,7 +27,7 @@ from marshmallow.validate import Length, Range from superset import app from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.db_engine_specs.base import builtin_time_grains -from superset.tags.models import TagTypes +from superset.tags.models import TagType from superset.utils import pandas_postprocessing, schema as utils from superset.utils.core import ( AnnotationType, @@ -146,7 +146,7 @@ openapi_spec_methods_override = { class TagSchema(Schema): id = fields.Int() name = fields.String() - type = fields.Enum(TagTypes, by_value=True) + type = fields.Enum(TagType, by_value=True) class ChartEntityResponseSchema(Schema): diff --git a/superset/cli/importexport.py b/superset/cli/importexport.py index 5dde06d01a..0d76e535e8 100755 --- a/superset/cli/importexport.py +++ b/superset/cli/importexport.py @@ -72,7 +72,7 @@ if feature_flags.get("VERSIONED_EXPORT"): def export_dashboards(dashboard_file: Optional[str] = None) -> None: """Export dashboards to ZIP file""" # pylint: disable=import-outside-toplevel - from superset.dashboards.commands.export import ExportDashboardsCommand + from superset.commands.dashboard.export import ExportDashboardsCommand from superset.models.dashboard import Dashboard g.user = security_manager.find_user(username="admin") @@ -106,8 +106,8 @@ if feature_flags.get("VERSIONED_EXPORT"): def export_datasources(datasource_file: Optional[str] = None) -> None: """Export datasources to ZIP file""" # pylint: disable=import-outside-toplevel + from superset.commands.dataset.export import ExportDatasetsCommand from superset.connectors.sqla.models import SqlaTable - from superset.datasets.commands.export import ExportDatasetsCommand g.user = security_manager.find_user(username="admin") @@ -144,10 +144,10 @@ if feature_flags.get("VERSIONED_EXPORT"): def import_dashboards(path: str, username: Optional[str]) -> None: """Import dashboards from ZIP file""" # pylint: disable=import-outside-toplevel - from superset.commands.importers.v1.utils import get_contents_from_bundle - from superset.dashboards.commands.importers.dispatcher import ( + from superset.commands.dashboard.importers.dispatcher import ( ImportDashboardsCommand, ) + from superset.commands.importers.v1.utils import get_contents_from_bundle if username is not None: g.user = security_manager.find_user(username=username) @@ -176,10 +176,8 @@ if feature_flags.get("VERSIONED_EXPORT"): def import_datasources(path: str) -> None: """Import datasources from ZIP file""" # pylint: disable=import-outside-toplevel + from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand from superset.commands.importers.v1.utils import get_contents_from_bundle - from superset.datasets.commands.importers.dispatcher import ( - ImportDatasetsCommand, - ) if is_zipfile(path): with ZipFile(path) as bundle: @@ -304,7 +302,7 @@ else: def import_dashboards(path: str, recursive: bool, username: str) -> None: """Import dashboards from JSON file""" # pylint: disable=import-outside-toplevel - from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand + from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand path_object = Path(path) files: list[Path] = [] @@ -353,7 +351,7 @@ else: def import_datasources(path: str, sync: str, recursive: bool) -> None: """Import datasources from YAML""" # pylint: disable=import-outside-toplevel - from superset.datasets.commands.importers.v0 import ImportDatasetsCommand + from superset.commands.dataset.importers.v0 import ImportDatasetsCommand sync_array = sync.split(",") sync_columns = "columns" in sync_array diff --git a/superset/cli/viz_migrations.py b/superset/cli/viz_migrations.py index 9e69135aea..f24dd8f444 100644 --- a/superset/cli/viz_migrations.py +++ b/superset/cli/viz_migrations.py @@ -24,11 +24,13 @@ from superset import db class VizType(str, Enum): - TREEMAP = "treemap" - DUAL_LINE = "dual_line" AREA = "area" + BUBBLE = "bubble" + DUAL_LINE = "dual_line" + LINE = "line" PIVOT_TABLE = "pivot_table" SUNBURST = "sunburst" + TREEMAP = "treemap" @click.group() @@ -75,18 +77,22 @@ def migrate(viz_type: VizType, is_downgrade: bool = False) -> None: # pylint: disable=import-outside-toplevel from superset.migrations.shared.migrate_viz.processors import ( MigrateAreaChart, + MigrateBubbleChart, MigrateDualLine, + MigrateLineChart, MigratePivotTable, MigrateSunburst, MigrateTreeMap, ) migrations = { - VizType.TREEMAP: MigrateTreeMap, - VizType.DUAL_LINE: MigrateDualLine, VizType.AREA: MigrateAreaChart, + VizType.BUBBLE: MigrateBubbleChart, + VizType.DUAL_LINE: MigrateDualLine, + VizType.LINE: MigrateLineChart, VizType.PIVOT_TABLE: MigratePivotTable, VizType.SUNBURST: MigrateSunburst, + VizType.TREEMAP: MigrateTreeMap, } if is_downgrade: migrations[viz_type].downgrade(db.session) diff --git a/superset/annotation_layers/annotations/commands/__init__.py b/superset/commands/annotation_layer/__init__.py similarity index 100% rename from superset/annotation_layers/annotations/commands/__init__.py rename to superset/commands/annotation_layer/__init__.py diff --git a/superset/annotation_layers/commands/__init__.py b/superset/commands/annotation_layer/annotation/__init__.py similarity index 100% rename from superset/annotation_layers/commands/__init__.py rename to superset/commands/annotation_layer/annotation/__init__.py diff --git a/superset/annotation_layers/annotations/commands/create.py b/superset/commands/annotation_layer/annotation/create.py similarity index 92% rename from superset/annotation_layers/annotations/commands/create.py rename to superset/commands/annotation_layer/annotation/create.py index 25317762da..feed6162ca 100644 --- a/superset/annotation_layers/annotations/commands/create.py +++ b/superset/commands/annotation_layer/annotation/create.py @@ -21,15 +21,15 @@ from typing import Any, Optional from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError -from superset.annotation_layers.annotations.commands.exceptions import ( +from superset.commands.annotation_layer.annotation.exceptions import ( AnnotationCreateFailedError, AnnotationDatesValidationError, AnnotationInvalidError, AnnotationUniquenessValidationError, ) -from superset.annotation_layers.commands.exceptions import AnnotationLayerNotFoundError +from superset.commands.annotation_layer.exceptions import AnnotationLayerNotFoundError from superset.commands.base import BaseCommand -from superset.daos.annotation import AnnotationDAO, AnnotationLayerDAO +from superset.daos.annotation_layer import AnnotationDAO, AnnotationLayerDAO from superset.daos.exceptions import DAOCreateFailedError logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/annotations/commands/delete.py b/superset/commands/annotation_layer/annotation/delete.py similarity index 93% rename from superset/annotation_layers/annotations/commands/delete.py rename to superset/commands/annotation_layer/annotation/delete.py index 2850f8cb96..3f48ae2ceb 100644 --- a/superset/annotation_layers/annotations/commands/delete.py +++ b/superset/commands/annotation_layer/annotation/delete.py @@ -17,12 +17,12 @@ import logging from typing import Optional -from superset.annotation_layers.annotations.commands.exceptions import ( +from superset.commands.annotation_layer.annotation.exceptions import ( AnnotationDeleteFailedError, AnnotationNotFoundError, ) from superset.commands.base import BaseCommand -from superset.daos.annotation import AnnotationDAO +from superset.daos.annotation_layer import AnnotationDAO from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import Annotation diff --git a/superset/annotation_layers/annotations/commands/exceptions.py b/superset/commands/annotation_layer/annotation/exceptions.py similarity index 100% rename from superset/annotation_layers/annotations/commands/exceptions.py rename to superset/commands/annotation_layer/annotation/exceptions.py diff --git a/superset/annotation_layers/annotations/commands/update.py b/superset/commands/annotation_layer/annotation/update.py similarity index 93% rename from superset/annotation_layers/annotations/commands/update.py rename to superset/commands/annotation_layer/annotation/update.py index 76287d24a9..9ba07fdcd6 100644 --- a/superset/annotation_layers/annotations/commands/update.py +++ b/superset/commands/annotation_layer/annotation/update.py @@ -21,16 +21,16 @@ from typing import Any, Optional from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError -from superset.annotation_layers.annotations.commands.exceptions import ( +from superset.commands.annotation_layer.annotation.exceptions import ( AnnotationDatesValidationError, AnnotationInvalidError, AnnotationNotFoundError, AnnotationUniquenessValidationError, AnnotationUpdateFailedError, ) -from superset.annotation_layers.commands.exceptions import AnnotationLayerNotFoundError +from superset.commands.annotation_layer.exceptions import AnnotationLayerNotFoundError from superset.commands.base import BaseCommand -from superset.daos.annotation import AnnotationDAO, AnnotationLayerDAO +from superset.daos.annotation_layer import AnnotationDAO, AnnotationLayerDAO from superset.daos.exceptions import DAOUpdateFailedError from superset.models.annotations import Annotation diff --git a/superset/annotation_layers/commands/create.py b/superset/commands/annotation_layer/create.py similarity index 94% rename from superset/annotation_layers/commands/create.py rename to superset/commands/annotation_layer/create.py index 39ce752d2a..6b87ad5703 100644 --- a/superset/annotation_layers/commands/create.py +++ b/superset/commands/annotation_layer/create.py @@ -20,13 +20,13 @@ from typing import Any from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError -from superset.annotation_layers.commands.exceptions import ( +from superset.commands.annotation_layer.exceptions import ( AnnotationLayerCreateFailedError, AnnotationLayerInvalidError, AnnotationLayerNameUniquenessValidationError, ) from superset.commands.base import BaseCommand -from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.annotation_layer import AnnotationLayerDAO from superset.daos.exceptions import DAOCreateFailedError logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/commands/delete.py b/superset/commands/annotation_layer/delete.py similarity index 94% rename from superset/annotation_layers/commands/delete.py rename to superset/commands/annotation_layer/delete.py index 41c727054b..a75ee42b77 100644 --- a/superset/annotation_layers/commands/delete.py +++ b/superset/commands/annotation_layer/delete.py @@ -17,13 +17,13 @@ import logging from typing import Optional -from superset.annotation_layers.commands.exceptions import ( +from superset.commands.annotation_layer.exceptions import ( AnnotationLayerDeleteFailedError, AnnotationLayerDeleteIntegrityError, AnnotationLayerNotFoundError, ) from superset.commands.base import BaseCommand -from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.annotation_layer import AnnotationLayerDAO from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import AnnotationLayer diff --git a/superset/annotation_layers/commands/exceptions.py b/superset/commands/annotation_layer/exceptions.py similarity index 100% rename from superset/annotation_layers/commands/exceptions.py rename to superset/commands/annotation_layer/exceptions.py diff --git a/superset/annotation_layers/commands/update.py b/superset/commands/annotation_layer/update.py similarity index 95% rename from superset/annotation_layers/commands/update.py rename to superset/commands/annotation_layer/update.py index e7f6963e82..d15440882b 100644 --- a/superset/annotation_layers/commands/update.py +++ b/superset/commands/annotation_layer/update.py @@ -20,14 +20,14 @@ from typing import Any, Optional from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError -from superset.annotation_layers.commands.exceptions import ( +from superset.commands.annotation_layer.exceptions import ( AnnotationLayerInvalidError, AnnotationLayerNameUniquenessValidationError, AnnotationLayerNotFoundError, AnnotationLayerUpdateFailedError, ) from superset.commands.base import BaseCommand -from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.annotation_layer import AnnotationLayerDAO from superset.daos.exceptions import DAOUpdateFailedError from superset.models.annotations import AnnotationLayer diff --git a/superset/charts/commands/__init__.py b/superset/commands/chart/__init__.py similarity index 100% rename from superset/charts/commands/__init__.py rename to superset/commands/chart/__init__.py diff --git a/superset/charts/commands/create.py b/superset/commands/chart/create.py similarity index 98% rename from superset/charts/commands/create.py rename to superset/commands/chart/create.py index 876073e335..2b251029c3 100644 --- a/superset/charts/commands/create.py +++ b/superset/commands/chart/create.py @@ -23,13 +23,13 @@ from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError from superset import security_manager -from superset.charts.commands.exceptions import ( +from superset.commands.base import BaseCommand, CreateMixin +from superset.commands.chart.exceptions import ( ChartCreateFailedError, ChartInvalidError, DashboardsForbiddenError, DashboardsNotFoundValidationError, ) -from superset.commands.base import BaseCommand, CreateMixin from superset.commands.utils import get_datasource_by_id from superset.daos.chart import ChartDAO from superset.daos.dashboard import DashboardDAO diff --git a/superset/charts/commands/importers/__init__.py b/superset/commands/chart/data/__init__.py similarity index 100% rename from superset/charts/commands/importers/__init__.py rename to superset/commands/chart/data/__init__.py diff --git a/superset/charts/data/commands/create_async_job_command.py b/superset/commands/chart/data/create_async_job_command.py similarity index 100% rename from superset/charts/data/commands/create_async_job_command.py rename to superset/commands/chart/data/create_async_job_command.py diff --git a/superset/charts/data/commands/get_data_command.py b/superset/commands/chart/data/get_data_command.py similarity index 97% rename from superset/charts/data/commands/get_data_command.py rename to superset/commands/chart/data/get_data_command.py index c791ace9de..971c343cba 100644 --- a/superset/charts/data/commands/get_data_command.py +++ b/superset/commands/chart/data/get_data_command.py @@ -19,11 +19,11 @@ from typing import Any from flask_babel import gettext as _ -from superset.charts.commands.exceptions import ( +from superset.commands.base import BaseCommand +from superset.commands.chart.exceptions import ( ChartDataCacheLoadError, ChartDataQueryFailedError, ) -from superset.commands.base import BaseCommand from superset.common.query_context import QueryContext from superset.exceptions import CacheLoadError diff --git a/superset/charts/commands/delete.py b/superset/commands/chart/delete.py similarity index 98% rename from superset/charts/commands/delete.py rename to superset/commands/chart/delete.py index a31d22be3e..ee635f04af 100644 --- a/superset/charts/commands/delete.py +++ b/superset/commands/chart/delete.py @@ -20,13 +20,13 @@ from typing import Optional from flask_babel import lazy_gettext as _ from superset import security_manager -from superset.charts.commands.exceptions import ( +from superset.commands.base import BaseCommand +from superset.commands.chart.exceptions import ( ChartDeleteFailedError, ChartDeleteFailedReportsExistError, ChartForbiddenError, ChartNotFoundError, ) -from superset.commands.base import BaseCommand from superset.daos.chart import ChartDAO from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO diff --git a/superset/charts/commands/exceptions.py b/superset/commands/chart/exceptions.py similarity index 100% rename from superset/charts/commands/exceptions.py rename to superset/commands/chart/exceptions.py diff --git a/superset/charts/commands/export.py b/superset/commands/chart/export.py similarity index 95% rename from superset/charts/commands/export.py rename to superset/commands/chart/export.py index c942aa96c9..fcb721c703 100644 --- a/superset/charts/commands/export.py +++ b/superset/commands/chart/export.py @@ -22,9 +22,9 @@ from collections.abc import Iterator import yaml -from superset.charts.commands.exceptions import ChartNotFoundError +from superset.commands.chart.exceptions import ChartNotFoundError from superset.daos.chart import ChartDAO -from superset.datasets.commands.export import ExportDatasetsCommand +from superset.commands.dataset.export import ExportDatasetsCommand from superset.commands.export.models import ExportModelsCommand from superset.models.slice import Slice from superset.utils.dict_import_export import EXPORT_VERSION diff --git a/superset/charts/data/commands/__init__.py b/superset/commands/chart/importers/__init__.py similarity index 100% rename from superset/charts/data/commands/__init__.py rename to superset/commands/chart/importers/__init__.py diff --git a/superset/charts/commands/importers/dispatcher.py b/superset/commands/chart/importers/dispatcher.py similarity index 98% rename from superset/charts/commands/importers/dispatcher.py rename to superset/commands/chart/importers/dispatcher.py index fb5007a50c..6d2d31ccf4 100644 --- a/superset/charts/commands/importers/dispatcher.py +++ b/superset/commands/chart/importers/dispatcher.py @@ -20,8 +20,8 @@ from typing import Any from marshmallow.exceptions import ValidationError -from superset.charts.commands.importers import v1 from superset.commands.base import BaseCommand +from superset.commands.chart.importers import v1 from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError diff --git a/superset/charts/commands/importers/v1/__init__.py b/superset/commands/chart/importers/v1/__init__.py similarity index 93% rename from superset/charts/commands/importers/v1/__init__.py rename to superset/commands/chart/importers/v1/__init__.py index 043018fa3b..783f300c07 100644 --- a/superset/charts/commands/importers/v1/__init__.py +++ b/superset/commands/chart/importers/v1/__init__.py @@ -20,15 +20,15 @@ from typing import Any from marshmallow import Schema from sqlalchemy.orm import Session -from superset.charts.commands.exceptions import ChartImportError -from superset.charts.commands.importers.v1.utils import import_chart from superset.charts.schemas import ImportV1ChartSchema +from superset.commands.chart.exceptions import ChartImportError +from superset.commands.chart.importers.v1.utils import import_chart +from superset.commands.database.importers.v1.utils import import_database +from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.importers.v1 import ImportModelsCommand from superset.connectors.sqla.models import SqlaTable from superset.daos.chart import ChartDAO -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema diff --git a/superset/charts/commands/importers/v1/utils.py b/superset/commands/chart/importers/v1/utils.py similarity index 98% rename from superset/charts/commands/importers/v1/utils.py rename to superset/commands/chart/importers/v1/utils.py index 3ef0a2ed78..d27b631f97 100644 --- a/superset/charts/commands/importers/v1/utils.py +++ b/superset/commands/chart/importers/v1/utils.py @@ -75,7 +75,6 @@ def migrate_chart(config: dict[str, Any]) -> dict[str, Any]: if isclass(class_) and issubclass(class_, MigrateViz) and hasattr(class_, "source_viz_type") - and class_ != processors.MigrateAreaChart # incomplete } output = copy.deepcopy(config) diff --git a/superset/charts/commands/update.py b/superset/commands/chart/update.py similarity index 98% rename from superset/charts/commands/update.py rename to superset/commands/chart/update.py index 32fd49e7cd..40b36ebcc5 100644 --- a/superset/charts/commands/update.py +++ b/superset/commands/chart/update.py @@ -23,7 +23,8 @@ from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError from superset import security_manager -from superset.charts.commands.exceptions import ( +from superset.commands.base import BaseCommand, UpdateMixin +from superset.commands.chart.exceptions import ( ChartForbiddenError, ChartInvalidError, ChartNotFoundError, @@ -31,7 +32,6 @@ from superset.charts.commands.exceptions import ( DashboardsNotFoundValidationError, DatasourceTypeUpdateRequiredValidationError, ) -from superset.commands.base import BaseCommand, UpdateMixin from superset.commands.utils import get_datasource_by_id from superset.daos.chart import ChartDAO from superset.daos.dashboard import DashboardDAO diff --git a/superset/charts/commands/warm_up_cache.py b/superset/commands/chart/warm_up_cache.py similarity index 96% rename from superset/charts/commands/warm_up_cache.py rename to superset/commands/chart/warm_up_cache.py index a684ee5e77..2e5c0ac3a3 100644 --- a/superset/charts/commands/warm_up_cache.py +++ b/superset/commands/chart/warm_up_cache.py @@ -21,12 +21,12 @@ from typing import Any, Optional, Union import simplejson as json from flask import g -from superset.charts.commands.exceptions import ( +from superset.commands.base import BaseCommand +from superset.commands.chart.data.get_data_command import ChartDataCommand +from superset.commands.chart.exceptions import ( ChartInvalidError, WarmUpCacheChartNotFoundError, ) -from superset.charts.data.commands.get_data_command import ChartDataCommand -from superset.commands.base import BaseCommand from superset.extensions import db from superset.models.slice import Slice from superset.utils.core import error_msg_from_exception diff --git a/superset/connectors/base/__init__.py b/superset/commands/css/__init__.py similarity index 100% rename from superset/connectors/base/__init__.py rename to superset/commands/css/__init__.py diff --git a/superset/css_templates/commands/delete.py b/superset/commands/css/delete.py similarity index 97% rename from superset/css_templates/commands/delete.py rename to superset/commands/css/delete.py index 123658cb45..b8362f6b46 100644 --- a/superset/css_templates/commands/delete.py +++ b/superset/commands/css/delete.py @@ -18,7 +18,7 @@ import logging from typing import Optional from superset.commands.base import BaseCommand -from superset.css_templates.commands.exceptions import ( +from superset.commands.css.exceptions import ( CssTemplateDeleteFailedError, CssTemplateNotFoundError, ) diff --git a/superset/css_templates/commands/exceptions.py b/superset/commands/css/exceptions.py similarity index 100% rename from superset/css_templates/commands/exceptions.py rename to superset/commands/css/exceptions.py diff --git a/superset/css_templates/commands/__init__.py b/superset/commands/dashboard/__init__.py similarity index 100% rename from superset/css_templates/commands/__init__.py rename to superset/commands/dashboard/__init__.py diff --git a/superset/dashboards/commands/create.py b/superset/commands/dashboard/create.py similarity index 98% rename from superset/dashboards/commands/create.py rename to superset/commands/dashboard/create.py index 4b5cd5fb04..1745391238 100644 --- a/superset/dashboards/commands/create.py +++ b/superset/commands/dashboard/create.py @@ -21,14 +21,14 @@ from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError from superset.commands.base import BaseCommand, CreateMixin -from superset.commands.utils import populate_roles -from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAOCreateFailedError -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.exceptions import ( DashboardCreateFailedError, DashboardInvalidError, DashboardSlugExistsValidationError, ) +from superset.commands.utils import populate_roles +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAOCreateFailedError logger = logging.getLogger(__name__) diff --git a/superset/dashboards/commands/delete.py b/superset/commands/dashboard/delete.py similarity index 98% rename from superset/dashboards/commands/delete.py rename to superset/commands/dashboard/delete.py index 7111758bb8..13ffcb443c 100644 --- a/superset/dashboards/commands/delete.py +++ b/superset/commands/dashboard/delete.py @@ -21,15 +21,15 @@ from flask_babel import lazy_gettext as _ from superset import security_manager from superset.commands.base import BaseCommand -from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.daos.report import ReportScheduleDAO -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.exceptions import ( DashboardDeleteFailedError, DashboardDeleteFailedReportsExistError, DashboardForbiddenError, DashboardNotFoundError, ) +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard diff --git a/superset/dashboards/commands/__init__.py b/superset/commands/dashboard/embedded/__init__.py similarity index 100% rename from superset/dashboards/commands/__init__.py rename to superset/commands/dashboard/embedded/__init__.py diff --git a/superset/embedded_dashboard/commands/exceptions.py b/superset/commands/dashboard/embedded/exceptions.py similarity index 100% rename from superset/embedded_dashboard/commands/exceptions.py rename to superset/commands/dashboard/embedded/exceptions.py diff --git a/superset/dashboards/commands/exceptions.py b/superset/commands/dashboard/exceptions.py similarity index 100% rename from superset/dashboards/commands/exceptions.py rename to superset/commands/dashboard/exceptions.py diff --git a/superset/dashboards/commands/export.py b/superset/commands/dashboard/export.py similarity index 95% rename from superset/dashboards/commands/export.py rename to superset/commands/dashboard/export.py index 4e25e5c1fc..fd06c60fa0 100644 --- a/superset/dashboards/commands/export.py +++ b/superset/commands/dashboard/export.py @@ -25,12 +25,12 @@ from collections.abc import Iterator import yaml -from superset.charts.commands.export import ExportChartsCommand -from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.commands.importers.v1.utils import find_chart_uuids +from superset.commands.chart.export import ExportChartsCommand +from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.importers.v1.utils import find_chart_uuids from superset.daos.dashboard import DashboardDAO from superset.commands.export.models import ExportModelsCommand -from superset.datasets.commands.export import ExportDatasetsCommand +from superset.commands.dataset.export import ExportDatasetsCommand from superset.daos.dataset import DatasetDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice diff --git a/superset/dashboards/commands/importers/__init__.py b/superset/commands/dashboard/filter_set/__init__.py similarity index 100% rename from superset/dashboards/commands/importers/__init__.py rename to superset/commands/dashboard/filter_set/__init__.py diff --git a/superset/dashboards/filter_sets/commands/base.py b/superset/commands/dashboard/filter_set/base.py similarity index 96% rename from superset/dashboards/filter_sets/commands/base.py rename to superset/commands/dashboard/filter_set/base.py index 8c53e8a818..24abe2509a 100644 --- a/superset/dashboards/filter_sets/commands/base.py +++ b/superset/commands/dashboard/filter_set/base.py @@ -20,13 +20,13 @@ from typing import cast, Optional from flask_appbuilder.models.sqla import Model from superset import security_manager -from superset.common.not_authorized_object import NotAuthorizedException -from superset.daos.dashboard import DashboardDAO -from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.filter_sets.commands.exceptions import ( +from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.filter_set.exceptions import ( FilterSetForbiddenError, FilterSetNotFoundError, ) +from superset.common.not_authorized_object import NotAuthorizedException +from superset.daos.dashboard import DashboardDAO from superset.dashboards.filter_sets.consts import USER_OWNER_TYPE from superset.models.dashboard import Dashboard from superset.models.filter_set import FilterSet diff --git a/superset/dashboards/filter_sets/commands/create.py b/superset/commands/dashboard/filter_set/create.py similarity index 95% rename from superset/dashboards/filter_sets/commands/create.py rename to superset/commands/dashboard/filter_set/create.py index d254e86d3c..49edb3172e 100644 --- a/superset/dashboards/filter_sets/commands/create.py +++ b/superset/commands/dashboard/filter_set/create.py @@ -20,13 +20,13 @@ from typing import Any from flask_appbuilder.models.sqla import Model from superset import security_manager -from superset.daos.dashboard import FilterSetDAO -from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand -from superset.dashboards.filter_sets.commands.exceptions import ( +from superset.commands.dashboard.filter_set.base import BaseFilterSetCommand +from superset.commands.dashboard.filter_set.exceptions import ( DashboardIdInconsistencyError, FilterSetCreateFailedError, UserIsNotDashboardOwnerError, ) +from superset.daos.dashboard import FilterSetDAO from superset.dashboards.filter_sets.consts import ( DASHBOARD_ID_FIELD, DASHBOARD_OWNER_TYPE, diff --git a/superset/dashboards/filter_sets/commands/delete.py b/superset/commands/dashboard/filter_set/delete.py similarity index 90% rename from superset/dashboards/filter_sets/commands/delete.py rename to superset/commands/dashboard/filter_set/delete.py index edde4b9b45..ce2bf6fce4 100644 --- a/superset/dashboards/filter_sets/commands/delete.py +++ b/superset/commands/dashboard/filter_set/delete.py @@ -16,14 +16,14 @@ # under the License. import logging -from superset.daos.dashboard import FilterSetDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand -from superset.dashboards.filter_sets.commands.exceptions import ( +from superset.commands.dashboard.filter_set.base import BaseFilterSetCommand +from superset.commands.dashboard.filter_set.exceptions import ( FilterSetDeleteFailedError, FilterSetForbiddenError, FilterSetNotFoundError, ) +from superset.daos.dashboard import FilterSetDAO +from superset.daos.exceptions import DAODeleteFailedError logger = logging.getLogger(__name__) @@ -38,7 +38,7 @@ class DeleteFilterSetCommand(BaseFilterSetCommand): assert self._filter_set try: - FilterSetDAO.delete(self._filter_set) + FilterSetDAO.delete([self._filter_set]) except DAODeleteFailedError as err: raise FilterSetDeleteFailedError(str(self._filter_set_id), "") from err diff --git a/superset/dashboards/filter_sets/commands/exceptions.py b/superset/commands/dashboard/filter_set/exceptions.py similarity index 100% rename from superset/dashboards/filter_sets/commands/exceptions.py rename to superset/commands/dashboard/filter_set/exceptions.py diff --git a/superset/dashboards/filter_sets/commands/update.py b/superset/commands/dashboard/filter_set/update.py similarity index 91% rename from superset/dashboards/filter_sets/commands/update.py rename to superset/commands/dashboard/filter_set/update.py index a63c8d46f2..5ce9f1fea6 100644 --- a/superset/dashboards/filter_sets/commands/update.py +++ b/superset/commands/dashboard/filter_set/update.py @@ -19,12 +19,10 @@ from typing import Any from flask_appbuilder.models.sqla import Model +from superset.commands.dashboard.filter_set.base import BaseFilterSetCommand +from superset.commands.dashboard.filter_set.exceptions import FilterSetUpdateFailedError from superset.daos.dashboard import FilterSetDAO from superset.daos.exceptions import DAOUpdateFailedError -from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand -from superset.dashboards.filter_sets.commands.exceptions import ( - FilterSetUpdateFailedError, -) from superset.dashboards.filter_sets.consts import OWNER_ID_FIELD, OWNER_TYPE_FIELD logger = logging.getLogger(__name__) diff --git a/superset/dashboards/filter_sets/commands/__init__.py b/superset/commands/dashboard/filter_state/__init__.py similarity index 100% rename from superset/dashboards/filter_sets/commands/__init__.py rename to superset/commands/dashboard/filter_state/__init__.py diff --git a/superset/dashboards/filter_state/commands/create.py b/superset/commands/dashboard/filter_state/create.py similarity index 87% rename from superset/dashboards/filter_state/commands/create.py rename to superset/commands/dashboard/filter_state/create.py index 48b5e4f5c2..1f105ac5c2 100644 --- a/superset/dashboards/filter_state/commands/create.py +++ b/superset/commands/dashboard/filter_state/create.py @@ -18,12 +18,12 @@ from typing import cast from flask import session -from superset.dashboards.filter_state.commands.utils import check_access +from superset.commands.dashboard.filter_state.utils import check_access +from superset.commands.temporary_cache.create import CreateTemporaryCacheCommand +from superset.commands.temporary_cache.entry import Entry +from superset.commands.temporary_cache.parameters import CommandParameters from superset.extensions import cache_manager from superset.key_value.utils import random_key -from superset.temporary_cache.commands.create import CreateTemporaryCacheCommand -from superset.temporary_cache.commands.entry import Entry -from superset.temporary_cache.commands.parameters import CommandParameters from superset.temporary_cache.utils import cache_key from superset.utils.core import get_user_id diff --git a/superset/dashboards/filter_state/commands/delete.py b/superset/commands/dashboard/filter_state/delete.py similarity index 84% rename from superset/dashboards/filter_state/commands/delete.py rename to superset/commands/dashboard/filter_state/delete.py index 6086388a8c..8be7f44d98 100644 --- a/superset/dashboards/filter_state/commands/delete.py +++ b/superset/commands/dashboard/filter_state/delete.py @@ -16,12 +16,12 @@ # under the License. from flask import session -from superset.dashboards.filter_state.commands.utils import check_access +from superset.commands.dashboard.filter_state.utils import check_access +from superset.commands.temporary_cache.delete import DeleteTemporaryCacheCommand +from superset.commands.temporary_cache.entry import Entry +from superset.commands.temporary_cache.exceptions import TemporaryCacheAccessDeniedError +from superset.commands.temporary_cache.parameters import CommandParameters from superset.extensions import cache_manager -from superset.temporary_cache.commands.delete import DeleteTemporaryCacheCommand -from superset.temporary_cache.commands.entry import Entry -from superset.temporary_cache.commands.exceptions import TemporaryCacheAccessDeniedError -from superset.temporary_cache.commands.parameters import CommandParameters from superset.temporary_cache.utils import cache_key from superset.utils.core import get_user_id diff --git a/superset/dashboards/filter_state/commands/get.py b/superset/commands/dashboard/filter_state/get.py similarity index 89% rename from superset/dashboards/filter_state/commands/get.py rename to superset/commands/dashboard/filter_state/get.py index ca7ffa9879..29104b5ee2 100644 --- a/superset/dashboards/filter_state/commands/get.py +++ b/superset/commands/dashboard/filter_state/get.py @@ -18,10 +18,10 @@ from typing import Optional from flask import current_app as app -from superset.dashboards.filter_state.commands.utils import check_access +from superset.commands.dashboard.filter_state.utils import check_access +from superset.commands.temporary_cache.get import GetTemporaryCacheCommand +from superset.commands.temporary_cache.parameters import CommandParameters from superset.extensions import cache_manager -from superset.temporary_cache.commands.get import GetTemporaryCacheCommand -from superset.temporary_cache.commands.parameters import CommandParameters from superset.temporary_cache.utils import cache_key diff --git a/superset/dashboards/filter_state/commands/update.py b/superset/commands/dashboard/filter_state/update.py similarity index 87% rename from superset/dashboards/filter_state/commands/update.py rename to superset/commands/dashboard/filter_state/update.py index c1dc529ccf..80b8c26ede 100644 --- a/superset/dashboards/filter_state/commands/update.py +++ b/superset/commands/dashboard/filter_state/update.py @@ -18,13 +18,13 @@ from typing import cast, Optional from flask import session -from superset.dashboards.filter_state.commands.utils import check_access +from superset.commands.dashboard.filter_state.utils import check_access +from superset.commands.temporary_cache.entry import Entry +from superset.commands.temporary_cache.exceptions import TemporaryCacheAccessDeniedError +from superset.commands.temporary_cache.parameters import CommandParameters +from superset.commands.temporary_cache.update import UpdateTemporaryCacheCommand from superset.extensions import cache_manager from superset.key_value.utils import random_key -from superset.temporary_cache.commands.entry import Entry -from superset.temporary_cache.commands.exceptions import TemporaryCacheAccessDeniedError -from superset.temporary_cache.commands.parameters import CommandParameters -from superset.temporary_cache.commands.update import UpdateTemporaryCacheCommand from superset.temporary_cache.utils import cache_key from superset.utils.core import get_user_id diff --git a/superset/dashboards/filter_state/commands/utils.py b/superset/commands/dashboard/filter_state/utils.py similarity index 91% rename from superset/dashboards/filter_state/commands/utils.py rename to superset/commands/dashboard/filter_state/utils.py index 7e52518249..14f7eb7893 100644 --- a/superset/dashboards/filter_state/commands/utils.py +++ b/superset/commands/dashboard/filter_state/utils.py @@ -15,15 +15,15 @@ # specific language governing permissions and limitations # under the License. -from superset.daos.dashboard import DashboardDAO -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.exceptions import ( DashboardAccessDeniedError, DashboardNotFoundError, ) -from superset.temporary_cache.commands.exceptions import ( +from superset.commands.temporary_cache.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheResourceNotFoundError, ) +from superset.daos.dashboard import DashboardDAO def check_access(resource_id: int) -> None: diff --git a/superset/dashboards/filter_state/commands/__init__.py b/superset/commands/dashboard/importers/__init__.py similarity index 100% rename from superset/dashboards/filter_state/commands/__init__.py rename to superset/commands/dashboard/importers/__init__.py diff --git a/superset/dashboards/commands/importers/dispatcher.py b/superset/commands/dashboard/importers/dispatcher.py similarity index 97% rename from superset/dashboards/commands/importers/dispatcher.py rename to superset/commands/dashboard/importers/dispatcher.py index d5323b4fe4..061558cce9 100644 --- a/superset/dashboards/commands/importers/dispatcher.py +++ b/superset/commands/dashboard/importers/dispatcher.py @@ -21,9 +21,9 @@ from typing import Any from marshmallow.exceptions import ValidationError from superset.commands.base import BaseCommand +from superset.commands.dashboard.importers import v0, v1 from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError -from superset.dashboards.commands.importers import v0, v1 logger = logging.getLogger(__name__) diff --git a/superset/dashboards/commands/importers/v0.py b/superset/commands/dashboard/importers/v0.py similarity index 99% rename from superset/dashboards/commands/importers/v0.py rename to superset/commands/dashboard/importers/v0.py index 012dbbc5c9..4c2a18e5cc 100644 --- a/superset/dashboards/commands/importers/v0.py +++ b/superset/commands/dashboard/importers/v0.py @@ -26,8 +26,8 @@ from sqlalchemy.orm import make_transient, Session from superset import db from superset.commands.base import BaseCommand +from superset.commands.dataset.importers.v0 import import_dataset from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.datasets.commands.importers.v0 import import_dataset from superset.exceptions import DashboardImportException from superset.models.dashboard import Dashboard from superset.models.slice import Slice diff --git a/superset/dashboards/commands/importers/v1/__init__.py b/superset/commands/dashboard/importers/v1/__init__.py similarity index 94% rename from superset/dashboards/commands/importers/v1/__init__.py rename to superset/commands/dashboard/importers/v1/__init__.py index 30e63da4e4..2717650e9e 100644 --- a/superset/dashboards/commands/importers/v1/__init__.py +++ b/superset/commands/dashboard/importers/v1/__init__.py @@ -21,21 +21,21 @@ from marshmallow import Schema from sqlalchemy.orm import Session from sqlalchemy.sql import select -from superset.charts.commands.importers.v1.utils import import_chart from superset.charts.schemas import ImportV1ChartSchema -from superset.commands.importers.v1 import ImportModelsCommand -from superset.daos.dashboard import DashboardDAO -from superset.dashboards.commands.exceptions import DashboardImportError -from superset.dashboards.commands.importers.v1.utils import ( +from superset.commands.chart.importers.v1.utils import import_chart +from superset.commands.dashboard.exceptions import DashboardImportError +from superset.commands.dashboard.importers.v1.utils import ( find_chart_uuids, find_native_filter_datasets, import_dashboard, update_id_refs, ) +from superset.commands.database.importers.v1.utils import import_database +from superset.commands.dataset.importers.v1.utils import import_dataset +from superset.commands.importers.v1 import ImportModelsCommand +from superset.daos.dashboard import DashboardDAO from superset.dashboards.schemas import ImportV1DashboardSchema -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.dashboard import dashboard_slices diff --git a/superset/dashboards/commands/importers/v1/utils.py b/superset/commands/dashboard/importers/v1/utils.py similarity index 100% rename from superset/dashboards/commands/importers/v1/utils.py rename to superset/commands/dashboard/importers/v1/utils.py diff --git a/superset/dashboards/permalink/commands/__init__.py b/superset/commands/dashboard/permalink/__init__.py similarity index 100% rename from superset/dashboards/permalink/commands/__init__.py rename to superset/commands/dashboard/permalink/__init__.py diff --git a/superset/dashboards/permalink/commands/base.py b/superset/commands/dashboard/permalink/base.py similarity index 100% rename from superset/dashboards/permalink/commands/base.py rename to superset/commands/dashboard/permalink/base.py diff --git a/superset/dashboards/permalink/commands/create.py b/superset/commands/dashboard/permalink/create.py similarity index 94% rename from superset/dashboards/permalink/commands/create.py rename to superset/commands/dashboard/permalink/create.py index 320003ff3d..3387d432d5 100644 --- a/superset/dashboards/permalink/commands/create.py +++ b/superset/commands/dashboard/permalink/create.py @@ -18,11 +18,11 @@ import logging from sqlalchemy.exc import SQLAlchemyError +from superset.commands.dashboard.permalink.base import BaseDashboardPermalinkCommand +from superset.commands.key_value.upsert import UpsertKeyValueCommand from superset.daos.dashboard import DashboardDAO -from superset.dashboards.permalink.commands.base import BaseDashboardPermalinkCommand from superset.dashboards.permalink.exceptions import DashboardPermalinkCreateFailedError from superset.dashboards.permalink.types import DashboardPermalinkState -from superset.key_value.commands.upsert import UpsertKeyValueCommand from superset.key_value.exceptions import KeyValueCodecEncodeException from superset.key_value.utils import encode_permalink_key, get_deterministic_uuid from superset.utils.core import get_user_id diff --git a/superset/dashboards/permalink/commands/get.py b/superset/commands/dashboard/permalink/get.py similarity index 91% rename from superset/dashboards/permalink/commands/get.py rename to superset/commands/dashboard/permalink/get.py index 6b32a459a5..32efa68881 100644 --- a/superset/dashboards/permalink/commands/get.py +++ b/superset/commands/dashboard/permalink/get.py @@ -19,12 +19,12 @@ from typing import Optional from sqlalchemy.exc import SQLAlchemyError +from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.permalink.base import BaseDashboardPermalinkCommand +from superset.commands.key_value.get import GetKeyValueCommand from superset.daos.dashboard import DashboardDAO -from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.permalink.commands.base import BaseDashboardPermalinkCommand from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError from superset.dashboards.permalink.types import DashboardPermalinkValue -from superset.key_value.commands.get import GetKeyValueCommand from superset.key_value.exceptions import ( KeyValueCodecDecodeException, KeyValueGetFailedError, diff --git a/superset/dashboards/commands/update.py b/superset/commands/dashboard/update.py similarity index 98% rename from superset/dashboards/commands/update.py rename to superset/commands/dashboard/update.py index f9975c0dd2..22dcad4b2c 100644 --- a/superset/dashboards/commands/update.py +++ b/superset/commands/dashboard/update.py @@ -23,16 +23,16 @@ from marshmallow import ValidationError from superset import security_manager from superset.commands.base import BaseCommand, UpdateMixin -from superset.commands.utils import populate_roles -from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAOUpdateFailedError -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.exceptions import ( DashboardForbiddenError, DashboardInvalidError, DashboardNotFoundError, DashboardSlugExistsValidationError, DashboardUpdateFailedError, ) +from superset.commands.utils import populate_roles +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.exceptions import SupersetSecurityException from superset.extensions import db from superset.models.dashboard import Dashboard diff --git a/superset/databases/commands/__init__.py b/superset/commands/database/__init__.py similarity index 100% rename from superset/databases/commands/__init__.py rename to superset/commands/database/__init__.py diff --git a/superset/databases/commands/create.py b/superset/commands/database/create.py similarity index 95% rename from superset/databases/commands/create.py rename to superset/commands/database/create.py index d3dfe59e5e..a012e9b2a5 100644 --- a/superset/databases/commands/create.py +++ b/superset/commands/database/create.py @@ -23,22 +23,22 @@ from marshmallow import ValidationError from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOCreateFailedError -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseConnectionFailedError, DatabaseCreateFailedError, DatabaseExistsValidationError, DatabaseInvalidError, DatabaseRequiredFieldValidationError, ) -from superset.databases.commands.test_connection import TestConnectionDatabaseCommand -from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelCreateFailedError, SSHTunnelingNotEnabledError, SSHTunnelInvalidError, ) +from superset.commands.database.test_connection import TestConnectionDatabaseCommand +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.exceptions import SupersetErrorsException from superset.extensions import db, event_logger, security_manager diff --git a/superset/databases/commands/delete.py b/superset/commands/database/delete.py similarity index 96% rename from superset/databases/commands/delete.py rename to superset/commands/database/delete.py index 254380a906..2db408c76e 100644 --- a/superset/databases/commands/delete.py +++ b/superset/commands/database/delete.py @@ -20,15 +20,15 @@ from typing import Optional from flask_babel import lazy_gettext as _ from superset.commands.base import BaseCommand -from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.daos.report import ReportScheduleDAO -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseDeleteDatasetsExistFailedError, DatabaseDeleteFailedError, DatabaseDeleteFailedReportsExistError, DatabaseNotFoundError, ) +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.models.core import Database logger = logging.getLogger(__name__) @@ -44,7 +44,7 @@ class DeleteDatabaseCommand(BaseCommand): assert self._model try: - DatabaseDAO.delete(self._model) + DatabaseDAO.delete([self._model]) except DAODeleteFailedError as ex: logger.exception(ex.exception) raise DatabaseDeleteFailedError() from ex diff --git a/superset/databases/commands/exceptions.py b/superset/commands/database/exceptions.py similarity index 100% rename from superset/databases/commands/exceptions.py rename to superset/commands/database/exceptions.py diff --git a/superset/databases/commands/export.py b/superset/commands/database/export.py similarity index 98% rename from superset/databases/commands/export.py rename to superset/commands/database/export.py index 71dc55a026..82c22ea801 100644 --- a/superset/databases/commands/export.py +++ b/superset/commands/database/export.py @@ -23,7 +23,7 @@ from collections.abc import Iterator import yaml -from superset.databases.commands.exceptions import DatabaseNotFoundError +from superset.commands.database.exceptions import DatabaseNotFoundError from superset.daos.database import DatabaseDAO from superset.commands.export.models import ExportModelsCommand from superset.models.core import Database diff --git a/superset/databases/commands/importers/__init__.py b/superset/commands/database/importers/__init__.py similarity index 100% rename from superset/databases/commands/importers/__init__.py rename to superset/commands/database/importers/__init__.py diff --git a/superset/databases/commands/importers/dispatcher.py b/superset/commands/database/importers/dispatcher.py similarity index 97% rename from superset/databases/commands/importers/dispatcher.py rename to superset/commands/database/importers/dispatcher.py index 70031b09e4..bdf487a758 100644 --- a/superset/databases/commands/importers/dispatcher.py +++ b/superset/commands/database/importers/dispatcher.py @@ -21,9 +21,9 @@ from typing import Any from marshmallow.exceptions import ValidationError from superset.commands.base import BaseCommand +from superset.commands.database.importers import v1 from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError -from superset.databases.commands.importers import v1 logger = logging.getLogger(__name__) diff --git a/superset/databases/commands/importers/v1/__init__.py b/superset/commands/database/importers/v1/__init__.py similarity index 91% rename from superset/databases/commands/importers/v1/__init__.py rename to superset/commands/database/importers/v1/__init__.py index 585c2d54ca..73b1bca531 100644 --- a/superset/databases/commands/importers/v1/__init__.py +++ b/superset/commands/database/importers/v1/__init__.py @@ -20,12 +20,12 @@ from typing import Any from marshmallow import Schema from sqlalchemy.orm import Session +from superset.commands.database.exceptions import DatabaseImportError +from superset.commands.database.importers.v1.utils import import_database +from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.importers.v1 import ImportModelsCommand from superset.daos.database import DatabaseDAO -from superset.databases.commands.exceptions import DatabaseImportError -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema diff --git a/superset/databases/commands/importers/v1/utils.py b/superset/commands/database/importers/v1/utils.py similarity index 100% rename from superset/databases/commands/importers/v1/utils.py rename to superset/commands/database/importers/v1/utils.py diff --git a/superset/databases/ssh_tunnel/commands/__init__.py b/superset/commands/database/ssh_tunnel/__init__.py similarity index 100% rename from superset/databases/ssh_tunnel/commands/__init__.py rename to superset/commands/database/ssh_tunnel/__init__.py diff --git a/superset/databases/ssh_tunnel/commands/create.py b/superset/commands/database/ssh_tunnel/create.py similarity index 98% rename from superset/databases/ssh_tunnel/commands/create.py rename to superset/commands/database/ssh_tunnel/create.py index 36f33e46f9..07209f010b 100644 --- a/superset/databases/ssh_tunnel/commands/create.py +++ b/superset/commands/database/ssh_tunnel/create.py @@ -21,13 +21,13 @@ from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError from superset.commands.base import BaseCommand -from superset.daos.database import SSHTunnelDAO -from superset.daos.exceptions import DAOCreateFailedError -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelCreateFailedError, SSHTunnelInvalidError, SSHTunnelRequiredFieldValidationError, ) +from superset.daos.database import SSHTunnelDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.extensions import db, event_logger logger = logging.getLogger(__name__) diff --git a/superset/databases/ssh_tunnel/commands/delete.py b/superset/commands/database/ssh_tunnel/delete.py similarity index 94% rename from superset/databases/ssh_tunnel/commands/delete.py rename to superset/commands/database/ssh_tunnel/delete.py index 04d6e68338..b8919e6d7b 100644 --- a/superset/databases/ssh_tunnel/commands/delete.py +++ b/superset/commands/database/ssh_tunnel/delete.py @@ -19,13 +19,13 @@ from typing import Optional from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.daos.database import SSHTunnelDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelDeleteFailedError, SSHTunnelingNotEnabledError, SSHTunnelNotFoundError, ) +from superset.daos.database import SSHTunnelDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.databases.ssh_tunnel.models import SSHTunnel logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ class DeleteSSHTunnelCommand(BaseCommand): assert self._model try: - SSHTunnelDAO.delete(self._model) + SSHTunnelDAO.delete([self._model]) except DAODeleteFailedError as ex: raise SSHTunnelDeleteFailedError() from ex diff --git a/superset/databases/ssh_tunnel/commands/exceptions.py b/superset/commands/database/ssh_tunnel/exceptions.py similarity index 100% rename from superset/databases/ssh_tunnel/commands/exceptions.py rename to superset/commands/database/ssh_tunnel/exceptions.py diff --git a/superset/databases/ssh_tunnel/commands/update.py b/superset/commands/database/ssh_tunnel/update.py similarity index 97% rename from superset/databases/ssh_tunnel/commands/update.py rename to superset/commands/database/ssh_tunnel/update.py index 4e4edcb664..ae7ee78afe 100644 --- a/superset/databases/ssh_tunnel/commands/update.py +++ b/superset/commands/database/ssh_tunnel/update.py @@ -20,14 +20,14 @@ from typing import Any, Optional from flask_appbuilder.models.sqla import Model from superset.commands.base import BaseCommand -from superset.daos.database import SSHTunnelDAO -from superset.daos.exceptions import DAOUpdateFailedError -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelInvalidError, SSHTunnelNotFoundError, SSHTunnelRequiredFieldValidationError, SSHTunnelUpdateFailedError, ) +from superset.daos.database import SSHTunnelDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.databases.ssh_tunnel.models import SSHTunnel logger = logging.getLogger(__name__) diff --git a/superset/databases/commands/tables.py b/superset/commands/database/tables.py similarity index 98% rename from superset/databases/commands/tables.py rename to superset/commands/database/tables.py index 6232470ece..fa98bcbc7e 100644 --- a/superset/databases/commands/tables.py +++ b/superset/commands/database/tables.py @@ -20,12 +20,12 @@ from typing import Any, cast from sqlalchemy.orm import lazyload, load_only from superset.commands.base import BaseCommand -from superset.connectors.sqla.models import SqlaTable -from superset.daos.database import DatabaseDAO -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseNotFoundError, DatabaseTablesUnexpectedError, ) +from superset.connectors.sqla.models import SqlaTable +from superset.daos.database import DatabaseDAO from superset.exceptions import SupersetException from superset.extensions import db, security_manager from superset.models.core import Database diff --git a/superset/databases/commands/test_connection.py b/superset/commands/database/test_connection.py similarity index 98% rename from superset/databases/commands/test_connection.py rename to superset/commands/database/test_connection.py index 49c5340dd2..0ffdf3ddd9 100644 --- a/superset/databases/commands/test_connection.py +++ b/superset/commands/database/test_connection.py @@ -27,15 +27,13 @@ from sqlalchemy.exc import DBAPIError, NoSuchModuleError from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.daos.database import DatabaseDAO, SSHTunnelDAO -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseSecurityUnsafeError, DatabaseTestConnectionDriverError, DatabaseTestConnectionUnexpectedError, ) -from superset.databases.ssh_tunnel.commands.exceptions import ( - SSHTunnelingNotEnabledError, -) +from superset.commands.database.ssh_tunnel.exceptions import SSHTunnelingNotEnabledError +from superset.daos.database import DatabaseDAO, SSHTunnelDAO from superset.databases.ssh_tunnel.models import SSHTunnel from superset.databases.utils import make_url_safe from superset.errors import ErrorLevel, SupersetErrorType diff --git a/superset/databases/commands/update.py b/superset/commands/database/update.py similarity index 96% rename from superset/databases/commands/update.py rename to superset/commands/database/update.py index d8d86c6d2d..039d731d72 100644 --- a/superset/databases/commands/update.py +++ b/superset/commands/database/update.py @@ -22,23 +22,23 @@ from marshmallow import ValidationError from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseConnectionFailedError, DatabaseExistsValidationError, DatabaseInvalidError, DatabaseNotFoundError, DatabaseUpdateFailedError, ) -from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelCreateFailedError, SSHTunnelingNotEnabledError, SSHTunnelInvalidError, SSHTunnelUpdateFailedError, ) -from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand +from superset.commands.database.ssh_tunnel.update import UpdateSSHTunnelCommand +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError from superset.extensions import db, security_manager from superset.models.core import Database from superset.utils.core import DatasourceType diff --git a/superset/databases/commands/validate.py b/superset/commands/database/validate.py similarity index 98% rename from superset/databases/commands/validate.py rename to superset/commands/database/validate.py index 6ea412b490..83bbc4e90a 100644 --- a/superset/databases/commands/validate.py +++ b/superset/commands/database/validate.py @@ -21,13 +21,13 @@ from typing import Any, Optional from flask_babel import gettext as __ from superset.commands.base import BaseCommand -from superset.daos.database import DatabaseDAO -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseOfflineError, DatabaseTestConnectionFailedError, InvalidEngineError, InvalidParametersError, ) +from superset.daos.database import DatabaseDAO from superset.databases.utils import make_url_safe from superset.db_engine_specs import get_engine_spec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType diff --git a/superset/databases/commands/validate_sql.py b/superset/commands/database/validate_sql.py similarity index 98% rename from superset/databases/commands/validate_sql.py rename to superset/commands/database/validate_sql.py index 6fc0c3a398..9a00526bfa 100644 --- a/superset/databases/commands/validate_sql.py +++ b/superset/commands/database/validate_sql.py @@ -22,8 +22,7 @@ from flask import current_app from flask_babel import gettext as __ from superset.commands.base import BaseCommand -from superset.daos.database import DatabaseDAO -from superset.databases.commands.exceptions import ( +from superset.commands.database.exceptions import ( DatabaseNotFoundError, NoValidatorConfigFoundError, NoValidatorFoundError, @@ -31,6 +30,7 @@ from superset.databases.commands.exceptions import ( ValidatorSQLError, ValidatorSQLUnexpectedError, ) +from superset.daos.database import DatabaseDAO from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.models.core import Database from superset.sql_validators import get_validator_by_name diff --git a/superset/datasets/columns/commands/__init__.py b/superset/commands/dataset/__init__.py similarity index 100% rename from superset/datasets/columns/commands/__init__.py rename to superset/commands/dataset/__init__.py diff --git a/superset/datasets/commands/__init__.py b/superset/commands/dataset/columns/__init__.py similarity index 100% rename from superset/datasets/commands/__init__.py rename to superset/commands/dataset/columns/__init__.py diff --git a/superset/datasets/columns/commands/delete.py b/superset/commands/dataset/columns/delete.py similarity index 95% rename from superset/datasets/columns/commands/delete.py rename to superset/commands/dataset/columns/delete.py index 23b0d93b6a..4739c2520f 100644 --- a/superset/datasets/columns/commands/delete.py +++ b/superset/commands/dataset/columns/delete.py @@ -19,14 +19,14 @@ from typing import Optional from superset import security_manager from superset.commands.base import BaseCommand -from superset.connectors.sqla.models import TableColumn -from superset.daos.dataset import DatasetColumnDAO, DatasetDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.datasets.columns.commands.exceptions import ( +from superset.commands.dataset.columns.exceptions import ( DatasetColumnDeleteFailedError, DatasetColumnForbiddenError, DatasetColumnNotFoundError, ) +from superset.connectors.sqla.models import TableColumn +from superset.daos.dataset import DatasetColumnDAO, DatasetDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ class DeleteDatasetColumnCommand(BaseCommand): assert self._model try: - DatasetColumnDAO.delete(self._model) + DatasetColumnDAO.delete([self._model]) except DAODeleteFailedError as ex: logger.exception(ex.exception) raise DatasetColumnDeleteFailedError() from ex diff --git a/superset/datasets/columns/commands/exceptions.py b/superset/commands/dataset/columns/exceptions.py similarity index 100% rename from superset/datasets/columns/commands/exceptions.py rename to superset/commands/dataset/columns/exceptions.py diff --git a/superset/datasets/commands/create.py b/superset/commands/dataset/create.py similarity index 98% rename from superset/datasets/commands/create.py rename to superset/commands/dataset/create.py index 8f486b0c9a..1c354e835f 100644 --- a/superset/datasets/commands/create.py +++ b/superset/commands/dataset/create.py @@ -22,15 +22,15 @@ from marshmallow import ValidationError from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand, CreateMixin -from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOCreateFailedError -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatabaseNotFoundValidationError, DatasetCreateFailedError, DatasetExistsValidationError, DatasetInvalidError, TableNotFoundValidationError, ) +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.extensions import db logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/delete.py b/superset/commands/dataset/delete.py similarity index 97% rename from superset/datasets/commands/delete.py rename to superset/commands/dataset/delete.py index 478267d01d..4b7e61ab4c 100644 --- a/superset/datasets/commands/delete.py +++ b/superset/commands/dataset/delete.py @@ -19,14 +19,14 @@ from typing import Optional from superset import security_manager from superset.commands.base import BaseCommand -from superset.connectors.sqla.models import SqlaTable -from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatasetDeleteFailedError, DatasetForbiddenError, DatasetNotFoundError, ) +from superset.connectors.sqla.models import SqlaTable +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/duplicate.py b/superset/commands/dataset/duplicate.py similarity index 99% rename from superset/datasets/commands/duplicate.py rename to superset/commands/dataset/duplicate.py index 12ae96e0ae..0ae47c35bc 100644 --- a/superset/datasets/commands/duplicate.py +++ b/superset/commands/dataset/duplicate.py @@ -23,16 +23,16 @@ from marshmallow import ValidationError from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand, CreateMixin -from superset.commands.exceptions import DatasourceTypeInvalidError -from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOCreateFailedError -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatasetDuplicateFailedError, DatasetExistsValidationError, DatasetInvalidError, DatasetNotFoundError, ) +from superset.commands.exceptions import DatasourceTypeInvalidError +from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException from superset.extensions import db diff --git a/superset/datasets/commands/exceptions.py b/superset/commands/dataset/exceptions.py similarity index 100% rename from superset/datasets/commands/exceptions.py rename to superset/commands/dataset/exceptions.py diff --git a/superset/datasets/commands/export.py b/superset/commands/dataset/export.py similarity index 98% rename from superset/datasets/commands/export.py rename to superset/commands/dataset/export.py index 3922652322..afecdd2fea 100644 --- a/superset/datasets/commands/export.py +++ b/superset/commands/dataset/export.py @@ -25,7 +25,7 @@ import yaml from superset.commands.export.models import ExportModelsCommand from superset.connectors.sqla.models import SqlaTable from superset.daos.database import DatabaseDAO -from superset.datasets.commands.exceptions import DatasetNotFoundError +from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.daos.dataset import DatasetDAO from superset.utils.dict_import_export import EXPORT_VERSION from superset.utils.file import get_filename diff --git a/superset/datasets/commands/importers/__init__.py b/superset/commands/dataset/importers/__init__.py similarity index 100% rename from superset/datasets/commands/importers/__init__.py rename to superset/commands/dataset/importers/__init__.py diff --git a/superset/datasets/commands/importers/dispatcher.py b/superset/commands/dataset/importers/dispatcher.py similarity index 97% rename from superset/datasets/commands/importers/dispatcher.py rename to superset/commands/dataset/importers/dispatcher.py index 6be8635da2..9138d4f971 100644 --- a/superset/datasets/commands/importers/dispatcher.py +++ b/superset/commands/dataset/importers/dispatcher.py @@ -21,9 +21,9 @@ from typing import Any from marshmallow.exceptions import ValidationError from superset.commands.base import BaseCommand +from superset.commands.dataset.importers import v0, v1 from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError -from superset.datasets.commands.importers import v0, v1 logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/importers/v0.py b/superset/commands/dataset/importers/v0.py similarity index 90% rename from superset/datasets/commands/importers/v0.py rename to superset/commands/dataset/importers/v0.py index a34d9be1ac..d389a17651 100644 --- a/superset/datasets/commands/importers/v0.py +++ b/superset/commands/dataset/importers/v0.py @@ -25,11 +25,15 @@ from sqlalchemy.orm.session import make_transient from superset import db from superset.commands.base import BaseCommand +from superset.commands.database.exceptions import DatabaseNotFoundError +from superset.commands.dataset.exceptions import DatasetInvalidError from superset.commands.importers.exceptions import IncorrectVersionError -from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric -from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.databases.commands.exceptions import DatabaseNotFoundError -from superset.datasets.commands.exceptions import DatasetInvalidError +from superset.connectors.sqla.models import ( + BaseDatasource, + SqlaTable, + SqlMetric, + TableColumn, +) from superset.models.core import Database from superset.utils.dict_import_export import DATABASES_KEY @@ -102,14 +106,8 @@ def lookup_sqla_metric(session: Session, metric: SqlMetric) -> SqlMetric: ) -def import_metric(session: Session, metric: BaseMetric) -> BaseMetric: - if isinstance(metric, SqlMetric): - lookup_metric = lookup_sqla_metric - else: - raise Exception( # pylint: disable=broad-exception-raised - f"Invalid metric type: {metric}" - ) - return import_simple_obj(session, metric, lookup_metric) +def import_metric(session: Session, metric: SqlMetric) -> SqlMetric: + return import_simple_obj(session, metric, lookup_sqla_metric) def lookup_sqla_column(session: Session, column: TableColumn) -> TableColumn: @@ -123,14 +121,8 @@ def lookup_sqla_column(session: Session, column: TableColumn) -> TableColumn: ) -def import_column(session: Session, column: BaseColumn) -> BaseColumn: - if isinstance(column, TableColumn): - lookup_column = lookup_sqla_column - else: - raise Exception( # pylint: disable=broad-exception-raised - f"Invalid column type: {column}" - ) - return import_simple_obj(session, column, lookup_column) +def import_column(session: Session, column: TableColumn) -> TableColumn: + return import_simple_obj(session, column, lookup_sqla_column) def import_datasource( # pylint: disable=too-many-arguments diff --git a/superset/datasets/commands/importers/v1/__init__.py b/superset/commands/dataset/importers/v1/__init__.py similarity index 92% rename from superset/datasets/commands/importers/v1/__init__.py rename to superset/commands/dataset/importers/v1/__init__.py index f46c137b7e..600a39bf48 100644 --- a/superset/datasets/commands/importers/v1/__init__.py +++ b/superset/commands/dataset/importers/v1/__init__.py @@ -20,12 +20,12 @@ from typing import Any from marshmallow import Schema from sqlalchemy.orm import Session +from superset.commands.database.importers.v1.utils import import_database +from superset.commands.dataset.exceptions import DatasetImportError +from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.importers.v1 import ImportModelsCommand from superset.daos.dataset import DatasetDAO -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.datasets.commands.exceptions import DatasetImportError -from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema diff --git a/superset/datasets/commands/importers/v1/utils.py b/superset/commands/dataset/importers/v1/utils.py similarity index 99% rename from superset/datasets/commands/importers/v1/utils.py rename to superset/commands/dataset/importers/v1/utils.py index c45f7a5655..c145cc50f9 100644 --- a/superset/datasets/commands/importers/v1/utils.py +++ b/superset/commands/dataset/importers/v1/utils.py @@ -29,9 +29,9 @@ from sqlalchemy.orm.exc import MultipleResultsFound from sqlalchemy.sql.visitors import VisitableType from superset import security_manager +from superset.commands.dataset.exceptions import DatasetForbiddenDataURI from superset.commands.exceptions import ImportFailedError from superset.connectors.sqla.models import SqlaTable -from superset.datasets.commands.exceptions import DatasetForbiddenDataURI from superset.models.core import Database logger = logging.getLogger(__name__) diff --git a/superset/datasets/metrics/commands/__init__.py b/superset/commands/dataset/metrics/__init__.py similarity index 100% rename from superset/datasets/metrics/commands/__init__.py rename to superset/commands/dataset/metrics/__init__.py diff --git a/superset/datasets/metrics/commands/delete.py b/superset/commands/dataset/metrics/delete.py similarity index 95% rename from superset/datasets/metrics/commands/delete.py rename to superset/commands/dataset/metrics/delete.py index 8f27e98a3d..b48668852c 100644 --- a/superset/datasets/metrics/commands/delete.py +++ b/superset/commands/dataset/metrics/delete.py @@ -19,14 +19,14 @@ from typing import Optional from superset import security_manager from superset.commands.base import BaseCommand -from superset.connectors.sqla.models import SqlMetric -from superset.daos.dataset import DatasetDAO, DatasetMetricDAO -from superset.daos.exceptions import DAODeleteFailedError -from superset.datasets.metrics.commands.exceptions import ( +from superset.commands.dataset.metrics.exceptions import ( DatasetMetricDeleteFailedError, DatasetMetricForbiddenError, DatasetMetricNotFoundError, ) +from superset.connectors.sqla.models import SqlMetric +from superset.daos.dataset import DatasetDAO, DatasetMetricDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ class DeleteDatasetMetricCommand(BaseCommand): assert self._model try: - DatasetMetricDAO.delete(self._model) + DatasetMetricDAO.delete([self._model]) except DAODeleteFailedError as ex: logger.exception(ex.exception) raise DatasetMetricDeleteFailedError() from ex diff --git a/superset/datasets/metrics/commands/exceptions.py b/superset/commands/dataset/metrics/exceptions.py similarity index 100% rename from superset/datasets/metrics/commands/exceptions.py rename to superset/commands/dataset/metrics/exceptions.py diff --git a/superset/datasets/commands/refresh.py b/superset/commands/dataset/refresh.py similarity index 97% rename from superset/datasets/commands/refresh.py rename to superset/commands/dataset/refresh.py index a25609636d..5976956d7c 100644 --- a/superset/datasets/commands/refresh.py +++ b/superset/commands/dataset/refresh.py @@ -21,13 +21,13 @@ from flask_appbuilder.models.sqla import Model from superset import security_manager from superset.commands.base import BaseCommand -from superset.connectors.sqla.models import SqlaTable -from superset.daos.dataset import DatasetDAO -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatasetForbiddenError, DatasetNotFoundError, DatasetRefreshFailedError, ) +from superset.connectors.sqla.models import SqlaTable +from superset.daos.dataset import DatasetDAO from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/update.py b/superset/commands/dataset/update.py similarity index 99% rename from superset/datasets/commands/update.py rename to superset/commands/dataset/update.py index 8dcc4dfd5f..8a72c24fd5 100644 --- a/superset/datasets/commands/update.py +++ b/superset/commands/dataset/update.py @@ -23,10 +23,7 @@ from marshmallow import ValidationError from superset import security_manager from superset.commands.base import BaseCommand, UpdateMixin -from superset.connectors.sqla.models import SqlaTable -from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOUpdateFailedError -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatabaseChangeValidationError, DatasetColumnNotFoundValidationError, DatasetColumnsDuplicateValidationError, @@ -40,6 +37,9 @@ from superset.datasets.commands.exceptions import ( DatasetNotFoundError, DatasetUpdateFailedError, ) +from superset.connectors.sqla.models import SqlaTable +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/warm_up_cache.py b/superset/commands/dataset/warm_up_cache.py similarity index 89% rename from superset/datasets/commands/warm_up_cache.py rename to superset/commands/dataset/warm_up_cache.py index 64becc9cd6..97b00c4772 100644 --- a/superset/datasets/commands/warm_up_cache.py +++ b/superset/commands/dataset/warm_up_cache.py @@ -18,10 +18,10 @@ from typing import Any, Optional -from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand from superset.commands.base import BaseCommand +from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand +from superset.commands.dataset.exceptions import WarmUpCacheTableNotFoundError from superset.connectors.sqla.models import SqlaTable -from superset.datasets.commands.exceptions import WarmUpCacheTableNotFoundError from superset.extensions import db from superset.models.core import Database from superset.models.slice import Slice @@ -45,7 +45,9 @@ class DatasetWarmUpCacheCommand(BaseCommand): self.validate() return [ ChartWarmUpCacheCommand( - chart, self._dashboard_id, self._extra_filters + chart, + self._dashboard_id, + self._extra_filters, ).run() for chart in self._charts ] diff --git a/superset/embedded_dashboard/commands/__init__.py b/superset/commands/explore/__init__.py similarity index 100% rename from superset/embedded_dashboard/commands/__init__.py rename to superset/commands/explore/__init__.py diff --git a/superset/explore/commands/__init__.py b/superset/commands/explore/form_data/__init__.py similarity index 100% rename from superset/explore/commands/__init__.py rename to superset/commands/explore/form_data/__init__.py diff --git a/superset/explore/form_data/commands/create.py b/superset/commands/explore/form_data/create.py similarity index 91% rename from superset/explore/form_data/commands/create.py rename to superset/commands/explore/form_data/create.py index df0250f2ff..e85f840133 100644 --- a/superset/explore/form_data/commands/create.py +++ b/superset/commands/explore/form_data/create.py @@ -20,12 +20,12 @@ from flask import session from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.form_data.commands.state import TemporaryExploreState -from superset.explore.form_data.commands.utils import check_access +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.form_data.state import TemporaryExploreState +from superset.commands.explore.form_data.utils import check_access +from superset.commands.temporary_cache.exceptions import TemporaryCacheCreateFailedError from superset.extensions import cache_manager from superset.key_value.utils import random_key -from superset.temporary_cache.commands.exceptions import TemporaryCacheCreateFailedError from superset.temporary_cache.utils import cache_key from superset.utils.core import DatasourceType, get_user_id from superset.utils.schema import validate_json diff --git a/superset/explore/form_data/commands/delete.py b/superset/commands/explore/form_data/delete.py similarity index 91% rename from superset/explore/form_data/commands/delete.py rename to superset/commands/explore/form_data/delete.py index bce13b719a..d998f132d6 100644 --- a/superset/explore/form_data/commands/delete.py +++ b/superset/commands/explore/form_data/delete.py @@ -22,14 +22,14 @@ from flask import session from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.form_data.commands.state import TemporaryExploreState -from superset.explore.form_data.commands.utils import check_access -from superset.extensions import cache_manager -from superset.temporary_cache.commands.exceptions import ( +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.form_data.state import TemporaryExploreState +from superset.commands.explore.form_data.utils import check_access +from superset.commands.temporary_cache.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheDeleteFailedError, ) +from superset.extensions import cache_manager from superset.temporary_cache.utils import cache_key from superset.utils.core import DatasourceType, get_user_id diff --git a/superset/explore/form_data/commands/get.py b/superset/commands/explore/form_data/get.py similarity index 89% rename from superset/explore/form_data/commands/get.py rename to superset/commands/explore/form_data/get.py index 53fd6ea6a9..0153888d4e 100644 --- a/superset/explore/form_data/commands/get.py +++ b/superset/commands/explore/form_data/get.py @@ -22,11 +22,11 @@ from flask import current_app as app from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.form_data.commands.state import TemporaryExploreState -from superset.explore.form_data.commands.utils import check_access +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.form_data.state import TemporaryExploreState +from superset.commands.explore.form_data.utils import check_access +from superset.commands.temporary_cache.exceptions import TemporaryCacheGetFailedError from superset.extensions import cache_manager -from superset.temporary_cache.commands.exceptions import TemporaryCacheGetFailedError from superset.utils.core import DatasourceType logger = logging.getLogger(__name__) diff --git a/superset/explore/form_data/commands/parameters.py b/superset/commands/explore/form_data/parameters.py similarity index 100% rename from superset/explore/form_data/commands/parameters.py rename to superset/commands/explore/form_data/parameters.py diff --git a/superset/explore/form_data/commands/state.py b/superset/commands/explore/form_data/state.py similarity index 100% rename from superset/explore/form_data/commands/state.py rename to superset/commands/explore/form_data/state.py diff --git a/superset/explore/form_data/commands/update.py b/superset/commands/explore/form_data/update.py similarity index 93% rename from superset/explore/form_data/commands/update.py rename to superset/commands/explore/form_data/update.py index ace57350c4..fbb6ee0719 100644 --- a/superset/explore/form_data/commands/update.py +++ b/superset/commands/explore/form_data/update.py @@ -22,15 +22,15 @@ from flask import session from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.form_data.commands.state import TemporaryExploreState -from superset.explore.form_data.commands.utils import check_access -from superset.extensions import cache_manager -from superset.key_value.utils import random_key -from superset.temporary_cache.commands.exceptions import ( +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.form_data.state import TemporaryExploreState +from superset.commands.explore.form_data.utils import check_access +from superset.commands.temporary_cache.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheUpdateFailedError, ) +from superset.extensions import cache_manager +from superset.key_value.utils import random_key from superset.temporary_cache.utils import cache_key from superset.utils.core import DatasourceType, get_user_id from superset.utils.schema import validate_json diff --git a/superset/explore/form_data/commands/utils.py b/superset/commands/explore/form_data/utils.py similarity index 90% rename from superset/explore/form_data/commands/utils.py rename to superset/commands/explore/form_data/utils.py index e4a843dc62..45b46fb8b3 100644 --- a/superset/explore/form_data/commands/utils.py +++ b/superset/commands/explore/form_data/utils.py @@ -16,19 +16,19 @@ # under the License. from typing import Optional -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( ChartAccessDeniedError, ChartNotFoundError, ) -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatasetAccessDeniedError, DatasetNotFoundError, ) -from superset.explore.utils import check_access as explore_check_access -from superset.temporary_cache.commands.exceptions import ( +from superset.commands.temporary_cache.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheResourceNotFoundError, ) +from superset.explore.utils import check_access as explore_check_access from superset.utils.core import DatasourceType diff --git a/superset/explore/commands/get.py b/superset/commands/explore/get.py similarity index 94% rename from superset/explore/commands/get.py rename to superset/commands/explore/get.py index d348b16251..bb8f5a85e9 100644 --- a/superset/explore/commands/get.py +++ b/superset/commands/explore/get.py @@ -26,18 +26,17 @@ from sqlalchemy.exc import SQLAlchemyError from superset import db from superset.commands.base import BaseCommand -from superset.connectors.base.models import BaseDatasource -from superset.connectors.sqla.models import SqlaTable +from superset.commands.explore.form_data.get import GetFormDataCommand +from superset.commands.explore.form_data.parameters import ( + CommandParameters as FormDataCommandParameters, +) +from superset.commands.explore.parameters import CommandParameters +from superset.commands.explore.permalink.get import GetExplorePermalinkCommand +from superset.connectors.sqla.models import BaseDatasource, SqlaTable from superset.daos.datasource import DatasourceDAO from superset.daos.exceptions import DatasourceNotFound from superset.exceptions import SupersetException -from superset.explore.commands.parameters import CommandParameters from superset.explore.exceptions import WrongEndpointError -from superset.explore.form_data.commands.get import GetFormDataCommand -from superset.explore.form_data.commands.parameters import ( - CommandParameters as FormDataCommandParameters, -) -from superset.explore.permalink.commands.get import GetExplorePermalinkCommand from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError from superset.utils import core as utils from superset.views.utils import ( diff --git a/superset/explore/commands/parameters.py b/superset/commands/explore/parameters.py similarity index 100% rename from superset/explore/commands/parameters.py rename to superset/commands/explore/parameters.py diff --git a/superset/explore/form_data/commands/__init__.py b/superset/commands/explore/permalink/__init__.py similarity index 100% rename from superset/explore/form_data/commands/__init__.py rename to superset/commands/explore/permalink/__init__.py diff --git a/superset/explore/permalink/commands/base.py b/superset/commands/explore/permalink/base.py similarity index 100% rename from superset/explore/permalink/commands/base.py rename to superset/commands/explore/permalink/base.py diff --git a/superset/explore/permalink/commands/create.py b/superset/commands/explore/permalink/create.py similarity index 95% rename from superset/explore/permalink/commands/create.py rename to superset/commands/explore/permalink/create.py index 97a8bcbf09..befb1d5a47 100644 --- a/superset/explore/permalink/commands/create.py +++ b/superset/commands/explore/permalink/create.py @@ -19,10 +19,10 @@ from typing import Any, Optional from sqlalchemy.exc import SQLAlchemyError -from superset.explore.permalink.commands.base import BaseExplorePermalinkCommand +from superset.commands.explore.permalink.base import BaseExplorePermalinkCommand +from superset.commands.key_value.create import CreateKeyValueCommand from superset.explore.permalink.exceptions import ExplorePermalinkCreateFailedError from superset.explore.utils import check_access as check_chart_access -from superset.key_value.commands.create import CreateKeyValueCommand from superset.key_value.exceptions import KeyValueCodecEncodeException from superset.key_value.utils import encode_permalink_key from superset.utils.core import DatasourceType diff --git a/superset/explore/permalink/commands/get.py b/superset/commands/explore/permalink/get.py similarity index 93% rename from superset/explore/permalink/commands/get.py rename to superset/commands/explore/permalink/get.py index 1aa093b380..4c01db1cca 100644 --- a/superset/explore/permalink/commands/get.py +++ b/superset/commands/explore/permalink/get.py @@ -19,12 +19,12 @@ from typing import Optional from sqlalchemy.exc import SQLAlchemyError -from superset.datasets.commands.exceptions import DatasetNotFoundError -from superset.explore.permalink.commands.base import BaseExplorePermalinkCommand +from superset.commands.dataset.exceptions import DatasetNotFoundError +from superset.commands.explore.permalink.base import BaseExplorePermalinkCommand +from superset.commands.key_value.get import GetKeyValueCommand from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError from superset.explore.permalink.types import ExplorePermalinkValue from superset.explore.utils import check_access as check_chart_access -from superset.key_value.commands.get import GetKeyValueCommand from superset.key_value.exceptions import ( KeyValueCodecDecodeException, KeyValueGetFailedError, diff --git a/superset/commands/export/assets.py b/superset/commands/export/assets.py index 1bd2cf6d61..61d805acaf 100644 --- a/superset/commands/export/assets.py +++ b/superset/commands/export/assets.py @@ -20,12 +20,12 @@ from datetime import datetime, timezone import yaml -from superset.charts.commands.export import ExportChartsCommand from superset.commands.base import BaseCommand -from superset.dashboards.commands.export import ExportDashboardsCommand -from superset.databases.commands.export import ExportDatabasesCommand -from superset.datasets.commands.export import ExportDatasetsCommand -from superset.queries.saved_queries.commands.export import ExportSavedQueriesCommand +from superset.commands.chart.export import ExportChartsCommand +from superset.commands.dashboard.export import ExportDashboardsCommand +from superset.commands.database.export import ExportDatabasesCommand +from superset.commands.dataset.export import ExportDatasetsCommand +from superset.commands.query.export import ExportSavedQueriesCommand from superset.utils.dict_import_export import EXPORT_VERSION METADATA_FILE_NAME = "metadata.yaml" diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py index 4c8971315c..b6bc29e0fa 100644 --- a/superset/commands/importers/v1/assets.py +++ b/superset/commands/importers/v1/assets.py @@ -22,29 +22,27 @@ from sqlalchemy.orm import Session from sqlalchemy.sql import delete, insert from superset import db -from superset.charts.commands.importers.v1.utils import import_chart from superset.charts.schemas import ImportV1ChartSchema from superset.commands.base import BaseCommand +from superset.commands.chart.importers.v1.utils import import_chart +from superset.commands.dashboard.importers.v1.utils import ( + find_chart_uuids, + import_dashboard, + update_id_refs, +) +from superset.commands.database.importers.v1.utils import import_database +from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.exceptions import CommandInvalidError, ImportFailedError from superset.commands.importers.v1.utils import ( load_configs, load_metadata, validate_metadata_type, ) -from superset.dashboards.commands.importers.v1.utils import ( - find_chart_uuids, - import_dashboard, - update_id_refs, -) +from superset.commands.query.importers.v1.utils import import_saved_query from superset.dashboards.schemas import ImportV1DashboardSchema -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.dashboard import dashboard_slices -from superset.queries.saved_queries.commands.importers.v1.utils import ( - import_saved_query, -) from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema diff --git a/superset/commands/importers/v1/examples.py b/superset/commands/importers/v1/examples.py index 737be25f8a..94194921ac 100644 --- a/superset/commands/importers/v1/examples.py +++ b/superset/commands/importers/v1/examples.py @@ -22,24 +22,24 @@ from sqlalchemy.orm.exc import MultipleResultsFound from sqlalchemy.sql import select from superset import db -from superset.charts.commands.importers.v1 import ImportChartsCommand -from superset.charts.commands.importers.v1.utils import import_chart from superset.charts.schemas import ImportV1ChartSchema -from superset.commands.exceptions import CommandException -from superset.commands.importers.v1 import ImportModelsCommand -from superset.daos.base import BaseDAO -from superset.dashboards.commands.importers.v1 import ImportDashboardsCommand -from superset.dashboards.commands.importers.v1.utils import ( +from superset.commands.chart.importers.v1 import ImportChartsCommand +from superset.commands.chart.importers.v1.utils import import_chart +from superset.commands.dashboard.importers.v1 import ImportDashboardsCommand +from superset.commands.dashboard.importers.v1.utils import ( find_chart_uuids, import_dashboard, update_id_refs, ) +from superset.commands.database.importers.v1 import ImportDatabasesCommand +from superset.commands.database.importers.v1.utils import import_database +from superset.commands.dataset.importers.v1 import ImportDatasetsCommand +from superset.commands.dataset.importers.v1.utils import import_dataset +from superset.commands.exceptions import CommandException +from superset.commands.importers.v1 import ImportModelsCommand +from superset.daos.base import BaseDAO from superset.dashboards.schemas import ImportV1DashboardSchema -from superset.databases.commands.importers.v1 import ImportDatabasesCommand -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.datasets.commands.importers.v1 import ImportDatasetsCommand -from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.dashboard import dashboard_slices from superset.utils.core import get_example_default_schema diff --git a/superset/explore/permalink/commands/__init__.py b/superset/commands/key_value/__init__.py similarity index 100% rename from superset/explore/permalink/commands/__init__.py rename to superset/commands/key_value/__init__.py diff --git a/superset/key_value/commands/create.py b/superset/commands/key_value/create.py similarity index 100% rename from superset/key_value/commands/create.py rename to superset/commands/key_value/create.py diff --git a/superset/key_value/commands/delete.py b/superset/commands/key_value/delete.py similarity index 92% rename from superset/key_value/commands/delete.py rename to superset/commands/key_value/delete.py index b3cf84be07..8b9095c09c 100644 --- a/superset/key_value/commands/delete.py +++ b/superset/commands/key_value/delete.py @@ -57,13 +57,7 @@ class DeleteKeyValueCommand(BaseCommand): def delete(self) -> bool: filter_ = get_filter(self.resource, self.key) - entry = ( - db.session.query(KeyValueEntry) - .filter_by(**filter_) - .autoflush(False) - .first() - ) - if entry: + if entry := db.session.query(KeyValueEntry).filter_by(**filter_).first(): db.session.delete(entry) db.session.commit() return True diff --git a/superset/key_value/commands/delete_expired.py b/superset/commands/key_value/delete_expired.py similarity index 100% rename from superset/key_value/commands/delete_expired.py rename to superset/commands/key_value/delete_expired.py diff --git a/superset/key_value/commands/get.py b/superset/commands/key_value/get.py similarity index 93% rename from superset/key_value/commands/get.py rename to superset/commands/key_value/get.py index 9d659f3bc7..8a7a250f1c 100644 --- a/superset/key_value/commands/get.py +++ b/superset/commands/key_value/get.py @@ -66,12 +66,7 @@ class GetKeyValueCommand(BaseCommand): def get(self) -> Optional[Any]: filter_ = get_filter(self.resource, self.key) - entry = ( - db.session.query(KeyValueEntry) - .filter_by(**filter_) - .autoflush(False) - .first() - ) + entry = db.session.query(KeyValueEntry).filter_by(**filter_).first() if entry and (entry.expires_on is None or entry.expires_on > datetime.now()): return self.codec.decode(entry.value) return None diff --git a/superset/key_value/commands/update.py b/superset/commands/key_value/update.py similarity index 94% rename from superset/key_value/commands/update.py rename to superset/commands/key_value/update.py index becd6d9ca8..ca940adf60 100644 --- a/superset/key_value/commands/update.py +++ b/superset/commands/key_value/update.py @@ -77,17 +77,13 @@ class UpdateKeyValueCommand(BaseCommand): def update(self) -> Optional[Key]: filter_ = get_filter(self.resource, self.key) entry: KeyValueEntry = ( - db.session.query(KeyValueEntry) - .filter_by(**filter_) - .autoflush(False) - .first() + db.session.query(KeyValueEntry).filter_by(**filter_).first() ) if entry: entry.value = self.codec.encode(self.value) entry.expires_on = self.expires_on entry.changed_on = datetime.now() entry.changed_by_fk = get_user_id() - db.session.merge(entry) db.session.commit() return Key(id=entry.id, uuid=entry.uuid) diff --git a/superset/key_value/commands/upsert.py b/superset/commands/key_value/upsert.py similarity index 93% rename from superset/key_value/commands/upsert.py rename to superset/commands/key_value/upsert.py index c5668f1161..84f02cb9cd 100644 --- a/superset/key_value/commands/upsert.py +++ b/superset/commands/key_value/upsert.py @@ -24,7 +24,7 @@ from sqlalchemy.exc import SQLAlchemyError from superset import db from superset.commands.base import BaseCommand -from superset.key_value.commands.create import CreateKeyValueCommand +from superset.commands.key_value.create import CreateKeyValueCommand from superset.key_value.exceptions import ( KeyValueCreateFailedError, KeyValueUpsertFailedError, @@ -81,17 +81,13 @@ class UpsertKeyValueCommand(BaseCommand): def upsert(self) -> Key: filter_ = get_filter(self.resource, self.key) entry: KeyValueEntry = ( - db.session.query(KeyValueEntry) - .filter_by(**filter_) - .autoflush(False) - .first() + db.session.query(KeyValueEntry).filter_by(**filter_).first() ) if entry: entry.value = self.codec.encode(self.value) entry.expires_on = self.expires_on entry.changed_on = datetime.now() entry.changed_by_fk = get_user_id() - db.session.merge(entry) db.session.commit() return Key(entry.id, entry.uuid) diff --git a/superset/key_value/commands/__init__.py b/superset/commands/query/__init__.py similarity index 100% rename from superset/key_value/commands/__init__.py rename to superset/commands/query/__init__.py diff --git a/superset/queries/saved_queries/commands/delete.py b/superset/commands/query/delete.py similarity index 96% rename from superset/queries/saved_queries/commands/delete.py rename to superset/commands/query/delete.py index 40b73658e0..978f30c5c4 100644 --- a/superset/queries/saved_queries/commands/delete.py +++ b/superset/commands/query/delete.py @@ -18,13 +18,13 @@ import logging from typing import Optional from superset.commands.base import BaseCommand -from superset.daos.exceptions import DAODeleteFailedError -from superset.daos.query import SavedQueryDAO -from superset.models.dashboard import Dashboard -from superset.queries.saved_queries.commands.exceptions import ( +from superset.commands.query.exceptions import ( SavedQueryDeleteFailedError, SavedQueryNotFoundError, ) +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.query import SavedQueryDAO +from superset.models.dashboard import Dashboard logger = logging.getLogger(__name__) diff --git a/superset/queries/saved_queries/commands/exceptions.py b/superset/commands/query/exceptions.py similarity index 100% rename from superset/queries/saved_queries/commands/exceptions.py rename to superset/commands/query/exceptions.py diff --git a/superset/queries/saved_queries/commands/export.py b/superset/commands/query/export.py similarity index 97% rename from superset/queries/saved_queries/commands/export.py rename to superset/commands/query/export.py index 1b85cda796..a8fa8acbf0 100644 --- a/superset/queries/saved_queries/commands/export.py +++ b/superset/commands/query/export.py @@ -25,7 +25,7 @@ from werkzeug.utils import secure_filename from superset.commands.export.models import ExportModelsCommand from superset.models.sql_lab import SavedQuery -from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError +from superset.commands.query.exceptions import SavedQueryNotFoundError from superset.daos.query import SavedQueryDAO from superset.utils.dict_import_export import EXPORT_VERSION diff --git a/superset/queries/saved_queries/commands/__init__.py b/superset/commands/query/importers/__init__.py similarity index 100% rename from superset/queries/saved_queries/commands/__init__.py rename to superset/commands/query/importers/__init__.py diff --git a/superset/queries/saved_queries/commands/importers/dispatcher.py b/superset/commands/query/importers/dispatcher.py similarity index 97% rename from superset/queries/saved_queries/commands/importers/dispatcher.py rename to superset/commands/query/importers/dispatcher.py index c2208f0e2a..438ea8351f 100644 --- a/superset/queries/saved_queries/commands/importers/dispatcher.py +++ b/superset/commands/query/importers/dispatcher.py @@ -23,7 +23,7 @@ from marshmallow.exceptions import ValidationError from superset.commands.base import BaseCommand from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError -from superset.queries.saved_queries.commands.importers import v1 +from superset.commands.query.importers import v1 logger = logging.getLogger(__name__) diff --git a/superset/queries/saved_queries/commands/importers/v1/__init__.py b/superset/commands/query/importers/v1/__init__.py similarity index 91% rename from superset/queries/saved_queries/commands/importers/v1/__init__.py rename to superset/commands/query/importers/v1/__init__.py index c8a159c7f5..fa1f21b6fc 100644 --- a/superset/queries/saved_queries/commands/importers/v1/__init__.py +++ b/superset/commands/query/importers/v1/__init__.py @@ -20,15 +20,13 @@ from typing import Any from marshmallow import Schema from sqlalchemy.orm import Session +from superset.commands.database.importers.v1.utils import import_database from superset.commands.importers.v1 import ImportModelsCommand +from superset.commands.query.exceptions import SavedQueryImportError +from superset.commands.query.importers.v1.utils import import_saved_query from superset.connectors.sqla.models import SqlaTable from superset.daos.query import SavedQueryDAO -from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema -from superset.queries.saved_queries.commands.exceptions import SavedQueryImportError -from superset.queries.saved_queries.commands.importers.v1.utils import ( - import_saved_query, -) from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema diff --git a/superset/queries/saved_queries/commands/importers/v1/utils.py b/superset/commands/query/importers/v1/utils.py similarity index 100% rename from superset/queries/saved_queries/commands/importers/v1/utils.py rename to superset/commands/query/importers/v1/utils.py diff --git a/superset/queries/saved_queries/commands/importers/__init__.py b/superset/commands/report/__init__.py similarity index 100% rename from superset/queries/saved_queries/commands/importers/__init__.py rename to superset/commands/report/__init__.py diff --git a/superset/reports/commands/alert.py b/superset/commands/report/alert.py similarity index 99% rename from superset/reports/commands/alert.py rename to superset/commands/report/alert.py index 2c36d3589c..68013a2c00 100644 --- a/superset/reports/commands/alert.py +++ b/superset/commands/report/alert.py @@ -29,7 +29,7 @@ from flask_babel import lazy_gettext as _ from superset import app, jinja_context, security_manager from superset.commands.base import BaseCommand -from superset.reports.commands.exceptions import ( +from superset.commands.report.exceptions import ( AlertQueryError, AlertQueryInvalidTypeError, AlertQueryMultipleColumnsError, diff --git a/superset/reports/commands/base.py b/superset/commands/report/base.py similarity index 98% rename from superset/reports/commands/base.py rename to superset/commands/report/base.py index da871ef17c..3b2f280816 100644 --- a/superset/reports/commands/base.py +++ b/superset/commands/report/base.py @@ -20,9 +20,7 @@ from typing import Any from marshmallow import ValidationError from superset.commands.base import BaseCommand -from superset.daos.chart import ChartDAO -from superset.daos.dashboard import DashboardDAO -from superset.reports.commands.exceptions import ( +from superset.commands.report.exceptions import ( ChartNotFoundValidationError, ChartNotSavedValidationError, DashboardNotFoundValidationError, @@ -30,6 +28,8 @@ from superset.reports.commands.exceptions import ( ReportScheduleEitherChartOrDashboardError, ReportScheduleOnlyChartOrDashboardError, ) +from superset.daos.chart import ChartDAO +from superset.daos.dashboard import DashboardDAO from superset.reports.models import ReportCreationMethod logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/create.py b/superset/commands/report/create.py similarity index 97% rename from superset/reports/commands/create.py rename to superset/commands/report/create.py index 177e01c33b..aa9bfefc6e 100644 --- a/superset/reports/commands/create.py +++ b/superset/commands/report/create.py @@ -22,11 +22,8 @@ from flask_babel import gettext as _ from marshmallow import ValidationError from superset.commands.base import CreateMixin -from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOCreateFailedError -from superset.daos.report import ReportScheduleDAO -from superset.reports.commands.base import BaseReportScheduleCommand -from superset.reports.commands.exceptions import ( +from superset.commands.report.base import BaseReportScheduleCommand +from superset.commands.report.exceptions import ( DatabaseNotFoundValidationError, ReportScheduleAlertRequiredDatabaseValidationError, ReportScheduleCreateFailedError, @@ -35,6 +32,9 @@ from superset.reports.commands.exceptions import ( ReportScheduleNameUniquenessValidationError, ReportScheduleRequiredTypeValidationError, ) +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOCreateFailedError +from superset.daos.report import ReportScheduleDAO from superset.reports.models import ( ReportCreationMethod, ReportSchedule, diff --git a/superset/reports/commands/delete.py b/superset/commands/report/delete.py similarity index 97% rename from superset/reports/commands/delete.py rename to superset/commands/report/delete.py index 2cdac17c4d..87ea4b99dd 100644 --- a/superset/reports/commands/delete.py +++ b/superset/commands/report/delete.py @@ -19,14 +19,14 @@ from typing import Optional from superset import security_manager from superset.commands.base import BaseCommand -from superset.daos.exceptions import DAODeleteFailedError -from superset.daos.report import ReportScheduleDAO -from superset.exceptions import SupersetSecurityException -from superset.reports.commands.exceptions import ( +from superset.commands.report.exceptions import ( ReportScheduleDeleteFailedError, ReportScheduleForbiddenError, ReportScheduleNotFoundError, ) +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO +from superset.exceptions import SupersetSecurityException from superset.reports.models import ReportSchedule logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/exceptions.py b/superset/commands/report/exceptions.py similarity index 100% rename from superset/reports/commands/exceptions.py rename to superset/commands/report/exceptions.py diff --git a/superset/reports/commands/execute.py b/superset/commands/report/execute.py similarity index 99% rename from superset/reports/commands/execute.py rename to superset/commands/report/execute.py index 301bac4531..d4b53e30dd 100644 --- a/superset/reports/commands/execute.py +++ b/superset/commands/report/execute.py @@ -26,20 +26,10 @@ from sqlalchemy.orm import Session from superset import app, security_manager from superset.commands.base import BaseCommand +from superset.commands.dashboard.permalink.create import CreateDashboardPermalinkCommand from superset.commands.exceptions import CommandException -from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType -from superset.daos.report import ( - REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER, - ReportScheduleDAO, -) -from superset.dashboards.permalink.commands.create import ( - CreateDashboardPermalinkCommand, -) -from superset.errors import ErrorLevel, SupersetError, SupersetErrorType -from superset.exceptions import SupersetErrorsException, SupersetException -from superset.extensions import feature_flag_manager, machine_auth_provider_factory -from superset.reports.commands.alert import AlertCommand -from superset.reports.commands.exceptions import ( +from superset.commands.report.alert import AlertCommand +from superset.commands.report.exceptions import ( ReportScheduleAlertGracePeriodError, ReportScheduleClientErrorsException, ReportScheduleCsvFailedError, @@ -56,6 +46,14 @@ from superset.reports.commands.exceptions import ( ReportScheduleUnexpectedError, ReportScheduleWorkingTimeoutError, ) +from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType +from superset.daos.report import ( + REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER, + ReportScheduleDAO, +) +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import SupersetErrorsException, SupersetException +from superset.extensions import feature_flag_manager, machine_auth_provider_factory from superset.reports.models import ( ReportDataFormat, ReportExecutionLog, @@ -123,8 +121,6 @@ class BaseReportState: self._report_schedule.last_state = state self._report_schedule.last_eval_dttm = datetime.utcnow() - - self._session.merge(self._report_schedule) self._session.commit() def create_log(self, error_message: Optional[str] = None) -> None: diff --git a/superset/reports/commands/log_prune.py b/superset/commands/report/log_prune.py similarity index 96% rename from superset/reports/commands/log_prune.py rename to superset/commands/report/log_prune.py index 09d9995414..3a9883c9f1 100644 --- a/superset/reports/commands/log_prune.py +++ b/superset/commands/report/log_prune.py @@ -18,9 +18,9 @@ import logging from datetime import datetime, timedelta from superset.commands.base import BaseCommand +from superset.commands.report.exceptions import ReportSchedulePruneLogError from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO -from superset.reports.commands.exceptions import ReportSchedulePruneLogError from superset.reports.models import ReportSchedule from superset.utils.celery import session_scope diff --git a/superset/reports/commands/update.py b/superset/commands/report/update.py similarity index 97% rename from superset/reports/commands/update.py rename to superset/commands/report/update.py index 7c3351e5ec..a33ba6b59a 100644 --- a/superset/reports/commands/update.py +++ b/superset/commands/report/update.py @@ -23,12 +23,8 @@ from marshmallow import ValidationError from superset import security_manager from superset.commands.base import UpdateMixin -from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOUpdateFailedError -from superset.daos.report import ReportScheduleDAO -from superset.exceptions import SupersetSecurityException -from superset.reports.commands.base import BaseReportScheduleCommand -from superset.reports.commands.exceptions import ( +from superset.commands.report.base import BaseReportScheduleCommand +from superset.commands.report.exceptions import ( DatabaseNotFoundValidationError, ReportScheduleForbiddenError, ReportScheduleInvalidError, @@ -36,6 +32,10 @@ from superset.reports.commands.exceptions import ( ReportScheduleNotFoundError, ReportScheduleUpdateFailedError, ) +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOUpdateFailedError +from superset.daos.report import ReportScheduleDAO +from superset.exceptions import SupersetSecurityException from superset.reports.models import ReportSchedule, ReportScheduleType, ReportState logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/__init__.py b/superset/commands/security/__init__.py similarity index 100% rename from superset/reports/commands/__init__.py rename to superset/commands/security/__init__.py diff --git a/superset/row_level_security/commands/create.py b/superset/commands/security/create.py similarity index 100% rename from superset/row_level_security/commands/create.py rename to superset/commands/security/create.py diff --git a/superset/row_level_security/commands/delete.py b/superset/commands/security/delete.py similarity index 96% rename from superset/row_level_security/commands/delete.py rename to superset/commands/security/delete.py index d669f7d90f..2c19c5f89b 100644 --- a/superset/row_level_security/commands/delete.py +++ b/superset/commands/security/delete.py @@ -18,13 +18,13 @@ import logging from superset.commands.base import BaseCommand -from superset.daos.exceptions import DAODeleteFailedError -from superset.daos.security import RLSDAO -from superset.reports.models import ReportSchedule -from superset.row_level_security.commands.exceptions import ( +from superset.commands.security.exceptions import ( RLSRuleNotFoundError, RuleDeleteFailedError, ) +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.security import RLSDAO +from superset.reports.models import ReportSchedule logger = logging.getLogger(__name__) diff --git a/superset/row_level_security/commands/exceptions.py b/superset/commands/security/exceptions.py similarity index 100% rename from superset/row_level_security/commands/exceptions.py rename to superset/commands/security/exceptions.py diff --git a/superset/row_level_security/commands/update.py b/superset/commands/security/update.py similarity index 96% rename from superset/row_level_security/commands/update.py rename to superset/commands/security/update.py index bc5ef368ba..f3a6cea607 100644 --- a/superset/row_level_security/commands/update.py +++ b/superset/commands/security/update.py @@ -21,12 +21,12 @@ from typing import Any, Optional from superset.commands.base import BaseCommand from superset.commands.exceptions import DatasourceNotFoundValidationError +from superset.commands.security.exceptions import RLSRuleNotFoundError from superset.commands.utils import populate_roles from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable from superset.daos.exceptions import DAOUpdateFailedError from superset.daos.security import RLSDAO from superset.extensions import db -from superset.row_level_security.commands.exceptions import RLSRuleNotFoundError logger = logging.getLogger(__name__) diff --git a/superset/row_level_security/commands/__init__.py b/superset/commands/sql_lab/__init__.py similarity index 100% rename from superset/row_level_security/commands/__init__.py rename to superset/commands/sql_lab/__init__.py diff --git a/superset/sqllab/commands/estimate.py b/superset/commands/sql_lab/estimate.py similarity index 100% rename from superset/sqllab/commands/estimate.py rename to superset/commands/sql_lab/estimate.py diff --git a/superset/sqllab/commands/execute.py b/superset/commands/sql_lab/execute.py similarity index 100% rename from superset/sqllab/commands/execute.py rename to superset/commands/sql_lab/execute.py diff --git a/superset/sqllab/commands/export.py b/superset/commands/sql_lab/export.py similarity index 100% rename from superset/sqllab/commands/export.py rename to superset/commands/sql_lab/export.py diff --git a/superset/sqllab/commands/results.py b/superset/commands/sql_lab/results.py similarity index 100% rename from superset/sqllab/commands/results.py rename to superset/commands/sql_lab/results.py diff --git a/superset/sqllab/commands/__init__.py b/superset/commands/tag/__init__.py similarity index 100% rename from superset/sqllab/commands/__init__.py rename to superset/commands/tag/__init__.py diff --git a/superset/tags/commands/create.py b/superset/commands/tag/create.py similarity index 92% rename from superset/tags/commands/create.py rename to superset/commands/tag/create.py index cd3bcc176b..ea23b8d59d 100644 --- a/superset/tags/commands/create.py +++ b/superset/commands/tag/create.py @@ -19,18 +19,18 @@ from typing import Any from superset import db, security_manager from superset.commands.base import BaseCommand, CreateMixin +from superset.commands.tag.exceptions import TagCreateFailedError, TagInvalidError +from superset.commands.tag.utils import to_object_model, to_object_type from superset.daos.exceptions import DAOCreateFailedError from superset.daos.tag import TagDAO from superset.exceptions import SupersetSecurityException -from superset.tags.commands.exceptions import TagCreateFailedError, TagInvalidError -from superset.tags.commands.utils import to_object_model, to_object_type -from superset.tags.models import ObjectTypes, TagTypes +from superset.tags.models import ObjectType, TagType logger = logging.getLogger(__name__) class CreateCustomTagCommand(CreateMixin, BaseCommand): - def __init__(self, object_type: ObjectTypes, object_id: int, tags: list[str]): + def __init__(self, object_type: ObjectType, object_id: int, tags: list[str]): self._object_type = object_type self._object_id = object_id self._tags = tags @@ -76,7 +76,7 @@ class CreateCustomTagWithRelationshipsCommand(CreateMixin, BaseCommand): try: tag_name = self._properties["name"] - tag = TagDAO.get_by_name(tag_name.strip(), TagTypes.custom) + tag = TagDAO.get_by_name(tag_name.strip(), TagType.custom) TagDAO.create_tag_relationship( objects_to_tag=self._properties.get("objects_to_tag", []), tag=tag, diff --git a/superset/tags/commands/delete.py b/superset/commands/tag/delete.py similarity index 94% rename from superset/tags/commands/delete.py rename to superset/commands/tag/delete.py index 4b92e40ff5..c4f2239009 100644 --- a/superset/tags/commands/delete.py +++ b/superset/commands/tag/delete.py @@ -17,24 +17,24 @@ import logging from superset.commands.base import BaseCommand -from superset.daos.exceptions import DAODeleteFailedError -from superset.daos.tag import TagDAO -from superset.tags.commands.exceptions import ( +from superset.commands.tag.exceptions import ( TagDeleteFailedError, TaggedObjectDeleteFailedError, TaggedObjectNotFoundError, TagInvalidError, TagNotFoundError, ) -from superset.tags.commands.utils import to_object_type -from superset.tags.models import ObjectTypes +from superset.commands.tag.utils import to_object_type +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.tag import TagDAO +from superset.tags.models import ObjectType from superset.views.base import DeleteMixin logger = logging.getLogger(__name__) class DeleteTaggedObjectCommand(DeleteMixin, BaseCommand): - def __init__(self, object_type: ObjectTypes, object_id: int, tag: str): + def __init__(self, object_type: ObjectType, object_id: int, tag: str): self._object_type = object_type self._object_id = object_id self._tag = tag diff --git a/superset/tags/commands/exceptions.py b/superset/commands/tag/exceptions.py similarity index 100% rename from superset/tags/commands/exceptions.py rename to superset/commands/tag/exceptions.py diff --git a/superset/tags/commands/update.py b/superset/commands/tag/update.py similarity index 94% rename from superset/tags/commands/update.py rename to superset/commands/tag/update.py index 182376438b..431bf93c4d 100644 --- a/superset/tags/commands/update.py +++ b/superset/commands/tag/update.py @@ -21,9 +21,9 @@ from flask_appbuilder.models.sqla import Model from superset import db from superset.commands.base import BaseCommand, UpdateMixin +from superset.commands.tag.exceptions import TagInvalidError, TagNotFoundError +from superset.commands.tag.utils import to_object_type from superset.daos.tag import TagDAO -from superset.tags.commands.exceptions import TagInvalidError, TagNotFoundError -from superset.tags.commands.utils import to_object_type from superset.tags.models import Tag logger = logging.getLogger(__name__) diff --git a/superset/tags/commands/utils.py b/superset/commands/tag/utils.py similarity index 79% rename from superset/tags/commands/utils.py rename to superset/commands/tag/utils.py index 028465d83a..c3929cc41b 100644 --- a/superset/tags/commands/utils.py +++ b/superset/commands/tag/utils.py @@ -23,25 +23,25 @@ from superset.daos.query import SavedQueryDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.models.sql_lab import SavedQuery -from superset.tags.models import ObjectTypes +from superset.tags.models import ObjectType -def to_object_type(object_type: Union[ObjectTypes, int, str]) -> Optional[ObjectTypes]: - if isinstance(object_type, ObjectTypes): +def to_object_type(object_type: Union[ObjectType, int, str]) -> Optional[ObjectType]: + if isinstance(object_type, ObjectType): return object_type - for type_ in ObjectTypes: + for type_ in ObjectType: if object_type in [type_.value, type_.name]: return type_ return None def to_object_model( - object_type: ObjectTypes, object_id: int + object_type: ObjectType, object_id: int ) -> Optional[Union[Dashboard, SavedQuery, Slice]]: - if ObjectTypes.dashboard == object_type: + if ObjectType.dashboard == object_type: return DashboardDAO.find_by_id(object_id) - if ObjectTypes.query == object_type: + if ObjectType.query == object_type: return SavedQueryDAO.find_by_id(object_id) - if ObjectTypes.chart == object_type: + if ObjectType.chart == object_type: return ChartDAO.find_by_id(object_id) return None diff --git a/superset/tags/commands/__init__.py b/superset/commands/temporary_cache/__init__.py similarity index 100% rename from superset/tags/commands/__init__.py rename to superset/commands/temporary_cache/__init__.py diff --git a/superset/temporary_cache/commands/create.py b/superset/commands/temporary_cache/create.py similarity index 92% rename from superset/temporary_cache/commands/create.py rename to superset/commands/temporary_cache/create.py index af3b5350f6..e43d48e54c 100644 --- a/superset/temporary_cache/commands/create.py +++ b/superset/commands/temporary_cache/create.py @@ -20,8 +20,8 @@ from abc import ABC, abstractmethod from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.temporary_cache.commands.exceptions import TemporaryCacheCreateFailedError -from superset.temporary_cache.commands.parameters import CommandParameters +from superset.commands.temporary_cache.exceptions import TemporaryCacheCreateFailedError +from superset.commands.temporary_cache.parameters import CommandParameters logger = logging.getLogger(__name__) diff --git a/superset/temporary_cache/commands/delete.py b/superset/commands/temporary_cache/delete.py similarity index 92% rename from superset/temporary_cache/commands/delete.py rename to superset/commands/temporary_cache/delete.py index 1281c8debf..d35b184d87 100644 --- a/superset/temporary_cache/commands/delete.py +++ b/superset/commands/temporary_cache/delete.py @@ -20,8 +20,8 @@ from abc import ABC, abstractmethod from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.temporary_cache.commands.exceptions import TemporaryCacheDeleteFailedError -from superset.temporary_cache.commands.parameters import CommandParameters +from superset.commands.temporary_cache.exceptions import TemporaryCacheDeleteFailedError +from superset.commands.temporary_cache.parameters import CommandParameters logger = logging.getLogger(__name__) diff --git a/superset/temporary_cache/commands/entry.py b/superset/commands/temporary_cache/entry.py similarity index 100% rename from superset/temporary_cache/commands/entry.py rename to superset/commands/temporary_cache/entry.py diff --git a/superset/temporary_cache/commands/exceptions.py b/superset/commands/temporary_cache/exceptions.py similarity index 100% rename from superset/temporary_cache/commands/exceptions.py rename to superset/commands/temporary_cache/exceptions.py diff --git a/superset/temporary_cache/commands/get.py b/superset/commands/temporary_cache/get.py similarity index 92% rename from superset/temporary_cache/commands/get.py rename to superset/commands/temporary_cache/get.py index 8c220b9c04..fa16977a8e 100644 --- a/superset/temporary_cache/commands/get.py +++ b/superset/commands/temporary_cache/get.py @@ -21,8 +21,8 @@ from typing import Optional from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.temporary_cache.commands.exceptions import TemporaryCacheGetFailedError -from superset.temporary_cache.commands.parameters import CommandParameters +from superset.commands.temporary_cache.exceptions import TemporaryCacheGetFailedError +from superset.commands.temporary_cache.parameters import CommandParameters logger = logging.getLogger(__name__) diff --git a/superset/temporary_cache/commands/parameters.py b/superset/commands/temporary_cache/parameters.py similarity index 100% rename from superset/temporary_cache/commands/parameters.py rename to superset/commands/temporary_cache/parameters.py diff --git a/superset/temporary_cache/commands/update.py b/superset/commands/temporary_cache/update.py similarity index 92% rename from superset/temporary_cache/commands/update.py rename to superset/commands/temporary_cache/update.py index 92af8c14f2..90b1c3d48f 100644 --- a/superset/temporary_cache/commands/update.py +++ b/superset/commands/temporary_cache/update.py @@ -21,8 +21,8 @@ from typing import Optional from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand -from superset.temporary_cache.commands.exceptions import TemporaryCacheUpdateFailedError -from superset.temporary_cache.commands.parameters import CommandParameters +from superset.commands.temporary_cache.exceptions import TemporaryCacheUpdateFailedError +from superset.commands.temporary_cache.parameters import CommandParameters logger = logging.getLogger(__name__) diff --git a/superset/commands/utils.py b/superset/commands/utils.py index 02b6b5f383..8cfeab3c11 100644 --- a/superset/commands/utils.py +++ b/superset/commands/utils.py @@ -33,7 +33,7 @@ from superset.extensions import db from superset.utils.core import DatasourceType, get_user_id if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource def populate_owners( diff --git a/superset/common/query_actions.py b/superset/common/query_actions.py index 22c778b77b..d73a99d027 100644 --- a/superset/common/query_actions.py +++ b/superset/common/query_actions.py @@ -24,7 +24,7 @@ from flask_babel import _ from superset import app from superset.common.chart_data import ChartDataResultType from superset.common.db_query_status import QueryStatus -from superset.connectors.base.models import BaseDatasource +from superset.connectors.sqla.models import BaseDatasource from superset.exceptions import QueryObjectValidationError from superset.utils.core import ( extract_column_dtype, diff --git a/superset/common/query_context.py b/superset/common/query_context.py index 1a8d3c518b..4f517cd905 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -30,7 +30,7 @@ from superset.common.query_object import QueryObject from superset.models.slice import Slice if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource from superset.models.helpers import QueryResult diff --git a/superset/common/query_context_factory.py b/superset/common/query_context_factory.py index d6510ccd9a..708907d4a9 100644 --- a/superset/common/query_context_factory.py +++ b/superset/common/query_context_factory.py @@ -29,7 +29,7 @@ from superset.models.slice import Slice from superset.utils.core import DatasourceDict, DatasourceType, is_adhoc_column if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource config = app.config diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index 5a0468b671..5b1414d53b 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -36,9 +36,9 @@ from superset.common.utils.time_range_utils import ( get_since_until_from_query_object, get_since_until_from_time_range, ) -from superset.connectors.base.models import BaseDatasource +from superset.connectors.sqla.models import BaseDatasource from superset.constants import CacheRegion, TimeGrain -from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.annotation_layer import AnnotationLayerDAO from superset.daos.chart import ChartDAO from superset.exceptions import ( InvalidPostProcessingError, @@ -682,7 +682,7 @@ class QueryContextProcessor: annotation_layer: dict[str, Any], force: bool ) -> dict[str, Any]: # pylint: disable=import-outside-toplevel - from superset.charts.data.commands.get_data_command import ChartDataCommand + from superset.commands.chart.data.get_data_command import ChartDataCommand if not (chart := ChartDAO.find_by_id(annotation_layer["value"])): raise QueryObjectValidationError(_("The chart does not exist")) diff --git a/superset/common/query_object.py b/superset/common/query_object.py index 1e826761ec..989df5775b 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -49,7 +49,7 @@ from superset.utils.core import ( from superset.utils.hashing import md5_sha_from_dict if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource logger = logging.getLogger(__name__) diff --git a/superset/common/query_object_factory.py b/superset/common/query_object_factory.py index d993eca279..d2aa140dfe 100644 --- a/superset/common/query_object_factory.py +++ b/superset/common/query_object_factory.py @@ -35,7 +35,7 @@ from superset.utils.core import ( if TYPE_CHECKING: from sqlalchemy.orm import sessionmaker - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource from superset.daos.datasource import DatasourceDAO diff --git a/superset/common/tags.py b/superset/common/tags.py index c7b06bdd4b..ce5c5ab195 100644 --- a/superset/common/tags.py +++ b/superset/common/tags.py @@ -22,7 +22,7 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import and_, func, join, literal, select from superset.extensions import db -from superset.tags.models import ObjectTypes, TagTypes +from superset.tags.models import ObjectType, TagType def add_types_to_charts( @@ -35,7 +35,7 @@ def add_types_to_charts( [ tag.c.id.label("tag_id"), slices.c.id.label("object_id"), - literal(ObjectTypes.chart.name).label("object_type"), + literal(ObjectType.chart.name).label("object_type"), ] ) .select_from( @@ -67,7 +67,7 @@ def add_types_to_dashboards( [ tag.c.id.label("tag_id"), dashboard_table.c.id.label("object_id"), - literal(ObjectTypes.dashboard.name).label("object_type"), + literal(ObjectType.dashboard.name).label("object_type"), ] ) .select_from( @@ -99,7 +99,7 @@ def add_types_to_saved_queries( [ tag.c.id.label("tag_id"), saved_query.c.id.label("object_id"), - literal(ObjectTypes.query.name).label("object_type"), + literal(ObjectType.query.name).label("object_type"), ] ) .select_from( @@ -131,7 +131,7 @@ def add_types_to_datasets( [ tag.c.id.label("tag_id"), tables.c.id.label("object_id"), - literal(ObjectTypes.dataset.name).label("object_type"), + literal(ObjectType.dataset.name).label("object_type"), ] ) .select_from( @@ -221,9 +221,9 @@ def add_types(metadata: MetaData) -> None: # add a tag for each object type insert = tag.insert() - for type_ in ObjectTypes.__members__: + for type_ in ObjectType.__members__: with contextlib.suppress(IntegrityError): # already exists - db.session.execute(insert, name=f"type:{type_}", type=TagTypes.type) + db.session.execute(insert, name=f"type:{type_}", type=TagType.type) add_types_to_charts(metadata, tag, tagged_object, columns) add_types_to_dashboards(metadata, tag, tagged_object, columns) @@ -241,7 +241,7 @@ def add_owners_to_charts( [ tag.c.id.label("tag_id"), slices.c.id.label("object_id"), - literal(ObjectTypes.chart.name).label("object_type"), + literal(ObjectType.chart.name).label("object_type"), ] ) .select_from( @@ -277,7 +277,7 @@ def add_owners_to_dashboards( [ tag.c.id.label("tag_id"), dashboard_table.c.id.label("object_id"), - literal(ObjectTypes.dashboard.name).label("object_type"), + literal(ObjectType.dashboard.name).label("object_type"), ] ) .select_from( @@ -313,7 +313,7 @@ def add_owners_to_saved_queries( [ tag.c.id.label("tag_id"), saved_query.c.id.label("object_id"), - literal(ObjectTypes.query.name).label("object_type"), + literal(ObjectType.query.name).label("object_type"), ] ) .select_from( @@ -349,7 +349,7 @@ def add_owners_to_datasets( [ tag.c.id.label("tag_id"), tables.c.id.label("object_id"), - literal(ObjectTypes.dataset.name).label("object_type"), + literal(ObjectType.dataset.name).label("object_type"), ] ) .select_from( @@ -444,7 +444,7 @@ def add_owners(metadata: MetaData) -> None: insert = tag.insert() for (id_,) in db.session.execute(ids): with contextlib.suppress(IntegrityError): # already exists - db.session.execute(insert, name=f"owner:{id_}", type=TagTypes.owner) + db.session.execute(insert, name=f"owner:{id_}", type=TagType.owner) add_owners_to_charts(metadata, tag, tagged_object, columns) add_owners_to_dashboards(metadata, tag, tagged_object, columns) add_owners_to_saved_queries(metadata, tag, tagged_object, columns) @@ -482,7 +482,7 @@ def add_favorites(metadata: MetaData) -> None: insert = tag.insert() for (id_,) in db.session.execute(ids): with contextlib.suppress(IntegrityError): # already exists - db.session.execute(insert, name=f"favorited_by:{id_}", type=TagTypes.type) + db.session.execute(insert, name=f"favorited_by:{id_}", type=TagType.type) favstars = ( select( [ diff --git a/superset/config.py b/superset/config.py index 401dfd2f3d..98f87e6f02 100644 --- a/superset/config.py +++ b/superset/config.py @@ -1442,6 +1442,7 @@ TALISMAN_CONFIG = { }, "content_security_policy_nonce_in": ["script-src"], "force_https": False, + "session_cookie_secure": False, } # React requires `eval` to work correctly in dev mode TALISMAN_DEV_CONFIG = { @@ -1463,6 +1464,7 @@ TALISMAN_DEV_CONFIG = { }, "content_security_policy_nonce_in": ["script-src"], "force_https": False, + "session_cookie_secure": False, } # diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py deleted file mode 100644 index d5386c7a66..0000000000 --- a/superset/connectors/base/models.py +++ /dev/null @@ -1,776 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import builtins -import json -import logging -from collections.abc import Hashable -from datetime import datetime -from json.decoder import JSONDecodeError -from typing import Any, TYPE_CHECKING - -from flask_appbuilder.security.sqla.models import User -from flask_babel import gettext as __ -from sqlalchemy import and_, Boolean, Column, Integer, String, Text -from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.orm import foreign, Query, relationship, RelationshipProperty, Session -from sqlalchemy.sql import literal_column - -from superset import security_manager -from superset.constants import EMPTY_STRING, NULL_STRING -from superset.datasets.commands.exceptions import DatasetNotFoundError -from superset.models.helpers import AuditMixinNullable, ImportExportMixin, QueryResult -from superset.models.slice import Slice -from superset.superset_typing import ( - FilterValue, - FilterValues, - QueryObjectDict, - ResultSetColumnType, -) -from superset.utils import core as utils -from superset.utils.backports import StrEnum -from superset.utils.core import GenericDataType, MediumText - -if TYPE_CHECKING: - from superset.db_engine_specs.base import BaseEngineSpec - -logger = logging.getLogger(__name__) - -METRIC_FORM_DATA_PARAMS = [ - "metric", - "metric_2", - "metrics", - "metrics_b", - "percent_metrics", - "secondary_metric", - "size", - "timeseries_limit_metric", - "x", - "y", -] - -COLUMN_FORM_DATA_PARAMS = [ - "all_columns", - "all_columns_x", - "columns", - "entity", - "groupby", - "order_by_cols", - "series", -] - - -class DatasourceKind(StrEnum): - VIRTUAL = "virtual" - PHYSICAL = "physical" - - -class BaseDatasource( - AuditMixinNullable, ImportExportMixin -): # pylint: disable=too-many-public-methods - """A common interface to objects that are queryable - (tables and datasources)""" - - # --------------------------------------------------------------- - # class attributes to define when deriving BaseDatasource - # --------------------------------------------------------------- - __tablename__: str | None = None # {connector_name}_datasource - baselink: str | None = None # url portion pointing to ModelView endpoint - - @property - def column_class(self) -> type[BaseColumn]: - # link to derivative of BaseColumn - raise NotImplementedError() - - @property - def metric_class(self) -> type[BaseMetric]: - # link to derivative of BaseMetric - raise NotImplementedError() - - owner_class: User | None = None - - # Used to do code highlighting when displaying the query in the UI - query_language: str | None = None - - # Only some datasources support Row Level Security - is_rls_supported: bool = False - - @property - def name(self) -> str: - # can be a Column or a property pointing to one - raise NotImplementedError() - - # --------------------------------------------------------------- - - # Columns - id = Column(Integer, primary_key=True) - description = Column(Text) - default_endpoint = Column(Text) - is_featured = Column(Boolean, default=False) # TODO deprecating - filter_select_enabled = Column(Boolean, default=True) - offset = Column(Integer, default=0) - cache_timeout = Column(Integer) - params = Column(String(1000)) - perm = Column(String(1000)) - schema_perm = Column(String(1000)) - is_managed_externally = Column(Boolean, nullable=False, default=False) - external_url = Column(Text, nullable=True) - - sql: str | None = None - owners: list[User] - update_from_object_fields: list[str] - - extra_import_fields = ["is_managed_externally", "external_url"] - - @property - def kind(self) -> DatasourceKind: - return DatasourceKind.VIRTUAL if self.sql else DatasourceKind.PHYSICAL - - @property - def owners_data(self) -> list[dict[str, Any]]: - return [ - { - "first_name": o.first_name, - "last_name": o.last_name, - "username": o.username, - "id": o.id, - } - for o in self.owners - ] - - @property - def is_virtual(self) -> bool: - return self.kind == DatasourceKind.VIRTUAL - - @declared_attr - def slices(self) -> RelationshipProperty: - return relationship( - "Slice", - overlaps="table", - primaryjoin=lambda: and_( - foreign(Slice.datasource_id) == self.id, - foreign(Slice.datasource_type) == self.type, - ), - ) - - columns: list[BaseColumn] = [] - metrics: list[BaseMetric] = [] - - @property - def type(self) -> str: - raise NotImplementedError() - - @property - def uid(self) -> str: - """Unique id across datasource types""" - return f"{self.id}__{self.type}" - - @property - def column_names(self) -> list[str]: - return sorted([c.column_name for c in self.columns], key=lambda x: x or "") - - @property - def columns_types(self) -> dict[str, str]: - return {c.column_name: c.type for c in self.columns} - - @property - def main_dttm_col(self) -> str: - return "timestamp" - - @property - def datasource_name(self) -> str: - raise NotImplementedError() - - @property - def connection(self) -> str | None: - """String representing the context of the Datasource""" - return None - - @property - def schema(self) -> str | None: - """String representing the schema of the Datasource (if it applies)""" - return None - - @property - def filterable_column_names(self) -> list[str]: - return sorted([c.column_name for c in self.columns if c.filterable]) - - @property - def dttm_cols(self) -> list[str]: - return [] - - @property - def url(self) -> str: - return f"/{self.baselink}/edit/{self.id}" - - @property - def explore_url(self) -> str: - if self.default_endpoint: - return self.default_endpoint - return f"/explore/?datasource_type={self.type}&datasource_id={self.id}" - - @property - def column_formats(self) -> dict[str, str | None]: - return {m.metric_name: m.d3format for m in self.metrics if m.d3format} - - @property - def currency_formats(self) -> dict[str, dict[str, str | None] | None]: - return {m.metric_name: m.currency_json for m in self.metrics if m.currency_json} - - def add_missing_metrics(self, metrics: list[BaseMetric]) -> None: - existing_metrics = {m.metric_name for m in self.metrics} - for metric in metrics: - if metric.metric_name not in existing_metrics: - metric.table_id = self.id - self.metrics.append(metric) - - @property - def short_data(self) -> dict[str, Any]: - """Data representation of the datasource sent to the frontend""" - return { - "edit_url": self.url, - "id": self.id, - "uid": self.uid, - "schema": self.schema, - "name": self.name, - "type": self.type, - "connection": self.connection, - "creator": str(self.created_by), - } - - @property - def select_star(self) -> str | None: - pass - - @property - def order_by_choices(self) -> list[tuple[str, str]]: - choices = [] - # self.column_names return sorted column_names - for column_name in self.column_names: - column_name = str(column_name or "") - choices.append( - (json.dumps([column_name, True]), f"{column_name} " + __("[asc]")) - ) - choices.append( - (json.dumps([column_name, False]), f"{column_name} " + __("[desc]")) - ) - return choices - - @property - def verbose_map(self) -> dict[str, str]: - verb_map = {"__timestamp": "Time"} - verb_map.update( - {o.metric_name: o.verbose_name or o.metric_name for o in self.metrics} - ) - verb_map.update( - {o.column_name: o.verbose_name or o.column_name for o in self.columns} - ) - return verb_map - - @property - def data(self) -> dict[str, Any]: - """Data representation of the datasource sent to the frontend""" - return { - # simple fields - "id": self.id, - "uid": self.uid, - "column_formats": self.column_formats, - "currency_formats": self.currency_formats, - "description": self.description, - "database": self.database.data, # pylint: disable=no-member - "default_endpoint": self.default_endpoint, - "filter_select": self.filter_select_enabled, # TODO deprecate - "filter_select_enabled": self.filter_select_enabled, - "name": self.name, - "datasource_name": self.datasource_name, - "table_name": self.datasource_name, - "type": self.type, - "schema": self.schema, - "offset": self.offset, - "cache_timeout": self.cache_timeout, - "params": self.params, - "perm": self.perm, - "edit_url": self.url, - # sqla-specific - "sql": self.sql, - # one to many - "columns": [o.data for o in self.columns], - "metrics": [o.data for o in self.metrics], - # TODO deprecate, move logic to JS - "order_by_choices": self.order_by_choices, - "owners": [owner.id for owner in self.owners], - "verbose_map": self.verbose_map, - "select_star": self.select_star, - } - - def data_for_slices( # pylint: disable=too-many-locals - self, slices: list[Slice] - ) -> dict[str, Any]: - """ - The representation of the datasource containing only the required data - to render the provided slices. - - Used to reduce the payload when loading a dashboard. - """ - data = self.data - metric_names = set() - column_names = set() - for slc in slices: - form_data = slc.form_data - # pull out all required metrics from the form_data - for metric_param in METRIC_FORM_DATA_PARAMS: - for metric in utils.as_list(form_data.get(metric_param) or []): - metric_names.add(utils.get_metric_name(metric)) - if utils.is_adhoc_metric(metric): - column = metric.get("column") or {} - if column_name := column.get("column_name"): - column_names.add(column_name) - - # Columns used in query filters - column_names.update( - filter_["subject"] - for filter_ in form_data.get("adhoc_filters") or [] - if filter_.get("clause") == "WHERE" and filter_.get("subject") - ) - - # columns used by Filter Box - column_names.update( - filter_config["column"] - for filter_config in form_data.get("filter_configs") or [] - if "column" in filter_config - ) - - # for legacy dashboard imports which have the wrong query_context in them - try: - query_context = slc.get_query_context() - except DatasetNotFoundError: - query_context = None - - # legacy charts don't have query_context charts - if query_context: - column_names.update( - [ - utils.get_column_name(column) - for query in query_context.queries - for column in query.columns - ] - or [] - ) - else: - _columns = [ - utils.get_column_name(column) - if utils.is_adhoc_column(column) - else column - for column_param in COLUMN_FORM_DATA_PARAMS - for column in utils.as_list(form_data.get(column_param) or []) - ] - column_names.update(_columns) - - filtered_metrics = [ - metric - for metric in data["metrics"] - if metric["metric_name"] in metric_names - ] - - filtered_columns: list[Column] = [] - column_types: set[GenericDataType] = set() - for column in data["columns"]: - generic_type = column.get("type_generic") - if generic_type is not None: - column_types.add(generic_type) - if column["column_name"] in column_names: - filtered_columns.append(column) - - data["column_types"] = list(column_types) - del data["description"] - data.update({"metrics": filtered_metrics}) - data.update({"columns": filtered_columns}) - verbose_map = {"__timestamp": "Time"} - verbose_map.update( - { - metric["metric_name"]: metric["verbose_name"] or metric["metric_name"] - for metric in filtered_metrics - } - ) - verbose_map.update( - { - column["column_name"]: column["verbose_name"] or column["column_name"] - for column in filtered_columns - } - ) - data["verbose_map"] = verbose_map - - return data - - @staticmethod - def filter_values_handler( # pylint: disable=too-many-arguments - values: FilterValues | None, - operator: str, - target_generic_type: GenericDataType, - target_native_type: str | None = None, - is_list_target: bool = False, - db_engine_spec: builtins.type[BaseEngineSpec] | None = None, - db_extra: dict[str, Any] | None = None, - ) -> FilterValues | None: - if values is None: - return None - - def handle_single_value(value: FilterValue | None) -> FilterValue | None: - if operator == utils.FilterOperator.TEMPORAL_RANGE: - return value - if ( - isinstance(value, (float, int)) - and target_generic_type == utils.GenericDataType.TEMPORAL - and target_native_type is not None - and db_engine_spec is not None - ): - value = db_engine_spec.convert_dttm( - target_type=target_native_type, - dttm=datetime.utcfromtimestamp(value / 1000), - db_extra=db_extra, - ) - value = literal_column(value) - if isinstance(value, str): - value = value.strip("\t\n") - - if ( - target_generic_type == utils.GenericDataType.NUMERIC - and operator - not in { - utils.FilterOperator.ILIKE, - utils.FilterOperator.LIKE, - } - ): - # For backwards compatibility and edge cases - # where a column data type might have changed - return utils.cast_to_num(value) - if value == NULL_STRING: - return None - if value == EMPTY_STRING: - return "" - if target_generic_type == utils.GenericDataType.BOOLEAN: - return utils.cast_to_boolean(value) - return value - - if isinstance(values, (list, tuple)): - values = [handle_single_value(v) for v in values] # type: ignore - else: - values = handle_single_value(values) - if is_list_target and not isinstance(values, (tuple, list)): - values = [values] # type: ignore - elif not is_list_target and isinstance(values, (tuple, list)): - values = values[0] if values else None - return values - - def external_metadata(self) -> list[ResultSetColumnType]: - """Returns column information from the external system""" - raise NotImplementedError() - - def get_query_str(self, query_obj: QueryObjectDict) -> str: - """Returns a query as a string - - This is used to be displayed to the user so that they can - understand what is taking place behind the scene""" - raise NotImplementedError() - - def query(self, query_obj: QueryObjectDict) -> QueryResult: - """Executes the query and returns a dataframe - - query_obj is a dictionary representing Superset's query interface. - Should return a ``superset.models.helpers.QueryResult`` - """ - raise NotImplementedError() - - def values_for_column(self, column_name: str, limit: int = 10000) -> list[Any]: - """Given a column, returns an iterable of distinct values - - This is used to populate the dropdown showing a list of - values in filters in the explore view""" - raise NotImplementedError() - - @staticmethod - def default_query(qry: Query) -> Query: - return qry - - def get_column(self, column_name: str | None) -> BaseColumn | None: - if not column_name: - return None - for col in self.columns: - if col.column_name == column_name: - return col - return None - - @staticmethod - def get_fk_many_from_list( - object_list: list[Any], - fkmany: list[Column], - fkmany_class: builtins.type[BaseColumn | BaseMetric], - key_attr: str, - ) -> list[Column]: - """Update ORM one-to-many list from object list - - Used for syncing metrics and columns using the same code""" - - object_dict = {o.get(key_attr): o for o in object_list} - - # delete fks that have been removed - fkmany = [o for o in fkmany if getattr(o, key_attr) in object_dict] - - # sync existing fks - for fk in fkmany: - obj = object_dict.get(getattr(fk, key_attr)) - if obj: - for attr in fkmany_class.update_from_object_fields: - setattr(fk, attr, obj.get(attr)) - - # create new fks - new_fks = [] - orm_keys = [getattr(o, key_attr) for o in fkmany] - for obj in object_list: - key = obj.get(key_attr) - if key not in orm_keys: - del obj["id"] - orm_kwargs = {} - for k in obj: - if k in fkmany_class.update_from_object_fields and k in obj: - orm_kwargs[k] = obj[k] - new_obj = fkmany_class(**orm_kwargs) - new_fks.append(new_obj) - fkmany += new_fks - return fkmany - - def update_from_object(self, obj: dict[str, Any]) -> None: - """Update datasource from a data structure - - The UI's table editor crafts a complex data structure that - contains most of the datasource's properties as well as - an array of metrics and columns objects. This method - receives the object from the UI and syncs the datasource to - match it. Since the fields are different for the different - connectors, the implementation uses ``update_from_object_fields`` - which can be defined for each connector and - defines which fields should be synced""" - for attr in self.update_from_object_fields: - setattr(self, attr, obj.get(attr)) - - self.owners = obj.get("owners", []) - - # Syncing metrics - metrics = ( - self.get_fk_many_from_list( - obj["metrics"], self.metrics, self.metric_class, "metric_name" - ) - if self.metric_class and "metrics" in obj - else [] - ) - self.metrics = metrics - - # Syncing columns - self.columns = ( - self.get_fk_many_from_list( - obj["columns"], self.columns, self.column_class, "column_name" - ) - if self.column_class and "columns" in obj - else [] - ) - - def get_extra_cache_keys( - self, query_obj: QueryObjectDict # pylint: disable=unused-argument - ) -> list[Hashable]: - """If a datasource needs to provide additional keys for calculation of - cache keys, those can be provided via this method - - :param query_obj: The dict representation of a query object - :return: list of keys - """ - return [] - - def __hash__(self) -> int: - return hash(self.uid) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, BaseDatasource): - return NotImplemented - return self.uid == other.uid - - def raise_for_access(self) -> None: - """ - Raise an exception if the user cannot access the resource. - - :raises SupersetSecurityException: If the user cannot access the resource - """ - - security_manager.raise_for_access(datasource=self) - - @classmethod - def get_datasource_by_name( - cls, session: Session, datasource_name: str, schema: str, database_name: str - ) -> BaseDatasource | None: - raise NotImplementedError() - - -class BaseColumn(AuditMixinNullable, ImportExportMixin): - """Interface for column""" - - __tablename__: str | None = None # {connector_name}_column - - id = Column(Integer, primary_key=True) - column_name = Column(String(255), nullable=False) - verbose_name = Column(String(1024)) - is_active = Column(Boolean, default=True) - type = Column(Text) - advanced_data_type = Column(String(255)) - groupby = Column(Boolean, default=True) - filterable = Column(Boolean, default=True) - description = Column(MediumText()) - is_dttm = None - - # [optional] Set this to support import/export functionality - export_fields: list[Any] = [] - - def __repr__(self) -> str: - return str(self.column_name) - - bool_types = ("BOOL",) - num_types = ( - "DOUBLE", - "FLOAT", - "INT", - "BIGINT", - "NUMBER", - "LONG", - "REAL", - "NUMERIC", - "DECIMAL", - "MONEY", - ) - date_types = ("DATE", "TIME") - str_types = ("VARCHAR", "STRING", "CHAR") - - @property - def is_numeric(self) -> bool: - return self.type and any(map(lambda t: t in self.type.upper(), self.num_types)) - - @property - def is_temporal(self) -> bool: - return self.type and any(map(lambda t: t in self.type.upper(), self.date_types)) - - @property - def is_string(self) -> bool: - return self.type and any(map(lambda t: t in self.type.upper(), self.str_types)) - - @property - def is_boolean(self) -> bool: - return self.type and any(map(lambda t: t in self.type.upper(), self.bool_types)) - - @property - def type_generic(self) -> utils.GenericDataType | None: - if self.is_string: - return utils.GenericDataType.STRING - if self.is_boolean: - return utils.GenericDataType.BOOLEAN - if self.is_numeric: - return utils.GenericDataType.NUMERIC - if self.is_temporal: - return utils.GenericDataType.TEMPORAL - return None - - @property - def expression(self) -> Column: - raise NotImplementedError() - - @property - def python_date_format(self) -> Column: - raise NotImplementedError() - - @property - def data(self) -> dict[str, Any]: - attrs = ( - "id", - "column_name", - "verbose_name", - "description", - "expression", - "filterable", - "groupby", - "is_dttm", - "type", - "advanced_data_type", - ) - return {s: getattr(self, s) for s in attrs if hasattr(self, s)} - - -class BaseMetric(AuditMixinNullable, ImportExportMixin): - """Interface for Metrics""" - - __tablename__: str | None = None # {connector_name}_metric - - id = Column(Integer, primary_key=True) - metric_name = Column(String(255), nullable=False) - verbose_name = Column(String(1024)) - metric_type = Column(String(32)) - description = Column(MediumText()) - d3format = Column(String(128)) - currency = Column(String(128)) - warning_text = Column(Text) - - """ - The interface should also declare a datasource relationship pointing - to a derivative of BaseDatasource, along with a FK - - datasource_name = Column( - String(255), - ForeignKey('datasources.datasource_name')) - datasource = relationship( - # needs to be altered to point to {Connector}Datasource - 'BaseDatasource', - backref=backref('metrics', cascade='all, delete-orphan'), - enable_typechecks=False) - """ - - @property - def currency_json(self) -> dict[str, str | None] | None: - try: - return json.loads(self.currency or "{}") or None - except (TypeError, JSONDecodeError) as exc: - logger.error( - "Unable to load currency json: %r. Leaving empty.", exc, exc_info=True - ) - return None - - @property - def perm(self) -> str | None: - raise NotImplementedError() - - @property - def expression(self) -> Column: - raise NotImplementedError() - - @property - def data(self) -> dict[str, Any]: - attrs = ( - "id", - "metric_name", - "verbose_name", - "description", - "expression", - "warning_text", - "d3format", - "currency", - ) - return {s: getattr(self, s) for s in attrs} diff --git a/superset/connectors/base/views.py b/superset/connectors/base/views.py deleted file mode 100644 index ae5013ebbf..0000000000 --- a/superset/connectors/base/views.py +++ /dev/null @@ -1,48 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from typing import Any - -from flask import Markup -from flask_appbuilder.fieldwidgets import BS3TextFieldWidget - -from superset.connectors.base.models import BaseDatasource -from superset.exceptions import SupersetException -from superset.views.base import SupersetModelView - - -class BS3TextFieldROWidget( # pylint: disable=too-few-public-methods - BS3TextFieldWidget -): - """ - Custom read only text field widget. - """ - - def __call__(self, field: Any, **kwargs: Any) -> Markup: - kwargs["readonly"] = "true" - return super().__call__(field, **kwargs) - - -class DatasourceModelView(SupersetModelView): - def pre_delete(self, item: BaseDatasource) -> None: - if item.slices: - raise SupersetException( - Markup( - "Cannot delete a datasource that has slices attached to it." - "Here's the list of associated charts: " - + "".join([i.slice_name for i in item.slices]) - ) - ) diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index e366940ff2..598bc6741b 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -17,6 +17,7 @@ # pylint: disable=too-many-lines from __future__ import annotations +import builtins import dataclasses import json import logging @@ -25,6 +26,7 @@ from collections import defaultdict from collections.abc import Hashable from dataclasses import dataclass, field from datetime import datetime, timedelta +from json.decoder import JSONDecodeError from typing import Any, Callable, cast import dateutil.parser @@ -34,7 +36,8 @@ import sqlalchemy as sa import sqlparse from flask import escape, Markup from flask_appbuilder import Model -from flask_babel import lazy_gettext as _ +from flask_appbuilder.security.sqla.models import User +from flask_babel import gettext as __, lazy_gettext as _ from jinja2.exceptions import TemplateError from sqlalchemy import ( and_, @@ -46,16 +49,17 @@ from sqlalchemy import ( inspect, Integer, or_, - select, String, Table, Text, update, ) from sqlalchemy.engine.base import Connection +from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import ( backref, + foreign, Mapped, Query, reconstructor, @@ -71,13 +75,14 @@ from sqlalchemy.sql.expression import Label, TextAsFrom from sqlalchemy.sql.selectable import Alias, TableClause from superset import app, db, is_feature_enabled, security_manager +from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.common.db_query_status import QueryStatus -from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric from superset.connectors.sqla.utils import ( get_columns_description, get_physical_table_metadata, get_virtual_table_metadata, ) +from superset.constants import EMPTY_STRING, NULL_STRING from superset.db_engine_specs.base import BaseEngineSpec, TimestampExpression from superset.exceptions import ( ColumnNotFoundException, @@ -98,19 +103,24 @@ from superset.models.helpers import ( AuditMixinNullable, CertificationMixin, ExploreMixin, + ImportExportMixin, QueryResult, QueryStringExtended, validate_adhoc_subquery, ) +from superset.models.slice import Slice from superset.sql_parse import ParsedQuery, sanitize_clause from superset.superset_typing import ( AdhocColumn, AdhocMetric, + FilterValue, + FilterValues, Metric, QueryObjectDict, ResultSetColumnType, ) from superset.utils import core as utils +from superset.utils.backports import StrEnum from superset.utils.core import GenericDataType, MediumText config = app.config @@ -135,6 +145,565 @@ class MetadataResult: modified: list[str] = field(default_factory=list) +logger = logging.getLogger(__name__) + +METRIC_FORM_DATA_PARAMS = [ + "metric", + "metric_2", + "metrics", + "metrics_b", + "percent_metrics", + "secondary_metric", + "size", + "timeseries_limit_metric", + "x", + "y", +] + +COLUMN_FORM_DATA_PARAMS = [ + "all_columns", + "all_columns_x", + "columns", + "entity", + "groupby", + "order_by_cols", + "series", +] + + +class DatasourceKind(StrEnum): + VIRTUAL = "virtual" + PHYSICAL = "physical" + + +class BaseDatasource( + AuditMixinNullable, ImportExportMixin +): # pylint: disable=too-many-public-methods + """A common interface to objects that are queryable + (tables and datasources)""" + + # --------------------------------------------------------------- + # class attributes to define when deriving BaseDatasource + # --------------------------------------------------------------- + __tablename__: str | None = None # {connector_name}_datasource + baselink: str | None = None # url portion pointing to ModelView endpoint + + owner_class: User | None = None + + # Used to do code highlighting when displaying the query in the UI + query_language: str | None = None + + # Only some datasources support Row Level Security + is_rls_supported: bool = False + + @property + def name(self) -> str: + # can be a Column or a property pointing to one + raise NotImplementedError() + + # --------------------------------------------------------------- + + # Columns + id = Column(Integer, primary_key=True) + description = Column(Text) + default_endpoint = Column(Text) + is_featured = Column(Boolean, default=False) # TODO deprecating + filter_select_enabled = Column(Boolean, default=True) + offset = Column(Integer, default=0) + cache_timeout = Column(Integer) + params = Column(String(1000)) + perm = Column(String(1000)) + schema_perm = Column(String(1000)) + is_managed_externally = Column(Boolean, nullable=False, default=False) + external_url = Column(Text, nullable=True) + + sql: str | None = None + owners: list[User] + update_from_object_fields: list[str] + + extra_import_fields = ["is_managed_externally", "external_url"] + + @property + def kind(self) -> DatasourceKind: + return DatasourceKind.VIRTUAL if self.sql else DatasourceKind.PHYSICAL + + @property + def owners_data(self) -> list[dict[str, Any]]: + return [ + { + "first_name": o.first_name, + "last_name": o.last_name, + "username": o.username, + "id": o.id, + } + for o in self.owners + ] + + @property + def is_virtual(self) -> bool: + return self.kind == DatasourceKind.VIRTUAL + + @declared_attr + def slices(self) -> RelationshipProperty: + return relationship( + "Slice", + overlaps="table", + primaryjoin=lambda: and_( + foreign(Slice.datasource_id) == self.id, + foreign(Slice.datasource_type) == self.type, + ), + ) + + columns: list[TableColumn] = [] + metrics: list[SqlMetric] = [] + + @property + def type(self) -> str: + raise NotImplementedError() + + @property + def uid(self) -> str: + """Unique id across datasource types""" + return f"{self.id}__{self.type}" + + @property + def column_names(self) -> list[str]: + return sorted([c.column_name for c in self.columns], key=lambda x: x or "") + + @property + def columns_types(self) -> dict[str, str]: + return {c.column_name: c.type for c in self.columns} + + @property + def main_dttm_col(self) -> str: + return "timestamp" + + @property + def datasource_name(self) -> str: + raise NotImplementedError() + + @property + def connection(self) -> str | None: + """String representing the context of the Datasource""" + return None + + @property + def schema(self) -> str | None: + """String representing the schema of the Datasource (if it applies)""" + return None + + @property + def filterable_column_names(self) -> list[str]: + return sorted([c.column_name for c in self.columns if c.filterable]) + + @property + def dttm_cols(self) -> list[str]: + return [] + + @property + def url(self) -> str: + return f"/{self.baselink}/edit/{self.id}" + + @property + def explore_url(self) -> str: + if self.default_endpoint: + return self.default_endpoint + return f"/explore/?datasource_type={self.type}&datasource_id={self.id}" + + @property + def column_formats(self) -> dict[str, str | None]: + return {m.metric_name: m.d3format for m in self.metrics if m.d3format} + + @property + def currency_formats(self) -> dict[str, dict[str, str | None] | None]: + return {m.metric_name: m.currency_json for m in self.metrics if m.currency_json} + + def add_missing_metrics(self, metrics: list[SqlMetric]) -> None: + existing_metrics = {m.metric_name for m in self.metrics} + for metric in metrics: + if metric.metric_name not in existing_metrics: + metric.table_id = self.id + self.metrics.append(metric) + + @property + def short_data(self) -> dict[str, Any]: + """Data representation of the datasource sent to the frontend""" + return { + "edit_url": self.url, + "id": self.id, + "uid": self.uid, + "schema": self.schema, + "name": self.name, + "type": self.type, + "connection": self.connection, + "creator": str(self.created_by), + } + + @property + def select_star(self) -> str | None: + pass + + @property + def order_by_choices(self) -> list[tuple[str, str]]: + choices = [] + # self.column_names return sorted column_names + for column_name in self.column_names: + column_name = str(column_name or "") + choices.append( + (json.dumps([column_name, True]), f"{column_name} " + __("[asc]")) + ) + choices.append( + (json.dumps([column_name, False]), f"{column_name} " + __("[desc]")) + ) + return choices + + @property + def verbose_map(self) -> dict[str, str]: + verb_map = {"__timestamp": "Time"} + verb_map.update( + {o.metric_name: o.verbose_name or o.metric_name for o in self.metrics} + ) + verb_map.update( + {o.column_name: o.verbose_name or o.column_name for o in self.columns} + ) + return verb_map + + @property + def data(self) -> dict[str, Any]: + """Data representation of the datasource sent to the frontend""" + return { + # simple fields + "id": self.id, + "uid": self.uid, + "column_formats": self.column_formats, + "currency_formats": self.currency_formats, + "description": self.description, + "database": self.database.data, # pylint: disable=no-member + "default_endpoint": self.default_endpoint, + "filter_select": self.filter_select_enabled, # TODO deprecate + "filter_select_enabled": self.filter_select_enabled, + "name": self.name, + "datasource_name": self.datasource_name, + "table_name": self.datasource_name, + "type": self.type, + "schema": self.schema, + "offset": self.offset, + "cache_timeout": self.cache_timeout, + "params": self.params, + "perm": self.perm, + "edit_url": self.url, + # sqla-specific + "sql": self.sql, + # one to many + "columns": [o.data for o in self.columns], + "metrics": [o.data for o in self.metrics], + # TODO deprecate, move logic to JS + "order_by_choices": self.order_by_choices, + "owners": [owner.id for owner in self.owners], + "verbose_map": self.verbose_map, + "select_star": self.select_star, + } + + def data_for_slices( # pylint: disable=too-many-locals + self, slices: list[Slice] + ) -> dict[str, Any]: + """ + The representation of the datasource containing only the required data + to render the provided slices. + + Used to reduce the payload when loading a dashboard. + """ + data = self.data + metric_names = set() + column_names = set() + for slc in slices: + form_data = slc.form_data + # pull out all required metrics from the form_data + for metric_param in METRIC_FORM_DATA_PARAMS: + for metric in utils.as_list(form_data.get(metric_param) or []): + metric_names.add(utils.get_metric_name(metric)) + if utils.is_adhoc_metric(metric): + column_ = metric.get("column") or {} + if column_name := column_.get("column_name"): + column_names.add(column_name) + + # Columns used in query filters + column_names.update( + filter_["subject"] + for filter_ in form_data.get("adhoc_filters") or [] + if filter_.get("clause") == "WHERE" and filter_.get("subject") + ) + + # columns used by Filter Box + column_names.update( + filter_config["column"] + for filter_config in form_data.get("filter_configs") or [] + if "column" in filter_config + ) + + # for legacy dashboard imports which have the wrong query_context in them + try: + query_context = slc.get_query_context() + except DatasetNotFoundError: + query_context = None + + # legacy charts don't have query_context charts + if query_context: + column_names.update( + [ + utils.get_column_name(column_) + for query in query_context.queries + for column_ in query.columns + ] + or [] + ) + else: + _columns = [ + utils.get_column_name(column_) + if utils.is_adhoc_column(column_) + else column_ + for column_param in COLUMN_FORM_DATA_PARAMS + for column_ in utils.as_list(form_data.get(column_param) or []) + ] + column_names.update(_columns) + + filtered_metrics = [ + metric + for metric in data["metrics"] + if metric["metric_name"] in metric_names + ] + + filtered_columns: list[Column] = [] + column_types: set[GenericDataType] = set() + for column_ in data["columns"]: + generic_type = column_.get("type_generic") + if generic_type is not None: + column_types.add(generic_type) + if column_["column_name"] in column_names: + filtered_columns.append(column_) + + data["column_types"] = list(column_types) + del data["description"] + data.update({"metrics": filtered_metrics}) + data.update({"columns": filtered_columns}) + verbose_map = {"__timestamp": "Time"} + verbose_map.update( + { + metric["metric_name"]: metric["verbose_name"] or metric["metric_name"] + for metric in filtered_metrics + } + ) + verbose_map.update( + { + column_["column_name"]: column_["verbose_name"] + or column_["column_name"] + for column_ in filtered_columns + } + ) + data["verbose_map"] = verbose_map + + return data + + @staticmethod + def filter_values_handler( # pylint: disable=too-many-arguments + values: FilterValues | None, + operator: str, + target_generic_type: GenericDataType, + target_native_type: str | None = None, + is_list_target: bool = False, + db_engine_spec: builtins.type[BaseEngineSpec] | None = None, + db_extra: dict[str, Any] | None = None, + ) -> FilterValues | None: + if values is None: + return None + + def handle_single_value(value: FilterValue | None) -> FilterValue | None: + if operator == utils.FilterOperator.TEMPORAL_RANGE: + return value + if ( + isinstance(value, (float, int)) + and target_generic_type == utils.GenericDataType.TEMPORAL + and target_native_type is not None + and db_engine_spec is not None + ): + value = db_engine_spec.convert_dttm( + target_type=target_native_type, + dttm=datetime.utcfromtimestamp(value / 1000), + db_extra=db_extra, + ) + value = literal_column(value) + if isinstance(value, str): + value = value.strip("\t\n") + + if ( + target_generic_type == utils.GenericDataType.NUMERIC + and operator + not in { + utils.FilterOperator.ILIKE, + utils.FilterOperator.LIKE, + } + ): + # For backwards compatibility and edge cases + # where a column data type might have changed + return utils.cast_to_num(value) + if value == NULL_STRING: + return None + if value == EMPTY_STRING: + return "" + if target_generic_type == utils.GenericDataType.BOOLEAN: + return utils.cast_to_boolean(value) + return value + + if isinstance(values, (list, tuple)): + values = [handle_single_value(v) for v in values] # type: ignore + else: + values = handle_single_value(values) + if is_list_target and not isinstance(values, (tuple, list)): + values = [values] # type: ignore + elif not is_list_target and isinstance(values, (tuple, list)): + values = values[0] if values else None + return values + + def external_metadata(self) -> list[ResultSetColumnType]: + """Returns column information from the external system""" + raise NotImplementedError() + + def get_query_str(self, query_obj: QueryObjectDict) -> str: + """Returns a query as a string + + This is used to be displayed to the user so that they can + understand what is taking place behind the scene""" + raise NotImplementedError() + + def query(self, query_obj: QueryObjectDict) -> QueryResult: + """Executes the query and returns a dataframe + + query_obj is a dictionary representing Superset's query interface. + Should return a ``superset.models.helpers.QueryResult`` + """ + raise NotImplementedError() + + @staticmethod + def default_query(qry: Query) -> Query: + return qry + + def get_column(self, column_name: str | None) -> TableColumn | None: + if not column_name: + return None + for col in self.columns: + if col.column_name == column_name: + return col + return None + + @staticmethod + def get_fk_many_from_list( + object_list: list[Any], + fkmany: list[Column], + fkmany_class: builtins.type[TableColumn | SqlMetric], + key_attr: str, + ) -> list[Column]: + """Update ORM one-to-many list from object list + + Used for syncing metrics and columns using the same code""" + + object_dict = {o.get(key_attr): o for o in object_list} + + # delete fks that have been removed + fkmany = [o for o in fkmany if getattr(o, key_attr) in object_dict] + + # sync existing fks + for fk in fkmany: + obj = object_dict.get(getattr(fk, key_attr)) + if obj: + for attr in fkmany_class.update_from_object_fields: + setattr(fk, attr, obj.get(attr)) + + # create new fks + new_fks = [] + orm_keys = [getattr(o, key_attr) for o in fkmany] + for obj in object_list: + key = obj.get(key_attr) + if key not in orm_keys: + del obj["id"] + orm_kwargs = {} + for k in obj: + if k in fkmany_class.update_from_object_fields and k in obj: + orm_kwargs[k] = obj[k] + new_obj = fkmany_class(**orm_kwargs) + new_fks.append(new_obj) + fkmany += new_fks + return fkmany + + def update_from_object(self, obj: dict[str, Any]) -> None: + """Update datasource from a data structure + + The UI's table editor crafts a complex data structure that + contains most of the datasource's properties as well as + an array of metrics and columns objects. This method + receives the object from the UI and syncs the datasource to + match it. Since the fields are different for the different + connectors, the implementation uses ``update_from_object_fields`` + which can be defined for each connector and + defines which fields should be synced""" + for attr in self.update_from_object_fields: + setattr(self, attr, obj.get(attr)) + + self.owners = obj.get("owners", []) + + # Syncing metrics + metrics = ( + self.get_fk_many_from_list( + obj["metrics"], self.metrics, SqlMetric, "metric_name" + ) + if "metrics" in obj + else [] + ) + self.metrics = metrics + + # Syncing columns + self.columns = ( + self.get_fk_many_from_list( + obj["columns"], self.columns, TableColumn, "column_name" + ) + if "columns" in obj + else [] + ) + + def get_extra_cache_keys( + self, query_obj: QueryObjectDict # pylint: disable=unused-argument + ) -> list[Hashable]: + """If a datasource needs to provide additional keys for calculation of + cache keys, those can be provided via this method + + :param query_obj: The dict representation of a query object + :return: list of keys + """ + return [] + + def __hash__(self) -> int: + return hash(self.uid) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BaseDatasource): + return NotImplemented + return self.uid == other.uid + + def raise_for_access(self) -> None: + """ + Raise an exception if the user cannot access the resource. + + :raises SupersetSecurityException: If the user cannot access the resource + """ + + security_manager.raise_for_access(datasource=self) + + @classmethod + def get_datasource_by_name( + cls, session: Session, datasource_name: str, schema: str, database_name: str + ) -> BaseDatasource | None: + raise NotImplementedError() + + class AnnotationDatasource(BaseDatasource): """Dummy object so we can query annotations using 'Viz' objects just like regular datasources. @@ -188,22 +757,33 @@ class AnnotationDatasource(BaseDatasource): raise NotImplementedError() -class TableColumn(Model, BaseColumn, CertificationMixin): +class TableColumn(Model, AuditMixinNullable, ImportExportMixin, CertificationMixin): """ORM object for table columns, each table can have multiple columns""" __tablename__ = "table_columns" __table_args__ = (UniqueConstraint("table_id", "column_name"),) + + id = Column(Integer, primary_key=True) + column_name = Column(String(255), nullable=False) + verbose_name = Column(String(1024)) + is_active = Column(Boolean, default=True) + type = Column(Text) + advanced_data_type = Column(String(255)) + groupby = Column(Boolean, default=True) + filterable = Column(Boolean, default=True) + description = Column(MediumText()) table_id = Column(Integer, ForeignKey("tables.id", ondelete="CASCADE")) - table: Mapped[SqlaTable] = relationship( - "SqlaTable", - back_populates="columns", - ) is_dttm = Column(Boolean, default=False) expression = Column(MediumText()) python_date_format = Column(String(255)) extra = Column(Text) + table: Mapped[SqlaTable] = relationship( + "SqlaTable", + back_populates="columns", + ) + export_fields = [ "table_id", "column_name", @@ -247,6 +827,9 @@ class TableColumn(Model, BaseColumn, CertificationMixin): self._database = None + def __repr__(self) -> str: + return str(self.column_name) + @property def is_boolean(self) -> bool: """ @@ -285,7 +868,7 @@ class TableColumn(Model, BaseColumn, CertificationMixin): return self.table.database if self.table else self._database @property - def db_engine_spec(self) -> type[BaseEngineSpec]: + def db_engine_spec(self) -> builtins.type[BaseEngineSpec]: return self.database.db_engine_spec @property @@ -367,44 +950,50 @@ class TableColumn(Model, BaseColumn, CertificationMixin): @property def data(self) -> dict[str, Any]: attrs = ( - "id", + "advanced_data_type", + "certification_details", + "certified_by", "column_name", - "verbose_name", "description", "expression", "filterable", "groupby", + "id", + "is_certified", "is_dttm", + "python_date_format", "type", "type_generic", - "advanced_data_type", - "python_date_format", - "is_certified", - "certified_by", - "certification_details", + "verbose_name", "warning_markdown", ) - attr_dict = {s: getattr(self, s) for s in attrs if hasattr(self, s)} - - attr_dict.update(super().data) - - return attr_dict + return {s: getattr(self, s) for s in attrs if hasattr(self, s)} -class SqlMetric(Model, BaseMetric, CertificationMixin): +class SqlMetric(Model, AuditMixinNullable, ImportExportMixin, CertificationMixin): """ORM object for metrics, each table can have multiple metrics""" __tablename__ = "sql_metrics" __table_args__ = (UniqueConstraint("table_id", "metric_name"),) + + id = Column(Integer, primary_key=True) + metric_name = Column(String(255), nullable=False) + verbose_name = Column(String(1024)) + metric_type = Column(String(32)) + description = Column(MediumText()) + d3format = Column(String(128)) + currency = Column(String(128)) + warning_text = Column(Text) table_id = Column(Integer, ForeignKey("tables.id", ondelete="CASCADE")) + expression = Column(MediumText(), nullable=False) + extra = Column(Text) + table: Mapped[SqlaTable] = relationship( "SqlaTable", back_populates="metrics", ) - expression = Column(MediumText(), nullable=False) - extra = Column(Text) export_fields = [ "metric_name", @@ -450,18 +1039,34 @@ class SqlMetric(Model, BaseMetric, CertificationMixin): def get_perm(self) -> str | None: return self.perm + @property + def currency_json(self) -> dict[str, str | None] | None: + try: + return json.loads(self.currency or "{}") or None + except (TypeError, JSONDecodeError) as exc: + logger.error( + "Unable to load currency json: %r. Leaving empty.", exc, exc_info=True + ) + return None + @property def data(self) -> dict[str, Any]: attrs = ( - "is_certified", - "certified_by", "certification_details", + "certified_by", + "currency", + "d3format", + "description", + "expression", + "id", + "is_certified", + "metric_name", "warning_markdown", + "warning_text", + "verbose_name", ) - attr_dict = {s: getattr(self, s) for s in attrs} - attr_dict.update(super().data) - return attr_dict + return {s: getattr(self, s) for s in attrs} sqlatable_user = Table( @@ -793,34 +1398,6 @@ class SqlaTable( ) ) from ex - def values_for_column(self, column_name: str, limit: int = 10000) -> list[Any]: - """Runs query against sqla to retrieve some - sample values for the given column. - """ - cols = {col.column_name: col for col in self.columns} - target_col = cols[column_name] - tp = self.get_template_processor() - tbl, cte = self.get_from_clause(tp) - - qry = ( - select([target_col.get_sqla_col(template_processor=tp)]) - .select_from(tbl) - .distinct() - ) - if limit: - qry = qry.limit(limit) - - if self.fetch_values_predicate: - qry = qry.where(self.get_fetch_values_predicate(template_processor=tp)) - - with self.database.get_sqla_engine_with_context() as engine: - sql = qry.compile(engine, compile_kwargs={"literal_binds": True}) - sql = self._apply_cte(sql, cte) - sql = self.mutate_query_from_config(sql) - - df = pd.read_sql_query(sql=sql, con=engine) - return df[column_name].to_list() - def mutate_query_from_config(self, sql: str) -> str: """Apply config's SQL_QUERY_MUTATOR diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py index 1ba10f18b2..36eebcb3f7 100644 --- a/superset/connectors/sqla/views.py +++ b/superset/connectors/sqla/views.py @@ -28,7 +28,6 @@ from flask_babel import lazy_gettext as _ from wtforms.validators import DataRequired, Regexp from superset import db -from superset.connectors.base.views import DatasourceModelView from superset.connectors.sqla import models from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.superset_typing import FlaskResponse @@ -282,7 +281,7 @@ class RowLevelSecurityView(BaseSupersetView): class TableModelView( # pylint: disable=too-many-ancestors - DatasourceModelView, DeleteMixin, YamlExportMixin + SupersetModelView, DeleteMixin, YamlExportMixin ): datamodel = SQLAInterface(models.SqlaTable) class_permission_name = "Dataset" diff --git a/superset/css_templates/api.py b/superset/css_templates/api.py index ee5d5fac70..ac222da66f 100644 --- a/superset/css_templates/api.py +++ b/superset/css_templates/api.py @@ -22,12 +22,12 @@ from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import ngettext -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.css_templates.commands.delete import DeleteCssTemplateCommand -from superset.css_templates.commands.exceptions import ( +from superset.commands.css.delete import DeleteCssTemplateCommand +from superset.commands.css.exceptions import ( CssTemplateDeleteFailedError, CssTemplateNotFoundError, ) +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.css_templates.filters import CssTemplateAllTextFilter from superset.css_templates.schemas import ( get_delete_ids_schema, @@ -54,6 +54,10 @@ class CssTemplateRestApi(BaseSupersetModelRestApi): allow_browser_login = True show_columns = [ + "changed_on_delta_humanized", + "changed_by.first_name", + "changed_by.id", + "changed_by.last_name", "created_by.first_name", "created_by.id", "created_by.last_name", @@ -79,7 +83,7 @@ class CssTemplateRestApi(BaseSupersetModelRestApi): order_columns = ["template_name"] search_filters = {"template_name": [CssTemplateAllTextFilter]} - allowed_rel_fields = {"created_by"} + allowed_rel_fields = {"created_by", "changed_by"} apispec_parameter_schemas = { "get_delete_ids_schema": get_delete_ids_schema, diff --git a/superset/daos/annotation.py b/superset/daos/annotation_layer.py similarity index 100% rename from superset/daos/annotation.py rename to superset/daos/annotation_layer.py diff --git a/superset/daos/base.py b/superset/daos/base.py index d2c1842c17..1133a76a1e 100644 --- a/superset/daos/base.py +++ b/superset/daos/base.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import Any, cast, Generic, get_args, TypeVar +from typing import Any, Generic, get_args, TypeVar from flask_appbuilder.models.filters import BaseFilter from flask_appbuilder.models.sqla import Model @@ -30,7 +30,6 @@ from superset.daos.exceptions import ( DAOUpdateFailedError, ) from superset.extensions import db -from superset.utils.core import as_list T = TypeVar("T", bound=Model) @@ -197,9 +196,9 @@ class BaseDAO(Generic[T]): return item # type: ignore @classmethod - def delete(cls, item_or_items: T | list[T], commit: bool = True) -> None: + def delete(cls, items: list[T], commit: bool = True) -> None: """ - Delete the specified item(s) including their associated relationships. + Delete the specified items including their associated relationships. Note that bulk deletion via `delete` is not invoked in the base class as this does not dispatch the ORM `after_delete` event which may be required to augment @@ -209,12 +208,12 @@ class BaseDAO(Generic[T]): Subclasses may invoke bulk deletion but are responsible for instrumenting any post-deletion logic. - :param items: The item(s) to delete + :param items: The items to delete :param commit: Whether to commit the transaction :raises DAODeleteFailedError: If the deletion failed :see: https://docs.sqlalchemy.org/en/latest/orm/queryguide/dml.html """ - items = cast(list[T], as_list(item_or_items)) + try: for item in items: db.session.delete(item) diff --git a/superset/daos/chart.py b/superset/daos/chart.py index 7eae38cb0e..eb8b3e809e 100644 --- a/superset/daos/chart.py +++ b/superset/daos/chart.py @@ -28,7 +28,7 @@ from superset.models.slice import Slice from superset.utils.core import get_user_id if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource logger = logging.getLogger(__name__) diff --git a/superset/daos/dashboard.py b/superset/daos/dashboard.py index 77f2dd9f34..b98252070d 100644 --- a/superset/daos/dashboard.py +++ b/superset/daos/dashboard.py @@ -25,12 +25,12 @@ from flask import g from flask_appbuilder.models.sqla.interface import SQLAInterface from superset import is_feature_enabled, security_manager -from superset.daos.base import BaseDAO -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.exceptions import ( DashboardAccessDeniedError, DashboardForbiddenError, DashboardNotFoundError, ) +from superset.daos.base import BaseDAO from superset.dashboards.filter_sets.consts import ( DASHBOARD_ID_FIELD, DESCRIPTION_FIELD, diff --git a/superset/daos/tag.py b/superset/daos/tag.py index 2acd221a35..60362bfbbd 100644 --- a/superset/daos/tag.py +++ b/superset/daos/tag.py @@ -21,6 +21,8 @@ from typing import Any, Optional from flask import g from sqlalchemy.exc import SQLAlchemyError +from superset.commands.tag.exceptions import TagNotFoundError +from superset.commands.tag.utils import to_object_type from superset.daos.base import BaseDAO from superset.daos.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.exceptions import MissingUserContextException @@ -28,14 +30,12 @@ from superset.extensions import db from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.models.sql_lab import SavedQuery -from superset.tags.commands.exceptions import TagNotFoundError -from superset.tags.commands.utils import to_object_type from superset.tags.models import ( get_tag, - ObjectTypes, + ObjectType, Tag, TaggedObject, - TagTypes, + TagType, user_favorite_tag_table, ) from superset.utils.core import get_user_id @@ -56,7 +56,7 @@ class TagDAO(BaseDAO[Tag]): @staticmethod def create_custom_tagged_objects( - object_type: ObjectTypes, object_id: int, tag_names: list[str] + object_type: ObjectType, object_id: int, tag_names: list[str] ) -> None: tagged_objects = [] for name in tag_names: @@ -64,7 +64,7 @@ class TagDAO(BaseDAO[Tag]): raise DAOCreateFailedError( message="Invalid Tag Name (cannot contain ':' or ',')" ) - type_ = TagTypes.custom + type_ = TagType.custom tag_name = name.strip() tag = TagDAO.get_by_name(tag_name, type_) tagged_objects.append( @@ -76,7 +76,7 @@ class TagDAO(BaseDAO[Tag]): @staticmethod def delete_tagged_object( - object_type: ObjectTypes, object_id: int, tag_name: str + object_type: ObjectType, object_id: int, tag_name: str ) -> None: """ deletes a tagged object by the object_id, object_type, and tag_name @@ -128,7 +128,7 @@ class TagDAO(BaseDAO[Tag]): raise DAODeleteFailedError(exception=ex) from ex @staticmethod - def get_by_name(name: str, type_: TagTypes = TagTypes.custom) -> Tag: + def get_by_name(name: str, type_: TagType = TagType.custom) -> Tag: """ returns a tag if one exists by that name, none otherwise. important!: Creates a tag by that name if the tag is not found. @@ -152,7 +152,7 @@ class TagDAO(BaseDAO[Tag]): @staticmethod def find_tagged_object( - object_type: ObjectTypes, object_id: int, tag_id: int + object_type: ObjectType, object_id: int, tag_id: int ) -> TaggedObject: """ returns a tagged object if one exists by that name, none otherwise. @@ -167,6 +167,14 @@ class TagDAO(BaseDAO[Tag]): .first() ) + @staticmethod + def get_tagged_objects_by_tag_id( + tag_ids: Optional[list[int]], obj_types: Optional[list[str]] = None + ) -> list[dict[str, Any]]: + tags = db.session.query(Tag).filter(Tag.id.in_(tag_ids)).all() + tag_names = [tag.name for tag in tags] + return TagDAO.get_tagged_objects_for_tags(tag_names, obj_types) + @staticmethod def get_tagged_objects_for_tags( tags: Optional[list[str]] = None, obj_types: Optional[list[str]] = None @@ -185,7 +193,7 @@ class TagDAO(BaseDAO[Tag]): TaggedObject, and_( TaggedObject.object_id == Dashboard.id, - TaggedObject.object_type == ObjectTypes.dashboard, + TaggedObject.object_type == ObjectType.dashboard, ), ) .join(Tag, TaggedObject.tag_id == Tag.id) @@ -195,7 +203,7 @@ class TagDAO(BaseDAO[Tag]): results.extend( { "id": obj.id, - "type": ObjectTypes.dashboard.name, + "type": ObjectType.dashboard.name, "name": obj.dashboard_title, "url": obj.url, "changed_on": obj.changed_on, @@ -215,7 +223,7 @@ class TagDAO(BaseDAO[Tag]): TaggedObject, and_( TaggedObject.object_id == Slice.id, - TaggedObject.object_type == ObjectTypes.chart, + TaggedObject.object_type == ObjectType.chart, ), ) .join(Tag, TaggedObject.tag_id == Tag.id) @@ -224,7 +232,7 @@ class TagDAO(BaseDAO[Tag]): results.extend( { "id": obj.id, - "type": ObjectTypes.chart.name, + "type": ObjectType.chart.name, "name": obj.slice_name, "url": obj.url, "changed_on": obj.changed_on, @@ -244,7 +252,7 @@ class TagDAO(BaseDAO[Tag]): TaggedObject, and_( TaggedObject.object_id == SavedQuery.id, - TaggedObject.object_type == ObjectTypes.query, + TaggedObject.object_type == ObjectType.query, ), ) .join(Tag, TaggedObject.tag_id == Tag.id) @@ -253,7 +261,7 @@ class TagDAO(BaseDAO[Tag]): results.extend( { "id": obj.id, - "type": ObjectTypes.query.name, + "type": ObjectType.query.name, "name": obj.label, "url": obj.url(), "changed_on": obj.changed_on, @@ -363,7 +371,7 @@ class TagDAO(BaseDAO[Tag]): @staticmethod def create_tag_relationship( - objects_to_tag: list[tuple[ObjectTypes, int]], + objects_to_tag: list[tuple[ObjectType, int]], tag: Tag, bulk_create: bool = False, ) -> None: @@ -373,7 +381,7 @@ class TagDAO(BaseDAO[Tag]): and an id, and creates a TaggedObject for each one, associating it with the provided tag. All created TaggedObjects are collected in a list. Args: - objects_to_tag (List[Tuple[ObjectTypes, int]]): A list of tuples, each + objects_to_tag (List[Tuple[ObjectType, int]]): A list of tuples, each containing an ObjectType and an id, representing the objects to be tagged. tag (Tag): The tag to be associated with the specified objects. @@ -409,7 +417,9 @@ class TagDAO(BaseDAO[Tag]): for object_type, object_id in tagged_objects_to_delete: # delete objects that were removed TagDAO.delete_tagged_object( - object_type, object_id, tag.name # type: ignore + object_type, # type: ignore + object_id, + tag.name, ) db.session.add_all(tagged_objects) diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index b2aa43b0ee..cf75a644fb 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -35,13 +35,9 @@ from werkzeug.wsgi import FileWrapper from superset import is_feature_enabled, thumbnail_cache from superset.charts.schemas import ChartEntityResponseSchema -from superset.commands.importers.exceptions import NoValidFilesFoundError -from superset.commands.importers.v1.utils import get_contents_from_bundle -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.daos.dashboard import DashboardDAO, EmbeddedDashboardDAO -from superset.dashboards.commands.create import CreateDashboardCommand -from superset.dashboards.commands.delete import DeleteDashboardCommand -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.create import CreateDashboardCommand +from superset.commands.dashboard.delete import DeleteDashboardCommand +from superset.commands.dashboard.exceptions import ( DashboardAccessDeniedError, DashboardCreateFailedError, DashboardDeleteFailedError, @@ -50,9 +46,13 @@ from superset.dashboards.commands.exceptions import ( DashboardNotFoundError, DashboardUpdateFailedError, ) -from superset.dashboards.commands.export import ExportDashboardsCommand -from superset.dashboards.commands.importers.dispatcher import ImportDashboardsCommand -from superset.dashboards.commands.update import UpdateDashboardCommand +from superset.commands.dashboard.export import ExportDashboardsCommand +from superset.commands.dashboard.importers.dispatcher import ImportDashboardsCommand +from superset.commands.dashboard.update import UpdateDashboardCommand +from superset.commands.importers.exceptions import NoValidFilesFoundError +from superset.commands.importers.v1.utils import get_contents_from_bundle +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.dashboard import DashboardDAO, EmbeddedDashboardDAO from superset.dashboards.filters import ( DashboardAccessFilter, DashboardCertifiedFilter, @@ -261,7 +261,7 @@ class DashboardRestApi(BaseSupersetModelRestApi): "roles": RelatedFieldFilter("name", FilterRelatedRoles), "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), } - allowed_rel_fields = {"owners", "roles", "created_by"} + allowed_rel_fields = {"owners", "roles", "created_by", "changed_by"} openapi_spec_tag = "Dashboards" """ Override the name set for this collection of endpoints """ @@ -1349,8 +1349,7 @@ class DashboardRestApi(BaseSupersetModelRestApi): 500: $ref: '#/components/responses/500' """ - for embedded in dashboard.embedded: - EmbeddedDashboardDAO.delete(embedded) + EmbeddedDashboardDAO.delete(dashboard.embedded) return self.response(200, message="OK") @expose("/<id_or_slug>/copy/", methods=("POST",)) diff --git a/superset/dashboards/filter_sets/api.py b/superset/dashboards/filter_sets/api.py index 5a2bf01923..ee7297ef4c 100644 --- a/superset/dashboards/filter_sets/api.py +++ b/superset/dashboards/filter_sets/api.py @@ -29,12 +29,10 @@ from flask_appbuilder.api import ( from flask_appbuilder.models.sqla.interface import SQLAInterface from marshmallow import ValidationError -from superset.commands.exceptions import ObjectNotFoundError -from superset.daos.dashboard import DashboardDAO -from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.filter_sets.commands.create import CreateFilterSetCommand -from superset.dashboards.filter_sets.commands.delete import DeleteFilterSetCommand -from superset.dashboards.filter_sets.commands.exceptions import ( +from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.filter_set.create import CreateFilterSetCommand +from superset.commands.dashboard.filter_set.delete import DeleteFilterSetCommand +from superset.commands.dashboard.filter_set.exceptions import ( FilterSetCreateFailedError, FilterSetDeleteFailedError, FilterSetForbiddenError, @@ -42,7 +40,9 @@ from superset.dashboards.filter_sets.commands.exceptions import ( FilterSetUpdateFailedError, UserIsNotDashboardOwnerError, ) -from superset.dashboards.filter_sets.commands.update import UpdateFilterSetCommand +from superset.commands.dashboard.filter_set.update import UpdateFilterSetCommand +from superset.commands.exceptions import ObjectNotFoundError +from superset.daos.dashboard import DashboardDAO from superset.dashboards.filter_sets.consts import ( DASHBOARD_FIELD, DASHBOARD_ID_FIELD, diff --git a/superset/dashboards/filter_state/api.py b/superset/dashboards/filter_state/api.py index 9e0720646a..d3b6ce8f7a 100644 --- a/superset/dashboards/filter_state/api.py +++ b/superset/dashboards/filter_state/api.py @@ -19,10 +19,10 @@ import logging from flask import Response from flask_appbuilder.api import expose, protect, safe -from superset.dashboards.filter_state.commands.create import CreateFilterStateCommand -from superset.dashboards.filter_state.commands.delete import DeleteFilterStateCommand -from superset.dashboards.filter_state.commands.get import GetFilterStateCommand -from superset.dashboards.filter_state.commands.update import UpdateFilterStateCommand +from superset.commands.dashboard.filter_state.create import CreateFilterStateCommand +from superset.commands.dashboard.filter_state.delete import DeleteFilterStateCommand +from superset.commands.dashboard.filter_state.get import GetFilterStateCommand +from superset.commands.dashboard.filter_state.update import UpdateFilterStateCommand from superset.extensions import event_logger from superset.temporary_cache.api import TemporaryCacheRestApi diff --git a/superset/dashboards/permalink/api.py b/superset/dashboards/permalink/api.py index 0a786d1def..a6ae2910f4 100644 --- a/superset/dashboards/permalink/api.py +++ b/superset/dashboards/permalink/api.py @@ -20,15 +20,13 @@ from flask import request, Response from flask_appbuilder.api import expose, protect, safe from marshmallow import ValidationError -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from superset.dashboards.commands.exceptions import ( +from superset.commands.dashboard.exceptions import ( DashboardAccessDeniedError, DashboardNotFoundError, ) -from superset.dashboards.permalink.commands.create import ( - CreateDashboardPermalinkCommand, -) -from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand +from superset.commands.dashboard.permalink.create import CreateDashboardPermalinkCommand +from superset.commands.dashboard.permalink.get import GetDashboardPermalinkCommand +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP from superset.dashboards.permalink.exceptions import DashboardPermalinkInvalidStateError from superset.dashboards.permalink.schemas import DashboardPermalinkStateSchema from superset.extensions import event_logger diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py index e467167297..615d830d42 100644 --- a/superset/dashboards/schemas.py +++ b/superset/dashboards/schemas.py @@ -18,11 +18,12 @@ import json import re from typing import Any, Union -from marshmallow import fields, post_load, pre_load, Schema +from marshmallow import fields, post_dump, post_load, pre_load, Schema from marshmallow.validate import Length, ValidationError +from superset import security_manager from superset.exceptions import SupersetException -from superset.tags.models import TagTypes +from superset.tags.models import TagType from superset.utils import core as utils get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}} @@ -169,7 +170,7 @@ class RolesSchema(Schema): class TagSchema(Schema): id = fields.Int() name = fields.String() - type = fields.Enum(TagTypes, by_value=True) + type = fields.Enum(TagType, by_value=True) class DashboardGetResponseSchema(Schema): @@ -198,6 +199,15 @@ class DashboardGetResponseSchema(Schema): changed_on_humanized = fields.String(data_key="changed_on_delta_humanized") is_managed_externally = fields.Boolean(allow_none=True, dump_default=False) + # pylint: disable=unused-argument + @post_dump() + def post_dump(self, serialized: dict[str, Any], **kwargs: Any) -> dict[str, Any]: + if security_manager.is_guest_user(): + del serialized["owners"] + del serialized["changed_by_name"] + del serialized["changed_by"] + return serialized + class DatabaseSchema(Schema): id = fields.Int() @@ -247,6 +257,14 @@ class DashboardDatasetSchema(Schema): normalize_columns = fields.Bool() always_filter_main_dttm = fields.Bool() + # pylint: disable=unused-argument + @post_dump() + def post_dump(self, serialized: dict[str, Any], **kwargs: Any) -> dict[str, Any]: + if security_manager.is_guest_user(): + del serialized["owners"] + del serialized["database"] + return serialized + class BaseDashboardSchema(Schema): # pylint: disable=unused-argument diff --git a/superset/databases/api.py b/superset/databases/api.py index 116e2ddb1f..8de84a16af 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -29,16 +29,9 @@ from marshmallow import ValidationError from sqlalchemy.exc import NoSuchTableError, OperationalError, SQLAlchemyError from superset import app, event_logger -from superset.commands.importers.exceptions import ( - IncorrectFormatError, - NoValidFilesFoundError, -) -from superset.commands.importers.v1.utils import get_contents_from_bundle -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.daos.database import DatabaseDAO -from superset.databases.commands.create import CreateDatabaseCommand -from superset.databases.commands.delete import DeleteDatabaseCommand -from superset.databases.commands.exceptions import ( +from superset.commands.database.create import CreateDatabaseCommand +from superset.commands.database.delete import DeleteDatabaseCommand +from superset.commands.database.exceptions import ( DatabaseConnectionFailedError, DatabaseCreateFailedError, DatabaseDeleteDatasetsExistFailedError, @@ -49,13 +42,26 @@ from superset.databases.commands.exceptions import ( DatabaseUpdateFailedError, InvalidParametersError, ) -from superset.databases.commands.export import ExportDatabasesCommand -from superset.databases.commands.importers.dispatcher import ImportDatabasesCommand -from superset.databases.commands.tables import TablesDatabaseCommand -from superset.databases.commands.test_connection import TestConnectionDatabaseCommand -from superset.databases.commands.update import UpdateDatabaseCommand -from superset.databases.commands.validate import ValidateDatabaseParametersCommand -from superset.databases.commands.validate_sql import ValidateSQLCommand +from superset.commands.database.export import ExportDatabasesCommand +from superset.commands.database.importers.dispatcher import ImportDatabasesCommand +from superset.commands.database.ssh_tunnel.delete import DeleteSSHTunnelCommand +from superset.commands.database.ssh_tunnel.exceptions import ( + SSHTunnelDeleteFailedError, + SSHTunnelingNotEnabledError, + SSHTunnelNotFoundError, +) +from superset.commands.database.tables import TablesDatabaseCommand +from superset.commands.database.test_connection import TestConnectionDatabaseCommand +from superset.commands.database.update import UpdateDatabaseCommand +from superset.commands.database.validate import ValidateDatabaseParametersCommand +from superset.commands.database.validate_sql import ValidateSQLCommand +from superset.commands.importers.exceptions import ( + IncorrectFormatError, + NoValidFilesFoundError, +) +from superset.commands.importers.v1.utils import get_contents_from_bundle +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.database import DatabaseDAO from superset.databases.decorators import check_datasource_access from superset.databases.filters import DatabaseFilter, DatabaseUploadEnabledFilter from superset.databases.schemas import ( @@ -79,12 +85,6 @@ from superset.databases.schemas import ( ValidateSQLRequest, ValidateSQLResponse, ) -from superset.databases.ssh_tunnel.commands.delete import DeleteSSHTunnelCommand -from superset.databases.ssh_tunnel.commands.exceptions import ( - SSHTunnelDeleteFailedError, - SSHTunnelingNotEnabledError, - SSHTunnelNotFoundError, -) from superset.databases.utils import get_table_metadata from superset.db_engine_specs import get_available_engine_specs from superset.errors import ErrorLevel, SupersetError, SupersetErrorType @@ -111,6 +111,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi): include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { RouteMethod.EXPORT, RouteMethod.IMPORT, + RouteMethod.RELATED, "tables", "table_metadata", "table_extra_metadata", @@ -162,6 +163,8 @@ class DatabaseRestApi(BaseSupersetModelRestApi): "backend", "changed_on", "changed_on_delta_humanized", + "changed_by.first_name", + "changed_by.last_name", "created_by.first_name", "created_by.last_name", "database_name", @@ -194,7 +197,17 @@ class DatabaseRestApi(BaseSupersetModelRestApi): edit_columns = add_columns + search_columns = [ + "allow_file_upload", + "allow_dml", + "allow_run_async", + "created_by", + "changed_by", + "database_name", + "expose_in_sqllab", + ] search_filters = {"allow_file_upload": [DatabaseUploadEnabledFilter]} + allowed_rel_fields = {"changed_by", "created_by"} list_select_columns = list_columns + ["extra", "sqlalchemy_uri", "password"] order_columns = [ diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py index abba9036a1..b56c98c5d6 100644 --- a/superset/databases/schemas.py +++ b/superset/databases/schemas.py @@ -28,13 +28,13 @@ from marshmallow.validate import Length, ValidationError from sqlalchemy import MetaData from superset import db, is_feature_enabled -from superset.constants import PASSWORD_MASK -from superset.databases.commands.exceptions import DatabaseInvalidError -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.exceptions import DatabaseInvalidError +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelingNotEnabledError, SSHTunnelInvalidCredentials, SSHTunnelMissingCredentials, ) +from superset.constants import PASSWORD_MASK from superset.databases.utils import make_url_safe from superset.db_engine_specs import get_engine_spec from superset.exceptions import CertificateException, SupersetSecurityException @@ -750,6 +750,7 @@ class ImportV1DatabaseExtraSchema(Schema): allows_virtual_table_explore = fields.Boolean(required=False) cancel_query_on_windows_unload = fields.Boolean(required=False) disable_data_preview = fields.Boolean(required=False) + version = fields.String(required=False, allow_none=True) class ImportV1DatabaseSchema(Schema): diff --git a/superset/databases/utils.py b/superset/databases/utils.py index fa163e4d9e..21abd7b9c2 100644 --- a/superset/databases/utils.py +++ b/superset/databases/utils.py @@ -18,7 +18,7 @@ from typing import Any, Optional, Union from sqlalchemy.engine.url import make_url, URL -from superset.databases.commands.exceptions import DatabaseInvalidError +from superset.commands.database.exceptions import DatabaseInvalidError def get_foreign_keys_metadata( diff --git a/superset/datasets/api.py b/superset/datasets/api.py index e1d7e5a09e..bc4a42e58e 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -30,17 +30,10 @@ from flask_babel import ngettext from marshmallow import ValidationError from superset import event_logger, is_feature_enabled -from superset.commands.exceptions import CommandException -from superset.commands.importers.exceptions import NoValidFilesFoundError -from superset.commands.importers.v1.utils import get_contents_from_bundle -from superset.connectors.sqla.models import SqlaTable -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.daos.dataset import DatasetDAO -from superset.databases.filters import DatabaseFilter -from superset.datasets.commands.create import CreateDatasetCommand -from superset.datasets.commands.delete import DeleteDatasetCommand -from superset.datasets.commands.duplicate import DuplicateDatasetCommand -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.create import CreateDatasetCommand +from superset.commands.dataset.delete import DeleteDatasetCommand +from superset.commands.dataset.duplicate import DuplicateDatasetCommand +from superset.commands.dataset.exceptions import ( DatasetCreateFailedError, DatasetDeleteFailedError, DatasetForbiddenError, @@ -49,11 +42,18 @@ from superset.datasets.commands.exceptions import ( DatasetRefreshFailedError, DatasetUpdateFailedError, ) -from superset.datasets.commands.export import ExportDatasetsCommand -from superset.datasets.commands.importers.dispatcher import ImportDatasetsCommand -from superset.datasets.commands.refresh import RefreshDatasetCommand -from superset.datasets.commands.update import UpdateDatasetCommand -from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand +from superset.commands.dataset.export import ExportDatasetsCommand +from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand +from superset.commands.dataset.refresh import RefreshDatasetCommand +from superset.commands.dataset.update import UpdateDatasetCommand +from superset.commands.dataset.warm_up_cache import DatasetWarmUpCacheCommand +from superset.commands.exceptions import CommandException +from superset.commands.importers.exceptions import NoValidFilesFoundError +from superset.commands.importers.v1.utils import get_contents_from_bundle +from superset.connectors.sqla.models import SqlaTable +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.dataset import DatasetDAO +from superset.databases.filters import DatabaseFilter from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter from superset.datasets.schemas import ( DatasetCacheWarmUpRequestSchema, @@ -247,8 +247,17 @@ class DatasetRestApi(BaseSupersetModelRestApi): "sql": [DatasetIsNullOrEmptyFilter], "id": [DatasetCertifiedFilter], } - search_columns = ["id", "database", "owners", "schema", "sql", "table_name"] - allowed_rel_fields = {"database", "owners"} + search_columns = [ + "id", + "database", + "owners", + "schema", + "sql", + "table_name", + "created_by", + "changed_by", + ] + allowed_rel_fields = {"database", "owners", "created_by", "changed_by"} allowed_distinct_fields = {"schema"} apispec_parameter_schemas = { diff --git a/superset/datasets/columns/api.py b/superset/datasets/columns/api.py index 0aafab5d39..90de0f7750 100644 --- a/superset/datasets/columns/api.py +++ b/superset/datasets/columns/api.py @@ -20,14 +20,14 @@ from flask import Response from flask_appbuilder.api import expose, permission_name, protect, safe from flask_appbuilder.models.sqla.interface import SQLAInterface -from superset.connectors.sqla.models import TableColumn -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from superset.datasets.columns.commands.delete import DeleteDatasetColumnCommand -from superset.datasets.columns.commands.exceptions import ( +from superset.commands.dataset.columns.delete import DeleteDatasetColumnCommand +from superset.commands.dataset.columns.exceptions import ( DatasetColumnDeleteFailedError, DatasetColumnForbiddenError, DatasetColumnNotFoundError, ) +from superset.connectors.sqla.models import TableColumn +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics logger = logging.getLogger(__name__) diff --git a/superset/datasets/metrics/api.py b/superset/datasets/metrics/api.py index 28ec9474e2..aa29254fc0 100644 --- a/superset/datasets/metrics/api.py +++ b/superset/datasets/metrics/api.py @@ -20,14 +20,14 @@ from flask import Response from flask_appbuilder.api import expose, permission_name, protect, safe from flask_appbuilder.models.sqla.interface import SQLAInterface -from superset.connectors.sqla.models import TableColumn -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from superset.datasets.metrics.commands.delete import DeleteDatasetMetricCommand -from superset.datasets.metrics.commands.exceptions import ( +from superset.commands.dataset.metrics.delete import DeleteDatasetMetricCommand +from superset.commands.dataset.metrics.exceptions import ( DatasetMetricDeleteFailedError, DatasetMetricForbiddenError, DatasetMetricNotFoundError, ) +from superset.connectors.sqla.models import TableColumn +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics logger = logging.getLogger(__name__) diff --git a/superset/datasource/api.py b/superset/datasource/api.py index 0c4338e349..6943d00bc7 100644 --- a/superset/datasource/api.py +++ b/superset/datasource/api.py @@ -115,11 +115,18 @@ class DatasourceRestApi(BaseSupersetApi): return self.response(403, message=ex.message) row_limit = apply_max_row_limit(app.config["FILTER_SELECT_ROW_LIMIT"]) + denormalize_column = not datasource.normalize_columns try: payload = datasource.values_for_column( - column_name=column_name, limit=row_limit + column_name=column_name, + limit=row_limit, + denormalize_column=denormalize_column, ) return self.response(200, result=payload) + except KeyError: + return self.response( + 400, message=f"Column name {column_name} does not exist" + ) except NotImplementedError: return self.response( 400, diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 6bce03d931..9894232ab1 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -51,7 +51,7 @@ from sqlalchemy.engine.reflection import Inspector from sqlalchemy.engine.url import URL from sqlalchemy.ext.compiler import compiles from sqlalchemy.orm import Session -from sqlalchemy.sql import quoted_name, text +from sqlalchemy.sql import literal_column, quoted_name, text from sqlalchemy.sql.expression import ColumnClause, Select, TextAsFrom, TextClause from sqlalchemy.types import TypeEngine from sqlparse.tokens import CTE @@ -1322,8 +1322,12 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods return comment @classmethod - def get_columns( - cls, inspector: Inspector, table_name: str, schema: str | None + def get_columns( # pylint: disable=unused-argument + cls, + inspector: Inspector, + table_name: str, + schema: str | None, + options: dict[str, Any] | None = None, ) -> list[ResultSetColumnType]: """ Get all columns from a given schema and table @@ -1331,6 +1335,8 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods :param inspector: SqlAlchemy Inspector instance :param table_name: Table name :param schema: Schema name. If omitted, uses default schema for database + :param options: Extra options to customise the display of columns in + some databases :return: All columns in table """ return convert_inspector_columns( @@ -1382,7 +1388,12 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods @classmethod def _get_fields(cls, cols: list[ResultSetColumnType]) -> list[Any]: - return [column(c["column_name"]) for c in cols] + return [ + literal_column(query_as) + if (query_as := c.get("query_as")) + else column(c["column_name"]) + for c in cols + ] @classmethod def select_star( # pylint: disable=too-many-arguments,too-many-locals diff --git a/superset/db_engine_specs/doris.py b/superset/db_engine_specs/doris.py new file mode 100644 index 0000000000..e502f5bda2 --- /dev/null +++ b/superset/db_engine_specs/doris.py @@ -0,0 +1,278 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import re +from re import Pattern +from typing import Any, Optional +from urllib import parse + +from flask_babel import gettext as __ +from sqlalchemy import Float, Integer, Numeric, String, TEXT, types +from sqlalchemy.engine.url import URL +from sqlalchemy.sql.type_api import TypeEngine + +from superset.db_engine_specs.mysql import MySQLEngineSpec +from superset.errors import SupersetErrorType +from superset.utils.core import GenericDataType + +# Regular expressions to catch custom errors +CONNECTION_ACCESS_DENIED_REGEX = re.compile( + "Access denied for user '(?P<username>.*?)'" +) +CONNECTION_INVALID_HOSTNAME_REGEX = re.compile( + "Unknown Doris server host '(?P<hostname>.*?)'" +) +CONNECTION_UNKNOWN_DATABASE_REGEX = re.compile("Unknown database '(?P<database>.*?)'") +CONNECTION_HOST_DOWN_REGEX = re.compile( + "Can't connect to Doris server on '(?P<hostname>.*?)'" +) +SYNTAX_ERROR_REGEX = re.compile( + "check the manual that corresponds to your MySQL server " + "version for the right syntax to use near '(?P<server_error>.*)" +) + +logger = logging.getLogger(__name__) + + +class TINYINT(Integer): + __visit_name__ = "TINYINT" + + +class LARGEINT(Integer): + __visit_name__ = "LARGEINT" + + +class DOUBLE(Float): + __visit_name__ = "DOUBLE" + + +class HLL(Numeric): + __visit_name__ = "HLL" + + +class BITMAP(Numeric): + __visit_name__ = "BITMAP" + + +class QuantileState(Numeric): + __visit_name__ = "QUANTILE_STATE" + + +class AggState(Numeric): + __visit_name__ = "AGG_STATE" + + +class ARRAY(TypeEngine): + __visit_name__ = "ARRAY" + + @property + def python_type(self) -> Optional[type[list[Any]]]: + return list + + +class MAP(TypeEngine): + __visit_name__ = "MAP" + + @property + def python_type(self) -> Optional[type[dict[Any, Any]]]: + return dict + + +class STRUCT(TypeEngine): + __visit_name__ = "STRUCT" + + @property + def python_type(self) -> Optional[type[Any]]: + return None + + +class DorisEngineSpec(MySQLEngineSpec): + engine = "pydoris" + engine_aliases = {"doris"} + engine_name = "Apache Doris" + max_column_name_length = 64 + default_driver = "pydoris" + sqlalchemy_uri_placeholder = ( + "doris://user:password@host:port/catalog.db[?key=value&key=value...]" + ) + encryption_parameters = {"ssl": "0"} + supports_dynamic_schema = True + + column_type_mappings = ( # type: ignore + ( + re.compile(r"^tinyint", re.IGNORECASE), + TINYINT(), + GenericDataType.NUMERIC, + ), + ( + re.compile(r"^largeint", re.IGNORECASE), + LARGEINT(), + GenericDataType.NUMERIC, + ), + ( + re.compile(r"^decimal.*", re.IGNORECASE), + types.DECIMAL(), + GenericDataType.NUMERIC, + ), + ( + re.compile(r"^double", re.IGNORECASE), + DOUBLE(), + GenericDataType.NUMERIC, + ), + ( + re.compile(r"^varchar(\((\d+)\))*$", re.IGNORECASE), + types.VARCHAR(), + GenericDataType.STRING, + ), + ( + re.compile(r"^char(\((\d+)\))*$", re.IGNORECASE), + types.CHAR(), + GenericDataType.STRING, + ), + ( + re.compile(r"^json.*", re.IGNORECASE), + types.JSON(), + GenericDataType.STRING, + ), + ( + re.compile(r"^binary.*", re.IGNORECASE), + types.BINARY(), + GenericDataType.STRING, + ), + ( + re.compile(r"^quantile_state", re.IGNORECASE), + QuantileState(), + GenericDataType.STRING, + ), + ( + re.compile(r"^agg_state.*", re.IGNORECASE), + AggState(), + GenericDataType.STRING, + ), + (re.compile(r"^hll", re.IGNORECASE), HLL(), GenericDataType.STRING), + ( + re.compile(r"^bitmap", re.IGNORECASE), + BITMAP(), + GenericDataType.STRING, + ), + ( + re.compile(r"^array.*", re.IGNORECASE), + ARRAY(), + GenericDataType.STRING, + ), + ( + re.compile(r"^map.*", re.IGNORECASE), + MAP(), + GenericDataType.STRING, + ), + ( + re.compile(r"^struct.*", re.IGNORECASE), + STRUCT(), + GenericDataType.STRING, + ), + ( + re.compile(r"^datetime.*", re.IGNORECASE), + types.DATETIME(), + GenericDataType.STRING, + ), + ( + re.compile(r"^date.*", re.IGNORECASE), + types.DATE(), + GenericDataType.STRING, + ), + ( + re.compile(r"^text.*", re.IGNORECASE), + TEXT(), + GenericDataType.STRING, + ), + ( + re.compile(r"^string.*", re.IGNORECASE), + String(), + GenericDataType.STRING, + ), + ) + + custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = { + CONNECTION_ACCESS_DENIED_REGEX: ( + __('Either the username "%(username)s" or the password is incorrect.'), + SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + {"invalid": ["username", "password"]}, + ), + CONNECTION_INVALID_HOSTNAME_REGEX: ( + __('Unknown Doris server host "%(hostname)s".'), + SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + {"invalid": ["host"]}, + ), + CONNECTION_HOST_DOWN_REGEX: ( + __('The host "%(hostname)s" might be down and can\'t be reached.'), + SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + {"invalid": ["host", "port"]}, + ), + CONNECTION_UNKNOWN_DATABASE_REGEX: ( + __('Unable to connect to database "%(database)s".'), + SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR, + {"invalid": ["database"]}, + ), + SYNTAX_ERROR_REGEX: ( + __( + 'Please check your query for syntax errors near "%(server_error)s". ' + "Then, try running your query again." + ), + SupersetErrorType.SYNTAX_ERROR, + {}, + ), + } + + @classmethod + def adjust_engine_params( + cls, + uri: URL, + connect_args: dict[str, Any], + catalog: Optional[str] = None, + schema: Optional[str] = None, + ) -> tuple[URL, dict[str, Any]]: + database = uri.database + if schema and database: + schema = parse.quote(schema, safe="") + if "." in database: + database = database.split(".")[0] + "." + schema + else: + database = "internal." + schema + uri = uri.set(database=database) + + return uri, connect_args + + @classmethod + def get_schema_from_engine_params( + cls, + sqlalchemy_uri: URL, + connect_args: dict[str, Any], + ) -> Optional[str]: + """ + Return the configured schema. + + For doris the SQLAlchemy URI looks like this: + + doris://localhost:9030/catalog.database + + """ + database = sqlalchemy_uri.database.strip("/") + + if "." not in database: + return None + + return parse.unquote(database.split(".")[1]) diff --git a/superset/db_engine_specs/druid.py b/superset/db_engine_specs/druid.py index 9bba3a7274..7cd85ec924 100644 --- a/superset/db_engine_specs/druid.py +++ b/superset/db_engine_specs/druid.py @@ -23,14 +23,12 @@ from datetime import datetime from typing import Any, TYPE_CHECKING from sqlalchemy import types -from sqlalchemy.engine.reflection import Inspector from superset import is_feature_enabled from superset.constants import TimeGrain from superset.db_engine_specs.base import BaseEngineSpec from superset.db_engine_specs.exceptions import SupersetDBAPIConnectionError from superset.exceptions import SupersetException -from superset.superset_typing import ResultSetColumnType from superset.utils import core as utils if TYPE_CHECKING: @@ -130,15 +128,6 @@ class DruidEngineSpec(BaseEngineSpec): """ return "MILLIS_TO_TIMESTAMP({col})" - @classmethod - def get_columns( - cls, inspector: Inspector, table_name: str, schema: str | None - ) -> list[ResultSetColumnType]: - """ - Update the Druid type map. - """ - return super().get_columns(inspector, table_name, schema) - @classmethod def get_dbapi_exception_mapping(cls) -> dict[type[Exception], type[Exception]]: # pylint: disable=import-outside-toplevel diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index 4a881e15b2..bd303f928d 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -410,9 +410,13 @@ class HiveEngineSpec(PrestoEngineSpec): @classmethod def get_columns( - cls, inspector: Inspector, table_name: str, schema: str | None + cls, + inspector: Inspector, + table_name: str, + schema: str | None, + options: dict[str, Any] | None = None, ) -> list[ResultSetColumnType]: - return BaseEngineSpec.get_columns(inspector, table_name, schema) + return BaseEngineSpec.get_columns(inspector, table_name, schema, options) @classmethod def where_latest_partition( # pylint: disable=too-many-arguments diff --git a/superset/temporary_cache/commands/__init__.py b/superset/db_engine_specs/ibmi.py similarity index 72% rename from superset/temporary_cache/commands/__init__.py rename to superset/db_engine_specs/ibmi.py index 13a83393a9..cac66ebc27 100644 --- a/superset/temporary_cache/commands/__init__.py +++ b/superset/db_engine_specs/ibmi.py @@ -14,3 +14,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from .db2 import Db2EngineSpec + + +class IBMiEngineSpec(Db2EngineSpec): + engine = "ibmi" + engine_name = "IBM Db2 for i" + max_column_name_length = 128 + + @classmethod + def epoch_to_dttm(cls) -> str: + return "(DAYS({col}) - DAYS('1970-01-01')) * 86400 + MIDNIGHT_SECONDS({col})" diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 8afa82d9b5..27e86a7980 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -981,7 +981,11 @@ class PrestoEngineSpec(PrestoBaseEngineSpec): @classmethod def get_columns( - cls, inspector: Inspector, table_name: str, schema: str | None + cls, + inspector: Inspector, + table_name: str, + schema: str | None, + options: dict[str, Any] | None = None, ) -> list[ResultSetColumnType]: """ Get columns from a Presto data source. This includes handling row and @@ -989,6 +993,7 @@ class PrestoEngineSpec(PrestoBaseEngineSpec): :param inspector: object that performs database schema inspection :param table_name: table name :param schema: schema name + :param options: Extra configuration options, not used by this backend :return: a list of results that contain column info (i.e. column name and data type) """ diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index 125a96ab82..6e56dbfa24 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -24,6 +24,7 @@ from typing import Any, TYPE_CHECKING import simplejson as json from flask import current_app +from sqlalchemy.engine.reflection import Inspector from sqlalchemy.engine.url import URL from sqlalchemy.orm import Session @@ -33,6 +34,7 @@ from superset.db_engine_specs.base import BaseEngineSpec from superset.db_engine_specs.exceptions import SupersetDBAPIConnectionError from superset.db_engine_specs.presto import PrestoBaseEngineSpec from superset.models.sql_lab import Query +from superset.superset_typing import ResultSetColumnType from superset.utils import core as utils if TYPE_CHECKING: @@ -331,3 +333,65 @@ class TrinoEngineSpec(PrestoBaseEngineSpec): return { requests_exceptions.ConnectionError: SupersetDBAPIConnectionError, } + + @classmethod + def _expand_columns(cls, col: ResultSetColumnType) -> list[ResultSetColumnType]: + """ + Expand the given column out to one or more columns by analysing their types, + descending into ROWS and expanding out their inner fields recursively. + + We can only navigate named fields in ROWs in this way, so we can't expand out + MAP or ARRAY types, nor fields in ROWs which have no name (in fact the trino + library doesn't correctly parse unnamed fields in ROWs). We won't be able to + expand ROWs which are nested underneath any of those types, either. + + Expanded columns are named foo.bar.baz and we provide a query_as property to + instruct the base engine spec how to correctly query them: instead of quoting + the whole string they have to be quoted like "foo"."bar"."baz" and we then + alias them to the full dotted string for ease of reference. + """ + # pylint: disable=import-outside-toplevel + from trino.sqlalchemy import datatype + + cols = [col] + col_type = col.get("type") + + if not isinstance(col_type, datatype.ROW): + return cols + + for inner_name, inner_type in col_type.attr_types: + outer_name = col["name"] + name = ".".join([outer_name, inner_name]) + query_name = ".".join([f'"{piece}"' for piece in name.split(".")]) + column_spec = cls.get_column_spec(str(inner_type)) + is_dttm = column_spec.is_dttm if column_spec else False + + inner_col = ResultSetColumnType( + name=name, + column_name=name, + type=inner_type, + is_dttm=is_dttm, + query_as=f'{query_name} AS "{name}"', + ) + cols.extend(cls._expand_columns(inner_col)) + + return cols + + @classmethod + def get_columns( + cls, + inspector: Inspector, + table_name: str, + schema: str | None, + options: dict[str, Any] | None = None, + ) -> list[ResultSetColumnType]: + """ + If the "expand_rows" feature is enabled on the database via + "schema_options", expand the schema definition out to show all + subfields of nested ROWs as their appropriate dotted paths. + """ + base_cols = super().get_columns(inspector, table_name, schema, options) + if not (options or {}).get("expand_rows"): + return base_cols + + return [col for base_col in base_cols for col in cls._expand_columns(base_col)] diff --git a/superset/embedded/api.py b/superset/embedded/api.py index ae800bf2b9..b907422bf5 100644 --- a/superset/embedded/api.py +++ b/superset/embedded/api.py @@ -23,12 +23,12 @@ from flask_appbuilder.hooks import before_request from flask_appbuilder.models.sqla.interface import SQLAInterface from superset import is_feature_enabled +from superset.commands.dashboard.embedded.exceptions import ( + EmbeddedDashboardNotFoundError, +) from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.daos.dashboard import EmbeddedDashboardDAO from superset.dashboards.schemas import EmbeddedDashboardResponseSchema -from superset.embedded_dashboard.commands.exceptions import ( - EmbeddedDashboardNotFoundError, -) from superset.extensions import event_logger from superset.models.embedded_dashboard import EmbeddedDashboard from superset.reports.logs.schemas import openapi_spec_methods_override diff --git a/superset/embedded/view.py b/superset/embedded/view.py index e59a6ced90..462c6046fa 100644 --- a/superset/embedded/view.py +++ b/superset/embedded/view.py @@ -17,7 +17,7 @@ import json from typing import Callable -from flask import abort, g, request +from flask import abort, request from flask_appbuilder import expose from flask_login import AnonymousUserMixin, login_user from flask_wtf.csrf import same_origin @@ -78,7 +78,7 @@ class EmbeddedView(BaseSupersetView): ) bootstrap_data = { - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), "embedded": { "dashboard_id": embedded.dashboard_id, }, diff --git a/superset/examples/bart_lines.py b/superset/examples/bart_lines.py index e18f6e4632..ad96aecac4 100644 --- a/superset/examples/bart_lines.py +++ b/superset/examples/bart_lines.py @@ -60,9 +60,9 @@ def load_bart_lines(only_metadata: bool = False, force: bool = False) -> None: tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) + db.session.add(tbl) tbl.description = "BART lines" tbl.database = database tbl.filter_select_enabled = True - db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/configs/charts/Filter_Segments.yaml b/superset/examples/configs/charts/Filter_Segments.yaml deleted file mode 100644 index 605e33ca7e..0000000000 --- a/superset/examples/configs/charts/Filter_Segments.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -slice_name: Filter Segments -viz_type: filter_box -params: - adhoc_filters: [] - datasource: 42__table - date_filter: false - filter_configs: - - asc: true - clearable: true - column: ethnic_minority - key: -xNBqpfQo - label: Ethnic Minority - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: gender - key: 19VeBGTKf - label: Gender - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: developer_type - key: OWTb4s69T - label: Developer Type - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: lang_at_home - key: Fn-YClyhb - label: Language at Home - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: country_live - key: 2fNskRCLJ - label: Country live - multiple: true - searchAllOptions: false - granularity_sqla: time_start - queryFields: {} - slice_id: 1387 - time_range: No filter - url_params: {} - viz_type: filter_box -cache_timeout: null -uuid: 6420629a-ce74-2c6b-ef7d-b2e78baa3cfe -version: 1.0.0 -dataset_uuid: d95a2865-53ce-1f82-a53d-8e3c89331469 diff --git a/superset/examples/configs/charts/Filtering_Vaccines.yaml b/superset/examples/configs/charts/Filtering_Vaccines.yaml deleted file mode 100644 index e458c5a009..0000000000 --- a/superset/examples/configs/charts/Filtering_Vaccines.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -slice_name: Filtering Vaccines -viz_type: filter_box -params: - adhoc_filters: [] - datasource: 69__table - date_filter: false - filter_configs: - - asc: true - clearable: true - column: country_name - key: D00hRxPLE - label: Country - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: product_category - key: jJ7x2cuIc - label: Vaccine Approach - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: clinical_stage - key: EgGwwAUU6 - label: Clinical Stage - multiple: true - searchAllOptions: false - queryFields: {} - slice_id: 3965 - time_range: No filter - url_params: {} - viz_type: filter_box -cache_timeout: null -uuid: c29381ce-0e99-4cf3-bf0f-5f55d6b94176 -version: 1.0.0 -dataset_uuid: 974b7a1c-22ea-49cb-9214-97b7dbd511e0 diff --git a/superset/examples/configs/charts/Video_Game_Sales_Filter.yaml b/superset/examples/configs/charts/Video_Game_Sales_Filter.yaml deleted file mode 100644 index 6c76d53e8e..0000000000 --- a/superset/examples/configs/charts/Video_Game_Sales_Filter.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -slice_name: Video Game Sales filter -viz_type: filter_box -params: - adhoc_filters: [] - datasource: 21__table - date_filter: true - filter_configs: - - asc: true - clearable: true - column: platform - key: s3ItH9vhG - label: Platform - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: genre - key: 202hDeMsG - label: Genre - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: publisher - key: 5Os6jsJFK - label: Publisher - multiple: true - searchAllOptions: false - granularity_sqla: year - queryFields: {} - time_range: No filter - url_params: - preselect_filters: '{"1389": {"platform": ["PS", "PS2", "PS3", "PS4"], "genre": - null, "__time_range": "No filter"}}' - viz_type: filter_box -cache_timeout: null -uuid: fd9ce7ec-ae08-4f71-93e0-7c26b132b2e6 -version: 1.0.0 -dataset_uuid: 53d47c0c-c03d-47f0-b9ac-81225f808283 diff --git a/superset/examples/configs/dashboards/COVID_Vaccine_Dashboard.yaml b/superset/examples/configs/dashboards/COVID_Vaccine_Dashboard.yaml index 363077aebe..1d870880b9 100644 --- a/superset/examples/configs/dashboards/COVID_Vaccine_Dashboard.yaml +++ b/superset/examples/configs/dashboards/COVID_Vaccine_Dashboard.yaml @@ -18,6 +18,9 @@ dashboard_title: COVID Vaccine Dashboard description: null css: "" slug: null +certified_by: "" +certification_details: "" +published: true uuid: f4065089-110a-41fa-8dd7-9ce98a65e250 position: CHART-63bEuxjDMJ: @@ -25,26 +28,10 @@ position: id: CHART-63bEuxjDMJ meta: chartId: 3961 - height: 72 + height: 60 sliceName: Vaccine Candidates per Country sliceNameOverride: Map of Vaccine Candidates uuid: ddc91df6-fb40-4826-bdca-16b85af1c024 - width: 12 - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - - ROW-zvw7luvEL - type: CHART - CHART-F-fkth0Dnv: - children: [] - id: CHART-F-fkth0Dnv - meta: - chartId: 3960 - height: 60 - sliceName: Vaccine Candidates per Country - sliceNameOverride: Treemap of Vaccine Candidates per Country - uuid: e2f5a8a7-feb0-4f79-bc6b-01fe55b98b3c width: 8 parents: - ROOT_ID @@ -52,6 +39,22 @@ position: - TAB-BCIJF4NvgQ - ROW-xSeNAspgw type: CHART + CHART-F-fkth0Dnv: + children: [] + id: CHART-F-fkth0Dnv + meta: + chartId: 3960 + height: 82 + sliceName: Vaccine Candidates per Country + sliceNameOverride: Treemap of Vaccine Candidates per Country + uuid: e2f5a8a7-feb0-4f79-bc6b-01fe55b98b3c + width: 4 + parents: + - ROOT_ID + - TABS-wUKya7eQ0Z + - TAB-BCIJF4NvgQ + - ROW-dieUdkeUw + type: CHART CHART-RjD_ygqtwH: children: [] id: CHART-RjD_ygqtwH @@ -66,7 +69,7 @@ position: - ROOT_ID - TABS-wUKya7eQ0Z - TAB-BCIJF4NvgQ - - ROW-zvw7luvEL + - ROW-zhOlQLQnB type: CHART CHART-aGfmWtliqA: children: [] @@ -81,7 +84,39 @@ position: - ROOT_ID - TABS-wUKya7eQ0Z - TAB-BCIJF4NvgQ - - ROW-zvw7luvEL + - ROW-zhOlQLQnB + type: CHART + CHART-dCUpAcPsji: + children: [] + id: CHART-dCUpAcPsji + meta: + chartId: 3963 + height: 82 + sliceName: Vaccine Candidates per Country & Stage + sliceNameOverride: Heatmap of Countries & Clinical Stages + uuid: cd111331-d286-4258-9020-c7949a109ed2 + width: 4 + parents: + - ROOT_ID + - TABS-wUKya7eQ0Z + - TAB-BCIJF4NvgQ + - ROW-dieUdkeUw + type: CHART + CHART-fYo7IyvKZQ: + children: [] + id: CHART-fYo7IyvKZQ + meta: + chartId: 3964 + height: 60 + sliceName: Vaccine Candidates per Country & Stage + sliceNameOverride: Sunburst of Country & Clinical Stages + uuid: f69c556f-15fe-4a82-a8bb-69d5b6954123 + width: 4 + parents: + - ROOT_ID + - TABS-wUKya7eQ0Z + - TAB-BCIJF4NvgQ + - ROW-xSeNAspgw type: CHART CHART-j4hUvP5dDD: children: [] @@ -99,38 +134,6 @@ position: - TAB-BCIJF4NvgQ - ROW-dieUdkeUw type: CHART - CHART-dCUpAcPsji: - children: [] - id: CHART-dCUpAcPsji - meta: - chartId: 3963 - height: 72 - sliceName: Vaccine Candidates per Country & Stage - sliceNameOverride: Heatmap of Countries & Clinical Stages - uuid: cd111331-d286-4258-9020-c7949a109ed2 - width: 4 - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - - ROW-dieUdkeUw - type: CHART - CHART-eirDduqb1A: - children: [] - id: CHART-eirDduqb1A - meta: - chartId: 3965 - height: 60 - sliceName: Filtering Vaccines - sliceNameOverride: Filter Box of Vaccines - uuid: c29381ce-0e99-4cf3-bf0f-5f55d6b94176 - width: 4 - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - - ROW-xSeNAspgw - type: CHART DASHBOARD_VERSION_KEY: v2 GRID_ID: children: [] @@ -189,27 +192,38 @@ position: - TAB-BCIJF4NvgQ - ROW-zhOlQLQnB type: MARKDOWN - CHART-fYo7IyvKZQ: - children: [] - id: CHART-fYo7IyvKZQ - meta: - chartId: 3964 - height: 72 - sliceName: Vaccine Candidates per Country & Stage - sliceNameOverride: Sunburst of Country & Clinical Stages - uuid: f69c556f-15fe-4a82-a8bb-69d5b6954123 - width: 4 - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - - ROW-dieUdkeUw - type: CHART ROOT_ID: children: - TABS-wUKya7eQ0Z id: ROOT_ID type: ROOT + ROW-dieUdkeUw: + children: + - CHART-F-fkth0Dnv + - CHART-dCUpAcPsji + - CHART-j4hUvP5dDD + id: ROW-dieUdkeUw + meta: + "0": ROOT_ID + background: BACKGROUND_TRANSPARENT + parents: + - ROOT_ID + - TABS-wUKya7eQ0Z + - TAB-BCIJF4NvgQ + type: ROW + ROW-xSeNAspgw: + children: + - CHART-63bEuxjDMJ + - CHART-fYo7IyvKZQ + id: ROW-xSeNAspgw + meta: + "0": ROOT_ID + background: BACKGROUND_TRANSPARENT + parents: + - ROOT_ID + - TABS-wUKya7eQ0Z + - TAB-BCIJF4NvgQ + type: ROW ROW-zhOlQLQnB: children: - MARKDOWN-VjQQ5SFj5v @@ -224,50 +238,10 @@ position: - TABS-wUKya7eQ0Z - TAB-BCIJF4NvgQ type: ROW - ROW-xSeNAspgw: - children: - - CHART-eirDduqb1A - - CHART-F-fkth0Dnv - id: ROW-xSeNAspgw - meta: - "0": ROOT_ID - background: BACKGROUND_TRANSPARENT - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - type: ROW - ROW-dieUdkeUw: - children: - - CHART-dCUpAcPsji - - CHART-fYo7IyvKZQ - - CHART-j4hUvP5dDD - id: ROW-dieUdkeUw - meta: - "0": ROOT_ID - background: BACKGROUND_TRANSPARENT - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - type: ROW - ROW-zvw7luvEL: - children: - - CHART-63bEuxjDMJ - id: ROW-zvw7luvEL - meta: - "0": ROOT_ID - background: BACKGROUND_TRANSPARENT - parents: - - ROOT_ID - - TABS-wUKya7eQ0Z - - TAB-BCIJF4NvgQ - type: ROW TAB-BCIJF4NvgQ: children: - ROW-zhOlQLQnB - ROW-xSeNAspgw - - ROW-zvw7luvEL - ROW-dieUdkeUw id: TAB-BCIJF4NvgQ meta: @@ -316,18 +290,4 @@ metadata: Unknown: "#EFA1AA" Live attenuated virus: "#FDE380" COUNT(*): "#D1C6BC" - filter_scopes: - "3965": - country_name: - scope: - - ROOT_ID - immune: [] - product_category: - scope: - - ROOT_ID - immune: [] - clinical_stage: - scope: - - ROOT_ID - immune: [] version: 1.0.0 diff --git a/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml b/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml index 2e97e6b576..b1508daff0 100644 --- a/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml +++ b/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml @@ -16,8 +16,11 @@ # under the License. dashboard_title: FCC New Coder Survey 2018 description: null -css: '' +css: "" slug: null +certified_by: "" +certification_details: "" +published: true uuid: 5b12b583-8204-08e9-392c-422209c29787 position: CHART--0GPGmD-pO: @@ -25,17 +28,17 @@ position: id: CHART--0GPGmD-pO meta: chartId: 1361 - height: 48 - sliceName: 'Current Developers: Is this your first development job?' + height: 56 + sliceName: "Current Developers: Is this your first development job?" sliceNameOverride: Is this your first development job? uuid: bfe5a8e6-146f-ef59-5e6c-13d519b236a8 width: 2 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-b7USYEngT + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-b7USYEngT type: CHART CHART--w_Br1tPP3: children: [] @@ -47,27 +50,27 @@ position: uuid: a6dd2d5a-2cdc-c8ec-f30c-85920f4f8a65 width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW-DR80aHJA2c + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW-DR80aHJA2c type: CHART CHART-0-zzTwBINh: children: [] id: CHART-0-zzTwBINh meta: chartId: 3631 - height: 49 + height: 55 sliceName: Last Year Income Distribution uuid: a2ec5256-94b4-43c4-b8c7-b83f70c5d4df width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-b7USYEngT + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-b7USYEngT type: CHART CHART-37fu7fO6Z0: children: [] @@ -79,11 +82,11 @@ position: uuid: 02f546ae-1bf4-bd26-8bc2-14b9279c8a62 width: 7 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-kNjtGVFpp + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-kNjtGVFpp type: CHART CHART-5QwNlSbXYU: children: [] @@ -95,11 +98,11 @@ position: uuid: 097c05c9-2dd2-481d-813d-d6c0c12b4a3d width: 5 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-kNjtGVFpp + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-kNjtGVFpp type: CHART CHART-FKuVqq4kaA: children: [] @@ -112,11 +115,11 @@ position: uuid: e6b09c28-98cf-785f-4caf-320fd4fca802 width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW-DR80aHJA2c + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW-DR80aHJA2c type: CHART CHART-JnpdZOhVer: children: [] @@ -124,16 +127,16 @@ position: meta: chartId: 1369 height: 50 - sliceName: "\U0001F393 Highest degree held" + sliceName: Highest degree held uuid: 9f7d2b9c-6b3a-69f9-f03e-d3a141514639 width: 2 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW--BIzjz9F0 - - COLUMN-IEKAo_QJlz + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW--BIzjz9F0 + - COLUMN-IEKAo_QJlz type: CHART CHART-LjfhrUkEef: children: [] @@ -145,11 +148,11 @@ position: uuid: 067c4a1e-ae03-4c0c-8e2a-d2c0f4bf43c3 width: 5 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-s3l4os7YY + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-s3l4os7YY type: CHART CHART-Q3pbwsH3id: children: [] @@ -162,27 +165,27 @@ position: uuid: def07750-b5c0-0b69-6228-cb2330916166 width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-mOvr_xWm1 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-mOvr_xWm1 type: CHART CHART-QVql08s5Bv: children: [] id: CHART-QVql08s5Bv meta: chartId: 3632 - height: 50 + height: 56 sliceName: First Time Developer? uuid: edc75073-8f33-4123-a28d-cd6dfb33cade width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-b7USYEngT + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-b7USYEngT type: CHART CHART-UtSaz4pfV6: children: [] @@ -194,12 +197,12 @@ position: uuid: 5f1ea868-604e-f69d-a241-5daa83ff33be width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-UsW-_RPAb - - COLUMN-OJ5spdMmNh + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-UsW-_RPAb + - COLUMN-OJ5spdMmNh type: CHART CHART-VvFbGxi3X_: children: [] @@ -211,12 +214,12 @@ position: uuid: 03a74c97-52fc-cf87-233c-d4275f8c550c width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-UsW-_RPAb - - COLUMN-OJ5spdMmNh + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-UsW-_RPAb + - COLUMN-OJ5spdMmNh type: CHART CHART-XHncHuS5pZ: children: [] @@ -229,11 +232,11 @@ position: uuid: a0e5329f-224e-6fc8-efd2-d37d0f546ee8 width: 2 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW-DR80aHJA2c + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW-DR80aHJA2c type: CHART CHART-YSzS5GOOLf: children: [] @@ -245,11 +248,11 @@ position: uuid: 4880e4f4-b701-4be0-86f3-e7e89432e83b width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-mOvr_xWm1 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-mOvr_xWm1 type: CHART CHART-ZECnzPz8Bi: children: [] @@ -261,44 +264,27 @@ position: uuid: 5596e0f6-78a9-465d-8325-7139c794a06a width: 7 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-s3l4os7YY + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-s3l4os7YY type: CHART CHART-aytwlT4GAq: children: [] id: CHART-aytwlT4GAq meta: chartId: 1384 - height: 50 + height: 30 sliceName: Breakdown of Developer Type uuid: b8386be8-f44e-6535-378c-2aa2ba461286 - width: 4 + width: 6 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-y-GwJPgxLr - type: CHART - CHART-d6vjW6rC6V: - children: [] - id: CHART-d6vjW6rC6V - meta: - chartId: 1387 - height: 54 - sliceName: Filter Segments - sliceNameOverride: Filter By - uuid: 6420629a-ce74-2c6b-ef7d-b2e78baa3cfe - width: 5 - parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-y-GwJPgxLr + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-y-GwJPgxLr type: CHART CHART-fLpTSAHpAO: children: [] @@ -310,11 +296,11 @@ position: uuid: 2ba66056-a756-d6a3-aaec-0c243fb7062e width: 9 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-UsW-_RPAb + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-UsW-_RPAb type: CHART CHART-lQVSAw0Or3: children: [] @@ -327,11 +313,11 @@ position: uuid: cb8998ab-9f93-4f0f-4e4b-3bfe4b0dea9d width: 4 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW--BIzjz9F0 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW--BIzjz9F0 type: CHART CHART-o-JPAWMZK-: children: [] @@ -343,11 +329,11 @@ position: uuid: 0f6b447c-828c-e71c-87ac-211bc412b214 width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-mOvr_xWm1 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-mOvr_xWm1 type: CHART CHART-v22McUFMtx: children: [] @@ -360,12 +346,12 @@ position: uuid: 6d0ceb30-2008-d19c-d285-cf77dc764433 width: 4 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW--BIzjz9F0 - - COLUMN-IEKAo_QJlz + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW--BIzjz9F0 + - COLUMN-IEKAo_QJlz type: CHART CHART-wxWVtlajRF: children: [] @@ -377,49 +363,49 @@ position: uuid: bff88053-ccc4-92f2-d6f5-de83e950e8cd width: 4 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW--BIzjz9F0 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW--BIzjz9F0 type: CHART COLUMN-IEKAo_QJlz: children: - - CHART-JnpdZOhVer - - CHART-v22McUFMtx + - CHART-JnpdZOhVer + - CHART-v22McUFMtx id: COLUMN-IEKAo_QJlz meta: background: BACKGROUND_TRANSPARENT width: 4 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW--BIzjz9F0 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW--BIzjz9F0 type: COLUMN COLUMN-OJ5spdMmNh: children: - - CHART-VvFbGxi3X_ - - CHART-UtSaz4pfV6 + - CHART-VvFbGxi3X_ + - CHART-UtSaz4pfV6 id: COLUMN-OJ5spdMmNh meta: background: BACKGROUND_TRANSPARENT width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-UsW-_RPAb + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-UsW-_RPAb type: COLUMN DASHBOARD_VERSION_KEY: v2 GRID_ID: children: - - TABS-L-d9eyOE-b + - TABS-L-d9eyOE-b id: GRID_ID parents: - - ROOT_ID + - ROOT_ID type: GRID HEADER_ID: id: HEADER_ID @@ -453,21 +439,21 @@ position: height: 50 width: 4 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- - - ROW-DR80aHJA2c + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- + - ROW-DR80aHJA2c type: MARKDOWN MARKDOWN-NQmSPDOtpl: children: [] id: MARKDOWN-NQmSPDOtpl meta: - code: '# Current Developers + code: "# Current Developers - While majority of the students on FCC are Aspiring developers, there''s a - nontrivial minority that''s there to continue leveling up their skills (17% + While majority of the students on FCC are Aspiring developers, there's a + nontrivial minority that's there to continue leveling up their skills (17% of the survey respondents). @@ -480,28 +466,28 @@ position: - The proportion of developers whose current job is their first developer job - - Distribution of last year''s income + - Distribution of last year's income - The geographic distribution of these developers - The overlap between commute time and if their current job is their first developer job - - Potential link between highest degree earned and last year''s income' - height: 50 + - Potential link between highest degree earned and last year's income" + height: 56 width: 4 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ - - ROW-b7USYEngT + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ + - ROW-b7USYEngT type: MARKDOWN MARKDOWN-__u6CsUyfh: children: [] id: MARKDOWN-__u6CsUyfh meta: - code: '## FreeCodeCamp New Coder Survey 2018 + code: "## FreeCodeCamp New Coder Survey 2018 Every year, FCC surveys its user base (mostly budding software developers) @@ -513,21 +499,22 @@ position: - [Dataset](https://github.com/freeCodeCamp/2018-new-coder-survey) - - [FCC Blog Post](https://www.freecodecamp.org/news/we-asked-20-000-people-who-they-are-and-how-theyre-learning-to-code-fff5d668969/)' - height: 45 - width: 3 + - [FCC Blog Post](https://www.freecodecamp.org/news/we-asked-20-000-people-who-they-are-and-how-theyre-learning-to-code-fff5d668969/)" + height: 30 + width: 6 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-y-GwJPgxLr + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-y-GwJPgxLr type: MARKDOWN MARKDOWN-zc2mWxZeox: children: [] id: MARKDOWN-zc2mWxZeox meta: - code: "# Demographics\n\nFreeCodeCamp is a completely-online community of people\ + code: + "# Demographics\n\nFreeCodeCamp is a completely-online community of people\ \ learning to code and consists of aspiring & current developers from all\ \ over the world. That doesn't necessarily mean that access to these types\ \ of opportunities are evenly distributed. \n\nThe following charts can begin\ @@ -537,243 +524,220 @@ position: height: 52 width: 3 parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t - - ROW-mOvr_xWm1 + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t + - ROW-mOvr_xWm1 type: MARKDOWN ROOT_ID: children: - - GRID_ID + - GRID_ID id: ROOT_ID type: ROOT ROW--BIzjz9F0: children: - - COLUMN-IEKAo_QJlz - - CHART-lQVSAw0Or3 - - CHART-wxWVtlajRF + - COLUMN-IEKAo_QJlz + - CHART-lQVSAw0Or3 + - CHART-wxWVtlajRF id: ROW--BIzjz9F0 meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- type: ROW ROW-DR80aHJA2c: children: - - MARKDOWN-BUmyHM2s0x - - CHART-XHncHuS5pZ - - CHART--w_Br1tPP3 - - CHART-FKuVqq4kaA + - MARKDOWN-BUmyHM2s0x + - CHART-XHncHuS5pZ + - CHART--w_Br1tPP3 + - CHART-FKuVqq4kaA id: ROW-DR80aHJA2c meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-YT6eNksV- + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-YT6eNksV- type: ROW ROW-UsW-_RPAb: children: - - COLUMN-OJ5spdMmNh - - CHART-fLpTSAHpAO + - COLUMN-OJ5spdMmNh + - CHART-fLpTSAHpAO id: ROW-UsW-_RPAb meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t type: ROW ROW-b7USYEngT: children: - - MARKDOWN-NQmSPDOtpl - - CHART--0GPGmD-pO - - CHART-QVql08s5Bv - - CHART-0-zzTwBINh + - MARKDOWN-NQmSPDOtpl + - CHART--0GPGmD-pO + - CHART-QVql08s5Bv + - CHART-0-zzTwBINh id: ROW-b7USYEngT meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ type: ROW ROW-kNjtGVFpp: children: - - CHART-5QwNlSbXYU - - CHART-37fu7fO6Z0 + - CHART-5QwNlSbXYU + - CHART-37fu7fO6Z0 id: ROW-kNjtGVFpp meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ type: ROW ROW-mOvr_xWm1: children: - - MARKDOWN-zc2mWxZeox - - CHART-Q3pbwsH3id - - CHART-o-JPAWMZK- - - CHART-YSzS5GOOLf + - MARKDOWN-zc2mWxZeox + - CHART-Q3pbwsH3id + - CHART-o-JPAWMZK- + - CHART-YSzS5GOOLf id: ROW-mOvr_xWm1 meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t type: ROW ROW-s3l4os7YY: children: - - CHART-LjfhrUkEef - - CHART-ZECnzPz8Bi + - CHART-LjfhrUkEef + - CHART-ZECnzPz8Bi id: ROW-s3l4os7YY meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-l_9I0aNYZ + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-l_9I0aNYZ type: ROW ROW-y-GwJPgxLr: children: - - MARKDOWN-__u6CsUyfh - - CHART-aytwlT4GAq - - CHART-d6vjW6rC6V + - MARKDOWN-__u6CsUyfh + - CHART-aytwlT4GAq id: ROW-y-GwJPgxLr meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b - - TAB-AsMaxdYL_t + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b + - TAB-AsMaxdYL_t type: ROW TAB-AsMaxdYL_t: children: - - ROW-y-GwJPgxLr - - ROW-mOvr_xWm1 - - ROW-UsW-_RPAb + - ROW-y-GwJPgxLr + - ROW-mOvr_xWm1 + - ROW-UsW-_RPAb id: TAB-AsMaxdYL_t meta: text: Overview parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b type: TAB TAB-YT6eNksV-: children: - - ROW-DR80aHJA2c - - ROW--BIzjz9F0 + - ROW-DR80aHJA2c + - ROW--BIzjz9F0 id: TAB-YT6eNksV- meta: text: "\U0001F680 Aspiring Developers" parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b type: TAB TAB-l_9I0aNYZ: children: - - ROW-b7USYEngT - - ROW-kNjtGVFpp - - ROW-s3l4os7YY + - ROW-b7USYEngT + - ROW-kNjtGVFpp + - ROW-s3l4os7YY id: TAB-l_9I0aNYZ meta: text: "\U0001F4BB Current Developers" parents: - - ROOT_ID - - GRID_ID - - TABS-L-d9eyOE-b + - ROOT_ID + - GRID_ID + - TABS-L-d9eyOE-b type: TAB TABS-L-d9eyOE-b: children: - - TAB-AsMaxdYL_t - - TAB-YT6eNksV- - - TAB-l_9I0aNYZ + - TAB-AsMaxdYL_t + - TAB-YT6eNksV- + - TAB-l_9I0aNYZ id: TABS-L-d9eyOE-b meta: {} parents: - - ROOT_ID - - GRID_ID + - ROOT_ID + - GRID_ID type: TABS metadata: timed_refresh_immune_slices: [] expanded_slices: {} refresh_frequency: 0 - default_filters: '{}' + default_filters: "{}" color_scheme: supersetColors - filter_scopes: - '1387': - ethnic_minority: - scope: - - TAB-AsMaxdYL_t - immune: [] - gender: - scope: - - ROOT_ID - immune: [] - developer_type: - scope: - - ROOT_ID - immune: [] - lang_at_home: - scope: - - ROOT_ID - immune: [] - country_live: - scope: - - ROOT_ID - immune: [] label_colors: - '0': '#FCC700' - '1': '#A868B7' - '15': '#3CCCCB' - '30': '#A38F79' - '45': '#8FD3E4' - age: '#1FA8C9' - Yes,: '#1FA8C9' - Female: '#454E7C' - Prefer: '#5AC189' - No,: '#FF7F44' - Male: '#666666' - Prefer not to say: '#E04355' - Ph.D.: '#FCC700' - associate's degree: '#A868B7' - bachelor's degree: '#3CCCCB' - high school diploma or equivalent (GED): '#A38F79' - master's degree (non-professional): '#8FD3E4' - no high school (secondary school): '#A1A6BD' - professional degree (MBA, MD, JD, etc.): '#ACE1C4' - some college credit, no degree: '#FEC0A1' - some high school: '#B2B2B2' - trade, technical, or vocational training: '#EFA1AA' - No, not an ethnic minority: '#1FA8C9' - Yes, an ethnic minority: '#454E7C' - <NULL>: '#5AC189' - 'Yes': '#FF7F44' - 'No': '#666666' - last_yr_income: '#E04355' - More: '#A1A6BD' - Less: '#ACE1C4' - I: '#FEC0A1' - expected_earn: '#B2B2B2' - 'Yes: Willing To': '#EFA1AA' - 'No: Not Willing to': '#FDE380' - No Answer: '#D3B3DA' - In an Office (with Other Developers): '#9EE5E5' - No Preference: '#D1C6BC' - From Home: '#1FA8C9' + "0": "#FCC700" + "1": "#A868B7" + "15": "#3CCCCB" + "30": "#A38F79" + "45": "#8FD3E4" + age: "#1FA8C9" + Yes,: "#1FA8C9" + Female: "#454E7C" + Prefer: "#5AC189" + No,: "#FF7F44" + Male: "#666666" + Prefer not to say: "#E04355" + Ph.D.: "#FCC700" + associate's degree: "#A868B7" + bachelor's degree: "#3CCCCB" + high school diploma or equivalent (GED): "#A38F79" + master's degree (non-professional): "#8FD3E4" + no high school (secondary school): "#A1A6BD" + professional degree (MBA, MD, JD, etc.): "#ACE1C4" + some college credit, no degree: "#FEC0A1" + some high school: "#B2B2B2" + trade, technical, or vocational training: "#EFA1AA" + No, not an ethnic minority: "#1FA8C9" + Yes, an ethnic minority: "#454E7C" + <NULL>: "#5AC189" + "Yes": "#FF7F44" + "No": "#666666" + last_yr_income: "#E04355" + More: "#A1A6BD" + Less: "#ACE1C4" + I: "#FEC0A1" + expected_earn: "#B2B2B2" + "Yes: Willing To": "#EFA1AA" + "No: Not Willing to": "#FDE380" + No Answer: "#D3B3DA" + In an Office (with Other Developers): "#9EE5E5" + No Preference: "#D1C6BC" + From Home: "#1FA8C9" version: 1.0.0 diff --git a/superset/examples/configs/dashboards/Sales_Dashboard.yaml b/superset/examples/configs/dashboards/Sales_Dashboard.yaml index 3efea3af25..439b763d0c 100644 --- a/superset/examples/configs/dashboards/Sales_Dashboard.yaml +++ b/superset/examples/configs/dashboards/Sales_Dashboard.yaml @@ -16,8 +16,11 @@ # under the License. dashboard_title: Sales Dashboard description: null -css: '' +css: "" slug: null +certified_by: "" +certification_details: "" +published: true uuid: 04f79081-fb49-7bac-7f14-cc76cd2ad93b position: CHART-1NOOLm5YPs: @@ -31,26 +34,26 @@ position: uuid: c3d643cd-fd6f-4659-a5b7-59402487a8d0 width: 2 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-Tyv02UA_6W - - COLUMN-8Rp54B6ikC + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-Tyv02UA_6W + - COLUMN-8Rp54B6ikC type: CHART CHART-AYpv8gFi_q: children: [] id: CHART-AYpv8gFi_q meta: chartId: 2810 - height: 91 + height: 70 sliceName: Number of Deals (for each Combination) uuid: bd20fc69-dd51-46c1-99b5-09e37a434bf1 - width: 3 + width: 6 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-0l1WcDzW3 + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-4fthLQmdX + - ROW-0l1WcDzW3 type: CHART CHART-KKT9BsnUst: children: [] @@ -63,90 +66,74 @@ position: uuid: db9609e4-9b78-4a32-87a7-4d9e19d51cd8 width: 7 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-oAtmu5grZ + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-oAtmu5grZ type: CHART CHART-OJ9aWDmn1q: children: [] id: CHART-OJ9aWDmn1q meta: chartId: 2808 - height: 91 + height: 70 sliceName: Proportion of Revenue by Product Line sliceNameOverride: Proportion of Monthly Revenue by Product Line uuid: 08aff161-f60c-4cb3-a225-dc9b1140d2e3 width: 6 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-0l1WcDzW3 + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-4fthLQmdX + - ROW-0l1WcDzW3 type: CHART CHART-YFg-9wHE7s: children: [] id: CHART-YFg-9wHE7s meta: chartId: 2811 - height: 63 + height: 49 sliceName: Seasonality of Revenue (per Product Line) uuid: cf0da099-b3ab-4d94-ab62-cf353ac3c611 width: 6 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-E7MDSGfnm + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-4fthLQmdX + - ROW-E7MDSGfnm type: CHART CHART-_LMKI0D3tj: children: [] id: CHART-_LMKI0D3tj meta: chartId: 2809 - height: 62 - sliceName: Revenue by Deal SIze + height: 49 + sliceName: Revenue by Deal Size sliceNameOverride: Monthly Revenue by Deal SIze uuid: f065a533-2e13-42b9-bd19-801a21700dff width: 6 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-E7MDSGfnm + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-4fthLQmdX + - ROW-E7MDSGfnm type: CHART CHART-id4RGv80N-: children: [] id: CHART-id4RGv80N- meta: chartId: 2807 - height: 40 + height: 59 sliceName: Total Items Sold (By Product Line) sliceNameOverride: Total Products Sold (By Product Line) uuid: b8b7ca30-6291-44b0-bc64-ba42e2892b86 width: 2 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-oAtmu5grZ - - COLUMN-G6_2DvG8aK - type: CHART - CHART-iyvXMcqHt9: - children: [] - id: CHART-iyvXMcqHt9 - meta: - chartId: 671 - height: 39 - sliceName: Filter - uuid: a5689df7-98fc-7c51-602c-ebd92dc3ec70 - width: 2 - parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-0l1WcDzW3 - - COLUMN-jlNWyWCfTC + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-oAtmu5grZ + - COLUMN-G6_2DvG8aK type: CHART CHART-j24u8ve41b: children: [] @@ -159,10 +146,10 @@ position: uuid: 09c497e0-f442-1121-c9e7-671e37750424 width: 3 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-oAtmu5grZ + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-oAtmu5grZ type: CHART CHART-lFanAaYKBK: children: [] @@ -174,11 +161,11 @@ position: uuid: 7b12a243-88e0-4dc5-ac33-9a840bb0ac5a width: 3 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-Tyv02UA_6W - - COLUMN-8Rp54B6ikC + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-Tyv02UA_6W + - COLUMN-8Rp54B6ikC type: CHART CHART-vomBOiI7U9: children: [] @@ -191,58 +178,44 @@ position: uuid: 692aca26-a526-85db-c94c-411c91cc1077 width: 7 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-Tyv02UA_6W + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-Tyv02UA_6W type: CHART COLUMN-8Rp54B6ikC: children: - - CHART-lFanAaYKBK - - CHART-1NOOLm5YPs + - CHART-lFanAaYKBK + - CHART-1NOOLm5YPs id: COLUMN-8Rp54B6ikC meta: background: BACKGROUND_TRANSPARENT width: 2 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-Tyv02UA_6W + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-Tyv02UA_6W type: COLUMN COLUMN-G6_2DvG8aK: children: - - CHART-id4RGv80N- + - CHART-id4RGv80N- id: COLUMN-G6_2DvG8aK meta: background: BACKGROUND_TRANSPARENT width: 2 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-oAtmu5grZ - type: COLUMN - COLUMN-jlNWyWCfTC: - children: - - MARKDOWN-HrzsMmvGQo - - CHART-iyvXMcqHt9 - id: COLUMN-jlNWyWCfTC - meta: - background: BACKGROUND_TRANSPARENT - width: 3 - parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-0l1WcDzW3 + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-oAtmu5grZ type: COLUMN DASHBOARD_VERSION_KEY: v2 GRID_ID: children: [] id: GRID_ID parents: - - ROOT_ID + - ROOT_ID type: GRID HEADER_ID: id: HEADER_ID @@ -253,7 +226,8 @@ position: children: [] id: MARKDOWN--AtDSWnapE meta: - code: "# \U0001F697 Vehicle Sales Dashboard \U0001F3CD\n\nThis example dashboard\ + code: + "# \U0001F697 Vehicle Sales Dashboard \U0001F3CD\n\nThis example dashboard\ \ provides insight into the business operations of vehicle seller. The dataset\ \ powering this dashboard can be found [here on Kaggle](https://www.kaggle.com/kyanyoga/sample-sales-data).\n\ \n### Timeline\n\nThe dataset contains data on all orders from the 2003 and\ @@ -265,151 +239,113 @@ position: height: 53 width: 3 parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH - - ROW-Tyv02UA_6W - type: MARKDOWN - MARKDOWN-HrzsMmvGQo: - children: [] - id: MARKDOWN-HrzsMmvGQo - meta: - code: "# \U0001F50D Filter Box\n\nDashboard filters are a powerful way to enable\ - \ teams to dive deeper into their business operations data. This filter box\ - \ helps focus the charts along the following variables:\n\n- Time Range: Focus\ - \ in on a specific time period (e.g. a holiday or quarter)\n- Product Line:\ - \ Choose 1 or more product lines to see relevant sales data\n- Deal Size:\ - \ Zoom in on small, medium, and / or large sales deals\n\nThe filter box below\ - \ \U0001F447 is configured to only apply to the charts in this tab (**Exploratory**).\ - \ You can customize the charts that this filter box applies to by:\n\n- entering\ - \ Edit mode in this dashboard\n- selecting the `...` in the top right corner\n\ - - selecting the **Set filter mapping** button" - height: 50 - width: 3 - parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX - - ROW-0l1WcDzW3 - - COLUMN-jlNWyWCfTC + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH + - ROW-Tyv02UA_6W type: MARKDOWN ROOT_ID: children: - - TABS-e5Ruro0cjP + - TABS-e5Ruro0cjP id: ROOT_ID type: ROOT ROW-0l1WcDzW3: children: - - COLUMN-jlNWyWCfTC - - CHART-OJ9aWDmn1q - - CHART-AYpv8gFi_q + - CHART-OJ9aWDmn1q + - CHART-AYpv8gFi_q id: ROW-0l1WcDzW3 meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-4fthLQmdX type: ROW ROW-E7MDSGfnm: children: - - CHART-YFg-9wHE7s - - CHART-_LMKI0D3tj + - CHART-YFg-9wHE7s + - CHART-_LMKI0D3tj id: ROW-E7MDSGfnm meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-4fthLQmdX + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-4fthLQmdX type: ROW ROW-Tyv02UA_6W: children: - - COLUMN-8Rp54B6ikC - - CHART-vomBOiI7U9 - - MARKDOWN--AtDSWnapE + - COLUMN-8Rp54B6ikC + - CHART-vomBOiI7U9 + - MARKDOWN--AtDSWnapE id: ROW-Tyv02UA_6W meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH type: ROW ROW-oAtmu5grZ: children: - - COLUMN-G6_2DvG8aK - - CHART-KKT9BsnUst - - CHART-j24u8ve41b + - COLUMN-G6_2DvG8aK + - CHART-KKT9BsnUst + - CHART-j24u8ve41b id: ROW-oAtmu5grZ meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-e5Ruro0cjP - - TAB-d-E0Zc1cTH + - ROOT_ID + - TABS-e5Ruro0cjP + - TAB-d-E0Zc1cTH type: ROW TAB-4fthLQmdX: children: - - ROW-0l1WcDzW3 - - ROW-E7MDSGfnm + - ROW-0l1WcDzW3 + - ROW-E7MDSGfnm id: TAB-4fthLQmdX meta: text: "\U0001F9ED Exploratory" parents: - - ROOT_ID - - TABS-e5Ruro0cjP + - ROOT_ID + - TABS-e5Ruro0cjP type: TAB TAB-d-E0Zc1cTH: children: - - ROW-Tyv02UA_6W - - ROW-oAtmu5grZ + - ROW-Tyv02UA_6W + - ROW-oAtmu5grZ id: TAB-d-E0Zc1cTH meta: text: "\U0001F3AF Sales Overview" parents: - - ROOT_ID - - TABS-e5Ruro0cjP + - ROOT_ID + - TABS-e5Ruro0cjP type: TAB TABS-e5Ruro0cjP: children: - - TAB-d-E0Zc1cTH - - TAB-4fthLQmdX + - TAB-d-E0Zc1cTH + - TAB-4fthLQmdX id: TABS-e5Ruro0cjP meta: {} parents: - - ROOT_ID + - ROOT_ID type: TABS metadata: timed_refresh_immune_slices: [] expanded_slices: {} refresh_frequency: 0 - default_filters: '{"671": {"__time_range": "No filter"}}' - filter_scopes: - "671": - product_line: - scope: - - TAB-4fthLQmdX - immune: [] - deal_size: - scope: - - ROOT_ID - immune: [] - __time_range: - scope: - - ROOT_ID - immune: [] + default_filters: "{}" color_scheme: supersetColors label_colors: - Medium: '#1FA8C9' - Small: '#454E7C' - Large: '#5AC189' - SUM(SALES): '#1FA8C9' - Classic Cars: '#454E7C' - Vintage Cars: '#5AC189' - Motorcycles: '#FF7F44' - Trucks and Buses: '#666666' - Planes: '#E04355' - Ships: '#FCC700' - Trains: '#A868B7' + Medium: "#1FA8C9" + Small: "#454E7C" + Large: "#5AC189" + SUM(SALES): "#1FA8C9" + Classic Cars: "#454E7C" + Vintage Cars: "#5AC189" + Motorcycles: "#FF7F44" + Trucks and Buses: "#666666" + Planes: "#E04355" + Ships: "#FCC700" + Trains: "#A868B7" version: 1.0.0 diff --git a/superset/examples/configs/dashboards/Video_Game_Sales.yaml b/superset/examples/configs/dashboards/Video_Game_Sales.yaml index 958d32b069..2edaad2d1a 100644 --- a/superset/examples/configs/dashboards/Video_Game_Sales.yaml +++ b/superset/examples/configs/dashboards/Video_Game_Sales.yaml @@ -16,39 +16,27 @@ # under the License. dashboard_title: Video Game Sales description: null -css: '' +css: "" slug: null +certified_by: "" +certification_details: "" +published: true uuid: c7bc10f4-6a2d-7569-caae-bbc91864ee11 position: - CHART-1L7NIcXvVN: - children: [] - id: CHART-1L7NIcXvVN - meta: - chartId: 3544 - height: 79 - sliceName: Games per Genre over time - uuid: 0f8976aa-7bb4-40c7-860b-64445a51aaaf - width: 6 - parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-fjg6YQBkH - type: CHART CHART-7mKdnU7OUJ: children: [] id: CHART-7mKdnU7OUJ meta: chartId: 3545 - height: 80 + height: 55 sliceName: Games per Genre uuid: 0499bdec-0837-44f3-ae8a-8c670de81afd - width: 3 + width: 8 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-yP9SB89PZ + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq + - ROW-yP9SB89PZ type: CHART CHART-8OG3UJX-Tn: children: [] @@ -56,15 +44,15 @@ position: meta: chartId: 661 height: 54 - sliceName: '# of Games That Hit 100k in Sales By Release Year' - sliceNameOverride: 'Top 10 Consoles, by # of Hit Games' + sliceName: "# of Games That Hit 100k in Sales By Release Year" + sliceNameOverride: "Top 10 Consoles, by # of Hit Games" uuid: 2b69887b-23e3-b46d-d38c-8ea11856c555 width: 6 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-7kAf1blYU + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm + - ROW-7kAf1blYU type: CHART CHART-W02beJK7ms: children: [] @@ -77,10 +65,10 @@ position: uuid: d20b7324-3b80-24d4-37e2-3bd583b66713 width: 3 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-7kAf1blYU + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm + - ROW-7kAf1blYU type: CHART CHART-XFag0yZdLk: children: [] @@ -93,10 +81,10 @@ position: uuid: 1810975a-f6d4-07c3-495c-c3b535d01f21 width: 3 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-7kAf1blYU + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm + - ROW-7kAf1blYU type: CHART CHART-XRvRfsMsaQ: children: [] @@ -104,14 +92,14 @@ position: meta: chartId: 3546 height: 62 - sliceName: 'Top 10 Games: Proportion of Sales in Markets' + sliceName: "Top 10 Games: Proportion of Sales in Markets" uuid: a40879d5-653a-42fe-9314-bbe88ad26e92 width: 6 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-NuR8GFQTO + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm + - ROW-NuR8GFQTO type: CHART CHART-XVIYTeubZh: children: [] @@ -121,12 +109,12 @@ position: height: 80 sliceName: Games uuid: 2a5e562b-ab37-1b9b-1de3-1be4335c8e83 - width: 5 + width: 6 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-yP9SB89PZ + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq + - ROW-yP9SB89PZ type: CHART CHART-_sx22yawJO: children: [] @@ -138,78 +126,45 @@ position: uuid: 326fc7e5-b7f1-448e-8a6f-80d0e7ce0b64 width: 6 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-NuR8GFQTO + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm + - ROW-NuR8GFQTO type: CHART CHART-nYns6xr4Ft: children: [] id: CHART-nYns6xr4Ft meta: chartId: 3548 - height: 79 + height: 80 sliceName: Total Sales per Market (Grouped by Genre) uuid: d8bf948e-46fd-4380-9f9c-a950c34bcc92 width: 6 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-fjg6YQBkH - type: CHART - CHART-uP9GF0z0rT: - children: [] - id: CHART-uP9GF0z0rT - meta: - chartId: 3547 - height: 45 - sliceName: Filter - uuid: fd9ce7ec-ae08-4f71-93e0-7c26b132b2e6 - width: 4 - parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-yP9SB89PZ - - COLUMN-F53B1OSMcz - type: CHART - CHART-wt6ZO8jRXZ: - children: [] - id: CHART-wt6ZO8jRXZ - meta: - chartId: 659 - height: 72 - sliceName: Rise & Fall of Video Game Consoles - sliceNameOverride: Global Sales per Console - uuid: 83b0e2d0-d38b-d980-ed8e-e1c9846361b6 - width: 12 - parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-XT1DsNA_V + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq + - ROW-fjg6YQBkH type: CHART COLUMN-F53B1OSMcz: children: - - MARKDOWN-7K5cBNy7qu - - CHART-uP9GF0z0rT + - MARKDOWN-7K5cBNy7qu id: COLUMN-F53B1OSMcz meta: background: BACKGROUND_TRANSPARENT width: 4 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-yP9SB89PZ + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq + - ROW-yP9SB89PZ type: COLUMN DASHBOARD_VERSION_KEY: v2 GRID_ID: children: [] id: GRID_ID parents: - - ROOT_ID + - ROOT_ID type: GRID HEADER_ID: id: HEADER_ID @@ -220,224 +175,194 @@ position: children: [] id: MARKDOWN-7K5cBNy7qu meta: - code: "# \U0001F93F Explore Trends\n\nDive into data on popular video games\ + code: + "# \U0001F93F Explore Trends\n\nDive into data on popular video games\ \ using the following dimensions:\n\n- Year\n- Platform\n- Publisher\n- Genre\n\ \nTo use the **Filter Games** box below, select values for each dimension\ \ you want to zoom in on and then click **Apply**. \n\nThe filter criteria\ \ you set in this Filter-box will apply to *all* charts in this tab." - height: 33 + height: 55 width: 4 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq - - ROW-yP9SB89PZ - - COLUMN-F53B1OSMcz + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq + - ROW-yP9SB89PZ + - COLUMN-F53B1OSMcz type: MARKDOWN MARKDOWN-JOZKOjVc3a: children: [] id: MARKDOWN-JOZKOjVc3a meta: - code: "## \U0001F3AEVideo Game Sales\n\nThis dashboard visualizes sales & platform\ + code: + "## \U0001F3AEVideo Game Sales\n\nThis dashboard visualizes sales & platform\ \ data on video games that sold more than 100k copies. The data was last updated\ \ in early 2017.\n\n[Original dataset](https://www.kaggle.com/gregorut/videogamesales)" height: 18 width: 12 parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - - ROW-0F99WDC-sz + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm + - ROW-0F99WDC-sz type: MARKDOWN ROOT_ID: children: - - TABS-97PVJa11D_ + - TABS-97PVJa11D_ id: ROOT_ID type: ROOT ROW-0F99WDC-sz: children: - - MARKDOWN-JOZKOjVc3a + - MARKDOWN-JOZKOjVc3a id: ROW-0F99WDC-sz meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm type: ROW ROW-7kAf1blYU: children: - - CHART-W02beJK7ms - - CHART-XFag0yZdLk - - CHART-8OG3UJX-Tn + - CHART-W02beJK7ms + - CHART-XFag0yZdLk + - CHART-8OG3UJX-Tn id: ROW-7kAf1blYU meta: - '0': ROOT_ID + "0": ROOT_ID background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm type: ROW ROW-NuR8GFQTO: children: - - CHART-_sx22yawJO - - CHART-XRvRfsMsaQ + - CHART-_sx22yawJO + - CHART-XRvRfsMsaQ id: ROW-NuR8GFQTO meta: - '0': ROOT_ID - '1': TABS-97PVJa11D_ + "0": ROOT_ID + "1": TABS-97PVJa11D_ background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm - type: ROW - ROW-XT1DsNA_V: - children: - - CHART-wt6ZO8jRXZ - id: ROW-XT1DsNA_V - meta: - background: BACKGROUND_TRANSPARENT - parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-lg-5ymUDgm + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-lg-5ymUDgm type: ROW ROW-fjg6YQBkH: children: - - CHART-1L7NIcXvVN - - CHART-nYns6xr4Ft + - CHART-nYns6xr4Ft + - CHART-XVIYTeubZh id: ROW-fjg6YQBkH meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq type: ROW ROW-yP9SB89PZ: children: - - COLUMN-F53B1OSMcz - - CHART-XVIYTeubZh - - CHART-7mKdnU7OUJ + - COLUMN-F53B1OSMcz + - CHART-7mKdnU7OUJ id: ROW-yP9SB89PZ meta: background: BACKGROUND_TRANSPARENT parents: - - ROOT_ID - - TABS-97PVJa11D_ - - TAB-2_QXp8aNq + - ROOT_ID + - TABS-97PVJa11D_ + - TAB-2_QXp8aNq type: ROW TAB-2_QXp8aNq: children: - - ROW-yP9SB89PZ - - ROW-fjg6YQBkH + - ROW-yP9SB89PZ + - ROW-fjg6YQBkH id: TAB-2_QXp8aNq meta: text: "\U0001F93F Explore Trends" parents: - - ROOT_ID - - TABS-97PVJa11D_ + - ROOT_ID + - TABS-97PVJa11D_ type: TAB TAB-lg-5ymUDgm: children: - - ROW-0F99WDC-sz - - ROW-XT1DsNA_V - - ROW-7kAf1blYU - - ROW-NuR8GFQTO + - ROW-0F99WDC-sz + - ROW-7kAf1blYU + - ROW-NuR8GFQTO id: TAB-lg-5ymUDgm meta: text: Overview parents: - - ROOT_ID - - TABS-97PVJa11D_ + - ROOT_ID + - TABS-97PVJa11D_ type: TAB TABS-97PVJa11D_: children: - - TAB-lg-5ymUDgm - - TAB-2_QXp8aNq + - TAB-lg-5ymUDgm + - TAB-2_QXp8aNq id: TABS-97PVJa11D_ meta: {} parents: - - ROOT_ID + - ROOT_ID type: TABS metadata: timed_refresh_immune_slices: [] expanded_slices: {} refresh_frequency: 0 - default_filters: '{"3547": {"platform": ["PS", "PS2", "PS3", "XB", "X360"], "__time_range": - "No filter"}}' + default_filters: "{}" color_scheme: supersetColors - filter_scopes: - "3547": - platform: - scope: - - TAB-2_QXp8aNq - immune: [] - genre: - scope: - - ROOT_ID - immune: [] - publisher: - scope: - - ROOT_ID - immune: [] - __time_range: - scope: - - ROOT_ID - immune: [] label_colors: - '0': '#1FA8C9' - '1': '#454E7C' - '2600': '#666666' - Europe: '#5AC189' - Japan: '#FF7F44' - North America: '#666666' - Other: '#E04355' - PS2: '#FCC700' - X360: '#A868B7' - PS3: '#3CCCCB' - Wii: '#A38F79' - DS: '#8FD3E4' - PS: '#A1A6BD' - GBA: '#ACE1C4' - PSP: '#FEC0A1' - PS4: '#B2B2B2' - PC: '#EFA1AA' - GB: '#FDE380' - XB: '#D3B3DA' - NES: '#9EE5E5' - 3DS: '#D1C6BC' - N64: '#1FA8C9' - SNES: '#454E7C' - GC: '#5AC189' - XOne: '#FF7F44' - WiiU: '#E04355' - PSV: '#FCC700' - SAT: '#A868B7' - GEN: '#3CCCCB' - DC: '#A38F79' - SCD: '#8FD3E4' - NG: '#A1A6BD' - WS: '#ACE1C4' - TG16: '#FEC0A1' - 3DO: '#B2B2B2' - GG: '#EFA1AA' - PCFX: '#FDE380' - Nintendo: '#D3B3DA' - Take-Two Interactive: '#9EE5E5' - Microsoft Game Studios: '#D1C6BC' - Action: '#1FA8C9' - Adventure: '#454E7C' - Fighting: '#5AC189' - Misc: '#FF7F44' - Platform: '#666666' - Puzzle: '#E04355' - Racing: '#FCC700' - Role-Playing: '#A868B7' - Shooter: '#3CCCCB' - Simulation: '#A38F79' - Sports: '#8FD3E4' - Strategy: '#A1A6BD' + "0": "#1FA8C9" + "1": "#454E7C" + "2600": "#666666" + Europe: "#5AC189" + Japan: "#FF7F44" + North America: "#666666" + Other: "#E04355" + PS2: "#FCC700" + X360: "#A868B7" + PS3: "#3CCCCB" + Wii: "#A38F79" + DS: "#8FD3E4" + PS: "#A1A6BD" + GBA: "#ACE1C4" + PSP: "#FEC0A1" + PS4: "#B2B2B2" + PC: "#EFA1AA" + GB: "#FDE380" + XB: "#D3B3DA" + NES: "#9EE5E5" + 3DS: "#D1C6BC" + N64: "#1FA8C9" + SNES: "#454E7C" + GC: "#5AC189" + XOne: "#FF7F44" + WiiU: "#E04355" + PSV: "#FCC700" + SAT: "#A868B7" + GEN: "#3CCCCB" + DC: "#A38F79" + SCD: "#8FD3E4" + NG: "#A1A6BD" + WS: "#ACE1C4" + TG16: "#FEC0A1" + 3DO: "#B2B2B2" + GG: "#EFA1AA" + PCFX: "#FDE380" + Nintendo: "#D3B3DA" + Take-Two Interactive: "#9EE5E5" + Microsoft Game Studios: "#D1C6BC" + Action: "#1FA8C9" + Adventure: "#454E7C" + Fighting: "#5AC189" + Misc: "#FF7F44" + Platform: "#666666" + Puzzle: "#E04355" + Racing: "#FCC700" + Role-Playing: "#A868B7" + Shooter: "#3CCCCB" + Simulation: "#A38F79" + Sports: "#8FD3E4" + Strategy: "#A1A6BD" version: 1.0.0 diff --git a/superset/examples/country_map.py b/superset/examples/country_map.py index 4331033ca8..3caf637584 100644 --- a/superset/examples/country_map.py +++ b/superset/examples/country_map.py @@ -80,13 +80,13 @@ def load_country_map_data(only_metadata: bool = False, force: bool = False) -> N obj = db.session.query(table).filter_by(table_name=tbl_name).first() if not obj: obj = table(table_name=tbl_name, schema=schema) + db.session.add(obj) obj.main_dttm_col = "dttm" obj.database = database obj.filter_select_enabled = True if not any(col.metric_name == "avg__2004" for col in obj.metrics): col = str(column("2004").compile(db.engine)) obj.metrics.append(SqlMetric(metric_name="avg__2004", expression=f"AVG({col})")) - db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/css_templates.py b/superset/examples/css_templates.py index 4f3f355895..2f67d2e1fa 100644 --- a/superset/examples/css_templates.py +++ b/superset/examples/css_templates.py @@ -27,6 +27,7 @@ def load_css_templates() -> None: obj = db.session.query(CssTemplate).filter_by(template_name="Flat").first() if not obj: obj = CssTemplate(template_name="Flat") + db.session.add(obj) css = textwrap.dedent( """\ .navbar { @@ -51,12 +52,12 @@ def load_css_templates() -> None: """ ) obj.css = css - db.session.merge(obj) db.session.commit() obj = db.session.query(CssTemplate).filter_by(template_name="Courier Black").first() if not obj: obj = CssTemplate(template_name="Courier Black") + db.session.add(obj) css = textwrap.dedent( """\ h2 { @@ -96,5 +97,4 @@ def load_css_templates() -> None: """ ) obj.css = css - db.session.merge(obj) db.session.commit() diff --git a/superset/examples/deck.py b/superset/examples/deck.py index fc1e8ba00c..326977054e 100644 --- a/superset/examples/deck.py +++ b/superset/examples/deck.py @@ -532,6 +532,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements if not dash: dash = Dashboard() + db.session.add(dash) dash.published = True js = POSITION_JSON pos = json.loads(js) @@ -540,5 +541,4 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements dash.dashboard_title = title dash.slug = slug dash.slices = slices - db.session.merge(dash) db.session.commit() diff --git a/superset/examples/energy.py b/superset/examples/energy.py index 6688e5d088..998ee97a30 100644 --- a/superset/examples/energy.py +++ b/superset/examples/energy.py @@ -66,6 +66,7 @@ def load_energy( tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) + db.session.add(tbl) tbl.description = "Energy consumption" tbl.database = database tbl.filter_select_enabled = True @@ -76,7 +77,6 @@ def load_energy( SqlMetric(metric_name="sum__value", expression=f"SUM({col})") ) - db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/flights.py b/superset/examples/flights.py index 7c8f980298..c7890cfa18 100644 --- a/superset/examples/flights.py +++ b/superset/examples/flights.py @@ -63,10 +63,10 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None: tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) + db.session.add(tbl) tbl.description = "Random set of flights in the US" tbl.database = database tbl.filter_select_enabled = True - db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() print("Done loading table!") diff --git a/superset/examples/long_lat.py b/superset/examples/long_lat.py index 88b45548f4..6f7cc64020 100644 --- a/superset/examples/long_lat.py +++ b/superset/examples/long_lat.py @@ -92,10 +92,10 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None obj = db.session.query(table).filter_by(table_name=tbl_name).first() if not obj: obj = table(table_name=tbl_name, schema=schema) + db.session.add(obj) obj.main_dttm_col = "datetime" obj.database = database obj.filter_select_enabled = True - db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/misc_dashboard.py b/superset/examples/misc_dashboard.py index 4146ea1bd3..aa8d037495 100644 --- a/superset/examples/misc_dashboard.py +++ b/superset/examples/misc_dashboard.py @@ -34,40 +34,26 @@ def load_misc_dashboard() -> None: if not dash: dash = Dashboard() + db.session.add(dash) js = textwrap.dedent( """\ { - "CHART-BkeVbh8ANQ": { - "children": [], - "id": "CHART-BkeVbh8ANQ", - "meta": { - "chartId": 4004, - "height": 34, - "sliceName": "Multi Line", - "width": 8 - }, - "type": "CHART" - }, - "CHART-H1HYNzEANX": { - "children": [], - "id": "CHART-H1HYNzEANX", - "meta": { - "chartId": 3940, - "height": 50, - "sliceName": "Energy Sankey", - "width": 6 - }, - "type": "CHART" - }, "CHART-HJOYVMV0E7": { "children": [], "id": "CHART-HJOYVMV0E7", "meta": { "chartId": 3969, - "height": 63, + "height": 69, "sliceName": "Mapbox Long/Lat", - "width": 6 + "uuid": "164efe31-295b-4408-aaa6-2f4bfb58a212", + "width": 4 }, + "parents": [ + "ROOT_ID", + "GRID_ID", + "ROW-S1MK4M4A4X", + "COLUMN-ByUFVf40EQ" + ], "type": "CHART" }, "CHART-S1WYNz4AVX": { @@ -75,32 +61,16 @@ def load_misc_dashboard() -> None: "id": "CHART-S1WYNz4AVX", "meta": { "chartId": 3989, - "height": 25, + "height": 69, "sliceName": "Parallel Coordinates", + "uuid": "e84f7e74-031a-47bb-9f80-ae0694dcca48", "width": 4 }, - "type": "CHART" - }, - "CHART-r19KVMNCE7": { - "children": [], - "id": "CHART-r19KVMNCE7", - "meta": { - "chartId": 3971, - "height": 34, - "sliceName": "Calendar Heatmap multiformat 0", - "width": 4 - }, - "type": "CHART" - }, - "CHART-rJ4K4GV04Q": { - "children": [], - "id": "CHART-rJ4K4GV04Q", - "meta": { - "chartId": 3941, - "height": 63, - "sliceName": "Energy Force Layout", - "width": 6 - }, + "parents": [ + "ROOT_ID", + "GRID_ID", + "ROW-SytNzNA4X" + ], "type": "CHART" }, "CHART-rkgF4G4A4X": { @@ -108,54 +78,27 @@ def load_misc_dashboard() -> None: "id": "CHART-rkgF4G4A4X", "meta": { "chartId": 3970, - "height": 25, + "height": 69, "sliceName": "Birth in France by department in 2016", - "width": 8 + "uuid": "54583ae9-c99a-42b5-a906-7ee2adfe1fb1", + "width": 4 }, + "parents": [ + "ROOT_ID", + "GRID_ID", + "ROW-SytNzNA4X" + ], "type": "CHART" }, - "CHART-rywK4GVR4X": { - "children": [], - "id": "CHART-rywK4GVR4X", - "meta": { - "chartId": 3942, - "height": 50, - "sliceName": "Heatmap", - "width": 6 - }, - "type": "CHART" - }, - "COLUMN-ByUFVf40EQ": { - "children": [ - "CHART-rywK4GVR4X", - "CHART-HJOYVMV0E7" - ], - "id": "COLUMN-ByUFVf40EQ", - "meta": { - "background": "BACKGROUND_TRANSPARENT", - "width": 6 - }, - "type": "COLUMN" - }, - "COLUMN-rkmYVGN04Q": { - "children": [ - "CHART-rJ4K4GV04Q", - "CHART-H1HYNzEANX" - ], - "id": "COLUMN-rkmYVGN04Q", - "meta": { - "background": "BACKGROUND_TRANSPARENT", - "width": 6 - }, - "type": "COLUMN" - }, + "DASHBOARD_VERSION_KEY": "v2", "GRID_ID": { "children": [ - "ROW-SytNzNA4X", - "ROW-S1MK4M4A4X", - "ROW-HkFFEzVRVm" + "ROW-SytNzNA4X" ], "id": "GRID_ID", + "parents": [ + "ROOT_ID" + ], "type": "GRID" }, "HEADER_ID": { @@ -172,40 +115,22 @@ def load_misc_dashboard() -> None: "id": "ROOT_ID", "type": "ROOT" }, - "ROW-HkFFEzVRVm": { - "children": [ - "CHART-r19KVMNCE7", - "CHART-BkeVbh8ANQ" - ], - "id": "ROW-HkFFEzVRVm", - "meta": { - "background": "BACKGROUND_TRANSPARENT" - }, - "type": "ROW" - }, - "ROW-S1MK4M4A4X": { - "children": [ - "COLUMN-rkmYVGN04Q", - "COLUMN-ByUFVf40EQ" - ], - "id": "ROW-S1MK4M4A4X", - "meta": { - "background": "BACKGROUND_TRANSPARENT" - }, - "type": "ROW" - }, "ROW-SytNzNA4X": { "children": [ "CHART-rkgF4G4A4X", - "CHART-S1WYNz4AVX" + "CHART-S1WYNz4AVX", + "CHART-HJOYVMV0E7" ], "id": "ROW-SytNzNA4X", "meta": { "background": "BACKGROUND_TRANSPARENT" }, + "parents": [ + "ROOT_ID", + "GRID_ID" + ], "type": "ROW" - }, - "DASHBOARD_VERSION_KEY": "v2" + } } """ ) @@ -215,5 +140,4 @@ def load_misc_dashboard() -> None: dash.position_json = json.dumps(pos, indent=4) dash.slug = DASH_SLUG dash.slices = slices - db.session.merge(dash) db.session.commit() diff --git a/superset/examples/multiformat_time_series.py b/superset/examples/multiformat_time_series.py index 6bad2a7ac2..4c1e796316 100644 --- a/superset/examples/multiformat_time_series.py +++ b/superset/examples/multiformat_time_series.py @@ -82,6 +82,7 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals obj = db.session.query(table).filter_by(table_name=tbl_name).first() if not obj: obj = table(table_name=tbl_name, schema=schema) + db.session.add(obj) obj.main_dttm_col = "ds" obj.database = database obj.filter_select_enabled = True @@ -100,7 +101,6 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals col.python_date_format = dttm_and_expr[0] col.database_expression = dttm_and_expr[1] col.is_dttm = True - db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/paris.py b/superset/examples/paris.py index 1180c428fe..fa5c77b84d 100644 --- a/superset/examples/paris.py +++ b/superset/examples/paris.py @@ -57,9 +57,9 @@ def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) -> tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) + db.session.add(tbl) tbl.description = "Map of Paris" tbl.database = database tbl.filter_select_enabled = True - db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/random_time_series.py b/superset/examples/random_time_series.py index 9a296ec2c4..4a2d10aee9 100644 --- a/superset/examples/random_time_series.py +++ b/superset/examples/random_time_series.py @@ -67,10 +67,10 @@ def load_random_time_series_data( obj = db.session.query(table).filter_by(table_name=tbl_name).first() if not obj: obj = table(table_name=tbl_name, schema=schema) + db.session.add(obj) obj.main_dttm_col = "ds" obj.database = database obj.filter_select_enabled = True - db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/sf_population_polygons.py b/superset/examples/sf_population_polygons.py index 76c039afb8..ba5905f58a 100644 --- a/superset/examples/sf_population_polygons.py +++ b/superset/examples/sf_population_polygons.py @@ -59,9 +59,9 @@ def load_sf_population_polygons( tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) + db.session.add(tbl) tbl.description = "Population density of San Francisco" tbl.database = database tbl.filter_select_enabled = True - db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/tabbed_dashboard.py b/superset/examples/tabbed_dashboard.py index 58c0ba3e4c..b057263345 100644 --- a/superset/examples/tabbed_dashboard.py +++ b/superset/examples/tabbed_dashboard.py @@ -33,6 +33,7 @@ def load_tabbed_dashboard(_: bool = False) -> None: if not dash: dash = Dashboard() + db.session.add(dash) js = textwrap.dedent( """ @@ -556,6 +557,4 @@ def load_tabbed_dashboard(_: bool = False) -> None: dash.slices = slices dash.dashboard_title = "Tabbed Dashboard" dash.slug = slug - - db.session.merge(dash) db.session.commit() diff --git a/superset/examples/world_bank.py b/superset/examples/world_bank.py index 31d956f5fd..1541e3e472 100644 --- a/superset/examples/world_bank.py +++ b/superset/examples/world_bank.py @@ -24,14 +24,8 @@ from sqlalchemy.sql import column import superset.utils.database from superset import app, db -from superset.connectors.sqla.models import SqlMetric -from superset.models.dashboard import Dashboard -from superset.models.slice import Slice -from superset.utils import core as utils -from superset.utils.core import DatasourceType - -from ..connectors.base.models import BaseDatasource -from .helpers import ( +from superset.connectors.sqla.models import BaseDatasource, SqlMetric +from superset.examples.helpers import ( get_example_url, get_examples_folder, get_slice_json, @@ -40,6 +34,10 @@ from .helpers import ( misc_dash_slices, update_slice_ids, ) +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils import core as utils +from superset.utils.core import DatasourceType def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-statements @@ -87,6 +85,7 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) + db.session.add(tbl) tbl.description = utils.readfile( os.path.join(get_examples_folder(), "countries.md") ) @@ -110,7 +109,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s SqlMetric(metric_name=metric, expression=f"{aggr_func}({col})") ) - db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() @@ -126,6 +124,7 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s if not dash: dash = Dashboard() + db.session.add(dash) dash.published = True pos = dashboard_positions slices = update_slice_ids(pos) @@ -134,7 +133,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s dash.position_json = json.dumps(pos, indent=4) dash.slug = slug dash.slices = slices - db.session.merge(dash) db.session.commit() @@ -169,35 +167,6 @@ def create_slices(tbl: BaseDatasource) -> list[Slice]: } return [ - Slice( - slice_name="Region Filter", - viz_type="filter_box", - datasource_type=DatasourceType.TABLE, - datasource_id=tbl.id, - params=get_slice_json( - defaults, - viz_type="filter_box", - date_filter=False, - filter_configs=[ - { - "asc": False, - "clearable": True, - "column": "region", - "key": "2s98dfu", - "metric": "sum__SP_POP_TOTL", - "multiple": False, - }, - { - "asc": False, - "clearable": True, - "key": "li3j2lk", - "column": "country_name", - "metric": "sum__SP_POP_TOTL", - "multiple": True, - }, - ], - ), - ), Slice( slice_name="World's Population", viz_type="big_number", @@ -374,18 +343,12 @@ def create_slices(tbl: BaseDatasource) -> list[Slice]: dashboard_positions = { - "CHART-36bfc934": { - "children": [], - "id": "CHART-36bfc934", - "meta": {"chartId": 40, "height": 25, "sliceName": "Region Filter", "width": 2}, - "type": "CHART", - }, "CHART-37982887": { "children": [], "id": "CHART-37982887", "meta": { "chartId": 41, - "height": 25, + "height": 52, "sliceName": "World's Population", "width": 2, }, @@ -466,7 +429,7 @@ dashboard_positions = { "type": "COLUMN", }, "COLUMN-fe3914b8": { - "children": ["CHART-36bfc934", "CHART-37982887"], + "children": ["CHART-37982887"], "id": "COLUMN-fe3914b8", "meta": {"background": "BACKGROUND_TRANSPARENT", "width": 2}, "type": "COLUMN", diff --git a/superset/explore/api.py b/superset/explore/api.py index ebda161bea..faadbe8d9a 100644 --- a/superset/explore/api.py +++ b/superset/explore/api.py @@ -19,18 +19,18 @@ import logging from flask import g, request, Response from flask_appbuilder.api import expose, protect, safe -from superset.charts.commands.exceptions import ChartNotFoundError +from superset.commands.chart.exceptions import ChartNotFoundError +from superset.commands.explore.get import GetExploreCommand +from superset.commands.explore.parameters import CommandParameters +from superset.commands.temporary_cache.exceptions import ( + TemporaryCacheAccessDeniedError, + TemporaryCacheResourceNotFoundError, +) from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from superset.explore.commands.get import GetExploreCommand -from superset.explore.commands.parameters import CommandParameters from superset.explore.exceptions import DatasetAccessDeniedError, WrongEndpointError from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError from superset.explore.schemas import ExploreContextSchema from superset.extensions import event_logger -from superset.temporary_cache.commands.exceptions import ( - TemporaryCacheAccessDeniedError, - TemporaryCacheResourceNotFoundError, -) from superset.views.base_api import BaseSupersetApi, statsd_metrics logger = logging.getLogger(__name__) diff --git a/superset/explore/form_data/api.py b/superset/explore/form_data/api.py index 36489ca449..6c882d92a6 100644 --- a/superset/explore/form_data/api.py +++ b/superset/explore/form_data/api.py @@ -20,18 +20,18 @@ from flask import request, Response from flask_appbuilder.api import expose, protect, safe from marshmallow import ValidationError -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from superset.explore.form_data.commands.create import CreateFormDataCommand -from superset.explore.form_data.commands.delete import DeleteFormDataCommand -from superset.explore.form_data.commands.get import GetFormDataCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.form_data.commands.update import UpdateFormDataCommand -from superset.explore.form_data.schemas import FormDataPostSchema, FormDataPutSchema -from superset.extensions import event_logger -from superset.temporary_cache.commands.exceptions import ( +from superset.commands.explore.form_data.create import CreateFormDataCommand +from superset.commands.explore.form_data.delete import DeleteFormDataCommand +from superset.commands.explore.form_data.get import GetFormDataCommand +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.form_data.update import UpdateFormDataCommand +from superset.commands.temporary_cache.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheResourceNotFoundError, ) +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP +from superset.explore.form_data.schemas import FormDataPostSchema, FormDataPutSchema +from superset.extensions import event_logger from superset.views.base_api import BaseSupersetApi, requires_json, statsd_metrics logger = logging.getLogger(__name__) diff --git a/superset/explore/permalink/api.py b/superset/explore/permalink/api.py index b249d4dee2..bc9bd1cf67 100644 --- a/superset/explore/permalink/api.py +++ b/superset/explore/permalink/api.py @@ -20,17 +20,17 @@ from flask import request, Response from flask_appbuilder.api import expose, protect, safe from marshmallow import ValidationError -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( ChartAccessDeniedError, ChartNotFoundError, ) -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatasetAccessDeniedError, DatasetNotFoundError, ) -from superset.explore.permalink.commands.create import CreateExplorePermalinkCommand -from superset.explore.permalink.commands.get import GetExplorePermalinkCommand +from superset.commands.explore.permalink.create import CreateExplorePermalinkCommand +from superset.commands.explore.permalink.get import GetExplorePermalinkCommand +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP from superset.explore.permalink.exceptions import ExplorePermalinkInvalidStateError from superset.explore.permalink.schemas import ExplorePermalinkStateSchema from superset.extensions import event_logger diff --git a/superset/explore/utils.py b/superset/explore/utils.py index ca73cb39fb..7d5c0d86be 100644 --- a/superset/explore/utils.py +++ b/superset/explore/utils.py @@ -17,10 +17,14 @@ from typing import Optional from superset import security_manager -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( ChartAccessDeniedError, ChartNotFoundError, ) +from superset.commands.dataset.exceptions import ( + DatasetAccessDeniedError, + DatasetNotFoundError, +) from superset.commands.exceptions import ( DatasourceNotFoundValidationError, DatasourceTypeInvalidError, @@ -29,10 +33,6 @@ from superset.commands.exceptions import ( from superset.daos.chart import ChartDAO from superset.daos.dataset import DatasetDAO from superset.daos.query import QueryDAO -from superset.datasets.commands.exceptions import ( - DatasetAccessDeniedError, - DatasetNotFoundError, -) from superset.utils.core import DatasourceType diff --git a/superset/extensions/metadb.py b/superset/extensions/metadb.py index 5b014b7af6..bdfe1ae1e7 100644 --- a/superset/extensions/metadb.py +++ b/superset/extensions/metadb.py @@ -38,6 +38,7 @@ joins and unions are done in memory, using the SQLite engine. from __future__ import annotations import datetime +import decimal import operator import urllib.parse from collections.abc import Iterator @@ -49,7 +50,6 @@ from shillelagh.adapters.base import Adapter from shillelagh.backends.apsw.dialects.base import APSWDialect from shillelagh.exceptions import ProgrammingError from shillelagh.fields import ( - Blob, Boolean, Date, DateTime, @@ -86,7 +86,7 @@ class SupersetAPSWDialect(APSWDialect): Queries can also join data across different Superset databases. - The dialect is built in top of the shillelagh library, leveraging SQLite to + The dialect is built in top of the Shillelagh library, leveraging SQLite to create virtual tables on-the-fly proxying Superset tables. The `SupersetShillelaghAdapter` adapter is responsible for returning data when a Superset table is accessed. @@ -164,11 +164,32 @@ class Duration(Field[datetime.timedelta, datetime.timedelta]): db_api_type = "DATETIME" +class Decimal(Field[decimal.Decimal, decimal.Decimal]): + """ + Shillelagh field used for representing decimals. + """ + + type = "DECIMAL" + db_api_type = "NUMBER" + + +class FallbackField(Field[Any, str]): + """ + Fallback field for unknown types; converts to string. + """ + + type = "TEXT" + db_api_type = "STRING" + + def parse(self, value: Any) -> str | None: + return value if value is None else str(value) + + # pylint: disable=too-many-instance-attributes class SupersetShillelaghAdapter(Adapter): """ - A shillelagh adapter for Superset tables. + A Shillelagh adapter for Superset tables. Shillelagh adapters are responsible for fetching data from a given resource, allowing it to be represented as a virtual table in SQLite. This one works @@ -190,6 +211,7 @@ class SupersetShillelaghAdapter(Adapter): datetime.datetime: DateTime, datetime.time: Time, datetime.timedelta: Duration, + decimal.Decimal: Decimal, } @staticmethod @@ -268,7 +290,7 @@ class SupersetShillelaghAdapter(Adapter): """ Convert a Python type into a Shillelagh field. """ - class_ = cls.type_map.get(python_type, Blob) + class_ = cls.type_map.get(python_type, FallbackField) return class_(filters=[Equal, Range], order=Order.ANY, exact=True) def _set_columns(self) -> None: diff --git a/superset/extensions/metastore_cache.py b/superset/extensions/metastore_cache.py index b6effdfe91..435c38ced8 100644 --- a/superset/extensions/metastore_cache.py +++ b/superset/extensions/metastore_cache.py @@ -71,7 +71,7 @@ class SupersetMetastoreCache(BaseCache): @staticmethod def _prune() -> None: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.delete_expired import ( + from superset.commands.key_value.delete_expired import ( DeleteExpiredKeyValueCommand, ) @@ -85,7 +85,7 @@ class SupersetMetastoreCache(BaseCache): def set(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.commands.key_value.upsert import UpsertKeyValueCommand UpsertKeyValueCommand( resource=RESOURCE, @@ -98,7 +98,7 @@ class SupersetMetastoreCache(BaseCache): def add(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.create import CreateKeyValueCommand + from superset.commands.key_value.create import CreateKeyValueCommand try: CreateKeyValueCommand( @@ -115,7 +115,7 @@ class SupersetMetastoreCache(BaseCache): def get(self, key: str) -> Any: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand return GetKeyValueCommand( resource=RESOURCE, @@ -131,6 +131,6 @@ class SupersetMetastoreCache(BaseCache): def delete(self, key: str) -> Any: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.commands.key_value.delete import DeleteKeyValueCommand return DeleteKeyValueCommand(resource=RESOURCE, key=self.get_key(key)).run() diff --git a/superset/jinja_context.py b/superset/jinja_context.py index c159a667ee..3b046b732e 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -17,9 +17,11 @@ """Defines the templating context for SQL Lab""" import json import re +from datetime import datetime from functools import lru_cache, partial from typing import Any, Callable, cast, Optional, TYPE_CHECKING, TypedDict, Union +import dateutil from flask import current_app, g, has_request_context, request from flask_babel import gettext as _ from jinja2 import DebugUndefined @@ -28,8 +30,8 @@ from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.sql.expression import bindparam from sqlalchemy.types import String +from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.constants import LRU_CACHE_MAX_SIZE -from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.exceptions import SupersetTemplateException from superset.extensions import feature_flag_manager from superset.utils.core import ( @@ -486,6 +488,19 @@ class BaseTemplateProcessor: class JinjaTemplateProcessor(BaseTemplateProcessor): + def _parse_datetime(self, dttm: str) -> Optional[datetime]: + """ + Try to parse a datetime and default to None in the worst case. + + Since this may have been rendered by different engines, the datetime may + vary slightly in format. We try to make it consistent, and if all else + fails, just return None. + """ + try: + return dateutil.parser.parse(dttm) + except dateutil.parser.ParserError: + return None + def set_context(self, **kwargs: Any) -> None: super().set_context(**kwargs) extra_cache = ExtraCache( @@ -494,6 +509,23 @@ class JinjaTemplateProcessor(BaseTemplateProcessor): removed_filters=self._removed_filters, dialect=self._database.get_dialect(), ) + + from_dttm = ( + self._parse_datetime(dttm) + if (dttm := self._context.get("from_dttm")) + else None + ) + to_dttm = ( + self._parse_datetime(dttm) + if (dttm := self._context.get("to_dttm")) + else None + ) + + dataset_macro_with_context = partial( + dataset_macro, + from_dttm=from_dttm, + to_dttm=to_dttm, + ) self._context.update( { "url_param": partial(safe_proxy, extra_cache.url_param), @@ -502,7 +534,7 @@ class JinjaTemplateProcessor(BaseTemplateProcessor): "cache_key_wrapper": partial(safe_proxy, extra_cache.cache_key_wrapper), "filter_values": partial(safe_proxy, extra_cache.filter_values), "get_filters": partial(safe_proxy, extra_cache.get_filters), - "dataset": partial(safe_proxy, dataset_macro), + "dataset": partial(safe_proxy, dataset_macro_with_context), } ) @@ -638,12 +670,18 @@ def dataset_macro( dataset_id: int, include_metrics: bool = False, columns: Optional[list[str]] = None, + from_dttm: Optional[datetime] = None, + to_dttm: Optional[datetime] = None, ) -> str: """ Given a dataset ID, return the SQL that represents it. The generated SQL includes all columns (including computed) by default. Optionally the user can also request metrics to be included, and columns to group by. + + The from_dttm and to_dttm parameters are filled in from filter values in explore + views, and we take them to make those properties available to jinja templates in + the underlying dataset. """ # pylint: disable=import-outside-toplevel from superset.daos.dataset import DatasetDAO @@ -659,6 +697,8 @@ def dataset_macro( "filter": [], "metrics": metrics if include_metrics else None, "columns": columns, + "from_dttm": from_dttm, + "to_dttm": to_dttm, } sqla_query = dataset.get_query_str_extended(query_obj, mutate=False) sql = sqla_query.sql diff --git a/superset/key_value/shared_entries.py b/superset/key_value/shared_entries.py index 7895b75907..130313157a 100644 --- a/superset/key_value/shared_entries.py +++ b/superset/key_value/shared_entries.py @@ -28,7 +28,7 @@ CODEC = JsonKeyValueCodec() def get_shared_value(key: SharedKey) -> Optional[Any]: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand uuid_key = uuid3(NAMESPACE, key) return GetKeyValueCommand(RESOURCE, key=uuid_key, codec=CODEC).run() @@ -36,7 +36,7 @@ def get_shared_value(key: SharedKey) -> Optional[Any]: def set_shared_value(key: SharedKey, value: Any) -> None: # pylint: disable=import-outside-toplevel - from superset.key_value.commands.create import CreateKeyValueCommand + from superset.commands.key_value.create import CreateKeyValueCommand uuid_key = uuid3(NAMESPACE, key) CreateKeyValueCommand( diff --git a/superset/migrations/shared/migrate_viz/base.py b/superset/migrations/shared/migrate_viz/base.py index b9826fee34..f9e1b9d3c9 100644 --- a/superset/migrations/shared/migrate_viz/base.py +++ b/superset/migrations/shared/migrate_viz/base.py @@ -123,7 +123,7 @@ class MigrateViz: ] @classmethod - def upgrade_slice(cls, slc: Slice) -> Slice: + def upgrade_slice(cls, slc: Slice) -> None: clz = cls(slc.params) form_data_bak = copy.deepcopy(clz.data) @@ -141,10 +141,9 @@ class MigrateViz: if "form_data" in (query_context := try_load_json(slc.query_context)): query_context["form_data"] = clz.data slc.query_context = json.dumps(query_context) - return slc @classmethod - def downgrade_slice(cls, slc: Slice) -> Slice: + def downgrade_slice(cls, slc: Slice) -> None: form_data = try_load_json(slc.params) if "viz_type" in (form_data_bak := form_data.get(FORM_DATA_BAK_FIELD_NAME, {})): slc.params = json.dumps(form_data_bak) @@ -153,19 +152,15 @@ class MigrateViz: if "form_data" in query_context: query_context["form_data"] = form_data_bak slc.query_context = json.dumps(query_context) - return slc @classmethod def upgrade(cls, session: Session) -> None: slices = session.query(Slice).filter(Slice.viz_type == cls.source_viz_type) for slc in paginated_update( slices, - lambda current, total: print( - f" Updating {current}/{total} charts", end="\r" - ), + lambda current, total: print(f"Upgraded {current}/{total} charts"), ): - new_viz = cls.upgrade_slice(slc) - session.merge(new_viz) + cls.upgrade_slice(slc) @classmethod def downgrade(cls, session: Session) -> None: @@ -177,9 +172,6 @@ class MigrateViz: ) for slc in paginated_update( slices, - lambda current, total: print( - f" Downgrading {current}/{total} charts", end="\r" - ), + lambda current, total: print(f"Downgraded {current}/{total} charts"), ): - new_viz = cls.downgrade_slice(slc) - session.merge(new_viz) + cls.downgrade_slice(slc) diff --git a/superset/migrations/shared/migrate_viz/processors.py b/superset/migrations/shared/migrate_viz/processors.py index 4ff6b2a934..5fbd624aa8 100644 --- a/superset/migrations/shared/migrate_viz/processors.py +++ b/superset/migrations/shared/migrate_viz/processors.py @@ -16,6 +16,8 @@ # under the License. from typing import Any +from superset.utils.core import as_list + from .base import MigrateViz @@ -34,40 +36,6 @@ class MigrateTreeMap(MigrateViz): self.data["metric"] = self.data["metrics"][0] -class MigrateAreaChart(MigrateViz): - """ - Migrate area charts. - - This migration is incomplete, see https://github.com/apache/superset/pull/24703#discussion_r1265222611 - for more details. If you fix this migration, please update the ``migrate_chart`` - function in ``superset/charts/commands/importers/v1/utils.py`` so that it gets - applied in chart imports. - """ - - source_viz_type = "area" - target_viz_type = "echarts_area" - remove_keys = {"contribution", "stacked_style", "x_axis_label"} - - def _pre_action(self) -> None: - if self.data.get("contribution"): - self.data["contributionMode"] = "row" - - if stacked := self.data.get("stacked_style"): - stacked_map = { - "expand": "Expand", - "stack": "Stack", - } - self.data["show_extra_controls"] = True - self.data["stack"] = stacked_map.get(stacked) - - if x_axis := self.data.get("granularity_sqla"): - self.data["x_axis"] = x_axis - - if x_axis_label := self.data.get("x_axis_label"): - self.data["x_axis_title"] = x_axis_label - self.data["x_axis_title_margin"] = 30 - - class MigratePivotTable(MigrateViz): source_viz_type = "pivot_table" target_viz_type = "pivot_table_v2" @@ -131,3 +99,117 @@ class MigrateSunburst(MigrateViz): source_viz_type = "sunburst" target_viz_type = "sunburst_v2" rename_keys = {"groupby": "columns"} + + +class TimeseriesChart(MigrateViz): + has_x_axis_control = True + rename_keys = { + "bottom_margin": "x_axis_title_margin", + "left_margin": "y_axis_title_margin", + "show_controls": "show_extra_controls", + "x_axis_label": "x_axis_title", + "x_axis_format": "x_axis_time_format", + "x_ticks_layout": "xAxisLabelRotation", + "y_axis_label": "y_axis_title", + "y_axis_showminmax": "truncateYAxis", + "y_log_scale": "logAxis", + } + remove_keys = {"contribution", "show_brush", "show_markers"} + + def _pre_action(self) -> None: + self.data["contributionMode"] = "row" if self.data.get("contribution") else None + self.data["zoomable"] = self.data.get("show_brush") == "yes" + self.data["markerEnabled"] = self.data.get("show_markers") or False + self.data["y_axis_showminmax"] = True + + bottom_margin = self.data.get("bottom_margin") + if self.data.get("x_axis_label") and ( + not bottom_margin or bottom_margin == "auto" + ): + self.data["bottom_margin"] = 30 + + if (rolling_type := self.data.get("rolling_type")) and rolling_type != "None": + self.data["rolling_type"] = rolling_type + + if time_compare := self.data.get("time_compare"): + self.data["time_compare"] = [ + value + " ago" for value in as_list(time_compare) if value + ] + + comparison_type = self.data.get("comparison_type") or "values" + self.data["comparison_type"] = ( + "difference" if comparison_type == "absolute" else comparison_type + ) + + if x_ticks_layout := self.data.get("x_ticks_layout"): + self.data["x_ticks_layout"] = 45 if x_ticks_layout == "45°" else 0 + + +class MigrateLineChart(TimeseriesChart): + source_viz_type = "line" + target_viz_type = "echarts_timeseries_line" + + def _pre_action(self) -> None: + super()._pre_action() + + self.remove_keys.add("line_interpolation") + + line_interpolation = self.data.get("line_interpolation") + if line_interpolation == "cardinal": + self.target_viz_type = "echarts_timeseries_smooth" + elif line_interpolation == "step-before": + self.target_viz_type = "echarts_timeseries_step" + self.data["seriesType"] = "start" + elif line_interpolation == "step-after": + self.target_viz_type = "echarts_timeseries_step" + self.data["seriesType"] = "end" + + +class MigrateAreaChart(TimeseriesChart): + source_viz_type = "area" + target_viz_type = "echarts_area" + stacked_map = { + "expand": "Expand", + "stack": "Stack", + "stream": "Stream", + } + + def _pre_action(self) -> None: + super()._pre_action() + + self.remove_keys.add("stacked_style") + + self.data["stack"] = self.stacked_map.get( + self.data.get("stacked_style") or "stack" + ) + + self.data["opacity"] = 0.7 + + +class MigrateBubbleChart(MigrateViz): + source_viz_type = "bubble" + target_viz_type = "bubble_v2" + rename_keys = { + "bottom_margin": "x_axis_title_margin", + "left_margin": "y_axis_title_margin", + "limit": "row_limit", + "x_axis_format": "xAxisFormat", + "x_log_scale": "logXAxis", + "x_ticks_layout": "xAxisLabelRotation", + "y_axis_showminmax": "truncateYAxis", + "y_log_scale": "logYAxis", + } + remove_keys = {"x_axis_showminmax"} + + def _pre_action(self) -> None: + bottom_margin = self.data.get("bottom_margin") + if self.data.get("x_axis_label") and ( + not bottom_margin or bottom_margin == "auto" + ): + self.data["bottom_margin"] = 30 + + if x_ticks_layout := self.data.get("x_ticks_layout"): + self.data["x_ticks_layout"] = 45 if x_ticks_layout == "45°" else 0 + + # Truncate y-axis by default to preserve layout + self.data["y_axis_showminmax"] = True diff --git a/superset/migrations/shared/security_converge.py b/superset/migrations/shared/security_converge.py index 9b1730a2a1..42a68acb24 100644 --- a/superset/migrations/shared/security_converge.py +++ b/superset/migrations/shared/security_converge.py @@ -243,7 +243,6 @@ def migrate_roles( if new_pvm not in role.permissions: logger.info(f"Add {new_pvm} to {role}") role.permissions.append(new_pvm) - session.merge(role) # Delete old permissions _delete_old_permissions(session, pvm_map) diff --git a/superset/migrations/shared/utils.py b/superset/migrations/shared/utils.py index 32e7dc1a39..2ae0dfeac1 100644 --- a/superset/migrations/shared/utils.py +++ b/superset/migrations/shared/utils.py @@ -43,11 +43,9 @@ def table_has_column(table: str, column: str) -> bool: :param column: A column name :returns: True iff the column exists in the table """ - config = op.get_context().config - engine = engine_from_config( - config.get_section(config.config_ini_section), prefix="sqlalchemy." - ) - insp = reflection.Inspector.from_engine(engine) + + insp = inspect(op.get_context().bind) + try: return any(col["name"] == column for col in insp.get_columns(table)) except NoSuchTableError: diff --git a/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py b/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py index b92378f092..c3d04e875a 100644 --- a/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py +++ b/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py @@ -56,7 +56,6 @@ def upgrade(): for slc in session.query(Slice).all(): if slc.datasource: slc.perm = slc.datasource.perm - session.merge(slc) session.commit() db.session.close() diff --git a/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py b/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py index f4373a3f38..8f4542cb3c 100644 --- a/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py +++ b/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py @@ -56,7 +56,6 @@ def upgrade(): slc.datasource_id = slc.druid_datasource_id if slc.table_id: slc.datasource_id = slc.table_id - session.merge(slc) session.commit() session.close() @@ -69,7 +68,6 @@ def downgrade(): slc.druid_datasource_id = slc.datasource_id if slc.datasource_type == "table": slc.table_id = slc.datasource_id - session.merge(slc) session.commit() session.close() op.drop_column("slices", "datasource_id") diff --git a/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py b/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py index 1f3dbab636..0bae8cd9a3 100644 --- a/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py +++ b/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py @@ -57,7 +57,6 @@ def upgrade(): try: d = json.loads(slc.params or "{}") slc.params = json.dumps(d, indent=2, sort_keys=True) - session.merge(slc) session.commit() print(f"Upgraded ({i}/{slice_len}): {slc.slice_name}") except Exception as ex: diff --git a/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py b/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py index 8e97ada3cd..8dafb77bee 100644 --- a/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py +++ b/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py @@ -80,7 +80,6 @@ def upgrade(): "/".join(split[:-1]) + "/?form_data=" + parse.quote_plus(json.dumps(d)) ) url.url = newurl - session.merge(url) session.commit() print(f"Updating url ({i}/{urls_len})") session.close() diff --git a/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py b/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py index 6073e8b84c..81bbb47914 100644 --- a/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py +++ b/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py @@ -58,7 +58,6 @@ def upgrade(): del params["latitude"] del params["longitude"] slc.params = json.dumps(params) - session.merge(slc) session.commit() session.close() diff --git a/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py b/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py index 4b1b807a6f..785e282397 100644 --- a/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py +++ b/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py @@ -69,7 +69,6 @@ def upgrade(): ) params["annotation_layers"] = new_layers slc.params = json.dumps(params) - session.merge(slc) session.commit() session.close() @@ -86,6 +85,5 @@ def downgrade(): if layers: params["annotation_layers"] = [layer["value"] for layer in layers] slc.params = json.dumps(params) - session.merge(slc) session.commit() session.close() diff --git a/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py b/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py index bf6276d702..6241ab2a39 100644 --- a/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py +++ b/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py @@ -62,7 +62,6 @@ def upgrade(): pos["v"] = 1 dashboard.position_json = json.dumps(positions, indent=2) - session.merge(dashboard) session.commit() session.close() @@ -85,6 +84,5 @@ def downgrade(): pos["v"] = 0 dashboard.position_json = json.dumps(positions, indent=2) - session.merge(dashboard) session.commit() pass diff --git a/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py b/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py index 49b19b9c69..2aa703cfec 100644 --- a/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py +++ b/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py @@ -59,7 +59,6 @@ def upgrade(): params["metrics"] = [params.get("metric")] del params["metric"] slc.params = json.dumps(params, indent=2, sort_keys=True) - session.merge(slc) session.commit() print(f"Upgraded ({i}/{slice_len}): {slc.slice_name}") except Exception as ex: diff --git a/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py b/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py index 620e2c5008..3dc0bcc455 100644 --- a/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py +++ b/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py @@ -647,7 +647,6 @@ def upgrade(): sorted_by_key = collections.OrderedDict(sorted(v2_layout.items())) dashboard.position_json = json.dumps(sorted_by_key, indent=2) - session.merge(dashboard) session.commit() else: print(f"Skip converted dash_id: {dashboard.id}") diff --git a/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py b/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py index 0179ba7d03..c6a66d6b53 100644 --- a/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py +++ b/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py @@ -33,7 +33,7 @@ from flask_appbuilder.models.mixins import AuditMixin from sqlalchemy import Column, DateTime, Enum, ForeignKey, Integer, String from sqlalchemy.ext.declarative import declarative_base, declared_attr -from superset.tags.models import ObjectTypes, TagTypes +from superset.tags.models import ObjectType, TagType from superset.utils.core import get_user_id Base = declarative_base() @@ -77,7 +77,7 @@ class Tag(Base, AuditMixinNullable): id = Column(Integer, primary_key=True) name = Column(String(250), unique=True) - type = Column(Enum(TagTypes)) + type = Column(Enum(TagType)) class TaggedObject(Base, AuditMixinNullable): @@ -86,7 +86,7 @@ class TaggedObject(Base, AuditMixinNullable): id = Column(Integer, primary_key=True) tag_id = Column(Integer, ForeignKey("tag.id")) object_id = Column(Integer) - object_type = Column(Enum(ObjectTypes)) + object_type = Column(Enum(ObjectType)) class User(Base): diff --git a/superset/migrations/versions/2018-08-01_11-47_7fcdcde0761c_.py b/superset/migrations/versions/2018-08-01_11-47_7fcdcde0761c_.py index 02021799e9..111cea4506 100644 --- a/superset/migrations/versions/2018-08-01_11-47_7fcdcde0761c_.py +++ b/superset/migrations/versions/2018-08-01_11-47_7fcdcde0761c_.py @@ -76,7 +76,6 @@ def upgrade(): dashboard.id, len(original_text), len(text) ) ) - session.merge(dashboard) session.commit() diff --git a/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py b/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py index c6361009ee..47c8a6cbcc 100644 --- a/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py +++ b/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py @@ -80,7 +80,6 @@ def upgrade(): dashboard.position_json = json.dumps( layout, indent=None, separators=(",", ":"), sort_keys=True ) - session.merge(dashboard) except Exception as ex: logging.exception(ex) @@ -110,7 +109,6 @@ def downgrade(): dashboard.position_json = json.dumps( layout, indent=None, separators=(",", ":"), sort_keys=True ) - session.merge(dashboard) except Exception as ex: logging.exception(ex) diff --git a/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py b/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py index 5aa38fd13a..ec02a8ca84 100644 --- a/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py +++ b/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py @@ -99,8 +99,6 @@ def upgrade(): ) else: dashboard.json_metadata = None - - session.merge(dashboard) except Exception as ex: logging.exception(f"dashboard {dashboard.id} has error: {ex}") diff --git a/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py b/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py index 4202de4560..70f1fcc07c 100644 --- a/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py +++ b/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py @@ -163,7 +163,6 @@ def upgrade(): separators=(",", ":"), sort_keys=True, ) - session.merge(dashboard) # remove iframe, separator and markup charts slices_to_remove = ( diff --git a/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py b/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py index 9ff117b1e2..574ca1536a 100644 --- a/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py +++ b/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py @@ -96,7 +96,6 @@ def update_position_json(dashboard, session, uuid_map): del object_["meta"]["uuid"] dashboard.position_json = json.dumps(layout, indent=4) - session.merge(dashboard) def update_dashboards(session, uuid_map): diff --git a/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py b/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py index 4407c1f8b7..24a81270d1 100644 --- a/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py +++ b/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py @@ -70,7 +70,6 @@ def upgrade(): slc.params = json.dumps(params) slc.viz_type = "graph_chart" - session.merge(slc) session.commit() session.close() @@ -100,6 +99,5 @@ def downgrade(): slc.params = json.dumps(params) slc.viz_type = "directed_force" - session.merge(slc) session.commit() session.close() diff --git a/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py b/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py index 30caf7efa1..8d9f070935 100644 --- a/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py +++ b/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py @@ -72,7 +72,6 @@ def upgrade(): new_conditional_formatting.append(formatter) params["conditional_formatting"] = new_conditional_formatting slc.params = json.dumps(params) - session.merge(slc) session.commit() session.close() diff --git a/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py b/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py index 859a6fe590..4972a86911 100755 --- a/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py +++ b/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py @@ -32,7 +32,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session from superset import db -from superset.migrations.shared.utils import paginated_update +from superset.migrations.shared.utils import paginated_update, table_has_column Base = declarative_base() @@ -45,23 +45,25 @@ class SqlaTable(Base): def upgrade(): - op.add_column( - "tables", - sa.Column( - "always_filter_main_dttm", - sa.Boolean(), - nullable=True, - default=False, - server_default=sa.false(), - ), - ) + if not table_has_column("tables", "always_filter_main_dttm"): + op.add_column( + "tables", + sa.Column( + "always_filter_main_dttm", + sa.Boolean(), + nullable=True, + default=False, + server_default=sa.false(), + ), + ) - bind = op.get_bind() - session = db.Session(bind=bind) + bind = op.get_bind() + session = db.Session(bind=bind) - for table in paginated_update(session.query(SqlaTable)): - table.always_filter_main_dttm = False + for table in paginated_update(session.query(SqlaTable)): + table.always_filter_main_dttm = False def downgrade(): - op.drop_column("tables", "always_filter_main_dttm") + if table_has_column("tables", "always_filter_main_dttm"): + op.drop_column("tables", "always_filter_main_dttm") diff --git a/superset/examples/configs/charts/Vehicle_Sales_Filter.yaml b/superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py similarity index 51% rename from superset/examples/configs/charts/Vehicle_Sales_Filter.yaml rename to superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py index 91c8f76bb8..b4286736f0 100644 --- a/superset/examples/configs/charts/Vehicle_Sales_Filter.yaml +++ b/superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py @@ -14,34 +14,31 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -slice_name: Vehicle Sales Filter -viz_type: filter_box -params: - adhoc_filters: [] - datasource: 23__table - date_filter: true - filter_configs: - - asc: true - clearable: true - column: product_line - key: 7oUjq15eQ - label: Product Line - multiple: true - searchAllOptions: false - - asc: true - clearable: true - column: deal_size - key: c3hO6Eub8 - label: Deal Size - multiple: true - searchAllOptions: false - granularity_sqla: order_date - queryFields: {} - slice_id: 671 - time_range: '2003-01-01T00:00:00 : 2005-06-01T00:00:00' - url_params: {} - viz_type: filter_box -cache_timeout: null -uuid: a5689df7-98fc-7c51-602c-ebd92dc3ec70 -version: 1.0.0 -dataset_uuid: e8623bb9-5e00-f531-506a-19607f5f8005 +"""replay 317970b4400c + +Revision ID: b7851ee5522f +Revises: 4b85906e5b91 +Create Date: 2023-12-01 12:03:27.538945 + +""" + +# revision identifiers, used by Alembic. +revision = "b7851ee5522f" +down_revision = "4b85906e5b91" + +from importlib import import_module + +import sqlalchemy as sa +from alembic import op + +module = import_module( + "superset.migrations.versions.2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_" +) + + +def upgrade(): + module.upgrade() + + +def downgrade(): + module.downgrade() diff --git a/superset/models/core.py b/superset/models/core.py index 6fa394de06..eece661ec5 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -59,8 +59,8 @@ from sqlalchemy.schema import UniqueConstraint from sqlalchemy.sql import ColumnElement, expression, Select from superset import app, db_engine_specs +from superset.commands.database.exceptions import DatabaseInvalidError from superset.constants import LRU_CACHE_MAX_SIZE, PASSWORD_MASK -from superset.databases.commands.exceptions import DatabaseInvalidError from superset.databases.utils import make_url_safe from superset.db_engine_specs.base import MetricType, TimeGrain from superset.extensions import ( @@ -237,6 +237,11 @@ class Database( # this will prevent any 'trash value' strings from going through return self.get_extra().get("disable_data_preview", False) is True + @property + def schema_options(self) -> dict[str, Any]: + """Additional schema display config for engines with complex schemas""" + return self.get_extra().get("schema_options", {}) + @property def data(self) -> dict[str, Any]: return { @@ -248,6 +253,7 @@ class Database( "allows_cost_estimate": self.allows_cost_estimate, "allows_virtual_table_explore": self.allows_virtual_table_explore, "explore_database_id": self.explore_database_id, + "schema_options": self.schema_options, "parameters": self.parameters, "disable_data_preview": self.disable_data_preview, "parameters_schema": self.parameters_schema, @@ -838,7 +844,9 @@ class Database( self, table_name: str, schema: str | None = None ) -> list[ResultSetColumnType]: with self.get_inspector_with_context() as inspector: - return self.db_engine_spec.get_columns(inspector, table_name, schema) + return self.db_engine_spec.get_columns( + inspector, table_name, schema, self.schema_options + ) def get_metrics( self, diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 18c94aa179..919c832ab5 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -47,8 +47,12 @@ from sqlalchemy.sql import join, select from sqlalchemy.sql.elements import BinaryExpression from superset import app, db, is_feature_enabled, security_manager -from superset.connectors.base.models import BaseDatasource -from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.connectors.sqla.models import ( + BaseDatasource, + SqlaTable, + SqlMetric, + TableColumn, +) from superset.daos.datasource import DatasourceDAO from superset.extensions import cache_manager from superset.models.filter_set import FilterSet diff --git a/superset/models/helpers.py b/superset/models/helpers.py index aafd58f34d..3e88bec44f 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -705,10 +705,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods "MIN": sa.func.MIN, "MAX": sa.func.MAX, } - - @property - def fetch_value_predicate(self) -> str: - return "fix this!" + fetch_values_predicate = None @property def type(self) -> str: @@ -785,17 +782,20 @@ class ExploreMixin: # pylint: disable=too-many-public-methods def columns(self) -> list[Any]: raise NotImplementedError() - def get_fetch_values_predicate( - self, template_processor: Optional[BaseTemplateProcessor] = None - ) -> TextClause: - raise NotImplementedError() - def get_extra_cache_keys(self, query_obj: dict[str, Any]) -> list[Hashable]: raise NotImplementedError() def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: raise NotImplementedError() + def get_fetch_values_predicate( + self, + template_processor: Optional[ # pylint: disable=unused-argument + BaseTemplateProcessor + ] = None, + ) -> TextClause: + return self.fetch_values_predicate + def get_sqla_row_level_filters( self, template_processor: BaseTemplateProcessor, @@ -1340,37 +1340,46 @@ class ExploreMixin: # pylint: disable=too-many-public-methods ) return and_(*l) - def values_for_column(self, column_name: str, limit: int = 10000) -> list[Any]: - """Runs query against sqla to retrieve some - sample values for the given column. - """ - cols = {} - for col in self.columns: - if isinstance(col, dict): - cols[col.get("column_name")] = col - else: - cols[col.column_name] = col - - target_col = cols[column_name] - tp = None # todo(hughhhh): add back self.get_template_processor() + def values_for_column( + self, column_name: str, limit: int = 10000, denormalize_column: bool = False + ) -> list[Any]: + # denormalize column name before querying for values + # unless disabled in the dataset configuration + db_dialect = self.database.get_dialect() + column_name_ = ( + self.database.db_engine_spec.denormalize_name(db_dialect, column_name) + if denormalize_column + else column_name + ) + cols = {col.column_name: col for col in self.columns} + target_col = cols[column_name_] + tp = self.get_template_processor() tbl, cte = self.get_from_clause(tp) - if isinstance(target_col, dict): - sql_column = sa.column(target_col.get("name")) - else: - sql_column = target_col - - qry = sa.select([sql_column]).select_from(tbl).distinct() + qry = ( + sa.select( + # The alias (label) here is important because some dialects will + # automatically add a random alias to the projection because of the + # call to DISTINCT; others will uppercase the column names. This + # gives us a deterministic column name in the dataframe. + [target_col.get_sqla_col(template_processor=tp).label("column_values")] + ) + .select_from(tbl) + .distinct() + ) if limit: qry = qry.limit(limit) + if self.fetch_values_predicate: + qry = qry.where(self.get_fetch_values_predicate(template_processor=tp)) + with self.database.get_sqla_engine_with_context() as engine: sql = qry.compile(engine, compile_kwargs={"literal_binds": True}) sql = self._apply_cte(sql, cte) sql = self.mutate_query_from_config(sql) df = pd.read_sql_query(sql=sql, con=engine) - return df[column_name].to_list() + return df["column_values"].to_list() def get_timestamp_expression( self, @@ -1942,7 +1951,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods ) having_clause_and += [self.text(having)] - if apply_fetch_values_predicate and self.fetch_values_predicate: # type: ignore + if apply_fetch_values_predicate and self.fetch_values_predicate: qry = qry.where( self.get_fetch_values_predicate(template_processor=template_processor) ) diff --git a/superset/models/slice.py b/superset/models/slice.py index 248f4ee947..b41bb72a85 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -51,7 +51,7 @@ from superset.viz import BaseViz, viz_types if TYPE_CHECKING: from superset.common.query_context import QueryContext from superset.common.query_context_factory import QueryContextFactory - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource metadata = Model.metadata # pylint: disable=no-member slice_user = Table( diff --git a/superset/queries/saved_queries/api.py b/superset/queries/saved_queries/api.py index 69e1a6191b..ce283dd6d6 100644 --- a/superset/queries/saved_queries/api.py +++ b/superset/queries/saved_queries/api.py @@ -32,19 +32,17 @@ from superset.commands.importers.exceptions import ( NoValidFilesFoundError, ) from superset.commands.importers.v1.utils import get_contents_from_bundle +from superset.commands.query.delete import DeleteSavedQueryCommand +from superset.commands.query.exceptions import ( + SavedQueryDeleteFailedError, + SavedQueryNotFoundError, +) +from superset.commands.query.export import ExportSavedQueriesCommand +from superset.commands.query.importers.dispatcher import ImportSavedQueriesCommand from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.databases.filters import DatabaseFilter from superset.extensions import event_logger from superset.models.sql_lab import SavedQuery -from superset.queries.saved_queries.commands.delete import DeleteSavedQueryCommand -from superset.queries.saved_queries.commands.exceptions import ( - SavedQueryDeleteFailedError, - SavedQueryNotFoundError, -) -from superset.queries.saved_queries.commands.export import ExportSavedQueriesCommand -from superset.queries.saved_queries.commands.importers.dispatcher import ( - ImportSavedQueriesCommand, -) from superset.queries.saved_queries.filters import ( SavedQueryAllTextFilter, SavedQueryFavoriteFilter, @@ -84,7 +82,11 @@ class SavedQueryRestApi(BaseSupersetModelRestApi): base_filters = [["id", SavedQueryFilter, lambda: []]] show_columns = [ + "changed_on", "changed_on_delta_humanized", + "changed_by.first_name", + "changed_by.id", + "changed_by.last_name", "created_by.first_name", "created_by.id", "created_by.last_name", @@ -99,7 +101,11 @@ class SavedQueryRestApi(BaseSupersetModelRestApi): "template_parameters", ] list_columns = [ + "changed_on", "changed_on_delta_humanized", + "changed_by.first_name", + "changed_by.id", + "changed_by.last_name", "created_on", "created_by.first_name", "created_by.id", @@ -142,7 +148,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi): "last_run_delta_humanized", ] - search_columns = ["id", "database", "label", "schema", "created_by"] + search_columns = ["id", "database", "label", "schema", "created_by", "changed_by"] if is_feature_enabled("TAGGING_SYSTEM"): search_columns += ["tags"] search_filters = { @@ -163,7 +169,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi): "database": "database_name", } base_related_field_filters = {"database": [["id", DatabaseFilter, lambda: []]]} - allowed_rel_fields = {"database"} + allowed_rel_fields = {"database", "changed_by", "created_by"} allowed_distinct_fields = {"schema"} def pre_add(self, item: SavedQuery) -> None: diff --git a/superset/reports/api.py b/superset/reports/api.py index 3116aef3b8..8238213fef 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -26,13 +26,9 @@ from marshmallow import ValidationError from superset import is_feature_enabled from superset.charts.filters import ChartFilter -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.dashboards.filters import DashboardAccessFilter -from superset.databases.filters import DatabaseFilter -from superset.extensions import event_logger -from superset.reports.commands.create import CreateReportScheduleCommand -from superset.reports.commands.delete import DeleteReportScheduleCommand -from superset.reports.commands.exceptions import ( +from superset.commands.report.create import CreateReportScheduleCommand +from superset.commands.report.delete import DeleteReportScheduleCommand +from superset.commands.report.exceptions import ( ReportScheduleCreateFailedError, ReportScheduleDeleteFailedError, ReportScheduleForbiddenError, @@ -40,7 +36,11 @@ from superset.reports.commands.exceptions import ( ReportScheduleNotFoundError, ReportScheduleUpdateFailedError, ) -from superset.reports.commands.update import UpdateReportScheduleCommand +from superset.commands.report.update import UpdateReportScheduleCommand +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.dashboards.filters import DashboardAccessFilter +from superset.databases.filters import DatabaseFilter +from superset.extensions import event_logger from superset.reports.filters import ReportScheduleAllTextFilter, ReportScheduleFilter from superset.reports.models import ReportSchedule from superset.reports.schemas import ( @@ -198,6 +198,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): search_columns = [ "name", "active", + "changed_by", "created_by", "owners", "type", @@ -207,7 +208,14 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "chart_id", ] search_filters = {"name": [ReportScheduleAllTextFilter]} - allowed_rel_fields = {"owners", "chart", "dashboard", "database", "created_by"} + allowed_rel_fields = { + "owners", + "chart", + "dashboard", + "database", + "created_by", + "changed_by", + } base_related_field_filters = { "chart": [["id", ChartFilter, lambda: []]], diff --git a/superset/reports/notifications/slack.py b/superset/reports/notifications/slack.py index a769622b57..fbae398bc5 100644 --- a/superset/reports/notifications/slack.py +++ b/superset/reports/notifications/slack.py @@ -44,6 +44,7 @@ from superset.reports.notifications.exceptions import ( NotificationParamException, NotificationUnprocessableException, ) +from superset.utils.core import get_email_address_list from superset.utils.decorators import statsd_gauge logger = logging.getLogger(__name__) @@ -60,7 +61,15 @@ class SlackNotification(BaseNotification): # pylint: disable=too-few-public-met type = ReportRecipientType.SLACK def _get_channel(self) -> str: - return json.loads(self._recipient.recipient_config_json)["target"] + """ + Get the recipient's channel(s). + Note Slack SDK uses "channel" to refer to one or more + channels. Multiple channels are demarcated by a comma. + :returns: The comma separated list of channel(s) + """ + recipient_str = json.loads(self._recipient.recipient_config_json)["target"] + + return ",".join(get_email_address_list(recipient_str)) def _message_template(self, table: str = "") -> str: return __( diff --git a/superset/row_level_security/api.py b/superset/row_level_security/api.py index 0a823f74d6..fc505e724f 100644 --- a/superset/row_level_security/api.py +++ b/superset/row_level_security/api.py @@ -28,14 +28,14 @@ from superset.commands.exceptions import ( DatasourceNotFoundValidationError, RolesNotFoundValidationError, ) +from superset.commands.security.create import CreateRLSRuleCommand +from superset.commands.security.delete import DeleteRLSRuleCommand +from superset.commands.security.exceptions import RLSRuleNotFoundError +from superset.commands.security.update import UpdateRLSRuleCommand from superset.connectors.sqla.models import RowLevelSecurityFilter from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError from superset.extensions import event_logger -from superset.row_level_security.commands.create import CreateRLSRuleCommand -from superset.row_level_security.commands.delete import DeleteRLSRuleCommand -from superset.row_level_security.commands.exceptions import RLSRuleNotFoundError -from superset.row_level_security.commands.update import UpdateRLSRuleCommand from superset.row_level_security.schemas import ( get_delete_ids_schema, openapi_spec_methods_override, @@ -77,6 +77,9 @@ class RLSRestApi(BaseSupersetModelRestApi): "roles.name", "clause", "changed_on_delta_humanized", + "changed_by.first_name", + "changed_by.last_name", + "changed_by.id", "group_key", ] order_columns = [ @@ -115,6 +118,8 @@ class RLSRestApi(BaseSupersetModelRestApi): "roles", "group_key", "clause", + "created_by", + "changed_by", ) edit_columns = add_columns @@ -123,7 +128,7 @@ class RLSRestApi(BaseSupersetModelRestApi): add_model_schema = RLSPostSchema() edit_model_schema = RLSPutSchema() - allowed_rel_fields = {"tables", "roles"} + allowed_rel_fields = {"tables", "roles", "created_by", "changed_by"} base_related_field_filters = { "tables": [["id", DatasourceFilter, lambda: []]], "roles": [["id", BaseFilterRelatedRoles, lambda: []]], diff --git a/superset/row_level_security/schemas.py b/superset/row_level_security/schemas.py index 6c8249b875..f02767ec13 100644 --- a/superset/row_level_security/schemas.py +++ b/superset/row_level_security/schemas.py @@ -20,6 +20,7 @@ from marshmallow import fields, Schema from marshmallow.validate import Length, OneOf from superset.connectors.sqla.models import RowLevelSecurityFilter +from superset.dashboards.schemas import UserSchema from superset.utils.core import RowLevelSecurityFilterType id_description = "Unique if of rls filter" @@ -81,6 +82,7 @@ class RLSListSchema(Schema): ) group_key = fields.String(metadata={"description": "group_key_description"}) description = fields.String(metadata={"description": "description_description"}) + changed_by = fields.Nested(UserSchema(exclude=["username"])) class RLSShowSchema(Schema): diff --git a/superset/security/api.py b/superset/security/api.py index b4a3069759..acafc32570 100644 --- a/superset/security/api.py +++ b/superset/security/api.py @@ -24,7 +24,7 @@ from flask_appbuilder.security.decorators import permission_name, protect from flask_wtf.csrf import generate_csrf from marshmallow import EXCLUDE, fields, post_load, Schema, ValidationError -from superset.embedded_dashboard.commands.exceptions import ( +from superset.commands.dashboard.embedded.exceptions import ( EmbeddedDashboardNotFoundError, ) from superset.extensions import event_logger diff --git a/superset/security/manager.py b/superset/security/manager.py index c8d2c236ab..5eb1afdda9 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -78,8 +78,11 @@ from superset.utils.urls import get_url_host if TYPE_CHECKING: from superset.common.query_context import QueryContext - from superset.connectors.base.models import BaseDatasource - from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable + from superset.connectors.sqla.models import ( + BaseDatasource, + RowLevelSecurityFilter, + SqlaTable, + ) from superset.models.core import Database from superset.models.dashboard import Dashboard from superset.models.sql_lab import Query @@ -876,7 +879,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods ): role_from_permissions.append(permission_view) role_to.permissions = role_from_permissions - self.get_session.merge(role_to) self.get_session.commit() def set_role( @@ -898,7 +900,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods permission_view for permission_view in pvms if pvm_check(permission_view) ] role.permissions = role_pvms - self.get_session.merge(role) self.get_session.commit() def _is_admin_only(self, pvm: PermissionView) -> bool: @@ -2152,10 +2153,10 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods @staticmethod def validate_guest_token_resources(resources: GuestTokenResources) -> None: # pylint: disable=import-outside-toplevel - from superset.daos.dashboard import EmbeddedDashboardDAO - from superset.embedded_dashboard.commands.exceptions import ( + from superset.commands.dashboard.embedded.exceptions import ( EmbeddedDashboardNotFoundError, ) + from superset.daos.dashboard import EmbeddedDashboardDAO from superset.models.dashboard import Dashboard for resource in resources: diff --git a/superset/sqllab/api.py b/superset/sqllab/api.py index 16070b52cc..6be378a9b5 100644 --- a/superset/sqllab/api.py +++ b/superset/sqllab/api.py @@ -27,6 +27,10 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface from marshmallow import ValidationError from superset import app, is_feature_enabled +from superset.commands.sql_lab.estimate import QueryEstimationCommand +from superset.commands.sql_lab.execute import CommandResult, ExecuteSqlCommand +from superset.commands.sql_lab.export import SqlResultExportCommand +from superset.commands.sql_lab.results import SqlExecutionResultsCommand from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP from superset.daos.database import DatabaseDAO from superset.daos.query import QueryDAO @@ -35,10 +39,6 @@ from superset.jinja_context import get_template_processor from superset.models.sql_lab import Query from superset.sql_lab import get_sql_results from superset.sqllab.command_status import SqlJsonExecutionStatus -from superset.sqllab.commands.estimate import QueryEstimationCommand -from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand -from superset.sqllab.commands.export import SqlResultExportCommand -from superset.sqllab.commands.results import SqlExecutionResultsCommand from superset.sqllab.exceptions import ( QueryIsForbiddenToAccessException, SqlLabException, diff --git a/superset/sqllab/query_render.py b/superset/sqllab/query_render.py index 4fb64c8ce2..f4c1c26c6e 100644 --- a/superset/sqllab/query_render.py +++ b/superset/sqllab/query_render.py @@ -24,9 +24,9 @@ from jinja2 import TemplateError from jinja2.meta import find_undeclared_variables from superset import is_feature_enabled +from superset.commands.sql_lab.execute import SqlQueryRender from superset.errors import SupersetErrorType from superset.sql_parse import ParsedQuery -from superset.sqllab.commands.execute import SqlQueryRender from superset.sqllab.exceptions import SqlLabException from superset.utils import core as utils diff --git a/superset/sqllab/validators.py b/superset/sqllab/validators.py index 5bc8a62253..b79789da4c 100644 --- a/superset/sqllab/validators.py +++ b/superset/sqllab/validators.py @@ -20,7 +20,7 @@ from __future__ import annotations from typing import TYPE_CHECKING from superset import security_manager -from superset.sqllab.commands.execute import CanAccessQueryValidator +from superset.commands.sql_lab.execute import CanAccessQueryValidator if TYPE_CHECKING: from superset.models.sql_lab import Query diff --git a/superset/superset_typing.py b/superset/superset_typing.py index 953683b5dc..c71dcea3f1 100644 --- a/superset/superset_typing.py +++ b/superset/superset_typing.py @@ -84,6 +84,8 @@ class ResultSetColumnType(TypedDict): scale: NotRequired[Any] max_length: NotRequired[Any] + query_as: NotRequired[Any] + CacheConfig = dict[str, Any] DbapiDescriptionRow = tuple[ diff --git a/superset/tags/api.py b/superset/tags/api.py index e9842f5a6a..c0df921e3e 100644 --- a/superset/tags/api.py +++ b/superset/tags/api.py @@ -22,16 +22,12 @@ from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.models.sqla.interface import SQLAInterface from marshmallow import ValidationError -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.daos.tag import TagDAO -from superset.exceptions import MissingUserContextException -from superset.extensions import event_logger -from superset.tags.commands.create import ( +from superset.commands.tag.create import ( CreateCustomTagCommand, CreateCustomTagWithRelationshipsCommand, ) -from superset.tags.commands.delete import DeleteTaggedObjectCommand, DeleteTagsCommand -from superset.tags.commands.exceptions import ( +from superset.commands.tag.delete import DeleteTaggedObjectCommand, DeleteTagsCommand +from superset.commands.tag.exceptions import ( TagDeleteFailedError, TaggedObjectDeleteFailedError, TaggedObjectNotFoundError, @@ -39,8 +35,12 @@ from superset.tags.commands.exceptions import ( TagNotFoundError, TagUpdateFailedError, ) -from superset.tags.commands.update import UpdateTagCommand -from superset.tags.models import ObjectTypes, Tag +from superset.commands.tag.update import UpdateTagCommand +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.tag import TagDAO +from superset.exceptions import MissingUserContextException +from superset.extensions import event_logger +from superset.tags.models import ObjectType, Tag from superset.tags.schemas import ( delete_tags_schema, openapi_spec_methods_override, @@ -117,7 +117,7 @@ class TagRestApi(BaseSupersetModelRestApi): related_field_filters = { "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), } - allowed_rel_fields = {"created_by"} + allowed_rel_fields = {"created_by", "changed_by"} add_model_schema = TagPostSchema() edit_model_schema = TagPutSchema() @@ -364,7 +364,7 @@ class TagRestApi(BaseSupersetModelRestApi): action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.add_objects", log_to_statsd=False, ) - def add_objects(self, object_type: ObjectTypes, object_id: int) -> Response: + def add_objects(self, object_type: ObjectType, object_id: int) -> Response: """Add tags to an object. Create new tags if they do not already exist. --- post: @@ -429,7 +429,7 @@ class TagRestApi(BaseSupersetModelRestApi): log_to_statsd=True, ) def delete_object( - self, object_type: ObjectTypes, object_id: int, tag: str + self, object_type: ObjectType, object_id: int, tag: str ) -> Response: """Delete a tagged object. --- @@ -584,12 +584,21 @@ class TagRestApi(BaseSupersetModelRestApi): 500: $ref: '#/components/responses/500' """ + tag_ids = [ + tag_id for tag_id in request.args.get("tagIds", "").split(",") if tag_id + ] tags = [tag for tag in request.args.get("tags", "").split(",") if tag] # filter types types = [type_ for type_ in request.args.get("types", "").split(",") if type_] try: - tagged_objects = TagDAO.get_tagged_objects_for_tags(tags, types) + if tag_ids: + # priotize using ids for lookups vs. names mainly using this + # for backward compatibility + tagged_objects = TagDAO.get_tagged_objects_by_tag_id(tag_ids, types) + else: + tagged_objects = TagDAO.get_tagged_objects_for_tags(tags, types) + result = [ self.object_entity_response_schema.dump(tagged_object) for tagged_object in tagged_objects @@ -609,11 +618,11 @@ class TagRestApi(BaseSupersetModelRestApi): log_to_statsd=False, ) def favorite_status(self, **kwargs: Any) -> Response: - """Favorite Stars for Dashboards + """Favorite Stars for Tags --- get: description: >- - Check favorited dashboards for current user + Get favorited tags for current user parameters: - in: query name: q diff --git a/superset/tags/models.py b/superset/tags/models.py index 7825f283bf..7a77677a36 100644 --- a/superset/tags/models.py +++ b/superset/tags/models.py @@ -20,9 +20,9 @@ import enum from typing import TYPE_CHECKING from flask_appbuilder import Model -from sqlalchemy import Column, Enum, ForeignKey, Integer, String, Table, Text +from sqlalchemy import Column, Enum, ForeignKey, Integer, orm, String, Table, Text from sqlalchemy.engine.base import Connection -from sqlalchemy.orm import relationship, Session, sessionmaker +from sqlalchemy.orm import relationship, sessionmaker from sqlalchemy.orm.mapper import Mapper from superset import security_manager @@ -35,7 +35,7 @@ if TYPE_CHECKING: from superset.models.slice import Slice from superset.models.sql_lab import Query -Session = sessionmaker(autoflush=False) +Session = sessionmaker() user_favorite_tag_table = Table( "user_favorite_tag", @@ -45,8 +45,7 @@ user_favorite_tag_table = Table( ) -class TagTypes(enum.Enum): - +class TagType(enum.Enum): """ Types for tags. @@ -65,8 +64,7 @@ class TagTypes(enum.Enum): favorited_by = 4 -class ObjectTypes(enum.Enum): - +class ObjectType(enum.Enum): """Object types.""" # pylint: disable=invalid-name @@ -83,7 +81,7 @@ class Tag(Model, AuditMixinNullable): __tablename__ = "tag" id = Column(Integer, primary_key=True) name = Column(String(250), unique=True) - type = Column(Enum(TagTypes)) + type = Column(Enum(TagType)) description = Column(Text) objects = relationship( @@ -108,12 +106,12 @@ class TaggedObject(Model, AuditMixinNullable): ForeignKey("slices.id"), ForeignKey("saved_query.id"), ) - object_type = Column(Enum(ObjectTypes)) + object_type = Column(Enum(ObjectType)) tag = relationship("Tag", back_populates="objects", overlaps="tags") -def get_tag(name: str, session: Session, type_: TagTypes) -> Tag: +def get_tag(name: str, session: orm.Session, type_: TagType) -> Tag: tag_name = name.strip() tag = session.query(Tag).filter_by(name=tag_name, type=type_).one_or_none() if tag is None: @@ -123,12 +121,12 @@ def get_tag(name: str, session: Session, type_: TagTypes) -> Tag: return tag -def get_object_type(class_name: str) -> ObjectTypes: +def get_object_type(class_name: str) -> ObjectType: mapping = { - "slice": ObjectTypes.chart, - "dashboard": ObjectTypes.dashboard, - "query": ObjectTypes.query, - "dataset": ObjectTypes.dataset, + "slice": ObjectType.chart, + "dashboard": ObjectType.dashboard, + "query": ObjectType.query, + "dataset": ObjectType.dataset, } try: return mapping[class_name.lower()] @@ -150,12 +148,12 @@ class ObjectUpdater: @classmethod def _add_owners( cls, - session: Session, + session: orm.Session, target: Dashboard | FavStar | Slice | Query | SqlaTable, ) -> None: for owner_id in cls.get_owners_ids(target): name = f"owner:{owner_id}" - tag = get_tag(name, session, TagTypes.owner) + tag = get_tag(name, session, TagType.owner) tagged_object = TaggedObject( tag_id=tag.id, object_id=target.id, object_type=cls.object_type ) @@ -168,21 +166,17 @@ class ObjectUpdater: connection: Connection, target: Dashboard | FavStar | Slice | Query | SqlaTable, ) -> None: - session = Session(bind=connection) - - try: + with Session(bind=connection) as session: # add `owner:` tags cls._add_owners(session, target) # add `type:` tags - tag = get_tag(f"type:{cls.object_type}", session, TagTypes.type) + tag = get_tag(f"type:{cls.object_type}", session, TagType.type) tagged_object = TaggedObject( tag_id=tag.id, object_id=target.id, object_type=cls.object_type ) session.add(tagged_object) session.commit() - finally: - session.close() @classmethod def after_update( @@ -191,9 +185,7 @@ class ObjectUpdater: connection: Connection, target: Dashboard | FavStar | Slice | Query | SqlaTable, ) -> None: - session = Session(bind=connection) - - try: + with Session(bind=connection) as session: # delete current `owner:` tags query = ( session.query(TaggedObject.id) @@ -201,7 +193,7 @@ class ObjectUpdater: .filter( TaggedObject.object_type == cls.object_type, TaggedObject.object_id == target.id, - Tag.type == TagTypes.owner, + Tag.type == TagType.owner, ) ) ids = [row[0] for row in query] @@ -212,8 +204,6 @@ class ObjectUpdater: # add `owner:` tags cls._add_owners(session, target) session.commit() - finally: - session.close() @classmethod def after_delete( @@ -222,9 +212,7 @@ class ObjectUpdater: connection: Connection, target: Dashboard | FavStar | Slice | Query | SqlaTable, ) -> None: - session = Session(bind=connection) - - try: + with Session(bind=connection) as session: # delete row from `tagged_objects` session.query(TaggedObject).filter( TaggedObject.object_type == cls.object_type, @@ -232,8 +220,6 @@ class ObjectUpdater: ).delete() session.commit() - finally: - session.close() class ChartUpdater(ObjectUpdater): @@ -273,10 +259,9 @@ class FavStarUpdater: def after_insert( cls, _mapper: Mapper, connection: Connection, target: FavStar ) -> None: - session = Session(bind=connection) - try: + with Session(bind=connection) as session: name = f"favorited_by:{target.user_id}" - tag = get_tag(name, session, TagTypes.favorited_by) + tag = get_tag(name, session, TagType.favorited_by) tagged_object = TaggedObject( tag_id=tag.id, object_id=target.obj_id, @@ -284,22 +269,19 @@ class FavStarUpdater: ) session.add(tagged_object) session.commit() - finally: - session.close() @classmethod def after_delete( cls, _mapper: Mapper, connection: Connection, target: FavStar ) -> None: - session = Session(bind=connection) - try: + with Session(bind=connection) as session: name = f"favorited_by:{target.user_id}" query = ( session.query(TaggedObject.id) .join(Tag) .filter( TaggedObject.object_id == target.obj_id, - Tag.type == TagTypes.favorited_by, + Tag.type == TagType.favorited_by, Tag.name == name, ) ) @@ -309,5 +291,3 @@ class FavStarUpdater: ) session.commit() - finally: - session.close() diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py index 609af3bc8e..61970ca1f3 100644 --- a/superset/tasks/async_queries.py +++ b/superset/tasks/async_queries.py @@ -64,7 +64,7 @@ def load_chart_data_into_cache( form_data: dict[str, Any], ) -> None: # pylint: disable=import-outside-toplevel - from superset.charts.data.commands.get_data_command import ChartDataCommand + from superset.commands.chart.data.get_data_command import ChartDataCommand user = ( security_manager.get_user_by_id(job_metadata.get("user_id")) diff --git a/superset/tasks/scheduler.py b/superset/tasks/scheduler.py index f3cc270b86..7b1350a07d 100644 --- a/superset/tasks/scheduler.py +++ b/superset/tasks/scheduler.py @@ -22,11 +22,11 @@ from celery.exceptions import SoftTimeLimitExceeded from superset import app, is_feature_enabled from superset.commands.exceptions import CommandException +from superset.commands.report.exceptions import ReportScheduleUnexpectedError +from superset.commands.report.execute import AsyncExecuteReportScheduleCommand +from superset.commands.report.log_prune import AsyncPruneReportScheduleLogCommand from superset.daos.report import ReportScheduleDAO from superset.extensions import celery_app -from superset.reports.commands.exceptions import ReportScheduleUnexpectedError -from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand -from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand from superset.stats_logger import BaseStatsLogger from superset.tasks.cron_util import cron_schedule_window from superset.utils.celery import session_scope diff --git a/superset/temporary_cache/api.py b/superset/temporary_cache/api.py index 0ecab44bf1..5dc95c122a 100644 --- a/superset/temporary_cache/api.py +++ b/superset/temporary_cache/api.py @@ -24,13 +24,13 @@ from apispec.exceptions import DuplicateComponentNameError from flask import request, Response from marshmallow import ValidationError -from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.key_value.types import JsonKeyValueCodec -from superset.temporary_cache.commands.exceptions import ( +from superset.commands.temporary_cache.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheResourceNotFoundError, ) -from superset.temporary_cache.commands.parameters import CommandParameters +from superset.commands.temporary_cache.parameters import CommandParameters +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.key_value.types import JsonKeyValueCodec from superset.temporary_cache.schemas import ( TemporaryCachePostSchema, TemporaryCachePutSchema, diff --git a/superset/translations/de/LC_MESSAGES/messages.json b/superset/translations/de/LC_MESSAGES/messages.json index 1c020467a6..44d5d3009c 100644 --- a/superset/translations/de/LC_MESSAGES/messages.json +++ b/superset/translations/de/LC_MESSAGES/messages.json @@ -2886,7 +2886,6 @@ "Manage email report": ["E-Mail-Bericht verwalten"], "Manage your databases": ["Verwalten Sie Ihre Datenbanken"], "Mandatory": ["Notwendig"], - "Mangle Duplicate Columns": ["Doppelte Spalten zusammenführen"], "Manually set min/max values for the y-axis.": [ "Min/Max-Werte für die y-Achse manuell festlegen." ], diff --git a/superset/translations/de/LC_MESSAGES/messages.po b/superset/translations/de/LC_MESSAGES/messages.po index 9234d0eaec..6a1ff69019 100644 --- a/superset/translations/de/LC_MESSAGES/messages.po +++ b/superset/translations/de/LC_MESSAGES/messages.po @@ -9407,10 +9407,6 @@ msgstr "Verwalten Sie Ihre Datenbanken" msgid "Mandatory" msgstr "Notwendig" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Doppelte Spalten zusammenführen" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "Min/Max-Werte für die y-Achse manuell festlegen." diff --git a/superset/translations/en/LC_MESSAGES/messages.json b/superset/translations/en/LC_MESSAGES/messages.json index e87511b901..f1d035d43a 100644 --- a/superset/translations/en/LC_MESSAGES/messages.json +++ b/superset/translations/en/LC_MESSAGES/messages.json @@ -2093,7 +2093,6 @@ "Manage email report": [""], "Manage your databases": [""], "Mandatory": [""], - "Mangle Duplicate Columns": [""], "Manually set min/max values for the y-axis.": [""], "Map": [""], "Map Style": [""], diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po index 7a84f6d5de..ee3b15bd9b 100644 --- a/superset/translations/en/LC_MESSAGES/messages.po +++ b/superset/translations/en/LC_MESSAGES/messages.po @@ -8797,10 +8797,6 @@ msgstr "" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/es/LC_MESSAGES/messages.json b/superset/translations/es/LC_MESSAGES/messages.json index 57ae460a4f..ea5636c049 100644 --- a/superset/translations/es/LC_MESSAGES/messages.json +++ b/superset/translations/es/LC_MESSAGES/messages.json @@ -1801,7 +1801,6 @@ ], "Manage": ["Administrar"], "Mandatory": ["Oblugatorio"], - "Mangle Duplicate Columns": ["Manglar Columnas Duplicadas"], "MapBox": [""], "Mapbox": [""], "March": ["Marzo"], diff --git a/superset/translations/es/LC_MESSAGES/messages.po b/superset/translations/es/LC_MESSAGES/messages.po index b629d66601..df4efb9b4b 100644 --- a/superset/translations/es/LC_MESSAGES/messages.po +++ b/superset/translations/es/LC_MESSAGES/messages.po @@ -9400,10 +9400,6 @@ msgstr "Nombre de tu fuente de datos" msgid "Mandatory" msgstr "Oblugatorio" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Manglar Columnas Duplicadas" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 #, fuzzy msgid "Manually set min/max values for the y-axis." diff --git a/superset/translations/fr/LC_MESSAGES/messages.json b/superset/translations/fr/LC_MESSAGES/messages.json index 5c000012fa..2391b33db8 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.json +++ b/superset/translations/fr/LC_MESSAGES/messages.json @@ -100,7 +100,7 @@ "1H": [""], "1M": [""], "1T": [""], - "2 years ago": ["il y a 2 ans"], + "2 years ago": ["Il y a 2 ans"], "2/98 percentiles": [""], "28 days ago": [""], "2D": [""], @@ -226,6 +226,7 @@ "Add calculated temporal columns to dataset in \"Edit datasource\" modal": [ "" ], + "Add cross-filter": ["Ajouter un filtre"], "Add custom scoping": [""], "Add delivery method": ["Ajouter méthode de livraison"], "Add filter": ["Ajouter un filtre"], @@ -556,6 +557,8 @@ "" ], "Append": ["Ajouter"], + "Applied filters (%d)": ["Filtres appliqués (%d)"], + "Applied filters: %s": ["Filtres appliqué: %s"], "Applied rolling window did not return any data. Please make sure the source query satisfies the minimum periods defined in the rolling window.": [ "La fenêtre glissante appliquée n'a pas retourné de données. Assurez-vous que la requête source satisfasse les périodes minimum définies dans la fenêtre glissante." ], @@ -630,6 +633,7 @@ "Batch editing %d filters:": ["Edition Batch %d filtres:"], "Battery level over time": [""], "Be careful.": ["Faites attention."], + "Before": ["Avant"], "Big Number": ["Gros nombre"], "Big Number Font Size": [""], "Big Number with Trendline": ["Gros nombre avec tendance"], @@ -1088,9 +1092,10 @@ ], "Creator": ["Créateur"], "Cross-filter will be applied to all of the charts that use this dataset.": [ - "" + "Le filtre va être appliqué à tous les graphiques qui utilise cet ensemble de données" ], "Currently rendered: %s": [""], + "Custom": ["Personnalisée"], "Custom Plugin": ["Plugin custom"], "Custom Plugins": ["Plugins custom"], "Custom SQL": ["SQL personnalisé"], @@ -1259,6 +1264,7 @@ "Datetime format": ["Format Datetime"], "Day": ["Jour"], "Day (freq=D)": [""], + "Days %s": ["Jours %s"], "Db engine did not return all queried columns": [ "La base de données n'a pas retourné toutes les colonnes demandées" ], @@ -1415,6 +1421,7 @@ "Divider": ["Diviseur"], "Do you want a donut or a pie?": [""], "Documentation": ["Documentation"], + "Download": ["Télécharger"], "Download as image": ["Télécharger comme image"], "Download to CSV": ["Télécharger en CSV"], "Draft": ["Brouillon"], @@ -1429,6 +1436,7 @@ "Drill by": [""], "Drill by is not available for this data point": [""], "Drill by is not yet supported for this chart type": [""], + "Drill by: %s": ["Trier par %s"], "Drill to detail": [""], "Drill to detail by": [""], "Drill to detail by value is not yet supported for this chart type.": [ @@ -1635,7 +1643,10 @@ "Export": ["Exporter"], "Export dashboards?": ["Exporter les tableaux de bords ?"], "Export query": ["Exporter la requête"], - "Export to YAML": ["Exporter en YAML"], + "Export to .CSV": ["Exporter au format CSV"], + "Export to .JSON": ["Exporter au format JSON"], + "Export to Excel": ["Exporter vers Excel"], + "Export to YAML": ["Exporter au format YAML"], "Export to YAML?": ["Exporter en YAML?"], "Export to original .CSV": [""], "Export to pivoted .CSV": [""], @@ -1818,6 +1829,7 @@ "Host": [""], "Hostname or IP address": ["Nom d'hôte ou adresse IP"], "Hour": ["Heure"], + "Hours %s": ["Heures %s"], "Hours offset": ["Offset des heures"], "How do you want to enter service account credentials?": [ "Comment voulez-vous entrer les informations de connexion du compte de service ?" @@ -1981,9 +1993,11 @@ "Labels for the marker lines": [""], "Labels for the markers": [""], "Labels for the ranges": [""], + "Last": ["Dernier"], "Last Changed": ["Dernière modification"], "Last Modified": ["Dernière modification"], "Last Updated %s": ["Dernière mise à jour %s"], + "Last Updated %s by %s": ["Dernière mise à jour %s"], "Last modified": ["Dernière modification"], "Last modified by %s": ["Dernière modification par %s"], "Last run": ["Dernière exécution"], @@ -2065,7 +2079,6 @@ ], "Manage": ["Gestion"], "Mandatory": ["Obligatoire"], - "Mangle Duplicate Columns": ["Supprimer les colonnes en double"], "Manually set min/max values for the y-axis.": [""], "Mapbox": ["Mapbox"], "March": ["Mars"], @@ -2142,6 +2155,7 @@ "Minimum value on the gauge axis": [""], "Minor Split Line": [""], "Minute": ["Minute"], + "Minutes %s": ["Minutes %s"], "Missing dataset": ["Jeu de données manquant"], "Mixed Time-Series": [""], "Modified": ["Modifié"], @@ -2150,6 +2164,7 @@ "Modified columns: %s": ["Colonnes modifiées : %s"], "Monday": ["Lundi"], "Month": ["Mois"], + "Months %s": ["Mois %s"], "Move only": [""], "Moves the given set of dates by a specified interval.": [ "Décale l'ensemble de dates d'un intervalle spécifié." @@ -2235,6 +2250,7 @@ "No filter": ["Pas de filtre"], "No filter is selected.": ["Pas de filtre sélectionné."], "No form settings were maintained": [""], + "No matching records found": ["Aucun enregistrement trouvé"], "No records found": ["Aucun enregistrement trouvé"], "No results found": ["Aucun résultat trouvé"], "No results match your filter criteria": [""], @@ -2265,6 +2281,7 @@ "Nothing triggered": ["Rien déclenché"], "Notification method": ["Méthode de notification"], "November": ["Novembre"], + "Now": ["Maintenant"], "Null or Empty": ["Null ou Vide"], "Null values": ["Valeurs NULL"], "Number bounds used for color encoding from red to blue.\n Reverse the numbers for blue to red. To get pure red or blue,\n you can enter either only min or max.": [ @@ -2590,6 +2607,7 @@ "Python datetime string pattern": ["Python datetime string pattern"], "QUERY DATA IN SQL LAB": [""], "Quarter": ["Trimestre"], + "Quarters %s": ["Trimestres %s"], "Query": ["Requête"], "Query %s: %s": [""], "Query History": ["Historiques des requêtes"], @@ -2652,8 +2670,10 @@ "Refresh frequency": ["Fréquence de rafraichissement"], "Refresh interval": ["Intervalle d'actualisation"], "Refresh the default values": ["Rafraichir les valeurs par défaut"], + "Refreshing charts": ["Rafraîchissement en cours"], "Regular": [""], "Relationships between community channels": [""], + "Relative Date/Time": ["Date/Heure Relative"], "Relative period": ["Période relative"], "Relative quantity": ["Quantité relative"], "Remind me in 24 hours": ["Me le rappeler dans 24 heures"], @@ -2730,6 +2750,7 @@ "Resource already has an attached report.": [""], "Restore Filter": ["Restaurer le Filtre"], "Results": ["Résultats"], + "Results %s": ["Résultats"], "Results backend is not configured.": [ "Le backend des résultats n'est pas configuré." ], @@ -2884,6 +2905,7 @@ "Secondary y-axis Bounds": [""], "Secondary y-axis format": [""], "Secondary y-axis title": [""], + "Seconds %s": ["Secondes %s"], "Secure Extra": ["Sécurité"], "Secure extra": ["Sécurité"], "Security": ["Sécurité"], @@ -2894,7 +2916,7 @@ "Select": ["Sélectionner"], "Select ...": ["Sélectionner..."], "Select Delivery Method": ["Choisir la méthode de livraison"], - "Select Viz Type": ["Selectionner un type de visualisation"], + "Select Viz Type": ["Sélectionner un type de visualisation"], "Select a Columnar file to be uploaded to a database.": [ "Sélectionner un fichier en colonne à téléverser dans une base de données." ], @@ -2904,7 +2926,7 @@ "Select a column": ["Sélectionner une colonne"], "Select a dashboard": ["Sélectionner un tableau de bord"], "Select a database to upload the file to": [""], - "Select a visualization type": ["Selectionner un type de visualisation"], + "Select a visualization type": ["Sélectionner un type de visualisation"], "Select aggregate options": ["Sélectionner les options d’agrégat"], "Select any columns for metadata inspection": [""], "Select color scheme": ["Sélectionner un schéma de couleurs"], @@ -2912,17 +2934,17 @@ "Select databases require additional fields to be completed in the Advanced tab to successfully connect the database. Learn what requirements your databases has ": [ "" ], - "Select filter": ["Selectionner un filtre"], + "Select filter": ["Sélectionner un filtre"], "Select filter plugin using AntD": [""], "Select first filter value by default": [ - "Selectionne la première valeur du filtre par défaut" + "Sélectionne la première valeur du filtre par défaut" ], "Select operator": ["Sélectionner l'opérateur"], "Select or type a value": ["Sélectionner ou renseigner une valeur"], "Select owners": ["Sélectionner les propriétaires"], "Select saved metrics": ["Sélectionner les métriques sauvegardées"], "Select start and end date": [ - "Selectionner la date de début et la date de fin" + "Sélectionner la date de début et la date de fin" ], "Select subject": ["Sélectionner un objet"], "Select the charts to which you want to apply cross-filters in this dashboard. Deselecting a chart will exclude it from being filtered when applying cross-filters from any chart on the dashboard. You can select \"All charts\" to apply cross-filters to all charts that use the same dataset or contain the same column name in the dashboard.": [ @@ -2936,7 +2958,7 @@ "Select values in highlighted field(s) in the control panel. Then run the query by clicking on the %s button.": [ "" ], - "Send as CSV": ["Envoyer comme CSV"], + "Send as CSV": ["Envoyer au format CSV"], "Send as PNG": ["Envoyer comme PNG"], "Send as text": ["Envoyer comme texte"], "Send range filter events to other charts": [""], @@ -3082,6 +3104,7 @@ "Sort ascending": ["Tri croissant"], "Sort bars by x labels.": [""], "Sort by": ["Trier par"], + "Sort by %s": ["Trier par %s"], "Sort columns alphabetically": ["Trier les colonnes alphabétiquement"], "Sort descending": ["Tri décroissant"], "Sort filter values": ["Trier les valeurs de filtre"], @@ -3092,6 +3115,7 @@ "Source SQL": ["SQL source"], "Sparkline": [""], "Spatial": ["Spatial"], + "Specific Date/Time": ["Date/Heure Spécifique"], "Specify a schema (if database flavor supports this).": [ "Spécifier un schéma (si la base de données soutient cette fonctionnalités)." ], @@ -3634,7 +3658,7 @@ "Cela peut être soit une adresse IP (ex 127.0.0.1) ou un nom de domaine (ex mydatabase.com)." ], "This chart applies cross-filters to charts whose datasets contain columns with the same name.": [ - "" + "Ce graphique filtre automatiquement les graphiques ayant des colonnes de même nom dans leurs ensembles de données." ], "This chart has been moved to a different filter scope.": [ "Ce graphique a été déplacé vers un autre champ d'application du filtre." @@ -3740,6 +3764,9 @@ "This value should be smaller than the right target value": [ "Cette valeur devrait être plus petite que la valeur cible de droite" ], + "This visualization type does not support cross-filtering.": [ + "Ce type de visualisation ne supporte pas le cross-filtering." + ], "This visualization type is not supported.": [ "Ce type de visualisation n'est pas supporté." ], @@ -3916,6 +3943,7 @@ "Unexpected error occurred, please check your logs for details": [ "Erreur inattendue, consultez les logs pour plus de détails" ], + "Unexpected time range: %s": ["Intervalle de temps inattendu: %s"], "Unknown": ["Erreur inconnue"], "Unknown MySQL server host \"%(hostname)s\".": [ "Hôte MySQL \"%(hostname)s\" inconnu." @@ -4134,6 +4162,7 @@ "Week_ending Sunday": ["Semaine terminant le dimanche"], "Weekly Report for %s": [""], "Weekly seasonality": [""], + "Weeks %s": ["Semaines %s"], "What should be shown on the label?": [""], "When `Calculation type` is set to \"Percentage change\", the Y Axis Format is forced to `.1%`": [ "Lorsque `Type de calcul` vaut \"Pourcentage de changement\", le format de l'axe Y est à forcé à `.1%`" @@ -4283,6 +4312,7 @@ "Year": ["Année"], "Year (freq=AS)": [""], "Yearly seasonality": [""], + "Years %s": ["Année %s"], "Yes": ["Oui"], "Yes, cancel": ["Oui, annuler"], "Yes, overwrite changes": [""], @@ -4304,7 +4334,9 @@ "You can add the components in the": [ "Vous pouvez ajouter les composants via le" ], - "You can also just click on the chart to apply cross-filter.": [""], + "You can also just click on the chart to apply cross-filter.": [ + "Vous pouvez juste cliquer sur le graphique pour appliquer le filtre" + ], "You can choose to display all charts that you have access to or only the ones you own.\n Your filter selection will be saved and remain active until you choose to change it.": [ "" ], @@ -4314,7 +4346,9 @@ "You can preview the list of dashboards in the chart settings dropdown.": [ "" ], - "You can't apply cross-filter on this data point.": [""], + "You can't apply cross-filter on this data point.": [ + "Vous ne pouvez pas ajouter de filtre sur ce point de donnée" + ], "You cannot delete the last temporal filter as it's used for time range filters in dashboards.": [ "" ], @@ -4440,6 +4474,7 @@ "aggregate": ["agrégat"], "alert": ["alerte"], "alerts": ["alertes"], + "all": ["Tous"], "also copy (duplicate) charts": [ "copier également les graphiques (dupliquer)" ], @@ -4525,6 +4560,11 @@ "json isn't valid": ["le json n'est pas valide"], "key a-z": [""], "key z-a": [""], + "last day": ["hier"], + "last month": ["le mois dernier"], + "last quarter": ["le trimestre dernier"], + "last week": ["la semaine dernière"], + "last year": ["l'année dernière"], "latest partition:": ["dernière partition :"], "less than {min} {name}": [""], "log": ["log"], @@ -4591,18 +4631,10 @@ "y: values are normalized within each row": [""], "year": ["année"], "zoom area": [""], - "No matching records found": ["Aucun résultat trouvé"], - "Seconds %s": ["%s secondes"], - "Minutes %s": ["%s minutes "], "10 seconds": ["10 secondes"], "6 hours": ["6 heures"], "12 hours": ["12 heures"], - "24 hours": ["24 heures"], - "Last day": ["Hier"], - "Last week": ["La semaine derniere"], - "Last month": ["Le mois dernier"], - "Last quarter": ["Le trimestre dernier"], - "Last year": ["L'année dernière"] + "24 hours": ["24 heures"] } } } diff --git a/superset/translations/fr/LC_MESSAGES/messages.po b/superset/translations/fr/LC_MESSAGES/messages.po index 6c4cdd69c4..ab2b065ce2 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.po +++ b/superset/translations/fr/LC_MESSAGES/messages.po @@ -1204,7 +1204,6 @@ msgid "Add calculated temporal columns to dataset in \"Edit datasource\" modal" msgstr "" #: superset-frontend/src/components/Chart/ChartContextMenu/ChartContextMenu.tsx:197 -#, fuzzy msgid "Add cross-filter" msgstr "Ajouter un filtre" @@ -2400,14 +2399,14 @@ msgid "Applied cross-filters (%d)" msgstr "Filtres croisés appliqués (%d)" #: superset-frontend/src/dashboard/components/FiltersBadge/DetailsPanel/index.tsx:149 -#, fuzzy, python-format +#, python-format msgid "Applied filters (%d)" msgstr "Filtres appliqués (%d)" #: superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControls.tsx:260 -#, fuzzy, python-format +#, python-format msgid "Applied filters: %s" -msgstr "Filtres appliqués (%d)" +msgstr "Filtres appliqué: %s" #: superset/viz.py:250 msgid "" @@ -2797,7 +2796,6 @@ msgstr "Faites attention." #: superset-frontend/src/components/AlteredSliceTag/index.jsx:178 #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:75 -#, fuzzy msgid "Before" msgstr "Avant" @@ -4921,7 +4919,7 @@ msgstr "Action" #: superset-frontend/src/components/Chart/ChartContextMenu/ChartContextMenu.tsx:152 msgid "Cross-filter will be applied to all of the charts that use this dataset." -msgstr "" +msgstr "Le filtre va être appliqué à tous les graphiques qui utilise cet ensemble de données" #: superset-frontend/src/components/Chart/ChartContextMenu/ChartContextMenu.tsx:164 #, fuzzy @@ -4956,7 +4954,6 @@ msgid "Currently rendered: %s" msgstr "" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:33 -#, fuzzy msgid "Custom" msgstr "Personnalisée" @@ -5598,7 +5595,7 @@ msgid "Day (freq=D)" msgstr "" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:65 -#, fuzzy, python-format +#, python-format msgid "Days %s" msgstr "Jours %s" @@ -6258,9 +6255,8 @@ msgstr "Édité" #: superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx:482 #: superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx:292 -#, fuzzy msgid "Download" -msgstr "télécharger en CSV" +msgstr "Télécharger" #: superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx:317 #: superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx:512 @@ -6335,7 +6331,7 @@ msgid "Drill by is not yet supported for this chart type" msgstr "" #: superset-frontend/src/components/Chart/DrillBy/DrillByModal.tsx:420 -#, fuzzy, python-format +#, python-format msgid "Drill by: %s" msgstr "Trier par %s" @@ -7312,24 +7308,21 @@ msgstr "Exporter la requête" #: superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx:487 #: superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx:316 -#, fuzzy msgid "Export to .CSV" -msgstr "Exporter en YAML" +msgstr "Exporter au format CSV" #: superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx:323 -#, fuzzy msgid "Export to .JSON" -msgstr "Exporter en YAML" +msgstr "Exporter au format JSON" #: superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx:506 #: superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx:335 -#, fuzzy msgid "Export to Excel" -msgstr "Exporter en YAML" +msgstr "Exporter vers Excel" #: superset/views/base.py:607 msgid "Export to YAML" -msgstr "Exporter en YAML" +msgstr "Exporter au format YAML" #: superset/views/base.py:607 msgid "Export to YAML?" @@ -8251,7 +8244,7 @@ msgid "Hour" msgstr "Heure" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:64 -#, fuzzy, python-format +#, python-format msgid "Hours %s" msgstr "Heures %s" @@ -9038,9 +9031,8 @@ msgstr "Partage de requête" #: superset-frontend/plugins/plugin-chart-pivot-table/src/plugin/controlPanel.tsx:190 #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:31 -#, fuzzy msgid "Last" -msgstr "à" +msgstr "Dernier" #: superset/connectors/sqla/views.py:388 superset/views/database/mixins.py:190 msgid "Last Changed" @@ -9056,7 +9048,7 @@ msgid "Last Updated %s" msgstr "Dernière mise à jour %s" #: superset-frontend/src/dashboard/components/OverwriteConfirm/OverwriteConfirmModal.tsx:182 -#, fuzzy, python-format +#, python-format msgid "Last Updated %s by %s" msgstr "Dernière mise à jour %s" @@ -9574,10 +9566,6 @@ msgstr "Donner un nom à la base de données" msgid "Mandatory" msgstr "Obligatoire" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Supprimer les colonnes en double" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" @@ -10040,7 +10028,7 @@ msgid "Minute" msgstr "Minute" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:63 -#, fuzzy, python-format +#, python-format msgid "Minutes %s" msgstr "Minutes %s" @@ -10106,7 +10094,7 @@ msgid "Month" msgstr "Mois" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:67 -#, fuzzy, python-format +#, python-format msgid "Months %s" msgstr "Mois %s" @@ -10571,7 +10559,6 @@ msgid "No global filters are currently added" msgstr "Aucun filtre ajouté" #: superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx:204 -#, fuzzy msgid "No matching records found" msgstr "Aucun enregistrement trouvé" @@ -10792,7 +10779,6 @@ msgid "November" msgstr "Novembre" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:89 -#, fuzzy msgid "Now" msgstr "Maintenant" @@ -12208,7 +12194,7 @@ msgid "Quarter" msgstr "Trimestre" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:68 -#, fuzzy, python-format +#, python-format msgid "Quarters %s" msgstr "Trimestres %s" @@ -12586,9 +12572,8 @@ msgid "Refresh the default values" msgstr "Rafraichir les valeurs par défaut" #: superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx:163 -#, fuzzy msgid "Refreshing charts" -msgstr "Une erreur s'est produite durant la récupération des tableaux de bord : %s" +msgstr "Rafraîchissement en cours" #: superset-frontend/src/features/datasets/AddDataset/DatasetPanel/DatasetPanel.tsx:175 #, fuzzy @@ -12636,7 +12621,6 @@ msgid "Relationships between community channels" msgstr "" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:88 -#, fuzzy msgid "Relative Date/Time" msgstr "Date/Heure Relative" @@ -12931,7 +12915,7 @@ msgstr "Résultats" #: superset-frontend/src/components/Chart/DrillBy/useResultsTableView.tsx:58 #: superset-frontend/src/explore/components/DataTablesPane/DataTablesPane.tsx:212 #: superset-frontend/src/explore/components/DataTablesPane/components/ResultsPaneOnDashboard.tsx:84 -#, fuzzy, python-format +#, python-format msgid "Results %s" msgstr "Résultats" @@ -13802,9 +13786,9 @@ msgid "Secondary y-axis title" msgstr "" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:62 -#, fuzzy, python-format +#, python-format msgid "Seconds %s" -msgstr "%s secondes" +msgstr "Secondes %s" #: superset/views/database/mixins.py:194 msgid "Secure Extra" @@ -13869,7 +13853,7 @@ msgstr "Choisir la méthode de livraison" #: superset-frontend/src/explore/components/controls/VizTypeControl/FastVizSwitcher.tsx:94 msgid "Select Viz Type" -msgstr "Selectionner un type de visualisation" +msgstr "Sélectionner un type de visualisation" #: superset/views/database/forms.py:425 msgid "Select a Columnar file to be uploaded to a database." @@ -13930,7 +13914,7 @@ msgstr "" #: superset-frontend/src/explore/components/controls/VizTypeControl/index.tsx:130 msgid "Select a visualization type" -msgstr "Selectionner un type de visualisation" +msgstr "Sélectionner un type de visualisation" #: superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/index.jsx:331 msgid "Select aggregate options" @@ -14011,7 +13995,7 @@ msgstr "Selectionner un filtre" #: superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx:318 #: superset-frontend/src/filters/components/Select/index.ts:28 msgid "Select filter" -msgstr "Selectionner un filtre" +msgstr "Sélectionner un filtre" #: superset-frontend/src/filters/components/Select/index.ts:29 msgid "Select filter plugin using AntD" @@ -14019,7 +14003,7 @@ msgstr "" #: superset-frontend/src/filters/components/Select/controlPanel.ts:104 msgid "Select first filter value by default" -msgstr "Selectionne la première valeur du filtre par défaut" +msgstr "Sélectionne la première valeur du filtre par défaut" #: superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx:362 msgid "Select operator" @@ -14056,7 +14040,7 @@ msgstr "Sélectionner un schéma de couleurs" #: superset-frontend/src/visualizations/FilterBox/FilterBox.jsx:307 msgid "Select start and end date" -msgstr "Selectionner la date de début et la date de fin" +msgstr "Sélectionner la date de début et la date de fin" #: superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx:334 msgid "Select subject" @@ -14113,7 +14097,7 @@ msgstr "" #: superset-frontend/src/features/alerts/AlertReportModal.tsx:408 msgid "Send as CSV" -msgstr "Envoyer comme CSV" +msgstr "Envoyer au format CSV" #: superset-frontend/src/features/alerts/AlertReportModal.tsx:407 msgid "Send as PNG" @@ -14831,7 +14815,7 @@ msgid "Sort by" msgstr "Trier par" #: superset-frontend/src/dashboard/components/SliceAdder.jsx:362 -#, fuzzy, python-format +#, python-format msgid "Sort by %s" msgstr "Trier par %s" @@ -14919,7 +14903,6 @@ msgid "Spatial" msgstr "Spatial" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:87 -#, fuzzy msgid "Specific Date/Time" msgstr "Date/Heure Spécifique" @@ -16818,7 +16801,8 @@ msgstr "" msgid "" "This chart applies cross-filters to charts whose datasets contain columns" " with the same name." -msgstr "" +msgstr "Ce graphique filtre automatiquement les graphiques ayant des colonnes de même nom dans leurs" +" ensembles de données." #: superset-frontend/src/dashboard/actions/dashboardLayout.js:260 msgid "This chart has been moved to a different filter scope." @@ -17077,9 +17061,8 @@ msgid "This value should be smaller than the right target value" msgstr "Cette valeur devrait être plus petite que la valeur cible de droite" #: superset-frontend/src/components/Chart/ChartContextMenu/ChartContextMenu.tsx:171 -#, fuzzy msgid "This visualization type does not support cross-filtering." -msgstr "Ce type de visualisation n'est pas supporté." +msgstr "Ce type de visualisation ne supporte pas le cross-filtering." #: superset-frontend/src/explore/components/controls/VizTypeControl/index.tsx:64 msgid "This visualization type is not supported." @@ -17931,9 +17914,9 @@ msgid "Unexpected error: " msgstr "Erreur inattendue" #: superset/views/api.py:108 -#, fuzzy, python-format +#, python-format msgid "Unexpected time range: %s" -msgstr "Erreur inattendue" +msgstr "Intervalle de temps inattendu: %s" #: superset-frontend/src/features/home/ActivityTable.tsx:86 msgid "Unknown" @@ -18701,7 +18684,7 @@ msgid "Weekly seasonality" msgstr "" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:66 -#, fuzzy, python-format +#, python-format msgid "Weeks %s" msgstr "Semaines %s" @@ -19350,7 +19333,7 @@ msgid "Yearly seasonality" msgstr "" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:69 -#, fuzzy, python-format +#, python-format msgid "Years %s" msgstr "Année %s" @@ -19452,7 +19435,7 @@ msgstr "Vous pouvez ajouter les composants via mode edition" #: superset-frontend/src/components/Chart/ChartContextMenu/ChartContextMenu.tsx:157 msgid "You can also just click on the chart to apply cross-filter." -msgstr "" +msgstr "Vous pouvez juste cliquer sur le graphique pour appliquer le filtre" #: superset-frontend/src/dashboard/components/SliceAdder.jsx:386 msgid "" @@ -19477,7 +19460,7 @@ msgstr "" #: superset-frontend/src/components/Chart/ChartContextMenu/ChartContextMenu.tsx:178 msgid "You can't apply cross-filter on this data point." -msgstr "" +msgstr "Vous ne pouvez pas ajouter de filtre sur ce point de donnée" #: superset-frontend/src/explore/components/ControlPanelsContainer.tsx:501 msgid "" @@ -19827,7 +19810,6 @@ msgstr "alertes" #: superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx:160 #: superset-frontend/src/components/Chart/DrillDetail/DrillDetailMenuItems.tsx:205 #: superset-frontend/src/explore/controls.jsx:254 -#, fuzzy msgid "all" msgstr "Tous" @@ -20421,27 +20403,22 @@ msgid "label" msgstr "Label" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:39 -#, fuzzy msgid "last day" msgstr "hier" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:41 -#, fuzzy msgid "last month" msgstr "le mois dernier" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:42 -#, fuzzy msgid "last quarter" msgstr "le trimestre dernier" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:40 -#, fuzzy msgid "last week" -msgstr "la semaine derniere" +msgstr "la semaine dernière" #: superset-frontend/src/explore/components/controls/DateFilterControl/utils/constants.ts:43 -#, fuzzy msgid "last year" msgstr "l'année dernière" diff --git a/superset/translations/it/LC_MESSAGES/messages.json b/superset/translations/it/LC_MESSAGES/messages.json index c166488296..faacdd2ed0 100644 --- a/superset/translations/it/LC_MESSAGES/messages.json +++ b/superset/translations/it/LC_MESSAGES/messages.json @@ -1755,7 +1755,6 @@ ], "Manage": ["Gestisci"], "Mandatory": [""], - "Mangle Duplicate Columns": [""], "Manually set min/max values for the y-axis.": [""], "Map Style": [""], "Mapbox": ["Mapbox"], diff --git a/superset/translations/it/LC_MESSAGES/messages.po b/superset/translations/it/LC_MESSAGES/messages.po index 6dc5d8c1a8..816904b50c 100644 --- a/superset/translations/it/LC_MESSAGES/messages.po +++ b/superset/translations/it/LC_MESSAGES/messages.po @@ -9152,10 +9152,6 @@ msgstr "Database" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/ja/LC_MESSAGES/messages.json b/superset/translations/ja/LC_MESSAGES/messages.json index 3bccbe60ea..dbec514276 100644 --- a/superset/translations/ja/LC_MESSAGES/messages.json +++ b/superset/translations/ja/LC_MESSAGES/messages.json @@ -1803,7 +1803,6 @@ ], "Manage": ["管理"], "Mandatory": [""], - "Mangle Duplicate Columns": [""], "Manually set min/max values for the y-axis.": [""], "Map Style": [""], "MapBox": [""], diff --git a/superset/translations/ja/LC_MESSAGES/messages.po b/superset/translations/ja/LC_MESSAGES/messages.po index 4f61d71211..a052291432 100644 --- a/superset/translations/ja/LC_MESSAGES/messages.po +++ b/superset/translations/ja/LC_MESSAGES/messages.po @@ -9156,10 +9156,6 @@ msgstr "データベースのインポート" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/ko/LC_MESSAGES/messages.json b/superset/translations/ko/LC_MESSAGES/messages.json index 17f4d50915..58cbd4bccd 100644 --- a/superset/translations/ko/LC_MESSAGES/messages.json +++ b/superset/translations/ko/LC_MESSAGES/messages.json @@ -1801,7 +1801,6 @@ ], "Manage": ["관리"], "Mandatory": [""], - "Mangle Duplicate Columns": [""], "Manually set min/max values for the y-axis.": [""], "Map Style": [""], "MapBox": [""], diff --git a/superset/translations/ko/LC_MESSAGES/messages.po b/superset/translations/ko/LC_MESSAGES/messages.po index e48301a1f8..16cb93d295 100644 --- a/superset/translations/ko/LC_MESSAGES/messages.po +++ b/superset/translations/ko/LC_MESSAGES/messages.po @@ -9084,10 +9084,6 @@ msgstr "데이터베이스 선택" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/messages.pot b/superset/translations/messages.pot index 8c1cc701f7..01b684883e 100644 --- a/superset/translations/messages.pot +++ b/superset/translations/messages.pot @@ -8796,10 +8796,6 @@ msgstr "" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/nl/LC_MESSAGES/messages.json b/superset/translations/nl/LC_MESSAGES/messages.json index 7148c1d3f2..e77f0459ee 100644 --- a/superset/translations/nl/LC_MESSAGES/messages.json +++ b/superset/translations/nl/LC_MESSAGES/messages.json @@ -2214,7 +2214,6 @@ ], "Manage": ["Beheer"], "Mandatory": ["Verplicht"], - "Mangle Duplicate Columns": ["Dubbele kolommen verwijderen"], "Manually set min/max values for the y-axis.": [""], "Map": [""], "Map Style": [""], diff --git a/superset/translations/nl/LC_MESSAGES/messages.po b/superset/translations/nl/LC_MESSAGES/messages.po index b34df36498..6a6578d705 100644 --- a/superset/translations/nl/LC_MESSAGES/messages.po +++ b/superset/translations/nl/LC_MESSAGES/messages.po @@ -9110,10 +9110,6 @@ msgstr "Importeer databases" msgid "Mandatory" msgstr "Verplicht" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Dubbele kolommen verwijderen" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/pt/LC_MESSAGES/message.json b/superset/translations/pt/LC_MESSAGES/message.json index 37b1e6f4ce..12284400e1 100644 --- a/superset/translations/pt/LC_MESSAGES/message.json +++ b/superset/translations/pt/LC_MESSAGES/message.json @@ -876,7 +876,6 @@ "Column to use as the row labels of the dataframe. Leave empty if no index column.": [ "" ], - "Mangle Duplicate Columns": ["Coluna Datahora principal"], "Specify duplicate columns as \"X.0, X.1\".": [""], "Skip Initial Space": [""], "Skip spaces after delimiter.": [""], diff --git a/superset/translations/pt/LC_MESSAGES/messages.json b/superset/translations/pt/LC_MESSAGES/messages.json index 0b0d099b6c..22cc78fa29 100644 --- a/superset/translations/pt/LC_MESSAGES/messages.json +++ b/superset/translations/pt/LC_MESSAGES/messages.json @@ -1730,7 +1730,6 @@ "Manage email report": [""], "Manage your databases": [""], "Mandatory": [""], - "Mangle Duplicate Columns": ["Coluna Datahora principal"], "Manually set min/max values for the y-axis.": [""], "Map Style": [""], "Mapbox": ["Mapbox"], diff --git a/superset/translations/pt/LC_MESSAGES/messages.po b/superset/translations/pt/LC_MESSAGES/messages.po index 5d2d4845ca..623233c405 100644 --- a/superset/translations/pt/LC_MESSAGES/messages.po +++ b/superset/translations/pt/LC_MESSAGES/messages.po @@ -9263,10 +9263,6 @@ msgstr "" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Coluna Datahora principal" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.json b/superset/translations/pt_BR/LC_MESSAGES/messages.json index 6e3a7333ba..08c1b1f776 100644 --- a/superset/translations/pt_BR/LC_MESSAGES/messages.json +++ b/superset/translations/pt_BR/LC_MESSAGES/messages.json @@ -2724,7 +2724,6 @@ "Manage email report": ["Gerenciar relatório de e-mail"], "Manage your databases": ["Gerenciar seus bancos de dados"], "Mandatory": ["Obrigatório"], - "Mangle Duplicate Columns": ["Emaranhar colunas duplicadas"], "Manually set min/max values for the y-axis.": [ "Definir manualmente os valores mínimo/máximo para o eixo y." ], diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.po b/superset/translations/pt_BR/LC_MESSAGES/messages.po index 639b2d42c2..d6d922df30 100644 --- a/superset/translations/pt_BR/LC_MESSAGES/messages.po +++ b/superset/translations/pt_BR/LC_MESSAGES/messages.po @@ -9396,10 +9396,6 @@ msgstr "Gerenciar seus bancos de dados" msgid "Mandatory" msgstr "Obrigatório" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Emaranhar colunas duplicadas" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "Definir manualmente os valores mínimo/máximo para o eixo y." diff --git a/superset/translations/ru/LC_MESSAGES/messages.json b/superset/translations/ru/LC_MESSAGES/messages.json index 616630b37a..91907df1ef 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.json +++ b/superset/translations/ru/LC_MESSAGES/messages.json @@ -2678,7 +2678,6 @@ "Manage email report": ["Управление рассылкой по почте"], "Manage your databases": ["Управляйте своими базами данных"], "Mandatory": ["Обязательно"], - "Mangle Duplicate Columns": ["Управление повторяющимися столбцами"], "Manually set min/max values for the y-axis.": [ "Вручную задать мин./макс. значения для оси Y" ], diff --git a/superset/translations/ru/LC_MESSAGES/messages.po b/superset/translations/ru/LC_MESSAGES/messages.po index 8d666021c7..bef790f515 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.po +++ b/superset/translations/ru/LC_MESSAGES/messages.po @@ -9259,10 +9259,6 @@ msgstr "Управляйте своими базами данных" msgid "Mandatory" msgstr "Обязательно" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Управление повторяющимися столбцами" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "Вручную задать мин./макс. значения для оси Y" diff --git a/superset/translations/sk/LC_MESSAGES/messages.json b/superset/translations/sk/LC_MESSAGES/messages.json index 43be893edf..6a3291fe74 100644 --- a/superset/translations/sk/LC_MESSAGES/messages.json +++ b/superset/translations/sk/LC_MESSAGES/messages.json @@ -2089,7 +2089,6 @@ "Manage email report": [""], "Manage your databases": [""], "Mandatory": [""], - "Mangle Duplicate Columns": [""], "Manually set min/max values for the y-axis.": [""], "Map": [""], "Map Style": [""], diff --git a/superset/translations/sk/LC_MESSAGES/messages.po b/superset/translations/sk/LC_MESSAGES/messages.po index 9c8141b1d5..3d2e044ccd 100644 --- a/superset/translations/sk/LC_MESSAGES/messages.po +++ b/superset/translations/sk/LC_MESSAGES/messages.po @@ -8841,10 +8841,6 @@ msgstr "" msgid "Mandatory" msgstr "" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 msgid "Manually set min/max values for the y-axis." msgstr "" diff --git a/superset/translations/sl/LC_MESSAGES/messages.json b/superset/translations/sl/LC_MESSAGES/messages.json index 4d2c51fa75..1f50e6a301 100644 --- a/superset/translations/sl/LC_MESSAGES/messages.json +++ b/superset/translations/sl/LC_MESSAGES/messages.json @@ -2484,7 +2484,6 @@ "Manage email report": ["Upravljaj e-poštno poročilo"], "Manage your databases": ["Upravljajte podatkovne baze"], "Mandatory": ["Obvezno"], - "Mangle Duplicate Columns": ["Odstrani podvojene stolpce"], "Map": ["Zemljevid"], "Map Style": ["Slog zemljevida"], "MapBox": ["MapBox"], diff --git a/superset/translations/sl/LC_MESSAGES/messages.po b/superset/translations/sl/LC_MESSAGES/messages.po index f6ba479f82..0841a840cd 100644 --- a/superset/translations/sl/LC_MESSAGES/messages.po +++ b/superset/translations/sl/LC_MESSAGES/messages.po @@ -9418,10 +9418,6 @@ msgstr "Upravljajte podatkovne baze" msgid "Mandatory" msgstr "Obvezno" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "Odstrani podvojene stolpce" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 #, fuzzy msgid "Manually set min/max values for the y-axis." diff --git a/superset/translations/zh/LC_MESSAGES/messages.json b/superset/translations/zh/LC_MESSAGES/messages.json index 41fdda34f3..e66218a493 100644 --- a/superset/translations/zh/LC_MESSAGES/messages.json +++ b/superset/translations/zh/LC_MESSAGES/messages.json @@ -1974,7 +1974,6 @@ "Manage": ["管理"], "Manage your databases": ["管理你的数据库"], "Mandatory": ["必填参数"], - "Mangle Duplicate Columns": ["混合重复列"], "Map": ["地图"], "Map Style": ["地图样式"], "MapBox": ["MapBox地图"], diff --git a/superset/translations/zh/LC_MESSAGES/messages.po b/superset/translations/zh/LC_MESSAGES/messages.po index e05a11e2e9..e21a83f32b 100644 --- a/superset/translations/zh/LC_MESSAGES/messages.po +++ b/superset/translations/zh/LC_MESSAGES/messages.po @@ -9138,10 +9138,6 @@ msgstr "管理你的数据库" msgid "Mandatory" msgstr "必填参数" -#: superset/views/database/forms.py:360 -msgid "Mangle Duplicate Columns" -msgstr "混合重复列" - #: superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx:297 #, fuzzy msgid "Manually set min/max values for the y-axis." diff --git a/superset/utils/core.py b/superset/utils/core.py index 1ef397053d..b9c24076a4 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -72,7 +72,7 @@ from sqlalchemy.dialects.mysql import MEDIUMTEXT from sqlalchemy.engine import Connection, Engine from sqlalchemy.engine.reflection import Inspector from sqlalchemy.sql.type_api import Variant -from sqlalchemy.types import TEXT, TypeDecorator, TypeEngine +from sqlalchemy.types import TypeEngine from typing_extensions import TypeGuard from superset.constants import ( @@ -105,7 +105,7 @@ from superset.utils.dates import datetime_to_epoch, EPOCH from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str if TYPE_CHECKING: - from superset.connectors.base.models import BaseColumn, BaseDatasource + from superset.connectors.sqla.models import BaseDatasource, TableColumn from superset.models.sql_lab import Query logging.getLogger("MARKDOWN").setLevel(logging.INFO) @@ -122,18 +122,6 @@ InputType = TypeVar("InputType") # pylint: disable=invalid-name ADHOC_FILTERS_REGEX = re.compile("^adhoc_filters") -class LenientEnum(Enum): - """Enums with a `get` method that convert a enum value to `Enum` if it is a - valid value.""" - - @classmethod - def get(cls, value: Any) -> Any: - try: - return super().__new__(cls, value) - except ValueError: - return None - - class AdhocMetricExpressionType(StrEnum): SIMPLE = "SIMPLE" SQL = "SQL" @@ -280,15 +268,6 @@ class PostProcessingContributionOrientation(StrEnum): COLUMN = "column" -class QueryMode(str, LenientEnum): - """ - Whether the query runs on aggregate or returns raw records - """ - - RAW = "raw" - AGGREGATE = "aggregate" - - class QuerySource(Enum): """ The source of a SQL query. @@ -454,22 +433,6 @@ class DashboardEncoder(json.JSONEncoder): return json.JSONEncoder(sort_keys=True).default(o) -class JSONEncodedDict(TypeDecorator): # pylint: disable=abstract-method - """Represents an immutable structure as a json-encoded string.""" - - impl = TEXT - - def process_bind_param( - self, value: dict[Any, Any] | None, dialect: str - ) -> str | None: - return json.dumps(value) if value is not None else None - - def process_result_value( - self, value: str | None, dialect: str - ) -> dict[Any, Any] | None: - return json.loads(value) if value is not None else None - - def format_timedelta(time_delta: timedelta) -> str: """ Ensures negative time deltas are easily interpreted by humans @@ -1665,7 +1628,7 @@ def extract_dataframe_dtypes( return generic_types -def extract_column_dtype(col: BaseColumn) -> GenericDataType: +def extract_column_dtype(col: TableColumn) -> GenericDataType: if col.is_temporal: return GenericDataType.TEMPORAL if col.is_numeric: diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py index 438e379a96..2d49424a82 100644 --- a/superset/utils/date_parser.py +++ b/superset/utils/date_parser.py @@ -41,7 +41,7 @@ from pyparsing import ( Suppress, ) -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( TimeDeltaAmbiguousError, TimeRangeAmbiguousError, TimeRangeParseFailError, diff --git a/superset/utils/retries.py b/superset/utils/retries.py index 8a1e6b95ea..3af821362d 100644 --- a/superset/utils/retries.py +++ b/superset/utils/retries.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. +import logging from collections.abc import Generator from typing import Any, Callable, Optional @@ -26,6 +27,7 @@ def retry_call( *args: Any, strategy: Callable[..., Generator[int, None, None]] = backoff.constant, exception: type[Exception] = Exception, + giveup_log_level: int = logging.WARNING, fargs: Optional[list[Any]] = None, fkwargs: Optional[dict[str, Any]] = None, **kwargs: Any @@ -33,6 +35,7 @@ def retry_call( """ Retry a given call. """ + kwargs["giveup_log_level"] = giveup_log_level decorated = backoff.on_exception(strategy, exception, *args, **kwargs)(func) fargs = fargs or [] fkwargs = fkwargs or {} diff --git a/superset/utils/screenshots.py b/superset/utils/screenshots.py index 8609d65038..bf6ed0f9e8 100644 --- a/superset/utils/screenshots.py +++ b/superset/utils/screenshots.py @@ -201,7 +201,7 @@ class BaseScreenshot: logger.debug("Cropping to: %s*%s", str(img.size[0]), str(desired_width)) img = img.crop((0, 0, img.size[0], desired_width)) logger.debug("Resizing to %s", str(thumb_size)) - img = img.resize(thumb_size, Image.ANTIALIAS) + img = img.resize(thumb_size, Image.Resampling.LANCZOS) new_img = BytesIO() if output != "png": img = img.convert("RGB") diff --git a/superset/utils/url_map_converters.py b/superset/utils/url_map_converters.py index 11e40267b3..ed10040227 100644 --- a/superset/utils/url_map_converters.py +++ b/superset/utils/url_map_converters.py @@ -18,7 +18,7 @@ from typing import Any from werkzeug.routing import BaseConverter, Map -from superset.tags.models import ObjectTypes +from superset.tags.models import ObjectType class RegexConverter(BaseConverter): @@ -31,7 +31,7 @@ class ObjectTypeConverter(BaseConverter): """Validate that object_type is indeed an object type.""" def to_python(self, value: str) -> Any: - return ObjectTypes[value] + return ObjectType[value] def to_url(self, value: Any) -> str: return value.name diff --git a/superset/utils/webdriver.py b/superset/utils/webdriver.py index 4353319072..f7814bfd3b 100644 --- a/superset/utils/webdriver.py +++ b/superset/utils/webdriver.py @@ -48,8 +48,8 @@ if TYPE_CHECKING: if feature_flag_manager.is_feature_enabled("PLAYWRIGHT_REPORTS_AND_THUMBNAILS"): from playwright.sync_api import ( BrowserContext, - ElementHandle, Error as PlaywrightError, + Locator, Page, sync_playwright, TimeoutError as PlaywrightTimeout, @@ -105,14 +105,7 @@ class WebDriverPlaywright(WebDriverProxy): alert_div.get_by_role("button").click() # wait for modal to show up - page.wait_for_selector( - ".ant-modal-content", - timeout=current_app.config[ - "SCREENSHOT_WAIT_FOR_ERROR_MODAL_VISIBLE" - ] - * 1000, - state="visible", - ) + page.locator(".ant-modal-content").wait_for(state="visible") err_msg_div = page.locator(".ant-modal-content .ant-modal-body") # # # collect error message @@ -125,14 +118,7 @@ class WebDriverPlaywright(WebDriverProxy): page.locator(".ant-modal-content .ant-modal-close").click() # # # wait until the modal becomes invisible - page.wait_for_selector( - ".ant-modal-content", - timeout=current_app.config[ - "SCREENSHOT_WAIT_FOR_ERROR_MODAL_INVISIBLE" - ] - * 1000, - state="detached", - ) + page.locator(".ant-modal-content").wait_for(state="detached") try: # Even if some errors can't be updated in the screenshot, # keep all the errors in the server log and do not fail the loop @@ -147,7 +133,9 @@ class WebDriverPlaywright(WebDriverProxy): return error_messages - def get_screenshot(self, url: str, element_name: str, user: User) -> bytes | None: + def get_screenshot( # pylint: disable=too-many-locals, too-many-statements + self, url: str, element_name: str, user: User + ) -> bytes | None: with sync_playwright() as playwright: browser = playwright.chromium.launch() pixel_density = current_app.config["WEBDRIVER_WINDOW"].get( @@ -166,24 +154,31 @@ class WebDriverPlaywright(WebDriverProxy): ) self.auth(user, context) page = context.new_page() - page.goto( - url, wait_until=current_app.config["SCREENSHOT_PLAYWRIGHT_WAIT_EVENT"] - ) + try: + page.goto( + url, + wait_until=current_app.config["SCREENSHOT_PLAYWRIGHT_WAIT_EVENT"], + ) + except PlaywrightTimeout: + logger.exception( + "Web event %s not detected. Page %s might not have been fully loaded", + current_app.config["SCREENSHOT_PLAYWRIGHT_WAIT_EVENT"], + url, + ) + img: bytes | None = None selenium_headstart = current_app.config["SCREENSHOT_SELENIUM_HEADSTART"] logger.debug("Sleeping for %i seconds", selenium_headstart) page.wait_for_timeout(selenium_headstart * 1000) - element: ElementHandle + element: Locator try: try: # page didn't load logger.debug( "Wait for the presence of %s at url: %s", element_name, url ) - element = page.wait_for_selector( - f".{element_name}", - timeout=self._screenshot_locate_wait * 1000, - ) + element = page.locator(f".{element_name}") + element.wait_for() except PlaywrightTimeout as ex: logger.exception("Timed out requesting url %s", url) raise ex @@ -191,9 +186,10 @@ class WebDriverPlaywright(WebDriverProxy): try: # chart containers didn't render logger.debug("Wait for chart containers to draw at url: %s", url) - page.wait_for_selector( - ".slice_container", timeout=self._screenshot_locate_wait * 1000 - ) + slice_container_locator = page.locator(".slice_container") + slice_container_locator.first.wait_for() + for slice_container_elem in slice_container_locator.all(): + slice_container_elem.wait_for() except PlaywrightTimeout as ex: logger.exception( "Timed out waiting for chart containers to draw at url %s", @@ -205,11 +201,8 @@ class WebDriverPlaywright(WebDriverProxy): logger.debug( "Wait for loading element of charts to be gone at url: %s", url ) - page.wait_for_selector( - ".loading", - timeout=self._screenshot_load_wait * 1000, - state="detached", - ) + for loading_element in page.locator(".loading").all(): + loading_element.wait_for(state="detached") except PlaywrightTimeout as ex: logger.exception( "Timed out waiting for charts to load at url %s", url diff --git a/superset/views/api.py b/superset/views/api.py index 312efb947e..eeedd7c641 100644 --- a/superset/views/api.py +++ b/superset/views/api.py @@ -26,7 +26,7 @@ from flask_appbuilder.security.decorators import has_access_api from flask_babel import lazy_gettext as _ from superset import db, event_logger -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( TimeRangeAmbiguousError, TimeRangeParseFailError, ) diff --git a/superset/views/base.py b/superset/views/base.py index 4015b7a028..9149c7ad91 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -14,6 +14,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from __future__ import annotations + import dataclasses import functools import logging @@ -21,7 +23,7 @@ import os import traceback from datetime import datetime from importlib.resources import files -from typing import Any, Callable, cast, Optional, Union +from typing import Any, Callable, cast import simplejson as json import yaml @@ -120,6 +122,7 @@ FRONTEND_CONF_KEYS = ( "ALERT_REPORTS_DEFAULT_WORKING_TIMEOUT", "NATIVE_FILTER_DEFAULT_ROW_LIMIT", "PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET", + "JWT_ACCESS_CSRF_COOKIE_NAME", ) logger = logging.getLogger(__name__) @@ -139,15 +142,11 @@ def get_error_msg() -> str: def json_error_response( - msg: Optional[str] = None, + msg: str | None = None, status: int = 500, - payload: Optional[dict[str, Any]] = None, - link: Optional[str] = None, + payload: dict[str, Any] | None = None, ) -> FlaskResponse: - if not payload: - payload = {"error": f"{msg}"} - if link: - payload["link"] = link + payload = payload or {"error": f"{msg}"} return Response( json.dumps(payload, default=utils.json_iso_dttm_ser, ignore_nan=True), @@ -159,10 +158,9 @@ def json_error_response( def json_errors_response( errors: list[SupersetError], status: int = 500, - payload: Optional[dict[str, Any]] = None, + payload: dict[str, Any] | None = None, ) -> FlaskResponse: - if not payload: - payload = {} + payload = payload or {} payload["errors"] = [dataclasses.asdict(error) for error in errors] return Response( @@ -182,7 +180,7 @@ def data_payload_response(payload_json: str, has_error: bool = False) -> FlaskRe def generate_download_headers( - extension: str, filename: Optional[str] = None + extension: str, filename: str | None = None ) -> dict[str, Any]: filename = filename if filename else datetime.now().strftime("%Y%m%d_%H%M%S") content_disp = f"attachment; filename={filename}.{extension}" @@ -192,7 +190,7 @@ def generate_download_headers( def deprecated( eol_version: str = "4.0.0", - new_target: Optional[str] = None, + new_target: str | None = None, ) -> Callable[[Callable[..., FlaskResponse]], Callable[..., FlaskResponse]]: """ A decorator to set an API endpoint from SupersetView has deprecated. @@ -200,7 +198,7 @@ def deprecated( """ def _deprecated(f: Callable[..., FlaskResponse]) -> Callable[..., FlaskResponse]: - def wraps(self: "BaseSupersetView", *args: Any, **kwargs: Any) -> FlaskResponse: + def wraps(self: BaseSupersetView, *args: Any, **kwargs: Any) -> FlaskResponse: message = ( "%s.%s " "This API endpoint is deprecated and will be removed in version %s" @@ -227,7 +225,7 @@ def api(f: Callable[..., FlaskResponse]) -> Callable[..., FlaskResponse]: return the response in the JSON format """ - def wraps(self: "BaseSupersetView", *args: Any, **kwargs: Any) -> FlaskResponse: + def wraps(self: BaseSupersetView, *args: Any, **kwargs: Any) -> FlaskResponse: try: return f(self, *args, **kwargs) except NoAuthorizationError: @@ -249,7 +247,7 @@ def handle_api_exception( exceptions. """ - def wraps(self: "BaseSupersetView", *args: Any, **kwargs: Any) -> FlaskResponse: + def wraps(self: BaseSupersetView, *args: Any, **kwargs: Any) -> FlaskResponse: try: return f(self, *args, **kwargs) except SupersetSecurityException as ex: @@ -294,11 +292,11 @@ class BaseSupersetView(BaseView): ) def render_app_template( - self, extra_bootstrap_data: Optional[dict[str, Any]] = None + self, extra_bootstrap_data: dict[str, Any] | None = None ) -> FlaskResponse: payload = { "user": bootstrap_user_data(g.user, include_perms=True), - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), **(extra_bootstrap_data or {}), } return self.render_template( @@ -335,21 +333,16 @@ def get_environment_tag() -> dict[str, Any]: def menu_data(user: User) -> dict[str, Any]: - menu = appbuilder.menu.get_data() + languages = { + lang: {**appbuilder.languages[lang], "url": appbuilder.get_url_for_locale(lang)} + for lang in appbuilder.languages + } - languages = {} - for lang in appbuilder.languages: - languages[lang] = { - **appbuilder.languages[lang], - "url": appbuilder.get_url_for_locale(lang), - } - brand_text = appbuilder.app.config["LOGO_RIGHT_TEXT"] - if callable(brand_text): + if callable(brand_text := appbuilder.app.config["LOGO_RIGHT_TEXT"]): brand_text = brand_text() - build_number = appbuilder.app.config["BUILD_NUMBER"] return { - "menu": menu, + "menu": appbuilder.menu.get_data(), "brand": { "path": appbuilder.app.config["LOGO_TARGET_PATH"] or "/superset/welcome/", "icon": appbuilder.app_icon, @@ -369,9 +362,9 @@ def menu_data(user: User) -> dict[str, Any]: "documentation_text": appbuilder.app.config["DOCUMENTATION_TEXT"], "version_string": appbuilder.app.config["VERSION_STRING"], "version_sha": appbuilder.app.config["VERSION_SHA"], - "build_number": build_number, + "build_number": appbuilder.app.config["BUILD_NUMBER"], "languages": languages, - "show_language_picker": len(languages.keys()) > 1, + "show_language_picker": len(languages) > 1, "user_is_anonymous": user.is_anonymous, "user_info_url": None if is_feature_enabled("MENU_HIDE_USER_INFO") @@ -387,7 +380,9 @@ def menu_data(user: User) -> dict[str, Any]: @cache_manager.cache.memoize(timeout=60) -def cached_common_bootstrap_data(user: User, locale: str) -> dict[str, Any]: +def cached_common_bootstrap_data( # pylint: disable=unused-argument + user_id: int | None, locale: str +) -> dict[str, Any]: """Common data always sent to the client The function is memoized as the return value only changes when user permissions @@ -424,15 +419,15 @@ def cached_common_bootstrap_data(user: User, locale: str) -> dict[str, Any]: "extra_sequential_color_schemes": conf["EXTRA_SEQUENTIAL_COLOR_SCHEMES"], "extra_categorical_color_schemes": conf["EXTRA_CATEGORICAL_COLOR_SCHEMES"], "theme_overrides": conf["THEME_OVERRIDES"], - "menu_data": menu_data(user), + "menu_data": menu_data(g.user), } bootstrap_data.update(conf["COMMON_BOOTSTRAP_OVERRIDES_FUNC"](bootstrap_data)) return bootstrap_data -def common_bootstrap_payload(user: User) -> dict[str, Any]: +def common_bootstrap_payload() -> dict[str, Any]: return { - **cached_common_bootstrap_data(user, get_locale()), + **cached_common_bootstrap_data(utils.get_user_id(), get_locale()), "flash_messages": get_flashed_messages(with_categories=True), } @@ -542,7 +537,7 @@ def show_unexpected_exception(ex: Exception) -> FlaskResponse: def get_common_bootstrap_data() -> dict[str, Any]: def serialize_bootstrap_data() -> str: return json.dumps( - {"common": common_bootstrap_payload(g.user)}, + {"common": common_bootstrap_payload()}, default=utils.pessimistic_json_iso_dttm_ser, ) @@ -560,7 +555,7 @@ class SupersetModelView(ModelView): def render_app_template(self) -> FlaskResponse: payload = { "user": bootstrap_user_data(g.user, include_perms=True), - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), } return self.render_template( "superset/spa.html", @@ -595,11 +590,11 @@ class YamlExportMixin: # pylint: disable=too-few-public-methods Used on DatabaseView for cli compatibility """ - yaml_dict_key: Optional[str] = None + yaml_dict_key: str | None = None @action("yaml_export", __("Export to YAML"), __("Export to YAML?"), "fa-download") def yaml_export( - self, items: Union[ImportExportMixin, list[ImportExportMixin]] + self, items: ImportExportMixin | list[ImportExportMixin] ) -> FlaskResponse: if not isinstance(items, list): items = [items] diff --git a/superset/views/core.py b/superset/views/core.py index 2f9b99eba0..9ad2f63fdc 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -44,27 +44,26 @@ from superset import ( security_manager, ) from superset.async_events.async_query_manager import AsyncQueryTokenException -from superset.charts.commands.exceptions import ChartNotFoundError -from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand +from superset.commands.chart.exceptions import ChartNotFoundError +from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand +from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand +from superset.commands.dashboard.permalink.get import GetDashboardPermalinkCommand +from superset.commands.dataset.exceptions import DatasetNotFoundError +from superset.commands.explore.form_data.create import CreateFormDataCommand +from superset.commands.explore.form_data.get import GetFormDataCommand +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.permalink.get import GetExplorePermalinkCommand from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType -from superset.connectors.base.models import BaseDatasource -from superset.connectors.sqla.models import SqlaTable +from superset.connectors.sqla.models import BaseDatasource, SqlaTable from superset.daos.chart import ChartDAO from superset.daos.datasource import DatasourceDAO -from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand -from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError -from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.exceptions import ( CacheLoadError, DatabaseNotFound, SupersetException, SupersetSecurityException, ) -from superset.explore.form_data.commands.create import CreateFormDataCommand -from superset.explore.form_data.commands.get import GetFormDataCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.permalink.commands.get import GetExplorePermalinkCommand from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError from superset.extensions import async_query_manager, cache_manager from superset.models.core import Database @@ -605,7 +604,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods "force": force, "user": bootstrap_user_data(g.user, include_perms=True), "forced_height": request.args.get("height"), - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), } if slc: title = slc.slice_name @@ -863,7 +862,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods bootstrap_data=json.dumps( { "user": bootstrap_user_data(g.user, include_perms=True), - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), }, default=utils.pessimistic_json_iso_dttm_ser, ), @@ -954,7 +953,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods payload = { "user": bootstrap_user_data(g.user, include_perms=True), - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), } return self.render_template( diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py index ce5e8f1e07..0b41a67ee2 100644 --- a/superset/views/dashboard/views.py +++ b/superset/views/dashboard/views.py @@ -151,7 +151,7 @@ class Dashboard(BaseSupersetView): ) bootstrap_data = { - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(), "embedded": {"dashboard_id": dashboard_id_or_slug}, } diff --git a/superset/views/database/forms.py b/superset/views/database/forms.py index 9e3ba500af..f8e528c4e3 100644 --- a/superset/views/database/forms.py +++ b/superset/views/database/forms.py @@ -357,10 +357,6 @@ class ExcelToDatabaseForm(UploadToDatabaseForm): validators=[Optional(), NumberRange(min=0)], widget=BS3TextFieldWidget(), ) - mangle_dupe_cols = BooleanField( - _("Mangle Duplicate Columns"), - description=_('Specify duplicate columns as "X.0, X.1".'), - ) skiprows = IntegerField( _("Skip Rows"), description=_("Number of rows to skip at start of file."), diff --git a/superset/views/database/validators.py b/superset/views/database/validators.py index 2ee49c8210..e4fef3446c 100644 --- a/superset/views/database/validators.py +++ b/superset/views/database/validators.py @@ -21,7 +21,7 @@ from flask_babel import lazy_gettext as _ from marshmallow import ValidationError from superset import security_manager -from superset.databases.commands.exceptions import DatabaseInvalidError +from superset.commands.database.exceptions import DatabaseInvalidError from superset.databases.utils import make_url_safe from superset.models.core import Database diff --git a/superset/views/database/views.py b/superset/views/database/views.py index 0a91df2d6f..9f54ae8b78 100644 --- a/superset/views/database/views.py +++ b/superset/views/database/views.py @@ -307,7 +307,6 @@ class ExcelToDatabaseView(SimpleFormView): def form_get(self, form: ExcelToDatabaseForm) -> None: form.header.data = 0 - form.mangle_dupe_cols.data = True form.decimal.data = "." form.if_exists.data = "fail" form.sheet_name.data = "" @@ -343,7 +342,7 @@ class ExcelToDatabaseView(SimpleFormView): index_col=form.index_col.data, io=form.excel_file.data, keep_default_na=not form.null_values.data, - na_values=form.null_values.data if form.null_values.data else None, + na_values=form.null_values.data if form.null_values.data else [], parse_dates=form.parse_dates.data, skiprows=form.skiprows.data, sheet_name=form.sheet_name.data if form.sheet_name.data else 0, diff --git a/superset/views/datasource/utils.py b/superset/views/datasource/utils.py index 65b19c3493..b08d1ccc15 100644 --- a/superset/views/datasource/utils.py +++ b/superset/views/datasource/utils.py @@ -17,12 +17,12 @@ from typing import Any, Optional from superset import app, db +from superset.commands.dataset.exceptions import DatasetSamplesFailedError from superset.common.chart_data import ChartDataResultType from superset.common.query_context_factory import QueryContextFactory from superset.common.utils.query_cache_manager import QueryCacheManager from superset.constants import CacheRegion from superset.daos.datasource import DatasourceDAO -from superset.datasets.commands.exceptions import DatasetSamplesFailedError from superset.utils.core import QueryStatus from superset.views.datasource.schemas import SamplesPayloadSchema @@ -43,7 +43,7 @@ def get_limit_clause(page: Optional[int], per_page: Optional[int]) -> dict[str, return {"row_offset": offset, "row_limit": limit} -def get_samples( # pylint: disable=too-many-arguments,too-many-locals +def get_samples( # pylint: disable=too-many-arguments datasource_type: str, datasource_id: int, force: bool = False, @@ -104,21 +104,18 @@ def get_samples( # pylint: disable=too-many-arguments,too-many-locals result_type=ChartDataResultType.FULL, force=force, ) - samples_results = samples_instance.get_payload() - count_star_results = count_star_instance.get_payload() try: - sample_data = samples_results["queries"][0] - count_star_data = count_star_results["queries"][0] - failed_status = ( - sample_data.get("status") == QueryStatus.FAILED - or count_star_data.get("status") == QueryStatus.FAILED - ) - error_msg = sample_data.get("error") or count_star_data.get("error") - if failed_status and error_msg: - cache_key = sample_data.get("cache_key") - QueryCacheManager.delete(cache_key, region=CacheRegion.DATA) - raise DatasetSamplesFailedError(error_msg) + count_star_data = count_star_instance.get_payload()["queries"][0] + + if count_star_data.get("status") == QueryStatus.FAILED: + raise DatasetSamplesFailedError(count_star_data.get("error")) + + sample_data = samples_instance.get_payload()["queries"][0] + + if sample_data.get("status") == QueryStatus.FAILED: + QueryCacheManager.delete(count_star_data.get("cache_key"), CacheRegion.DATA) + raise DatasetSamplesFailedError(sample_data.get("error")) sample_data["page"] = page sample_data["per_page"] = per_page diff --git a/superset/views/datasource/views.py b/superset/views/datasource/views.py index 56acbd8580..a4c158a11f 100644 --- a/superset/views/datasource/views.py +++ b/superset/views/datasource/views.py @@ -28,14 +28,14 @@ from sqlalchemy.exc import NoSuchTableError from sqlalchemy.orm.exc import NoResultFound from superset import db, event_logger, security_manager +from superset.commands.dataset.exceptions import ( + DatasetForbiddenError, + DatasetNotFoundError, +) from superset.commands.utils import populate_owners from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.utils import get_physical_table_metadata from superset.daos.datasource import DatasourceDAO -from superset.datasets.commands.exceptions import ( - DatasetForbiddenError, - DatasetNotFoundError, -) from superset.exceptions import SupersetException, SupersetSecurityException from superset.models.core import Database from superset.superset_typing import FlaskResponse diff --git a/superset/viz.py b/superset/viz.py index 2e697a77be..8ba785ddcf 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -84,7 +84,7 @@ from superset.utils.hashing import md5_sha_from_str if TYPE_CHECKING: from superset.common.query_context_factory import QueryContextFactory - from superset.connectors.base.models import BaseDatasource + from superset.connectors.sqla.models import BaseDatasource config = app.config stats_logger = config["STATS_LOGGER"] diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py index 7f7c543d8b..0040ec60f6 100644 --- a/tests/integration_tests/base_tests.py +++ b/tests/integration_tests/base_tests.py @@ -36,8 +36,7 @@ from sqlalchemy.dialects.mysql import dialect from tests.integration_tests.test_app import app, login from superset.sql_parse import CtasMethod from superset import db, security_manager -from superset.connectors.base.models import BaseDatasource -from superset.connectors.sqla.models import SqlaTable +from superset.connectors.sqla.models import BaseDatasource, SqlaTable from superset.models import core as models from superset.models.slice import Slice from superset.models.core import Database diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index ae64eba807..69888104fa 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -28,8 +28,8 @@ from parameterized import parameterized from sqlalchemy import and_ from sqlalchemy.sql import func -from superset.charts.commands.exceptions import ChartDataQueryFailedError -from superset.charts.data.commands.get_data_command import ChartDataCommand +from superset.commands.chart.data.get_data_command import ChartDataCommand +from superset.commands.chart.exceptions import ChartDataQueryFailedError from superset.connectors.sqla.models import SqlaTable from superset.extensions import cache_manager, db, security_manager from superset.models.core import Database, FavStar, FavStarClassName @@ -981,7 +981,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin): rv = self.get_assert_metric(uri, "get_list") self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(data["count"], 34) + self.assertEqual(data["count"], 33) @pytest.mark.usefixtures("load_energy_table_with_slice", "add_dashboard_to_chart") def test_get_charts_dashboards(self): @@ -1447,7 +1447,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin): """ Chart API: Test get charts filter """ - # Assuming we have 34 sample charts + # Assuming we have 33 sample charts self.login(username="admin") arguments = {"page_size": 10, "page": 0} uri = f"api/v1/chart/?q={prison.dumps(arguments)}" @@ -1461,7 +1461,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin): rv = self.get_assert_metric(uri, "get_list") self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(len(data["result"]), 4) + self.assertEqual(len(data["result"]), 3) def test_get_charts_no_data_access(self): """ diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index f9785a4dd6..87c7823ae5 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -22,15 +22,15 @@ import yaml from flask import g from superset import db, security_manager -from superset.charts.commands.create import CreateChartCommand -from superset.charts.commands.exceptions import ( +from superset.commands.chart.create import CreateChartCommand +from superset.commands.chart.exceptions import ( ChartNotFoundError, WarmUpCacheChartNotFoundError, ) -from superset.charts.commands.export import ExportChartsCommand -from superset.charts.commands.importers.v1 import ImportChartsCommand -from superset.charts.commands.update import UpdateChartCommand -from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand +from superset.commands.chart.export import ExportChartsCommand +from superset.commands.chart.importers.v1 import ImportChartsCommand +from superset.commands.chart.update import UpdateChartCommand +from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError from superset.connectors.sqla.models import SqlaTable @@ -171,7 +171,7 @@ class TestExportChartsCommand(SupersetTestCase): class TestImportChartsCommand(SupersetTestCase): - @patch("superset.charts.commands.importers.v1.utils.g") + @patch("superset.commands.chart.importers.v1.utils.g") @patch("superset.security.manager.g") def test_import_v1_chart(self, sm_g, utils_g): """Test that we can import a chart""" @@ -324,7 +324,7 @@ class TestImportChartsCommand(SupersetTestCase): class TestChartsCreateCommand(SupersetTestCase): @patch("superset.utils.core.g") - @patch("superset.charts.commands.create.g") + @patch("superset.commands.chart.create.g") @patch("superset.security.manager.g") @pytest.mark.usefixtures("load_energy_table_with_slice") def test_create_v1_response(self, mock_sm_g, mock_c_g, mock_u_g): @@ -354,7 +354,7 @@ class TestChartsCreateCommand(SupersetTestCase): class TestChartsUpdateCommand(SupersetTestCase): - @patch("superset.charts.commands.update.g") + @patch("superset.commands.chart.update.g") @patch("superset.utils.core.g") @patch("superset.security.manager.g") @pytest.mark.usefixtures("load_energy_table_with_slice") diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py index 32a4be160c..4def03ff4e 100644 --- a/tests/integration_tests/charts/data/api_tests.py +++ b/tests/integration_tests/charts/data/api_tests.py @@ -42,7 +42,7 @@ from tests.integration_tests.fixtures.energy_dashboard import ( import pytest from superset.models.slice import Slice -from superset.charts.data.commands.get_data_command import ChartDataCommand +from superset.commands.chart.data.get_data_command import ChartDataCommand from superset.connectors.sqla.models import TableColumn, SqlaTable from superset.errors import SupersetErrorType from superset.extensions import async_query_manager_factory, db @@ -1293,7 +1293,6 @@ def test_chart_cache_timeout( slice_with_cache_timeout = load_energy_table_with_slice[0] slice_with_cache_timeout.cache_timeout = 20 - db.session.merge(slice_with_cache_timeout) datasource: SqlaTable = ( db.session.query(SqlaTable) @@ -1301,7 +1300,6 @@ def test_chart_cache_timeout( .first() ) datasource.cache_timeout = 1254 - db.session.merge(datasource) db.session.commit() @@ -1331,7 +1329,6 @@ def test_chart_cache_timeout_not_present( .first() ) datasource.cache_timeout = 1980 - db.session.merge(datasource) db.session.commit() rv = test_client.post(CHART_DATA_URI, json=physical_query_context) diff --git a/tests/integration_tests/cli_tests.py b/tests/integration_tests/cli_tests.py index f9195a6c26..55557ab32d 100644 --- a/tests/integration_tests/cli_tests.py +++ b/tests/integration_tests/cli_tests.py @@ -137,7 +137,7 @@ def test_export_dashboards_versioned_export(app_context, fs): "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True ) @mock.patch( - "superset.dashboards.commands.export.ExportDashboardsCommand.run", + "superset.commands.dashboard.export.ExportDashboardsCommand.run", side_effect=Exception(), ) def test_failing_export_dashboards_versioned_export( @@ -191,7 +191,7 @@ def test_export_datasources_versioned_export(app_context, fs): "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True ) @mock.patch( - "superset.dashboards.commands.export.ExportDatasetsCommand.run", + "superset.commands.dashboard.export.ExportDatasetsCommand.run", side_effect=Exception(), ) def test_failing_export_datasources_versioned_export( @@ -217,7 +217,7 @@ def test_failing_export_datasources_versioned_export( @mock.patch.dict( "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True ) -@mock.patch("superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand") +@mock.patch("superset.commands.dashboard.importers.dispatcher.ImportDashboardsCommand") def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs): """ Test that both ZIP and JSON can be imported. @@ -261,7 +261,7 @@ def test_import_dashboards_versioned_export(import_dashboards_command, app_conte "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True ) @mock.patch( - "superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand.run", + "superset.commands.dashboard.importers.dispatcher.ImportDashboardsCommand.run", side_effect=Exception(), ) def test_failing_import_dashboards_versioned_export( @@ -304,7 +304,7 @@ def test_failing_import_dashboards_versioned_export( @mock.patch.dict( "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True ) -@mock.patch("superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand") +@mock.patch("superset.commands.dataset.importers.dispatcher.ImportDatasetsCommand") def test_import_datasets_versioned_export(import_datasets_command, app_context, fs): """ Test that both ZIP and YAML can be imported. @@ -347,7 +347,7 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context, @mock.patch.dict( "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True ) -@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand") +@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand") def test_import_datasets_sync_argument_columns_metrics( import_datasets_command, app_context, fs ): @@ -384,7 +384,7 @@ def test_import_datasets_sync_argument_columns_metrics( @mock.patch.dict( "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True ) -@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand") +@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand") def test_import_datasets_sync_argument_columns( import_datasets_command, app_context, fs ): @@ -421,7 +421,7 @@ def test_import_datasets_sync_argument_columns( @mock.patch.dict( "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True ) -@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand") +@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand") def test_import_datasets_sync_argument_metrics( import_datasets_command, app_context, fs ): @@ -459,7 +459,7 @@ def test_import_datasets_sync_argument_metrics( "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True ) @mock.patch( - "superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand.run", + "superset.commands.dataset.importers.dispatcher.ImportDatasetsCommand.run", side_effect=Exception(), ) def test_failing_import_datasets_versioned_export( diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 28da7b7913..3e6aa96307 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -326,7 +326,8 @@ def virtual_dataset(): TableColumn(column_name="col5", type="VARCHAR(255)", table=dataset) SqlMetric(metric_name="count", expression="count(*)", table=dataset) - db.session.merge(dataset) + db.session.add(dataset) + db.session.commit() yield dataset @@ -390,7 +391,7 @@ def physical_dataset(): table=dataset, ) SqlMetric(metric_name="count", expression="count(*)", table=dataset) - db.session.merge(dataset) + db.session.add(dataset) db.session.commit() yield dataset @@ -425,7 +426,8 @@ def virtual_dataset_comma_in_column_value(): TableColumn(column_name="col2", type="VARCHAR(255)", table=dataset) SqlMetric(metric_name="count", expression="count(*)", table=dataset) - db.session.merge(dataset) + db.session.add(dataset) + db.session.commit() yield dataset diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 3157ddd649..c4a0897332 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -35,8 +35,8 @@ from sqlalchemy.exc import SQLAlchemyError import superset.utils.database import superset.views.utils from superset import dataframe, db, security_manager, sql_lab -from superset.charts.commands.exceptions import ChartDataQueryFailedError -from superset.charts.data.commands.get_data_command import ChartDataCommand +from superset.commands.chart.data.get_data_command import ChartDataCommand +from superset.commands.chart.exceptions import ChartDataQueryFailedError from superset.common.db_query_status import QueryStatus from superset.connectors.sqla.models import SqlaTable from superset.db_engine_specs.base import BaseEngineSpec @@ -713,10 +713,17 @@ class TestCore(SupersetTestCase): data = json.loads(rv.data.decode("utf-8")) keys = list(data.keys()) - self.assertEqual(rv.status_code, 202) - self.assertCountEqual( - keys, ["channel_id", "job_id", "user_id", "status", "errors", "result_url"] - ) + # If chart is cached, it will return 200, otherwise 202 + assert rv.status_code in {200, 202} + if rv.status_code == 202: + assert keys == [ + "channel_id", + "job_id", + "user_id", + "status", + "errors", + "result_url", + ] @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @mock.patch.dict( @@ -1164,7 +1171,7 @@ class TestCore(SupersetTestCase): self.assertIn("Error message", data) @pytest.mark.usefixtures("load_energy_table_with_slice") - @mock.patch("superset.explore.form_data.commands.create.CreateFormDataCommand.run") + @mock.patch("superset.commands.explore.form_data.create.CreateFormDataCommand.run") def test_explore_redirect(self, mock_command: mock.Mock): self.login(username="admin") random_key = "random_key" diff --git a/tests/integration_tests/css_templates/api_tests.py b/tests/integration_tests/css_templates/api_tests.py index b28cca955c..ceb46f553b 100644 --- a/tests/integration_tests/css_templates/api_tests.py +++ b/tests/integration_tests/css_templates/api_tests.py @@ -19,6 +19,8 @@ import json import pytest import prison +from datetime import datetime +from freezegun import freeze_time from sqlalchemy.sql import func import tests.integration_tests.test_app @@ -189,20 +191,27 @@ class TestCssTemplateApi(SupersetTestCase): """ CSS Template API: Test get CSS Template """ - css_template = ( - db.session.query(CssTemplate) - .filter(CssTemplate.template_name == "template_name1") - .one_or_none() - ) - self.login(username="admin") - uri = f"api/v1/css_template/{css_template.id}" - rv = self.get_assert_metric(uri, "get") + with freeze_time(datetime.now()): + css_template = ( + db.session.query(CssTemplate) + .filter(CssTemplate.template_name == "template_name1") + .one_or_none() + ) + self.login(username="admin") + uri = f"api/v1/css_template/{css_template.id}" + rv = self.get_assert_metric(uri, "get") assert rv.status_code == 200 expected_result = { "id": css_template.id, "template_name": "template_name1", "css": "css1", + "changed_by": { + "first_name": css_template.created_by.first_name, + "id": css_template.created_by.id, + "last_name": css_template.created_by.last_name, + }, + "changed_on_delta_humanized": "now", "created_by": { "first_name": css_template.created_by.first_name, "id": css_template.created_by.id, diff --git a/tests/integration_tests/csv_upload_tests.py b/tests/integration_tests/csv_upload_tests.py index 9bc204ff06..741f4c1bc9 100644 --- a/tests/integration_tests/csv_upload_tests.py +++ b/tests/integration_tests/csv_upload_tests.py @@ -165,7 +165,6 @@ def upload_excel( "sheet_name": "Sheet1", "if_exists": "fail", "index_label": "test_label", - "mangle_dupe_cols": False, } if schema := utils.get_example_default_schema(): form_data["schema"] = schema diff --git a/tests/integration_tests/dashboard_tests.py b/tests/integration_tests/dashboard_tests.py index 0df9b22267..0275152231 100644 --- a/tests/integration_tests/dashboard_tests.py +++ b/tests/integration_tests/dashboard_tests.py @@ -78,8 +78,8 @@ class TestDashboard(SupersetTestCase): hidden_dash.slices = [slice] hidden_dash.published = False - db.session.merge(published_dash) - db.session.merge(hidden_dash) + db.session.add(published_dash) + db.session.add(hidden_dash) yield db.session.commit() self.revoke_public_access_to_table(table) @@ -137,8 +137,6 @@ class TestDashboard(SupersetTestCase): # Make the births dash published so it can be seen births_dash = db.session.query(Dashboard).filter_by(slug="births").one() births_dash.published = True - - db.session.merge(births_dash) db.session.commit() # Try access before adding appropriate permissions. @@ -180,7 +178,6 @@ class TestDashboard(SupersetTestCase): dash = db.session.query(Dashboard).filter_by(slug="births").first() dash.owners = [security_manager.find_user("admin")] dash.created_by = security_manager.find_user("admin") - db.session.merge(dash) db.session.commit() res: Response = self.client.get("/superset/dashboard/births/") diff --git a/tests/integration_tests/dashboard_utils.py b/tests/integration_tests/dashboard_utils.py index c08a3ec292..41dd8dc978 100644 --- a/tests/integration_tests/dashboard_utils.py +++ b/tests/integration_tests/dashboard_utils.py @@ -59,11 +59,11 @@ def create_table_metadata( normalize_columns=False, always_filter_main_dttm=False, ) + db.session.add(table) if fetch_values_predicate: table.fetch_values_predicate = fetch_values_predicate table.database = database table.description = table_description - db.session.merge(table) db.session.commit() return table diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index cc7bc109b4..a5c44f9f08 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -176,6 +176,26 @@ class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixi expected_values = [0, 1] if backend() == "presto" else [0, 1, 2] self.assertEqual(result[0]["column_types"], expected_values) + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.dashboards.schemas.security_manager.has_guest_access") + @patch("superset.dashboards.schemas.security_manager.is_guest_user") + def test_get_dashboard_datasets_as_guest(self, is_guest_user, has_guest_access): + self.login(username="admin") + uri = "api/v1/dashboard/world_health/datasets" + is_guest_user = True + has_guest_access = True + response = self.get_assert_metric(uri, "get_datasets") + self.assertEqual(response.status_code, 200) + data = json.loads(response.data.decode("utf-8")) + dashboard = Dashboard.get("world_health") + expected_dataset_ids = {s.datasource_id for s in dashboard.slices} + result = data["result"] + actual_dataset_ids = {dataset["id"] for dataset in result} + self.assertEqual(actual_dataset_ids, expected_dataset_ids) + for dataset in result: + for excluded_key in ["database", "owners"]: + assert excluded_key not in dataset + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") def test_get_dashboard_datasets_not_found(self): self.login(username="alpha") @@ -409,6 +429,29 @@ class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixi db.session.delete(dashboard) db.session.commit() + @patch("superset.dashboards.schemas.security_manager.has_guest_access") + @patch("superset.dashboards.schemas.security_manager.is_guest_user") + def test_get_dashboard_as_guest(self, is_guest_user, has_guest_access): + """ + Dashboard API: Test get dashboard as guest + """ + admin = self.get_user("admin") + dashboard = self.insert_dashboard( + "title", "slug1", [admin.id], created_by=admin + ) + is_guest_user.return_value = True + has_guest_access.return_value = True + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.get_assert_metric(uri, "get") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + for excluded_key in ["changed_by", "changed_by_name", "owners"]: + assert excluded_key not in data["result"] + # rollback changes + db.session.delete(dashboard) + db.session.commit() + def test_info_dashboard(self): """ Dashboard API: Test info diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py index 75bdd17bcf..175a8a3198 100644 --- a/tests/integration_tests/dashboards/commands_tests.py +++ b/tests/integration_tests/dashboards/commands_tests.py @@ -23,16 +23,16 @@ import yaml from werkzeug.utils import secure_filename from superset import db, security_manager -from superset.commands.exceptions import CommandInvalidError -from superset.commands.importers.exceptions import IncorrectVersionError -from superset.connectors.sqla.models import SqlaTable -from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.commands.export import ( +from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.export import ( append_charts, ExportDashboardsCommand, get_default_position, ) -from superset.dashboards.commands.importers import v0, v1 +from superset.commands.dashboard.importers import v0, v1 +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.dashboard import Dashboard from superset.models.slice import Slice @@ -97,17 +97,11 @@ class TestExportDashboardsCommand(SupersetTestCase): "published": False, "uuid": str(example_dashboard.uuid), "position": { - "CHART-36bfc934": { - "children": [], - "id": "CHART-36bfc934", - "meta": {"height": 25, "sliceName": "Region Filter", "width": 2}, - "type": "CHART", - }, "CHART-37982887": { "children": [], "id": "CHART-37982887", "meta": { - "height": 25, + "height": 52, "sliceName": "World's Population", "width": 2, }, @@ -180,7 +174,7 @@ class TestExportDashboardsCommand(SupersetTestCase): "type": "COLUMN", }, "COLUMN-fe3914b8": { - "children": ["CHART-36bfc934", "CHART-37982887"], + "children": ["CHART-37982887"], "id": "COLUMN-fe3914b8", "meta": {"background": "BACKGROUND_TRANSPARENT", "width": 2}, "type": "COLUMN", @@ -292,14 +286,16 @@ class TestExportDashboardsCommand(SupersetTestCase): ] @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - @patch("superset.dashboards.commands.export.suffix") + @patch("superset.commands.dashboard.export.suffix") def test_append_charts(self, mock_suffix): """Test that orphaned charts are added to the dashboard position""" # return deterministic IDs mock_suffix.side_effect = (str(i) for i in itertools.count(1)) position = get_default_position("example") - chart_1 = db.session.query(Slice).filter_by(slice_name="Region Filter").one() + chart_1 = ( + db.session.query(Slice).filter_by(slice_name="World's Population").one() + ) new_position = append_charts(position, {chart_1}) assert new_position == { "DASHBOARD_VERSION_KEY": "v2", @@ -328,7 +324,7 @@ class TestExportDashboardsCommand(SupersetTestCase): "meta": { "chartId": chart_1.id, "height": 50, - "sliceName": "Region Filter", + "sliceName": "World's Population", "uuid": str(chart_1.uuid), "width": 4, }, @@ -375,7 +371,7 @@ class TestExportDashboardsCommand(SupersetTestCase): "meta": { "chartId": chart_1.id, "height": 50, - "sliceName": "Region Filter", + "sliceName": "World's Population", "uuid": str(chart_1.uuid), "width": 4, }, @@ -406,7 +402,7 @@ class TestExportDashboardsCommand(SupersetTestCase): "meta": { "chartId": chart_1.id, "height": 50, - "sliceName": "Region Filter", + "sliceName": "World's Population", "uuid": str(chart_1.uuid), "width": 4, }, @@ -490,7 +486,7 @@ class TestImportDashboardsCommand(SupersetTestCase): db.session.delete(dataset) db.session.commit() - @patch("superset.dashboards.commands.importers.v1.utils.g") + @patch("superset.commands.dashboard.importers.v1.utils.g") @patch("superset.security.manager.g") def test_import_v1_dashboard(self, sm_g, utils_g): """Test that we can import a dashboard""" diff --git a/tests/integration_tests/dashboards/dao_tests.py b/tests/integration_tests/dashboards/dao_tests.py index 91e27af3b6..65fc9e32dd 100644 --- a/tests/integration_tests/dashboards/dao_tests.py +++ b/tests/integration_tests/dashboards/dao_tests.py @@ -33,60 +33,6 @@ from tests.integration_tests.fixtures.world_bank_dashboard import ( class TestDashboardDAO(SupersetTestCase): - @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - def test_set_dash_metadata(self): - dash: Dashboard = ( - db.session.query(Dashboard).filter_by(slug="world_health").first() - ) - data = dash.data - positions = data["position_json"] - data.update({"positions": positions}) - original_data = copy.deepcopy(data) - - # add filter scopes - filter_slice = next(slc for slc in dash.slices if slc.viz_type == "filter_box") - immune_slices = [slc for slc in dash.slices if slc != filter_slice] - filter_scopes = { - str(filter_slice.id): { - "region": { - "scope": ["ROOT_ID"], - "immune": [slc.id for slc in immune_slices], - } - } - } - data.update({"filter_scopes": json.dumps(filter_scopes)}) - DashboardDAO.set_dash_metadata(dash, data) - updated_metadata = json.loads(dash.json_metadata) - self.assertEqual(updated_metadata["filter_scopes"], filter_scopes) - - # remove a slice and change slice ids (as copy slices) - removed_slice = immune_slices.pop() - removed_components = [ - key - for (key, value) in positions.items() - if isinstance(value, dict) - and value.get("type") == "CHART" - and value["meta"]["chartId"] == removed_slice.id - ] - for component_id in removed_components: - del positions[component_id] - - data.update({"positions": positions}) - DashboardDAO.set_dash_metadata(dash, data) - updated_metadata = json.loads(dash.json_metadata) - expected_filter_scopes = { - str(filter_slice.id): { - "region": { - "scope": ["ROOT_ID"], - "immune": [slc.id for slc in immune_slices], - } - } - } - self.assertEqual(updated_metadata["filter_scopes"], expected_filter_scopes) - - # reset dash to original data - DashboardDAO.set_dash_metadata(dash, original_data) - @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") @patch("superset.utils.core.g") @patch("superset.security.manager.g") @@ -113,7 +59,6 @@ class TestDashboardDAO(SupersetTestCase): data.update({"foo": "bar"}) DashboardDAO.set_dash_metadata(dashboard, data) - db.session.merge(dashboard) db.session.commit() new_changed_on = DashboardDAO.get_dashboard_changed_on(dashboard) assert old_changed_on.replace(microsecond=0) < new_changed_on @@ -125,7 +70,6 @@ class TestDashboardDAO(SupersetTestCase): ) DashboardDAO.set_dash_metadata(dashboard, original_data) - db.session.merge(dashboard) db.session.commit() @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") diff --git a/tests/integration_tests/dashboards/dashboard_test_utils.py b/tests/integration_tests/dashboards/dashboard_test_utils.py index ee8001cdba..39bce02caa 100644 --- a/tests/integration_tests/dashboards/dashboard_test_utils.py +++ b/tests/integration_tests/dashboards/dashboard_test_utils.py @@ -110,12 +110,10 @@ def random_str(): def grant_access_to_dashboard(dashboard, role_name): role = security_manager.find_role(role_name) dashboard.roles.append(role) - db.session.merge(dashboard) db.session.commit() def revoke_access_to_dashboard(dashboard, role_name): role = security_manager.find_role(role_name) dashboard.roles.remove(role) - db.session.merge(dashboard) db.session.commit() diff --git a/tests/integration_tests/dashboards/filter_state/api_tests.py b/tests/integration_tests/dashboards/filter_state/api_tests.py index 15b479686a..3538e14012 100644 --- a/tests/integration_tests/dashboards/filter_state/api_tests.py +++ b/tests/integration_tests/dashboards/filter_state/api_tests.py @@ -22,10 +22,10 @@ from flask.ctx import AppContext from flask_appbuilder.security.sqla.models import User from sqlalchemy.orm import Session -from superset.dashboards.commands.exceptions import DashboardAccessDeniedError +from superset.commands.dashboard.exceptions import DashboardAccessDeniedError +from superset.commands.temporary_cache.entry import Entry from superset.extensions import cache_manager from superset.models.dashboard import Dashboard -from superset.temporary_cache.commands.entry import Entry from superset.temporary_cache.utils import cache_key from tests.integration_tests.fixtures.world_bank_dashboard import ( load_world_bank_dashboard_with_slices, diff --git a/tests/integration_tests/dashboards/permalink/api_tests.py b/tests/integration_tests/dashboards/permalink/api_tests.py index 3c560a4469..a49f1e6f4c 100644 --- a/tests/integration_tests/dashboards/permalink/api_tests.py +++ b/tests/integration_tests/dashboards/permalink/api_tests.py @@ -23,7 +23,7 @@ from flask_appbuilder.security.sqla.models import User from sqlalchemy.orm import Session from superset import db -from superset.dashboards.commands.exceptions import DashboardAccessDeniedError +from superset.commands.dashboard.exceptions import DashboardAccessDeniedError from superset.key_value.models import KeyValueEntry from superset.key_value.types import KeyValueResource from superset.key_value.utils import decode_permalink_id diff --git a/tests/integration_tests/dashboards/security/security_dataset_tests.py b/tests/integration_tests/dashboards/security/security_dataset_tests.py index 54e8b81442..4ccfa981b1 100644 --- a/tests/integration_tests/dashboards/security/security_dataset_tests.py +++ b/tests/integration_tests/dashboards/security/security_dataset_tests.py @@ -61,8 +61,8 @@ class TestDashboardDatasetSecurity(DashboardTestCase): hidden_dash.slices = [slice] hidden_dash.published = False - db.session.merge(published_dash) - db.session.merge(hidden_dash) + db.session.add(published_dash) + db.session.add(hidden_dash) yield db.session.commit() self.revoke_public_access_to_table(table) @@ -192,4 +192,4 @@ class TestDashboardDatasetSecurity(DashboardTestCase): self.assert200(rv) data = json.loads(rv.data.decode("utf-8")) self.assertEqual(0, data["count"]) - DashboardDAO.delete(dashboard) + DashboardDAO.delete([dashboard]) diff --git a/tests/integration_tests/dashboards/security/security_rbac_tests.py b/tests/integration_tests/dashboards/security/security_rbac_tests.py index 8b7f2ad1ef..792c9d1716 100644 --- a/tests/integration_tests/dashboards/security/security_rbac_tests.py +++ b/tests/integration_tests/dashboards/security/security_rbac_tests.py @@ -21,8 +21,8 @@ from unittest.mock import patch import pytest +from superset.commands.dashboard.exceptions import DashboardForbiddenError from superset.daos.dashboard import DashboardDAO -from superset.dashboards.commands.exceptions import DashboardForbiddenError from superset.utils.core import backend, override_user from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.dashboards.dashboard_test_utils import * diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index cbdacc8f34..0bc1f245a1 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -197,6 +197,7 @@ class TestDatabaseApi(SupersetTestCase): "allows_subquery", "allows_virtual_table_explore", "backend", + "changed_by", "changed_on", "changed_on_delta_humanized", "created_by", @@ -288,9 +289,9 @@ class TestDatabaseApi(SupersetTestCase): db.session.commit() @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) - @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch("superset.commands.database.create.is_feature_enabled") @mock.patch( "superset.models.core.Database.get_all_schema_names", ) @@ -336,10 +337,10 @@ class TestDatabaseApi(SupersetTestCase): db.session.commit() @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) - @mock.patch("superset.databases.commands.create.is_feature_enabled") - @mock.patch("superset.databases.commands.update.is_feature_enabled") + @mock.patch("superset.commands.database.create.is_feature_enabled") + @mock.patch("superset.commands.database.update.is_feature_enabled") @mock.patch( "superset.models.core.Database.get_all_schema_names", ) @@ -397,10 +398,10 @@ class TestDatabaseApi(SupersetTestCase): db.session.commit() @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) - @mock.patch("superset.databases.commands.create.is_feature_enabled") - @mock.patch("superset.databases.commands.update.is_feature_enabled") + @mock.patch("superset.commands.database.create.is_feature_enabled") + @mock.patch("superset.commands.database.update.is_feature_enabled") @mock.patch( "superset.models.core.Database.get_all_schema_names", ) @@ -477,12 +478,12 @@ class TestDatabaseApi(SupersetTestCase): db.session.commit() @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) @mock.patch( "superset.models.core.Database.get_all_schema_names", ) - @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch("superset.commands.database.create.is_feature_enabled") def test_cascade_delete_ssh_tunnel( self, mock_test_connection_database_command_run, @@ -531,9 +532,9 @@ class TestDatabaseApi(SupersetTestCase): assert model_ssh_tunnel is None @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) - @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch("superset.commands.database.create.is_feature_enabled") @mock.patch( "superset.models.core.Database.get_all_schema_names", ) @@ -582,9 +583,9 @@ class TestDatabaseApi(SupersetTestCase): assert model is None @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) - @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch("superset.commands.database.create.is_feature_enabled") @mock.patch( "superset.models.core.Database.get_all_schema_names", ) @@ -637,7 +638,7 @@ class TestDatabaseApi(SupersetTestCase): db.session.commit() @mock.patch( - "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", ) @mock.patch( "superset.models.core.Database.get_all_schema_names", @@ -2005,10 +2006,10 @@ class TestDatabaseApi(SupersetTestCase): app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False @mock.patch( - "superset.databases.commands.test_connection.DatabaseDAO.build_db_for_connection_test", + "superset.commands.database.test_connection.DatabaseDAO.build_db_for_connection_test", ) @mock.patch( - "superset.databases.commands.test_connection.event_logger", + "superset.commands.database.test_connection.event_logger", ) def test_test_connection_failed_invalid_hostname( self, mock_event_logger, mock_build_db @@ -2074,7 +2075,7 @@ class TestDatabaseApi(SupersetTestCase): rv = self.get_assert_metric(uri, "related_objects") self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) - self.assertEqual(response["charts"]["count"], 34) + self.assertEqual(response["charts"]["count"], 33) self.assertEqual(response["dashboards"]["count"], 3) def test_get_database_related_objects_not_found(self): @@ -3748,7 +3749,7 @@ class TestDatabaseApi(SupersetTestCase): }, ) - @patch("superset.databases.commands.validate_sql.get_validator_by_name") + @patch("superset.commands.database.validate_sql.get_validator_by_name") @patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", PRESTO_SQL_VALIDATORS_BY_ENGINE, diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py index d5946d8b6d..b46e1b7ea3 100644 --- a/tests/integration_tests/databases/commands_tests.py +++ b/tests/integration_tests/databases/commands_tests.py @@ -23,11 +23,8 @@ from func_timeout import FunctionTimedOut from sqlalchemy.exc import DBAPIError from superset import db, event_logger, security_manager -from superset.commands.exceptions import CommandInvalidError -from superset.commands.importers.exceptions import IncorrectVersionError -from superset.connectors.sqla.models import SqlaTable -from superset.databases.commands.create import CreateDatabaseCommand -from superset.databases.commands.exceptions import ( +from superset.commands.database.create import CreateDatabaseCommand +from superset.commands.database.exceptions import ( DatabaseInvalidError, DatabaseNotFoundError, DatabaseSecurityUnsafeError, @@ -35,11 +32,14 @@ from superset.databases.commands.exceptions import ( DatabaseTestConnectionDriverError, DatabaseTestConnectionUnexpectedError, ) -from superset.databases.commands.export import ExportDatabasesCommand -from superset.databases.commands.importers.v1 import ImportDatabasesCommand -from superset.databases.commands.tables import TablesDatabaseCommand -from superset.databases.commands.test_connection import TestConnectionDatabaseCommand -from superset.databases.commands.validate import ValidateDatabaseParametersCommand +from superset.commands.database.export import ExportDatabasesCommand +from superset.commands.database.importers.v1 import ImportDatabasesCommand +from superset.commands.database.tables import TablesDatabaseCommand +from superset.commands.database.test_connection import TestConnectionDatabaseCommand +from superset.commands.database.validate import ValidateDatabaseParametersCommand +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable from superset.databases.schemas import DatabaseTestConnectionSchema from superset.databases.ssh_tunnel.models import SSHTunnel from superset.errors import ErrorLevel, SupersetError, SupersetErrorType @@ -75,7 +75,7 @@ from tests.integration_tests.fixtures.importexport import ( class TestCreateDatabaseCommand(SupersetTestCase): - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_create_duplicate_error(self, mock_g, mock_logger): example_db = get_example_database() @@ -94,7 +94,7 @@ class TestCreateDatabaseCommand(SupersetTestCase): "DatabaseRequiredFieldValidationError" ) - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_multiple_error_logging(self, mock_g, mock_logger): mock_g.user = security_manager.find_user("admin") @@ -834,7 +834,7 @@ class TestImportDatabasesCommand(SupersetTestCase): } } - @patch("superset.databases.commands.importers.v1.import_dataset") + @patch("superset.commands.database.importers.v1.import_dataset") def test_import_v1_rollback(self, mock_import_dataset): """Test than on an exception everything is rolled back""" num_databases = db.session.query(Database).count() @@ -860,7 +860,7 @@ class TestImportDatabasesCommand(SupersetTestCase): class TestTestConnectionDatabaseCommand(SupersetTestCase): @patch("superset.daos.database.Database._get_sqla_engine") - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_db_exception( self, mock_g, mock_event_logger, mock_get_sqla_engine @@ -881,7 +881,7 @@ class TestTestConnectionDatabaseCommand(SupersetTestCase): mock_event_logger.assert_called() @patch("superset.daos.database.Database._get_sqla_engine") - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_do_ping_exception( self, mock_g, mock_event_logger, mock_get_sqla_engine @@ -903,8 +903,8 @@ class TestTestConnectionDatabaseCommand(SupersetTestCase): == SupersetErrorType.GENERIC_DB_ENGINE_ERROR ) - @patch("superset.databases.commands.test_connection.func_timeout") - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.func_timeout") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_do_ping_timeout( self, mock_g, mock_event_logger, mock_func_timeout @@ -926,7 +926,7 @@ class TestTestConnectionDatabaseCommand(SupersetTestCase): ) @patch("superset.daos.database.Database._get_sqla_engine") - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_superset_security_connection( self, mock_g, mock_event_logger, mock_get_sqla_engine @@ -949,7 +949,7 @@ class TestTestConnectionDatabaseCommand(SupersetTestCase): mock_event_logger.assert_called() @patch("superset.daos.database.Database._get_sqla_engine") - @patch("superset.databases.commands.test_connection.event_logger.log_with_context") + @patch("superset.commands.database.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_db_api_exc( self, mock_g, mock_event_logger, mock_get_sqla_engine @@ -975,7 +975,7 @@ class TestTestConnectionDatabaseCommand(SupersetTestCase): @patch("superset.db_engine_specs.base.is_hostname_valid") @patch("superset.db_engine_specs.base.is_port_open") -@patch("superset.databases.commands.validate.DatabaseDAO") +@patch("superset.commands.database.validate.DatabaseDAO") def test_validate(DatabaseDAO, is_port_open, is_hostname_valid, app_context): """ Test parameter validation. diff --git a/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py b/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py index 64bc0d8572..1cd9afcc80 100644 --- a/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py +++ b/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py @@ -20,13 +20,13 @@ from unittest.mock import patch import pytest from superset import security_manager -from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand -from superset.databases.ssh_tunnel.commands.delete import DeleteSSHTunnelCommand -from superset.databases.ssh_tunnel.commands.exceptions import ( +from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand +from superset.commands.database.ssh_tunnel.delete import DeleteSSHTunnelCommand +from superset.commands.database.ssh_tunnel.exceptions import ( SSHTunnelInvalidError, SSHTunnelNotFoundError, ) -from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand +from superset.commands.database.ssh_tunnel.update import UpdateSSHTunnelCommand from tests.integration_tests.base_tests import SupersetTestCase @@ -67,7 +67,7 @@ class TestUpdateSSHTunnelCommand(SupersetTestCase): class TestDeleteSSHTunnelCommand(SupersetTestCase): @mock.patch("superset.utils.core.g") - @mock.patch("superset.databases.ssh_tunnel.commands.delete.is_feature_enabled") + @mock.patch("superset.commands.database.ssh_tunnel.delete.is_feature_enabled") def test_delete_ssh_tunnel_not_found(self, mock_g, mock_delete_is_feature_enabled): mock_g.user = security_manager.find_user("admin") mock_delete_is_feature_enabled.return_value = True diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index f060d36739..d969895489 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -30,13 +30,13 @@ from sqlalchemy.orm import joinedload from sqlalchemy.sql import func from superset import app +from superset.commands.dataset.exceptions import DatasetCreateFailedError from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.daos.exceptions import ( DAOCreateFailedError, DAODeleteFailedError, DAOUpdateFailedError, ) -from superset.datasets.commands.exceptions import DatasetCreateFailedError from superset.datasets.models import Dataset from superset.extensions import db, security_manager from superset.models.core import Database @@ -2458,7 +2458,7 @@ class TestDatasetApi(SupersetTestCase): response = json.loads(rv.data.decode("utf-8")) self.assertEqual(response["message"], {"database": ["Database does not exist"]}) - @patch("superset.datasets.commands.create.CreateDatasetCommand.run") + @patch("superset.commands.dataset.create.CreateDatasetCommand.run") def test_get_or_create_dataset_create_fails(self, command_run_mock): """ Dataset API: Test get or create endpoint when create fails diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py index a718c81e29..1ea554a818 100644 --- a/tests/integration_tests/datasets/commands_tests.py +++ b/tests/integration_tests/datasets/commands_tests.py @@ -23,19 +23,19 @@ import yaml from sqlalchemy.exc import SQLAlchemyError from superset import db, security_manager -from superset.commands.exceptions import CommandInvalidError -from superset.commands.importers.exceptions import IncorrectVersionError -from superset.connectors.sqla.models import SqlaTable -from superset.databases.commands.importers.v1 import ImportDatabasesCommand -from superset.datasets.commands.create import CreateDatasetCommand -from superset.datasets.commands.exceptions import ( +from superset.commands.database.importers.v1 import ImportDatabasesCommand +from superset.commands.dataset.create import CreateDatasetCommand +from superset.commands.dataset.exceptions import ( DatasetInvalidError, DatasetNotFoundError, WarmUpCacheTableNotFoundError, ) -from superset.datasets.commands.export import ExportDatasetsCommand -from superset.datasets.commands.importers import v0, v1 -from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand +from superset.commands.dataset.export import ExportDatasetsCommand +from superset.commands.dataset.importers import v0, v1 +from superset.commands.dataset.warm_up_cache import DatasetWarmUpCacheCommand +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.slice import Slice from superset.utils.core import get_example_default_schema @@ -339,7 +339,7 @@ class TestImportDatasetsCommand(SupersetTestCase): db.session.delete(dataset) db.session.commit() - @patch("superset.datasets.commands.importers.v1.utils.g") + @patch("superset.commands.dataset.importers.v1.utils.g") @patch("superset.security.manager.g") @pytest.mark.usefixtures("load_energy_table_with_slice") def test_import_v1_dataset(self, sm_g, utils_g): diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py index c2865f7b63..5ab81b58d1 100644 --- a/tests/integration_tests/datasource_tests.py +++ b/tests/integration_tests/datasource_tests.py @@ -24,11 +24,11 @@ import prison import pytest from superset import app, db +from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.common.utils.query_cache_manager import QueryCacheManager from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.constants import CacheRegion from superset.daos.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError -from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.exceptions import SupersetGenericDBErrorException from superset.models.core import Database from superset.utils.core import backend, get_example_default_schema @@ -550,7 +550,6 @@ def test_get_samples_with_incorrect_cc(test_client, login_as_admin, virtual_data table=virtual_dataset, expression="INCORRECT SQL", ) - db.session.merge(virtual_dataset) uri = ( f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" diff --git a/tests/integration_tests/explore/api_tests.py b/tests/integration_tests/explore/api_tests.py index 50606257c2..e37200e310 100644 --- a/tests/integration_tests/explore/api_tests.py +++ b/tests/integration_tests/explore/api_tests.py @@ -21,9 +21,9 @@ import pytest from flask_appbuilder.security.sqla.models import User from sqlalchemy.orm import Session +from superset.commands.explore.form_data.state import TemporaryExploreState from superset.connectors.sqla.models import SqlaTable from superset.explore.exceptions import DatasetAccessDeniedError -from superset.explore.form_data.commands.state import TemporaryExploreState from superset.extensions import cache_manager from superset.models.slice import Slice from tests.integration_tests.fixtures.world_bank_dashboard import ( diff --git a/tests/integration_tests/explore/form_data/api_tests.py b/tests/integration_tests/explore/form_data/api_tests.py index 0e73d0b516..5dbd67d4f5 100644 --- a/tests/integration_tests/explore/form_data/api_tests.py +++ b/tests/integration_tests/explore/form_data/api_tests.py @@ -21,9 +21,9 @@ import pytest from flask_appbuilder.security.sqla.models import User from sqlalchemy.orm import Session +from superset.commands.dataset.exceptions import DatasetAccessDeniedError +from superset.commands.explore.form_data.state import TemporaryExploreState from superset.connectors.sqla.models import SqlaTable -from superset.datasets.commands.exceptions import DatasetAccessDeniedError -from superset.explore.form_data.commands.state import TemporaryExploreState from superset.extensions import cache_manager from superset.models.slice import Slice from superset.utils.core import DatasourceType diff --git a/tests/integration_tests/explore/form_data/commands_tests.py b/tests/integration_tests/explore/form_data/commands_tests.py index 18dd8415f6..781c4fdbb2 100644 --- a/tests/integration_tests/explore/form_data/commands_tests.py +++ b/tests/integration_tests/explore/form_data/commands_tests.py @@ -22,12 +22,12 @@ import pytest from superset import app, db, security, security_manager from superset.commands.exceptions import DatasourceTypeInvalidError +from superset.commands.explore.form_data.create import CreateFormDataCommand +from superset.commands.explore.form_data.delete import DeleteFormDataCommand +from superset.commands.explore.form_data.get import GetFormDataCommand +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.form_data.update import UpdateFormDataCommand from superset.connectors.sqla.models import SqlaTable -from superset.explore.form_data.commands.create import CreateFormDataCommand -from superset.explore.form_data.commands.delete import DeleteFormDataCommand -from superset.explore.form_data.commands.get import GetFormDataCommand -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.form_data.commands.update import UpdateFormDataCommand from superset.models.slice import Slice from superset.models.sql_lab import Query from superset.utils.core import DatasourceType, get_example_default_schema diff --git a/tests/integration_tests/explore/permalink/commands_tests.py b/tests/integration_tests/explore/permalink/commands_tests.py index eace978d78..5402a419bc 100644 --- a/tests/integration_tests/explore/permalink/commands_tests.py +++ b/tests/integration_tests/explore/permalink/commands_tests.py @@ -21,10 +21,10 @@ import pytest from superset import app, db, security, security_manager from superset.commands.exceptions import DatasourceTypeInvalidError +from superset.commands.explore.form_data.parameters import CommandParameters +from superset.commands.explore.permalink.create import CreateExplorePermalinkCommand +from superset.commands.explore.permalink.get import GetExplorePermalinkCommand from superset.connectors.sqla.models import SqlaTable -from superset.explore.form_data.commands.parameters import CommandParameters -from superset.explore.permalink.commands.create import CreateExplorePermalinkCommand -from superset.explore.permalink.commands.get import GetExplorePermalinkCommand from superset.key_value.utils import decode_permalink_id from superset.models.slice import Slice from superset.models.sql_lab import Query @@ -138,8 +138,8 @@ class TestCreatePermalinkDataCommand(SupersetTestCase): assert cache_data.get("datasource") == datasource @patch("superset.security.manager.g") - @patch("superset.key_value.commands.get.GetKeyValueCommand.run") - @patch("superset.explore.permalink.commands.get.decode_permalink_id") + @patch("superset.commands.key_value.get.GetKeyValueCommand.run") + @patch("superset.commands.explore.permalink.get.decode_permalink_id") @pytest.mark.usefixtures("create_dataset", "create_slice") def test_get_permalink_command_with_old_dataset_key( self, decode_id_mock, get_kv_command_mock, mock_g diff --git a/tests/integration_tests/fixtures/energy_dashboard.py b/tests/integration_tests/fixtures/energy_dashboard.py index 5b4690f572..9687fb4aff 100644 --- a/tests/integration_tests/fixtures/energy_dashboard.py +++ b/tests/integration_tests/fixtures/energy_dashboard.py @@ -82,8 +82,6 @@ def _create_energy_table() -> list[Slice]: table.metrics.append( SqlMetric(metric_name="sum__value", expression=f"SUM({col})") ) - db.session.merge(table) - db.session.commit() table.fetch_metadata() slices = [] diff --git a/tests/integration_tests/import_export_tests.py b/tests/integration_tests/import_export_tests.py index 5dc8143f77..c195e3a4cb 100644 --- a/tests/integration_tests/import_export_tests.py +++ b/tests/integration_tests/import_export_tests.py @@ -32,12 +32,12 @@ from tests.integration_tests.fixtures.energy_dashboard import ( load_energy_table_data, ) from tests.integration_tests.test_app import app -from superset.dashboards.commands.importers.v0 import decode_dashboards +from superset.commands.dashboard.importers.v0 import decode_dashboards from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.dashboards.commands.importers.v0 import import_chart, import_dashboard -from superset.datasets.commands.importers.v0 import import_dataset +from superset.commands.dashboard.importers.v0 import import_chart, import_dashboard +from superset.commands.dataset.importers.v0 import import_dataset from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.utils.core import DatasourceType, get_example_default_schema diff --git a/tests/integration_tests/importexport/commands_tests.py b/tests/integration_tests/importexport/commands_tests.py index ceaf097565..9e8f790260 100644 --- a/tests/integration_tests/importexport/commands_tests.py +++ b/tests/integration_tests/importexport/commands_tests.py @@ -21,7 +21,7 @@ import yaml from freezegun import freeze_time from superset import security_manager -from superset.databases.commands.export import ExportDatabasesCommand +from superset.commands.database.export import ExportDatabasesCommand from superset.utils.database import get_example_database from tests.integration_tests.base_tests import SupersetTestCase diff --git a/tests/integration_tests/key_value/commands/create_test.py b/tests/integration_tests/key_value/commands/create_test.py index a2ee3d13ae..494456fa0c 100644 --- a/tests/integration_tests/key_value/commands/create_test.py +++ b/tests/integration_tests/key_value/commands/create_test.py @@ -37,7 +37,7 @@ from tests.integration_tests.key_value.commands.fixtures import ( def test_create_id_entry(app_context: AppContext, admin: User) -> None: - from superset.key_value.commands.create import CreateKeyValueCommand + from superset.commands.key_value.create import CreateKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): @@ -46,9 +46,7 @@ def test_create_id_entry(app_context: AppContext, admin: User) -> None: value=JSON_VALUE, codec=JSON_CODEC, ).run() - entry = ( - db.session.query(KeyValueEntry).filter_by(id=key.id).autoflush(False).one() - ) + entry = db.session.query(KeyValueEntry).filter_by(id=key.id).one() assert json.loads(entry.value) == JSON_VALUE assert entry.created_by_fk == admin.id db.session.delete(entry) @@ -56,16 +54,14 @@ def test_create_id_entry(app_context: AppContext, admin: User) -> None: def test_create_uuid_entry(app_context: AppContext, admin: User) -> None: - from superset.key_value.commands.create import CreateKeyValueCommand + from superset.commands.key_value.create import CreateKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): key = CreateKeyValueCommand( resource=RESOURCE, value=JSON_VALUE, codec=JSON_CODEC ).run() - entry = ( - db.session.query(KeyValueEntry).filter_by(uuid=key.uuid).autoflush(False).one() - ) + entry = db.session.query(KeyValueEntry).filter_by(uuid=key.uuid).one() assert json.loads(entry.value) == JSON_VALUE assert entry.created_by_fk == admin.id db.session.delete(entry) @@ -73,7 +69,7 @@ def test_create_uuid_entry(app_context: AppContext, admin: User) -> None: def test_create_fail_json_entry(app_context: AppContext, admin: User) -> None: - from superset.key_value.commands.create import CreateKeyValueCommand + from superset.commands.key_value.create import CreateKeyValueCommand with pytest.raises(KeyValueCreateFailedError): CreateKeyValueCommand( @@ -84,7 +80,7 @@ def test_create_fail_json_entry(app_context: AppContext, admin: User) -> None: def test_create_pickle_entry(app_context: AppContext, admin: User) -> None: - from superset.key_value.commands.create import CreateKeyValueCommand + from superset.commands.key_value.create import CreateKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): @@ -93,9 +89,7 @@ def test_create_pickle_entry(app_context: AppContext, admin: User) -> None: value=PICKLE_VALUE, codec=PICKLE_CODEC, ).run() - entry = ( - db.session.query(KeyValueEntry).filter_by(id=key.id).autoflush(False).one() - ) + entry = db.session.query(KeyValueEntry).filter_by(id=key.id).one() assert type(pickle.loads(entry.value)) == type(PICKLE_VALUE) assert entry.created_by_fk == admin.id db.session.delete(entry) diff --git a/tests/integration_tests/key_value/commands/delete_test.py b/tests/integration_tests/key_value/commands/delete_test.py index 3c4892faa6..706aab8880 100644 --- a/tests/integration_tests/key_value/commands/delete_test.py +++ b/tests/integration_tests/key_value/commands/delete_test.py @@ -58,7 +58,7 @@ def test_delete_id_entry( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.commands.key_value.delete import DeleteKeyValueCommand assert DeleteKeyValueCommand(resource=RESOURCE, key=ID_KEY).run() is True @@ -68,7 +68,7 @@ def test_delete_uuid_entry( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.commands.key_value.delete import DeleteKeyValueCommand assert DeleteKeyValueCommand(resource=RESOURCE, key=UUID_KEY).run() is True @@ -78,6 +78,6 @@ def test_delete_entry_missing( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.commands.key_value.delete import DeleteKeyValueCommand assert DeleteKeyValueCommand(resource=RESOURCE, key=456).run() is False diff --git a/tests/integration_tests/key_value/commands/get_test.py b/tests/integration_tests/key_value/commands/get_test.py index 28a6dd73d5..b14c64f752 100644 --- a/tests/integration_tests/key_value/commands/get_test.py +++ b/tests/integration_tests/key_value/commands/get_test.py @@ -38,7 +38,7 @@ if TYPE_CHECKING: def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -> None: - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY, codec=JSON_CODEC).run() assert value == JSON_VALUE @@ -47,7 +47,7 @@ def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) - def test_get_uuid_entry( app_context: AppContext, key_value_entry: KeyValueEntry ) -> None: - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand value = GetKeyValueCommand(resource=RESOURCE, key=UUID_KEY, codec=JSON_CODEC).run() assert value == JSON_VALUE @@ -57,14 +57,14 @@ def test_get_id_entry_missing( app_context: AppContext, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand value = GetKeyValueCommand(resource=RESOURCE, key=456, codec=JSON_CODEC).run() assert value is None def test_get_expired_entry(app_context: AppContext) -> None: - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand from superset.key_value.models import KeyValueEntry entry = KeyValueEntry( @@ -83,7 +83,7 @@ def test_get_expired_entry(app_context: AppContext) -> None: def test_get_future_expiring_entry(app_context: AppContext) -> None: - from superset.key_value.commands.get import GetKeyValueCommand + from superset.commands.key_value.get import GetKeyValueCommand from superset.key_value.models import KeyValueEntry id_ = 789 diff --git a/tests/integration_tests/key_value/commands/update_test.py b/tests/integration_tests/key_value/commands/update_test.py index 2c0fc3e31d..62d118b197 100644 --- a/tests/integration_tests/key_value/commands/update_test.py +++ b/tests/integration_tests/key_value/commands/update_test.py @@ -45,7 +45,7 @@ def test_update_id_entry( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.update import UpdateKeyValueCommand + from superset.commands.key_value.update import UpdateKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): @@ -57,7 +57,7 @@ def test_update_id_entry( ).run() assert key is not None assert key.id == ID_KEY - entry = db.session.query(KeyValueEntry).filter_by(id=ID_KEY).autoflush(False).one() + entry = db.session.query(KeyValueEntry).filter_by(id=ID_KEY).one() assert json.loads(entry.value) == NEW_VALUE assert entry.changed_by_fk == admin.id @@ -67,7 +67,7 @@ def test_update_uuid_entry( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.update import UpdateKeyValueCommand + from superset.commands.key_value.update import UpdateKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): @@ -79,15 +79,13 @@ def test_update_uuid_entry( ).run() assert key is not None assert key.uuid == UUID_KEY - entry = ( - db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).autoflush(False).one() - ) + entry = db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).one() assert json.loads(entry.value) == NEW_VALUE assert entry.changed_by_fk == admin.id def test_update_missing_entry(app_context: AppContext, admin: User) -> None: - from superset.key_value.commands.update import UpdateKeyValueCommand + from superset.commands.key_value.update import UpdateKeyValueCommand with override_user(admin): key = UpdateKeyValueCommand( diff --git a/tests/integration_tests/key_value/commands/upsert_test.py b/tests/integration_tests/key_value/commands/upsert_test.py index c26b66d02e..b23ddaee97 100644 --- a/tests/integration_tests/key_value/commands/upsert_test.py +++ b/tests/integration_tests/key_value/commands/upsert_test.py @@ -45,7 +45,7 @@ def test_upsert_id_entry( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.commands.key_value.upsert import UpsertKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): @@ -57,9 +57,7 @@ def test_upsert_id_entry( ).run() assert key is not None assert key.id == ID_KEY - entry = ( - db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).autoflush(False).one() - ) + entry = db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).one() assert json.loads(entry.value) == NEW_VALUE assert entry.changed_by_fk == admin.id @@ -69,7 +67,7 @@ def test_upsert_uuid_entry( admin: User, key_value_entry: KeyValueEntry, ) -> None: - from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.commands.key_value.upsert import UpsertKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): @@ -81,15 +79,13 @@ def test_upsert_uuid_entry( ).run() assert key is not None assert key.uuid == UUID_KEY - entry = ( - db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).autoflush(False).one() - ) + entry = db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).one() assert json.loads(entry.value) == NEW_VALUE assert entry.changed_by_fk == admin.id def test_upsert_missing_entry(app_context: AppContext, admin: User) -> None: - from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.commands.key_value.upsert import UpsertKeyValueCommand from superset.key_value.models import KeyValueEntry with override_user(admin): diff --git a/tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py b/tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py deleted file mode 100644 index f02d069b2b..0000000000 --- a/tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py +++ /dev/null @@ -1,99 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import json - -from superset.app import SupersetApp -from superset.migrations.shared.migrate_viz import MigrateAreaChart - -area_form_data = """{ - "adhoc_filters": [], - "annotation_layers": [], - "bottom_margin": "auto", - "color_scheme": "lyftColors", - "comparison_type": "values", - "contribution": true, - "datasource": "2__table", - "extra_form_data": {}, - "granularity_sqla": "ds", - "groupby": [ - "gender" - ], - "line_interpolation": "linear", - "metrics": [ - "sum__num" - ], - "order_desc": true, - "rich_tooltip": true, - "rolling_type": "None", - "row_limit": 10000, - "show_brush": "auto", - "show_controls": true, - "show_legend": true, - "slice_id": 165, - "stacked_style": "stack", - "time_grain_sqla": "P1D", - "time_range": "No filter", - "viz_type": "area", - "x_axis_format": "smart_date", - "x_axis_label": "x asix label", - "x_axis_showminmax": false, - "x_ticks_layout": "auto", - "y_axis_bounds": [ - null, - null - ], - "y_axis_format": "SMART_NUMBER" -} -""" - - -def test_area_migrate(app_context: SupersetApp) -> None: - from superset.models.slice import Slice - - slc = Slice( - viz_type=MigrateAreaChart.source_viz_type, - datasource_type="table", - params=area_form_data, - query_context=f'{{"form_data": {area_form_data}}}', - ) - - slc = MigrateAreaChart.upgrade_slice(slc) - assert slc.viz_type == MigrateAreaChart.target_viz_type - # verify form_data - new_form_data = json.loads(slc.params) - assert new_form_data["contributionMode"] == "row" - assert "contribution" not in new_form_data - assert new_form_data["show_extra_controls"] is True - assert new_form_data["stack"] == "Stack" - assert new_form_data["x_axis_title"] == "x asix label" - assert new_form_data["x_axis_title_margin"] == 30 - assert json.dumps(new_form_data["form_data_bak"], sort_keys=True) == json.dumps( - json.loads(area_form_data), sort_keys=True - ) - - # verify query_context - new_query_context = json.loads(slc.query_context) - assert ( - new_query_context["form_data"]["viz_type"] == MigrateAreaChart.target_viz_type - ) - - # downgrade - slc = MigrateAreaChart.downgrade_slice(slc) - assert slc.viz_type == MigrateAreaChart.source_viz_type - assert json.dumps(json.loads(slc.params), sort_keys=True) == json.dumps( - json.loads(area_form_data), sort_keys=True - ) diff --git a/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py b/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py index 3e9ef33092..e67d87fa13 100644 --- a/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py +++ b/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py @@ -68,7 +68,7 @@ def test_treemap_migrate(app_context: SupersetApp) -> None: query_context=f'{{"form_data": {treemap_form_data}}}', ) - slc = MigrateTreeMap.upgrade_slice(slc) + MigrateTreeMap.upgrade_slice(slc) assert slc.viz_type == MigrateTreeMap.target_viz_type # verify form_data new_form_data = json.loads(slc.params) @@ -84,7 +84,7 @@ def test_treemap_migrate(app_context: SupersetApp) -> None: assert new_query_context["form_data"]["viz_type"] == "treemap_v2" # downgrade - slc = MigrateTreeMap.downgrade_slice(slc) + MigrateTreeMap.downgrade_slice(slc) assert slc.viz_type == MigrateTreeMap.source_viz_type assert json.dumps(json.loads(slc.params), sort_keys=True) == json.dumps( json.loads(treemap_form_data), sort_keys=True diff --git a/tests/integration_tests/queries/saved_queries/api_tests.py b/tests/integration_tests/queries/saved_queries/api_tests.py index 09929e4d23..c51c0dcbf0 100644 --- a/tests/integration_tests/queries/saved_queries/api_tests.py +++ b/tests/integration_tests/queries/saved_queries/api_tests.py @@ -17,6 +17,7 @@ # isort:skip_file """Unit tests for Superset""" import json +from datetime import datetime from io import BytesIO from typing import Optional from zipfile import is_zipfile, ZipFile @@ -24,6 +25,7 @@ from zipfile import is_zipfile, ZipFile import yaml import pytest import prison +from freezegun import freeze_time from sqlalchemy.sql import func, and_ import tests.integration_tests.test_app @@ -507,14 +509,17 @@ class TestSavedQueryApi(SupersetTestCase): db.session.query(SavedQuery).filter(SavedQuery.label == "label1").all()[0] ) self.login(username="admin") - uri = f"api/v1/saved_query/{saved_query.id}" - rv = self.get_assert_metric(uri, "get") - assert rv.status_code == 200 + with freeze_time(datetime.now()): + uri = f"api/v1/saved_query/{saved_query.id}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 200 expected_result = { "id": saved_query.id, "database": {"id": saved_query.database.id, "database_name": "examples"}, "description": "cool description", + "changed_by": None, + "changed_on_delta_humanized": "now", "created_by": { "first_name": saved_query.created_by.first_name, "id": saved_query.created_by.id, @@ -527,9 +532,8 @@ class TestSavedQueryApi(SupersetTestCase): "template_parameters": None, } data = json.loads(rv.data.decode("utf-8")) - self.assertIn("changed_on_delta_humanized", data["result"]) for key, value in data["result"].items(): - if key not in ("changed_on_delta_humanized",): + if key != "changed_on": assert value == expected_result[key] def test_get_saved_query_not_found(self): diff --git a/tests/integration_tests/queries/saved_queries/commands_tests.py b/tests/integration_tests/queries/saved_queries/commands_tests.py index 5c7b862209..cccc409985 100644 --- a/tests/integration_tests/queries/saved_queries/commands_tests.py +++ b/tests/integration_tests/queries/saved_queries/commands_tests.py @@ -23,13 +23,11 @@ import yaml from superset import db, security_manager from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError +from superset.commands.query.exceptions import SavedQueryNotFoundError +from superset.commands.query.export import ExportSavedQueriesCommand +from superset.commands.query.importers.v1 import ImportSavedQueriesCommand from superset.models.core import Database from superset.models.sql_lab import SavedQuery -from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError -from superset.queries.saved_queries.commands.export import ExportSavedQueriesCommand -from superset.queries.saved_queries.commands.importers.v1 import ( - ImportSavedQueriesCommand, -) from superset.utils.database import get_example_database from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.importexport import ( diff --git a/tests/integration_tests/reports/alert_tests.py b/tests/integration_tests/reports/alert_tests.py index 76890a19e2..6664d65a9b 100644 --- a/tests/integration_tests/reports/alert_tests.py +++ b/tests/integration_tests/reports/alert_tests.py @@ -22,7 +22,7 @@ import pandas as pd import pytest from pytest_mock import MockFixture -from superset.reports.commands.exceptions import AlertQueryError +from superset.commands.report.exceptions import AlertQueryError from superset.reports.models import ReportCreationMethod, ReportScheduleType from superset.tasks.types import ExecutorType from superset.utils.database import get_example_database @@ -64,7 +64,7 @@ def test_execute_query_as_report_executor( app_context: None, get_user, ) -> None: - from superset.reports.commands.alert import AlertCommand + from superset.commands.report.alert import AlertCommand from superset.reports.models import ReportSchedule with app.app_context(): @@ -86,7 +86,7 @@ def test_execute_query_as_report_executor( ) command = AlertCommand(report_schedule=report_schedule) override_user_mock = mocker.patch( - "superset.reports.commands.alert.override_user" + "superset.commands.report.alert.override_user" ) cm = ( pytest.raises(type(expected_result)) @@ -103,10 +103,10 @@ def test_execute_query_as_report_executor( def test_execute_query_succeeded_no_retry( mocker: MockFixture, app_context: None ) -> None: - from superset.reports.commands.alert import AlertCommand + from superset.commands.report.alert import AlertCommand execute_query_mock = mocker.patch( - "superset.reports.commands.alert.AlertCommand._execute_query", + "superset.commands.report.alert.AlertCommand._execute_query", side_effect=lambda: pd.DataFrame([{"sample_col": 0}]), ) @@ -120,10 +120,10 @@ def test_execute_query_succeeded_no_retry( def test_execute_query_succeeded_with_retries( mocker: MockFixture, app_context: None ) -> None: - from superset.reports.commands.alert import AlertCommand, AlertQueryError + from superset.commands.report.alert import AlertCommand, AlertQueryError execute_query_mock = mocker.patch( - "superset.reports.commands.alert.AlertCommand._execute_query" + "superset.commands.report.alert.AlertCommand._execute_query" ) query_executed_count = 0 @@ -150,10 +150,10 @@ def test_execute_query_succeeded_with_retries( def test_execute_query_failed_no_retry(mocker: MockFixture, app_context: None) -> None: - from superset.reports.commands.alert import AlertCommand, AlertQueryTimeout + from superset.commands.report.alert import AlertCommand, AlertQueryTimeout execute_query_mock = mocker.patch( - "superset.reports.commands.alert.AlertCommand._execute_query" + "superset.commands.report.alert.AlertCommand._execute_query" ) def _mocked_execute_query() -> None: @@ -172,10 +172,10 @@ def test_execute_query_failed_no_retry(mocker: MockFixture, app_context: None) - def test_execute_query_failed_max_retries( mocker: MockFixture, app_context: None ) -> None: - from superset.reports.commands.alert import AlertCommand, AlertQueryError + from superset.commands.report.alert import AlertCommand, AlertQueryError execute_query_mock = mocker.patch( - "superset.reports.commands.alert.AlertCommand._execute_query" + "superset.commands.report.alert.AlertCommand._execute_query" ) def _mocked_execute_query() -> None: diff --git a/tests/integration_tests/reports/commands/create_dashboard_report_tests.py b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py index 81945c18a9..a7f3001aa8 100644 --- a/tests/integration_tests/reports/commands/create_dashboard_report_tests.py +++ b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py @@ -18,9 +18,9 @@ import pytest from superset import db +from superset.commands.report.create import CreateReportScheduleCommand +from superset.commands.report.exceptions import ReportScheduleInvalidError from superset.models.dashboard import Dashboard -from superset.reports.commands.create import CreateReportScheduleCommand -from superset.reports.commands.exceptions import ReportScheduleInvalidError from superset.reports.models import ( ReportCreationMethod, ReportRecipientType, diff --git a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py index fe20365765..68150a9c3c 100644 --- a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py +++ b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py @@ -20,11 +20,9 @@ from uuid import uuid4 from flask import current_app -from superset.dashboards.permalink.commands.create import ( - CreateDashboardPermalinkCommand, -) +from superset.commands.dashboard.permalink.create import CreateDashboardPermalinkCommand +from superset.commands.report.execute import AsyncExecuteReportScheduleCommand from superset.models.dashboard import Dashboard -from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand from superset.reports.models import ReportSourceFormat from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard from tests.integration_tests.reports.utils import create_dashboard_report @@ -32,10 +30,10 @@ from tests.integration_tests.reports.utils import create_dashboard_report @patch("superset.reports.notifications.email.send_email_smtp") @patch( - "superset.reports.commands.execute.DashboardScreenshot", + "superset.commands.report.execute.DashboardScreenshot", ) @patch( - "superset.dashboards.permalink.commands.create.CreateDashboardPermalinkCommand.run" + "superset.commands.dashboard.permalink.create.CreateDashboardPermalinkCommand.run" ) def test_report_for_dashboard_with_tabs( create_dashboard_permalink_mock: MagicMock, @@ -70,10 +68,10 @@ def test_report_for_dashboard_with_tabs( @patch("superset.reports.notifications.email.send_email_smtp") @patch( - "superset.reports.commands.execute.DashboardScreenshot", + "superset.commands.report.execute.DashboardScreenshot", ) @patch( - "superset.dashboards.permalink.commands.create.CreateDashboardPermalinkCommand.run" + "superset.commands.dashboard.permalink.create.CreateDashboardPermalinkCommand.run" ) def test_report_with_header_data( create_dashboard_permalink_mock: MagicMock, diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index 120559f8fd..939c9c0cfa 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -39,11 +39,7 @@ from slack_sdk.errors import ( from sqlalchemy.sql import func from superset import db -from superset.exceptions import SupersetException -from superset.models.core import Database -from superset.models.dashboard import Dashboard -from superset.models.slice import Slice -from superset.reports.commands.exceptions import ( +from superset.commands.report.exceptions import ( AlertQueryError, AlertQueryInvalidTypeError, AlertQueryMultipleColumnsError, @@ -58,11 +54,15 @@ from superset.reports.commands.exceptions import ( ReportScheduleSystemErrorsException, ReportScheduleWorkingTimeoutError, ) -from superset.reports.commands.execute import ( +from superset.commands.report.execute import ( AsyncExecuteReportScheduleCommand, BaseReportState, ) -from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand +from superset.commands.report.log_prune import AsyncPruneReportScheduleLogCommand +from superset.exceptions import SupersetException +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice from superset.reports.models import ( ReportDataFormat, ReportExecutionLog, @@ -1607,7 +1607,7 @@ def test_soft_timeout_alert(email_mock, create_alert_email_chart): """ from celery.exceptions import SoftTimeLimitExceeded - from superset.reports.commands.exceptions import AlertQueryTimeout + from superset.commands.report.exceptions import AlertQueryTimeout with patch.object( create_alert_email_chart.database.db_engine_spec, "execute", return_value=None @@ -1748,7 +1748,7 @@ def test_fail_screenshot(screenshot_mock, email_mock, create_report_email_chart) """ from celery.exceptions import SoftTimeLimitExceeded - from superset.reports.commands.exceptions import AlertQueryTimeout + from superset.commands.report.exceptions import AlertQueryTimeout screenshot_mock.side_effect = Exception("Unexpected error") with pytest.raises(ReportScheduleScreenshotFailedError): @@ -1919,7 +1919,6 @@ def test_grace_period_error_flap( # Change report_schedule to valid create_invalid_sql_alert_email_chart.sql = "SELECT 1 AS metric" create_invalid_sql_alert_email_chart.grace_period = 0 - db.session.merge(create_invalid_sql_alert_email_chart) db.session.commit() with freeze_time("2020-01-01T00:31:00Z"): @@ -1936,7 +1935,6 @@ def test_grace_period_error_flap( create_invalid_sql_alert_email_chart.sql = "SELECT 'first'" create_invalid_sql_alert_email_chart.grace_period = 10 - db.session.merge(create_invalid_sql_alert_email_chart) db.session.commit() # assert that after a success, when back to error we send the error notification @@ -1965,8 +1963,8 @@ def test_prune_log_soft_time_out(bulk_delete_logs, create_report_email_dashboard assert str(excinfo.value) == "SoftTimeLimitExceeded()" -@patch("superset.reports.commands.execute.logger") -@patch("superset.reports.commands.execute.create_notification") +@patch("superset.commands.report.execute.logger") +@patch("superset.commands.report.execute.create_notification") def test__send_with_client_errors(notification_mock, logger_mock): notification_content = "I am some content" recipients = ["test@foo.com"] @@ -1980,8 +1978,8 @@ def test__send_with_client_errors(notification_mock, logger_mock): ) -@patch("superset.reports.commands.execute.logger") -@patch("superset.reports.commands.execute.create_notification") +@patch("superset.commands.report.execute.logger") +@patch("superset.commands.report.execute.create_notification") def test__send_with_multiple_errors(notification_mock, logger_mock): notification_content = "I am some content" recipients = ["test@foo.com", "test2@bar.com"] @@ -2007,8 +2005,8 @@ def test__send_with_multiple_errors(notification_mock, logger_mock): ) -@patch("superset.reports.commands.execute.logger") -@patch("superset.reports.commands.execute.create_notification") +@patch("superset.commands.report.execute.logger") +@patch("superset.commands.report.execute.create_notification") def test__send_with_server_errors(notification_mock, logger_mock): notification_content = "I am some content" recipients = ["test@foo.com"] diff --git a/tests/integration_tests/reports/scheduler_tests.py b/tests/integration_tests/reports/scheduler_tests.py index 29dd58273a..ee93ef48a4 100644 --- a/tests/integration_tests/reports/scheduler_tests.py +++ b/tests/integration_tests/reports/scheduler_tests.py @@ -154,11 +154,11 @@ def test_scheduler_feature_flag_off(execute_mock, is_feature_enabled, owners): @pytest.mark.usefixtures("owners") -@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.__init__") -@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.run") +@patch("superset.commands.report.execute.AsyncExecuteReportScheduleCommand.__init__") +@patch("superset.commands.report.execute.AsyncExecuteReportScheduleCommand.run") @patch("superset.tasks.scheduler.execute.update_state") def test_execute_task(update_state_mock, command_mock, init_mock, owners): - from superset.reports.commands.exceptions import ReportScheduleUnexpectedError + from superset.commands.report.exceptions import ReportScheduleUnexpectedError with app.app_context(): report_schedule = insert_report_schedule( @@ -179,8 +179,8 @@ def test_execute_task(update_state_mock, command_mock, init_mock, owners): @pytest.mark.usefixtures("owners") -@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.__init__") -@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.run") +@patch("superset.commands.report.execute.AsyncExecuteReportScheduleCommand.__init__") +@patch("superset.commands.report.execute.AsyncExecuteReportScheduleCommand.run") @patch("superset.tasks.scheduler.execute.update_state") @patch("superset.utils.log.logger") def test_execute_task_with_command_exception( diff --git a/tests/integration_tests/security/migrate_roles_tests.py b/tests/integration_tests/security/migrate_roles_tests.py index ae89fea068..39d66a82aa 100644 --- a/tests/integration_tests/security/migrate_roles_tests.py +++ b/tests/integration_tests/security/migrate_roles_tests.py @@ -62,7 +62,6 @@ def create_old_role(pvm_map: PvmMigrationMapType, external_pvms): db.session.query(Role).filter(Role.name == "Dummy Role").one_or_none() ) new_role.permissions = [] - db.session.merge(new_role) for old_pvm, new_pvms in pvm_map.items(): security_manager.del_permission_view_menu(old_pvm.permission, old_pvm.view) for new_pvm in new_pvms: diff --git a/tests/integration_tests/sql_lab/api_tests.py b/tests/integration_tests/sql_lab/api_tests.py index 49dd4ea32e..da050c2363 100644 --- a/tests/integration_tests/sql_lab/api_tests.py +++ b/tests/integration_tests/sql_lab/api_tests.py @@ -209,7 +209,7 @@ class TestSqlLabApi(SupersetTestCase): return_value=formatter_response ) - with mock.patch("superset.sqllab.commands.estimate.db") as mock_superset_db: + with mock.patch("superset.commands.sql_lab.estimate.db") as mock_superset_db: mock_superset_db.session.query().get.return_value = db_mock data = {"database_id": 1, "sql": "SELECT 1"} @@ -236,7 +236,7 @@ class TestSqlLabApi(SupersetTestCase): self.assertDictEqual(resp_data, success_resp) self.assertEqual(rv.status_code, 200) - @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @mock.patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_execute_required_params(self): self.login() client_id = f"{random.getrandbits(64)}"[:10] @@ -276,7 +276,7 @@ class TestSqlLabApi(SupersetTestCase): self.assertDictEqual(resp_data, failed_resp) self.assertEqual(rv.status_code, 400) - @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @mock.patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_execute_valid_request(self) -> None: from superset import sql_lab as core @@ -320,9 +320,9 @@ class TestSqlLabApi(SupersetTestCase): self.delete_fake_db_for_macros() - @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @mock.patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_get_results_with_display_limit(self): - from superset.sqllab.commands import results as command + from superset.commands.sql_lab import results as command command.results_backend = mock.Mock() self.login() @@ -355,7 +355,7 @@ class TestSqlLabApi(SupersetTestCase): compressed = utils.zlib_compress(serialized_payload) command.results_backend.get.return_value = compressed - with mock.patch("superset.sqllab.commands.results.db") as mock_superset_db: + with mock.patch("superset.commands.sql_lab.results.db") as mock_superset_db: mock_superset_db.session.query().filter_by().one_or_none.return_value = ( query_mock ) diff --git a/tests/integration_tests/sql_lab/commands_tests.py b/tests/integration_tests/sql_lab/commands_tests.py index d76924a8fb..11eb5de0c9 100644 --- a/tests/integration_tests/sql_lab/commands_tests.py +++ b/tests/integration_tests/sql_lab/commands_tests.py @@ -22,6 +22,7 @@ import pytest from flask_babel import gettext as __ from superset import app, db, sql_lab +from superset.commands.sql_lab import estimate, export, results from superset.common.db_query_status import QueryStatus from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( @@ -32,7 +33,6 @@ from superset.exceptions import ( ) from superset.models.core import Database from superset.models.sql_lab import Query -from superset.sqllab.commands import estimate, export, results from superset.sqllab.limiting_factor import LimitingFactor from superset.sqllab.schemas import EstimateQueryCostSchema from superset.utils import core as utils @@ -47,7 +47,7 @@ class TestQueryEstimationCommand(SupersetTestCase): data: EstimateQueryCostSchema = schema.dump(params) command = estimate.QueryEstimationCommand(data) - with mock.patch("superset.sqllab.commands.estimate.db") as mock_superset_db: + with mock.patch("superset.commands.sql_lab.estimate.db") as mock_superset_db: mock_superset_db.session.query().get.return_value = None with pytest.raises(SupersetErrorException) as ex_info: command.validate() @@ -79,7 +79,7 @@ class TestQueryEstimationCommand(SupersetTestCase): db_mock.db_engine_spec.query_cost_formatter = mock.Mock(return_value=None) is_feature_enabled.return_value = False - with mock.patch("superset.sqllab.commands.estimate.db") as mock_superset_db: + with mock.patch("superset.commands.sql_lab.estimate.db") as mock_superset_db: mock_superset_db.session.query().get.return_value = db_mock with pytest.raises(SupersetErrorException) as ex_info: command.run() @@ -105,7 +105,7 @@ class TestQueryEstimationCommand(SupersetTestCase): db_mock.db_engine_spec.estimate_query_cost = mock.Mock(return_value=100) db_mock.db_engine_spec.query_cost_formatter = mock.Mock(return_value=payload) - with mock.patch("superset.sqllab.commands.estimate.db") as mock_superset_db: + with mock.patch("superset.commands.sql_lab.estimate.db") as mock_superset_db: mock_superset_db.session.query().get.return_value = db_mock result = command.run() assert result == payload @@ -223,7 +223,7 @@ class TestSqlResultExportCommand(SupersetTestCase): @pytest.mark.usefixtures("create_database_and_query") @patch("superset.models.sql_lab.Query.raise_for_access", lambda _: None) - @patch("superset.sqllab.commands.export.results_backend_use_msgpack", False) + @patch("superset.commands.sql_lab.export.results_backend_use_msgpack", False) def test_run_with_results_backend(self) -> None: command = export.SqlResultExportCommand("test") @@ -273,8 +273,8 @@ class TestSqlExecutionResultsCommand(SupersetTestCase): db.session.delete(query_obj) db.session.commit() - @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) - @patch("superset.sqllab.commands.results.results_backend", None) + @patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) + @patch("superset.commands.sql_lab.results.results_backend", None) def test_validation_no_results_backend(self) -> None: command = results.SqlExecutionResultsCommand("test", 1000) @@ -285,7 +285,7 @@ class TestSqlExecutionResultsCommand(SupersetTestCase): == SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR ) - @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_validation_data_cannot_be_retrieved(self) -> None: results.results_backend = mock.Mock() results.results_backend.get.return_value = None @@ -296,7 +296,7 @@ class TestSqlExecutionResultsCommand(SupersetTestCase): command.run() assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR - @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_validation_data_not_found(self) -> None: data = [{"col_0": i} for i in range(100)] payload = { @@ -317,7 +317,7 @@ class TestSqlExecutionResultsCommand(SupersetTestCase): assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR @pytest.mark.usefixtures("create_database_and_query") - @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_validation_query_not_found(self) -> None: data = [{"col_0": i} for i in range(104)] payload = { @@ -344,7 +344,7 @@ class TestSqlExecutionResultsCommand(SupersetTestCase): ) @pytest.mark.usefixtures("create_database_and_query") - @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @patch("superset.commands.sql_lab.results.results_backend_use_msgpack", False) def test_run_succeeds(self) -> None: data = [{"col_0": i} for i in range(104)] payload = { diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py index 6fec16ca74..8a7477a8fc 100644 --- a/tests/integration_tests/strategy_tests.py +++ b/tests/integration_tests/strategy_tests.py @@ -33,7 +33,7 @@ from superset.utils.database import get_example_database from superset import db from superset.models.core import Log -from superset.tags.models import get_tag, ObjectTypes, TaggedObject, TagTypes +from superset.tags.models import get_tag, ObjectType, TaggedObject, TagType from superset.tasks.cache import ( DashboardTagsStrategy, TopNDashboardsStrategy, @@ -93,7 +93,7 @@ class TestCacheWarmUp(SupersetTestCase): "load_unicode_dashboard_with_slice", "load_birth_names_dashboard_with_slices" ) def test_dashboard_tags_strategy(self): - tag1 = get_tag("tag1", db.session, TagTypes.custom) + tag1 = get_tag("tag1", db.session, TagType.custom) # delete first to make test idempotent self.reset_tag(tag1) @@ -103,11 +103,11 @@ class TestCacheWarmUp(SupersetTestCase): self.assertEqual(result, expected) # tag dashboard 'births' with `tag1` - tag1 = get_tag("tag1", db.session, TagTypes.custom) + tag1 = get_tag("tag1", db.session, TagType.custom) dash = self.get_dash_by_slug("births") tag1_urls = [{"chart_id": chart.id} for chart in dash.slices] tagged_object = TaggedObject( - tag_id=tag1.id, object_id=dash.id, object_type=ObjectTypes.dashboard + tag_id=tag1.id, object_id=dash.id, object_type=ObjectType.dashboard ) db.session.add(tagged_object) db.session.commit() @@ -115,7 +115,7 @@ class TestCacheWarmUp(SupersetTestCase): self.assertCountEqual(strategy.get_payloads(), tag1_urls) strategy = DashboardTagsStrategy(["tag2"]) - tag2 = get_tag("tag2", db.session, TagTypes.custom) + tag2 = get_tag("tag2", db.session, TagType.custom) self.reset_tag(tag2) result = strategy.get_payloads() @@ -128,7 +128,7 @@ class TestCacheWarmUp(SupersetTestCase): tag2_urls = [{"chart_id": chart.id}] object_id = chart.id tagged_object = TaggedObject( - tag_id=tag2.id, object_id=object_id, object_type=ObjectTypes.chart + tag_id=tag2.id, object_id=object_id, object_type=ObjectType.chart ) db.session.add(tagged_object) db.session.commit() diff --git a/tests/integration_tests/tagging_tests.py b/tests/integration_tests/tagging_tests.py index 4ecfd1049f..36fb8df3ff 100644 --- a/tests/integration_tests/tagging_tests.py +++ b/tests/integration_tests/tagging_tests.py @@ -70,7 +70,7 @@ class TestTagging(SupersetTestCase): # Test to make sure that a dataset tag was added to the tagged_object table tags = self.query_tagged_object_table() self.assertEqual(1, len(tags)) - self.assertEqual("ObjectTypes.dataset", str(tags[0].object_type)) + self.assertEqual("ObjectType.dataset", str(tags[0].object_type)) self.assertEqual(test_dataset.id, tags[0].object_id) # Cleanup the db @@ -108,7 +108,7 @@ class TestTagging(SupersetTestCase): # Test to make sure that a chart tag was added to the tagged_object table tags = self.query_tagged_object_table() self.assertEqual(1, len(tags)) - self.assertEqual("ObjectTypes.chart", str(tags[0].object_type)) + self.assertEqual("ObjectType.chart", str(tags[0].object_type)) self.assertEqual(test_chart.id, tags[0].object_id) # Cleanup the db @@ -144,7 +144,7 @@ class TestTagging(SupersetTestCase): # Test to make sure that a dashboard tag was added to the tagged_object table tags = self.query_tagged_object_table() self.assertEqual(1, len(tags)) - self.assertEqual("ObjectTypes.dashboard", str(tags[0].object_type)) + self.assertEqual("ObjectType.dashboard", str(tags[0].object_type)) self.assertEqual(test_dashboard.id, tags[0].object_id) # Cleanup the db @@ -178,14 +178,14 @@ class TestTagging(SupersetTestCase): self.assertEqual(2, len(tags)) - self.assertEqual("ObjectTypes.query", str(tags[0].object_type)) + self.assertEqual("ObjectType.query", str(tags[0].object_type)) self.assertEqual("owner:None", str(tags[0].tag.name)) - self.assertEqual("TagTypes.owner", str(tags[0].tag.type)) + self.assertEqual("TagType.owner", str(tags[0].tag.type)) self.assertEqual(test_saved_query.id, tags[0].object_id) - self.assertEqual("ObjectTypes.query", str(tags[1].object_type)) + self.assertEqual("ObjectType.query", str(tags[1].object_type)) self.assertEqual("type:query", str(tags[1].tag.name)) - self.assertEqual("TagTypes.type", str(tags[1].tag.type)) + self.assertEqual("TagType.type", str(tags[1].tag.type)) self.assertEqual(test_saved_query.id, tags[1].object_id) # Cleanup the db @@ -217,7 +217,7 @@ class TestTagging(SupersetTestCase): # Test to make sure that a favorited object tag was added to the tagged_object table tags = self.query_tagged_object_table() self.assertEqual(1, len(tags)) - self.assertEqual("ObjectTypes.chart", str(tags[0].object_type)) + self.assertEqual("ObjectType.chart", str(tags[0].object_type)) self.assertEqual(test_saved_query.obj_id, tags[0].object_id) # Cleanup the db diff --git a/tests/integration_tests/tags/api_tests.py b/tests/integration_tests/tags/api_tests.py index 33fa4902b2..863288a3e7 100644 --- a/tests/integration_tests/tags/api_tests.py +++ b/tests/integration_tests/tags/api_tests.py @@ -17,10 +17,12 @@ # isort:skip_file """Unit tests for Superset""" import json +from datetime import datetime from flask import g import pytest import prison +from freezegun import freeze_time from sqlalchemy.sql import func from sqlalchemy import and_ from superset.models.dashboard import Dashboard @@ -35,7 +37,7 @@ from superset import db, security_manager from superset.common.db_query_status import QueryStatus from superset.models.core import Database from superset.utils.database import get_example_database, get_main_database -from superset.tags.models import ObjectTypes, Tag, TagTypes, TaggedObject +from superset.tags.models import ObjectType, Tag, TagType, TaggedObject from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, @@ -47,7 +49,7 @@ from tests.integration_tests.fixtures.world_bank_dashboard import ( from tests.integration_tests.fixtures.tags import with_tagging_system_feature from tests.integration_tests.base_tests import SupersetTestCase from superset.daos.tag import TagDAO -from superset.tags.models import ObjectTypes +from superset.tags.models import ObjectType TAGS_FIXTURE_COUNT = 10 @@ -84,7 +86,7 @@ class TestTagApi(SupersetTestCase): self, tag_id: int, object_id: int, - object_type: ObjectTypes, + object_type: ObjectType, ) -> TaggedObject: tag = db.session.query(Tag).filter(Tag.id == tag_id).first() tagged_object = TaggedObject( @@ -121,13 +123,14 @@ class TestTagApi(SupersetTestCase): """ Query API: Test get query """ - tag = self.insert_tag( - name="test get tag", - tag_type="custom", - ) - self.login(username="admin") - uri = f"api/v1/tag/{tag.id}" - rv = self.client.get(uri) + with freeze_time(datetime.now()): + tag = self.insert_tag( + name="test get tag", + tag_type="custom", + ) + self.login(username="admin") + uri = f"api/v1/tag/{tag.id}" + rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) expected_result = { "changed_by": None, @@ -135,7 +138,7 @@ class TestTagApi(SupersetTestCase): "created_by": None, "id": tag.id, "name": "test get tag", - "type": TagTypes.custom.value, + "type": TagType.custom.value, } data = json.loads(rv.data.decode("utf-8")) for key, value in expected_result.items(): @@ -192,7 +195,7 @@ class TestTagApi(SupersetTestCase): .first() ) dashboard_id = dashboard.id - dashboard_type = ObjectTypes.dashboard.value + dashboard_type = ObjectType.dashboard.value uri = f"api/v1/tag/{dashboard_type}/{dashboard_id}/" example_tag_names = ["example_tag_1", "example_tag_2"] data = {"properties": {"tags": example_tag_names}} @@ -207,7 +210,7 @@ class TestTagApi(SupersetTestCase): tagged_objects = db.session.query(TaggedObject).filter( TaggedObject.tag_id.in_(tag_ids), TaggedObject.object_id == dashboard_id, - TaggedObject.object_type == ObjectTypes.dashboard, + TaggedObject.object_type == ObjectType.dashboard, ) assert tagged_objects.count() == 2 # clean up tags and tagged objects @@ -225,7 +228,7 @@ class TestTagApi(SupersetTestCase): def test_delete_tagged_objects(self): self.login(username="admin") dashboard_id = 1 - dashboard_type = ObjectTypes.dashboard + dashboard_type = ObjectType.dashboard tag_names = ["example_tag_1", "example_tag_2"] tags = db.session.query(Tag).filter(Tag.name.in_(tag_names)) assert tags.count() == 2 @@ -295,7 +298,7 @@ class TestTagApi(SupersetTestCase): .first() ) dashboard_id = dashboard.id - dashboard_type = ObjectTypes.dashboard + dashboard_type = ObjectType.dashboard tag_names = ["example_tag_1", "example_tag_2"] tags = db.session.query(Tag).filter(Tag.name.in_(tag_names)) for tag in tags: @@ -331,7 +334,7 @@ class TestTagApi(SupersetTestCase): .first() ) dashboard_id = dashboard.id - dashboard_type = ObjectTypes.dashboard + dashboard_type = ObjectType.dashboard tag_names = ["example_tag_1", "example_tag_2"] tags = db.session.query(Tag).filter(Tag.name.in_(tag_names)) for tag in tags: @@ -480,7 +483,7 @@ class TestTagApi(SupersetTestCase): user_id = self.get_user(username="admin").get_id() tag = ( db.session.query(Tag) - .filter(Tag.name == "my_tag", Tag.type == TagTypes.custom) + .filter(Tag.name == "my_tag", Tag.type == TagType.custom) .one_or_none() ) assert tag is not None @@ -576,13 +579,13 @@ class TestTagApi(SupersetTestCase): tagged_objects = db.session.query(TaggedObject).filter( TaggedObject.object_id == dashboard.id, - TaggedObject.object_type == ObjectTypes.dashboard, + TaggedObject.object_type == ObjectType.dashboard, ) assert tagged_objects.count() == 2 tagged_objects = db.session.query(TaggedObject).filter( TaggedObject.object_id == chart.id, - TaggedObject.object_type == ObjectTypes.chart, + TaggedObject.object_type == ObjectType.chart, ) assert tagged_objects.count() == 2 diff --git a/tests/integration_tests/tags/commands_tests.py b/tests/integration_tests/tags/commands_tests.py index cd5a024840..48abfd31b4 100644 --- a/tests/integration_tests/tags/commands_tests.py +++ b/tests/integration_tests/tags/commands_tests.py @@ -22,22 +22,22 @@ import yaml from werkzeug.utils import secure_filename from superset import db, security_manager -from superset.commands.exceptions import CommandInvalidError -from superset.commands.importers.exceptions import IncorrectVersionError -from superset.connectors.sqla.models import SqlaTable -from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.commands.export import ( +from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.export import ( append_charts, ExportDashboardsCommand, get_default_position, ) -from superset.dashboards.commands.importers import v0, v1 +from superset.commands.dashboard.importers import v0, v1 +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.commands.tag.create import CreateCustomTagCommand +from superset.commands.tag.delete import DeleteTaggedObjectCommand, DeleteTagsCommand +from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.dashboard import Dashboard from superset.models.slice import Slice -from superset.tags.commands.create import CreateCustomTagCommand -from superset.tags.commands.delete import DeleteTaggedObjectCommand, DeleteTagsCommand -from superset.tags.models import ObjectTypes, Tag, TaggedObject, TagTypes +from superset.tags.models import ObjectType, Tag, TaggedObject, TagType from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.importexport import ( chart_config, @@ -65,7 +65,7 @@ class TestCreateCustomTagCommand(SupersetTestCase): ) example_tags = ["create custom tag example 1", "create custom tag example 2"] command = CreateCustomTagCommand( - ObjectTypes.dashboard.value, example_dashboard.id, example_tags + ObjectType.dashboard.value, example_dashboard.id, example_tags ) command.run() @@ -74,7 +74,7 @@ class TestCreateCustomTagCommand(SupersetTestCase): .join(TaggedObject) .filter( TaggedObject.object_id == example_dashboard.id, - Tag.type == TagTypes.custom, + Tag.type == TagType.custom, ) .all() ) @@ -101,7 +101,7 @@ class TestDeleteTagsCommand(SupersetTestCase): ) example_tags = ["create custom tag example 1", "create custom tag example 2"] command = CreateCustomTagCommand( - ObjectTypes.dashboard.value, example_dashboard.id, example_tags + ObjectType.dashboard.value, example_dashboard.id, example_tags ) command.run() @@ -110,7 +110,7 @@ class TestDeleteTagsCommand(SupersetTestCase): .join(TaggedObject) .filter( TaggedObject.object_id == example_dashboard.id, - Tag.type == TagTypes.custom, + Tag.type == TagType.custom, ) .all() ) @@ -133,7 +133,7 @@ class TestDeleteTaggedObjectCommand(SupersetTestCase): ) example_tags = ["create custom tag example 1", "create custom tag example 2"] command = CreateCustomTagCommand( - ObjectTypes.dashboard.value, example_dashboard.id, example_tags + ObjectType.dashboard.value, example_dashboard.id, example_tags ) command.run() @@ -142,14 +142,14 @@ class TestDeleteTaggedObjectCommand(SupersetTestCase): .join(Tag) .filter( TaggedObject.object_id == example_dashboard.id, - TaggedObject.object_type == ObjectTypes.dashboard.name, + TaggedObject.object_type == ObjectType.dashboard.name, Tag.name.in_(example_tags), ) ) assert tagged_objects.count() == 2 # delete one of the tagged objects command = DeleteTaggedObjectCommand( - object_type=ObjectTypes.dashboard.value, + object_type=ObjectType.dashboard.value, object_id=example_dashboard.id, tag=example_tags[0], ) @@ -159,7 +159,7 @@ class TestDeleteTaggedObjectCommand(SupersetTestCase): .join(Tag) .filter( TaggedObject.object_id == example_dashboard.id, - TaggedObject.object_type == ObjectTypes.dashboard.name, + TaggedObject.object_type == ObjectType.dashboard.name, Tag.name.in_(example_tags), ) ) diff --git a/tests/integration_tests/tags/dao_tests.py b/tests/integration_tests/tags/dao_tests.py index 8acaa353e9..272ba43ed3 100644 --- a/tests/integration_tests/tags/dao_tests.py +++ b/tests/integration_tests/tags/dao_tests.py @@ -23,7 +23,7 @@ from superset.models.slice import Slice from superset.models.sql_lab import SavedQuery from superset.daos.tag import TagDAO from superset.tags.exceptions import InvalidTagNameError -from superset.tags.models import ObjectTypes, Tag, TaggedObject +from superset.tags.models import ObjectType, Tag, TaggedObject from tests.integration_tests.tags.api_tests import TAGS_FIXTURE_COUNT import tests.integration_tests.test_app # pylint: disable=unused-import @@ -57,7 +57,7 @@ class TestTagsDAO(SupersetTestCase): self, tag_id: int, object_id: int, - object_type: ObjectTypes, + object_type: ObjectType, ) -> TaggedObject: tag = db.session.query(Tag).filter(Tag.id == tag_id).first() tagged_object = TaggedObject( @@ -113,7 +113,7 @@ class TestTagsDAO(SupersetTestCase): tagged_objects.append( self.insert_tagged_object( object_id=dashboard_id, - object_type=ObjectTypes.dashboard, + object_type=ObjectType.dashboard, tag_id=tag.id, ) ) @@ -127,14 +127,14 @@ class TestTagsDAO(SupersetTestCase): # test that a tag cannot be added if it has ':' in it with pytest.raises(DAOCreateFailedError): TagDAO.create_custom_tagged_objects( - object_type=ObjectTypes.dashboard.name, + object_type=ObjectType.dashboard.name, object_id=1, tag_names=["invalid:example tag 1"], ) # test that a tag can be added if it has a valid name TagDAO.create_custom_tagged_objects( - object_type=ObjectTypes.dashboard.name, + object_type=ObjectType.dashboard.name, object_id=1, tag_names=["example tag 1"], ) @@ -155,7 +155,7 @@ class TestTagsDAO(SupersetTestCase): dashboard_id = dashboard.id tag = db.session.query(Tag).filter_by(name="example_tag_1").one() self.insert_tagged_object( - object_id=dashboard_id, object_type=ObjectTypes.dashboard, tag_id=tag.id + object_id=dashboard_id, object_type=ObjectType.dashboard, tag_id=tag.id ) # get objects tagged_objects = TagDAO.get_tagged_objects_for_tags( @@ -179,7 +179,7 @@ class TestTagsDAO(SupersetTestCase): TaggedObject, and_( TaggedObject.object_id == Slice.id, - TaggedObject.object_type == ObjectTypes.chart, + TaggedObject.object_type == ObjectType.chart, ), ) .distinct(Slice.id) @@ -191,7 +191,7 @@ class TestTagsDAO(SupersetTestCase): TaggedObject, and_( TaggedObject.object_id == Dashboard.id, - TaggedObject.object_type == ObjectTypes.dashboard, + TaggedObject.object_type == ObjectType.dashboard, ), ) .distinct(Dashboard.id) @@ -207,13 +207,46 @@ class TestTagsDAO(SupersetTestCase): tagged_objects = TagDAO.get_tagged_objects_for_tags(obj_types=["chart"]) assert len(tagged_objects) == num_charts + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @pytest.mark.usefixtures("with_tagging_system_feature") + @pytest.mark.usefixtures("create_tags") + # test get objects from tag + def test_get_objects_from_tag_with_id(self): + # create tagged objects + dashboard = ( + db.session.query(Dashboard) + .filter(Dashboard.dashboard_title == "World Bank's Data") + .first() + ) + dashboard_id = dashboard.id + tag_1 = db.session.query(Tag).filter_by(name="example_tag_1").one() + tag_2 = db.session.query(Tag).filter_by(name="example_tag_2").one() + tag_ids = [tag_1.id, tag_2.id] + self.insert_tagged_object( + object_id=dashboard_id, object_type=ObjectType.dashboard, tag_id=tag_1.id + ) + # get objects + tagged_objects = TagDAO.get_tagged_objects_by_tag_id(tag_ids) + assert len(tagged_objects) == 1 + + # test get objects from tag with type + tagged_objects = TagDAO.get_tagged_objects_by_tag_id( + tag_ids, obj_types=["dashboard", "chart"] + ) + assert len(tagged_objects) == 1 + + tagged_objects = TagDAO.get_tagged_objects_by_tag_id( + tag_ids, obj_types=["chart"] + ) + assert len(tagged_objects) == 0 + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") @pytest.mark.usefixtures("with_tagging_system_feature") @pytest.mark.usefixtures("create_tagged_objects") def test_find_tagged_object(self): tag = db.session.query(Tag).filter(Tag.name == "example_tag_1").first() tagged_object = TagDAO.find_tagged_object( - object_id=1, object_type=ObjectTypes.dashboard.name, tag_id=tag.id + object_id=1, object_type=ObjectType.dashboard.name, tag_id=tag.id ) assert tagged_object is not None @@ -269,20 +302,20 @@ class TestTagsDAO(SupersetTestCase): .filter( TaggedObject.tag_id == tag.id, TaggedObject.object_id == 1, - TaggedObject.object_type == ObjectTypes.dashboard.name, + TaggedObject.object_type == ObjectType.dashboard.name, ) .first() ) assert tagged_object is not None TagDAO.delete_tagged_object( - object_type=ObjectTypes.dashboard.name, object_id=1, tag_name=tag.name + object_type=ObjectType.dashboard.name, object_id=1, tag_name=tag.name ) tagged_object = ( db.session.query(TaggedObject) .filter( TaggedObject.tag_id == tag.id, TaggedObject.object_id == 1, - TaggedObject.object_type == ObjectTypes.dashboard.name, + TaggedObject.object_type == ObjectType.dashboard.name, ) .first() ) diff --git a/tests/integration_tests/tasks/async_queries_tests.py b/tests/integration_tests/tasks/async_queries_tests.py index 8e6e595757..01880b7a62 100644 --- a/tests/integration_tests/tasks/async_queries_tests.py +++ b/tests/integration_tests/tasks/async_queries_tests.py @@ -21,8 +21,8 @@ from uuid import uuid4 import pytest from celery.exceptions import SoftTimeLimitExceeded -from superset.charts.commands.exceptions import ChartDataQueryFailedError -from superset.charts.data.commands.get_data_command import ChartDataCommand +from superset.commands.chart.data.get_data_command import ChartDataCommand +from superset.commands.chart.exceptions import ChartDataQueryFailedError from superset.exceptions import SupersetException from superset.extensions import async_query_manager, security_manager from tests.integration_tests.base_tests import SupersetTestCase diff --git a/tests/integration_tests/utils_tests.py b/tests/integration_tests/utils_tests.py index 6648d72c61..ddd0b0caf4 100644 --- a/tests/integration_tests/utils_tests.py +++ b/tests/integration_tests/utils_tests.py @@ -24,7 +24,7 @@ import re from typing import Any, Optional from unittest.mock import Mock, patch -from superset.databases.commands.exceptions import DatabaseInvalidError +from superset.commands.database.exceptions import DatabaseInvalidError from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, @@ -59,7 +59,6 @@ from superset.utils.core import ( get_stacktrace, json_int_dttm_ser, json_iso_dttm_ser, - JSONEncodedDict, merge_extra_filters, merge_extra_form_data, merge_request_params, @@ -583,15 +582,6 @@ class TestUtils(SupersetTestCase): "-16 days, 4:03:00", ) - def test_json_encoded_obj(self): - obj = {"a": 5, "b": ["a", "g", 5]} - val = '{"a": 5, "b": ["a", "g", 5]}' - jsonObj = JSONEncodedDict() - resp = jsonObj.process_bind_param(obj, "dialect") - self.assertIn('"a": 5', resp) - self.assertIn('"b": ["a", "g", 5]', resp) - self.assertEqual(jsonObj.process_result_value(val, "dialect"), obj) - def test_validate_json(self): valid = '{"a": 5, "b": [1, 5, ["g", "h"]]}' self.assertIsNone(validate_json(valid)) @@ -754,50 +744,6 @@ class TestUtils(SupersetTestCase): self.assertListEqual(as_list([123]), [123]) self.assertListEqual(as_list("foo"), ["foo"]) - @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - def test_build_extra_filters(self): - world_health = db.session.query(Dashboard).filter_by(slug="world_health").one() - layout = json.loads(world_health.position_json) - filter_ = db.session.query(Slice).filter_by(slice_name="Region Filter").one() - world = db.session.query(Slice).filter_by(slice_name="World's Population").one() - box_plot = db.session.query(Slice).filter_by(slice_name="Box plot").one() - treemap = db.session.query(Slice).filter_by(slice_name="Treemap").one() - - filter_scopes = { - str(filter_.id): { - "region": {"scope": ["ROOT_ID"], "immune": [treemap.id]}, - "country_name": { - "scope": ["ROOT_ID"], - "immune": [treemap.id, box_plot.id], - }, - } - } - - default_filters = { - str(filter_.id): { - "region": ["North America"], - "country_name": ["United States"], - } - } - - # immune to all filters - assert ( - build_extra_filters(layout, filter_scopes, default_filters, treemap.id) - == [] - ) - - # in scope - assert build_extra_filters( - layout, filter_scopes, default_filters, world.id - ) == [ - {"col": "region", "op": "==", "val": "North America"}, - {"col": "country_name", "op": "in", "val": ["United States"]}, - ] - - assert build_extra_filters( - layout, filter_scopes, default_filters, box_plot.id - ) == [{"col": "region", "op": "==", "val": "North America"}] - def test_merge_extra_filters_with_no_extras(self): form_data = { "time_range": "Last 10 days", diff --git a/tests/unit_tests/charts/commands/importers/v1/import_test.py b/tests/unit_tests/charts/commands/importers/v1/import_test.py index 06e0063fe9..f0d142644d 100644 --- a/tests/unit_tests/charts/commands/importers/v1/import_test.py +++ b/tests/unit_tests/charts/commands/importers/v1/import_test.py @@ -30,7 +30,7 @@ def test_import_chart(mocker: MockFixture, session: Session) -> None: Test importing a chart. """ from superset import security_manager - from superset.charts.commands.importers.v1.utils import import_chart + from superset.commands.chart.importers.v1.utils import import_chart from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.slice import Slice @@ -57,7 +57,7 @@ def test_import_chart_managed_externally(mocker: MockFixture, session: Session) Test importing a chart that is managed externally. """ from superset import security_manager - from superset.charts.commands.importers.v1.utils import import_chart + from superset.commands.chart.importers.v1.utils import import_chart from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.slice import Slice @@ -87,7 +87,7 @@ def test_import_chart_without_permission( Test importing a chart when a user doesn't have permissions to create. """ from superset import security_manager - from superset.charts.commands.importers.v1.utils import import_chart + from superset.commands.chart.importers.v1.utils import import_chart from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.slice import Slice diff --git a/tests/unit_tests/charts/commands/importers/v1/utils_test.py b/tests/unit_tests/charts/commands/importers/v1/utils_test.py index 77d31e7d77..de3f805d8b 100644 --- a/tests/unit_tests/charts/commands/importers/v1/utils_test.py +++ b/tests/unit_tests/charts/commands/importers/v1/utils_test.py @@ -17,7 +17,7 @@ import json -from superset.charts.commands.importers.v1.utils import migrate_chart +from superset.commands.chart.importers.v1.utils import migrate_chart def test_migrate_chart_area() -> None: @@ -31,13 +31,21 @@ def test_migrate_chart_area() -> None: "description": None, "certified_by": None, "certification_details": None, - "viz_type": "area", + "viz_type": "echarts_area", "query_context": None, "params": json.dumps( { - "adhoc_filters": [], + "adhoc_filters": [ + { + "clause": "WHERE", + "subject": "ds", + "operator": "TEMPORAL_RANGE", + "comparator": "No filter", + "expressionType": "SIMPLE", + } + ], "annotation_layers": [], - "bottom_margin": "auto", + "x_axis_title_margin": "auto", "color_scheme": "supersetColors", "comparison_type": "values", "dashboards": [], diff --git a/tests/unit_tests/common/test_get_aggregated_join_column.py b/tests/unit_tests/common/test_get_aggregated_join_column.py index 8effacf249..de0b6b92b2 100644 --- a/tests/unit_tests/common/test_get_aggregated_join_column.py +++ b/tests/unit_tests/common/test_get_aggregated_join_column.py @@ -24,7 +24,7 @@ from superset.common.query_context_processor import ( AGGREGATED_JOIN_COLUMN, QueryContextProcessor, ) -from superset.connectors.base.models import BaseDatasource +from superset.connectors.sqla.models import BaseDatasource from superset.constants import TimeGrain query_context_processor = QueryContextProcessor( diff --git a/tests/unit_tests/dao/tag_test.py b/tests/unit_tests/dao/tag_test.py index 065ed75662..5f29d0f28c 100644 --- a/tests/unit_tests/dao/tag_test.py +++ b/tests/unit_tests/dao/tag_test.py @@ -149,7 +149,7 @@ def test_user_favorite_tag_exc_raise(mocker): def test_create_tag_relationship(mocker): from superset.daos.tag import TagDAO from superset.tags.models import ( # Assuming these are defined in the same module - ObjectTypes, + ObjectType, TaggedObject, ) @@ -157,9 +157,9 @@ def test_create_tag_relationship(mocker): # Define a list of objects to tag objects_to_tag = [ - (ObjectTypes.query, 1), - (ObjectTypes.chart, 2), - (ObjectTypes.dashboard, 3), + (ObjectType.query, 1), + (ObjectType.chart, 2), + (ObjectType.dashboard, 3), ] # Call the function diff --git a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py index e07a23f6bf..67e0897755 100644 --- a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py +++ b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py @@ -30,8 +30,8 @@ def test_import_dashboard(mocker: MockFixture, session: Session) -> None: Test importing a dashboard. """ from superset import security_manager + from superset.commands.dashboard.importers.v1.utils import import_dashboard from superset.connectors.sqla.models import SqlaTable - from superset.dashboards.commands.importers.v1.utils import import_dashboard from superset.models.core import Database from superset.models.slice import Slice from tests.integration_tests.fixtures.importexport import dashboard_config @@ -58,8 +58,8 @@ def test_import_dashboard_managed_externally( Test importing a dashboard that is managed externally. """ from superset import security_manager + from superset.commands.dashboard.importers.v1.utils import import_dashboard from superset.connectors.sqla.models import SqlaTable - from superset.dashboards.commands.importers.v1.utils import import_dashboard from superset.models.core import Database from superset.models.slice import Slice from tests.integration_tests.fixtures.importexport import dashboard_config @@ -86,8 +86,8 @@ def test_import_dashboard_without_permission( Test importing a dashboard when a user doesn't have permissions to create. """ from superset import security_manager + from superset.commands.dashboard.importers.v1.utils import import_dashboard from superset.connectors.sqla.models import SqlaTable - from superset.dashboards.commands.importers.v1.utils import import_dashboard from superset.models.core import Database from superset.models.slice import Slice from tests.integration_tests.fixtures.importexport import dashboard_config diff --git a/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py b/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py index 60a659159a..0e84362957 100644 --- a/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py +++ b/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py @@ -29,7 +29,7 @@ def test_update_id_refs_immune_missing( # pylint: disable=invalid-name immune to filters. The missing chart ID should be simply ignored when the dashboard is imported. """ - from superset.dashboards.commands.importers.v1.utils import update_id_refs + from superset.commands.dashboard.importers.v1.utils import update_id_refs config = { "position": { @@ -83,7 +83,7 @@ def test_update_id_refs_immune_missing( # pylint: disable=invalid-name def test_update_native_filter_config_scope_excluded(): - from superset.dashboards.commands.importers.v1.utils import update_id_refs + from superset.commands.dashboard.importers.v1.utils import update_id_refs config = { "position": { diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index aa15645ddb..28ca123ec6 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -396,7 +396,7 @@ def test_delete_ssh_tunnel( mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") mocker.patch("superset.utils.log.DBEventLogger.log") mocker.patch( - "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + "superset.commands.database.ssh_tunnel.delete.is_feature_enabled", return_value=True, ) @@ -472,7 +472,7 @@ def test_delete_ssh_tunnel_not_found( mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") mocker.patch("superset.utils.log.DBEventLogger.log") mocker.patch( - "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + "superset.commands.database.ssh_tunnel.delete.is_feature_enabled", return_value=True, ) @@ -559,7 +559,7 @@ def test_apply_dynamic_database_filter( mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") mocker.patch("superset.utils.log.DBEventLogger.log") mocker.patch( - "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + "superset.commands.database.ssh_tunnel.delete.is_feature_enabled", return_value=False, ) diff --git a/tests/unit_tests/databases/commands/importers/v1/import_test.py b/tests/unit_tests/databases/commands/importers/v1/import_test.py index b8bd24d94d..5fb4d12ce5 100644 --- a/tests/unit_tests/databases/commands/importers/v1/import_test.py +++ b/tests/unit_tests/databases/commands/importers/v1/import_test.py @@ -17,6 +17,7 @@ # pylint: disable=unused-argument, import-outside-toplevel, invalid-name import copy +import json import pytest from pytest_mock import MockFixture @@ -30,7 +31,7 @@ def test_import_database(mocker: MockFixture, session: Session) -> None: Test importing a database. """ from superset import security_manager - from superset.databases.commands.importers.v1.utils import import_database + from superset.commands.database.importers.v1.utils import import_database from superset.models.core import Database from tests.integration_tests.fixtures.importexport import database_config @@ -70,7 +71,7 @@ def test_import_database_sqlite_invalid(mocker: MockFixture, session: Session) - Test importing a database. """ from superset import app, security_manager - from superset.databases.commands.importers.v1.utils import import_database + from superset.commands.database.importers.v1.utils import import_database from superset.models.core import Database from tests.integration_tests.fixtures.importexport import database_config_sqlite @@ -99,7 +100,7 @@ def test_import_database_managed_externally( Test importing a database that is managed externally. """ from superset import security_manager - from superset.databases.commands.importers.v1.utils import import_database + from superset.commands.database.importers.v1.utils import import_database from superset.models.core import Database from tests.integration_tests.fixtures.importexport import database_config @@ -125,7 +126,7 @@ def test_import_database_without_permission( Test importing a database when a user doesn't have permissions to create. """ from superset import security_manager - from superset.databases.commands.importers.v1.utils import import_database + from superset.commands.database.importers.v1.utils import import_database from superset.models.core import Database from tests.integration_tests.fixtures.importexport import database_config @@ -142,3 +143,23 @@ def test_import_database_without_permission( str(excinfo.value) == "Database doesn't exist and user doesn't have permission to create databases" ) + + +def test_import_database_with_version(mocker: MockFixture, session: Session) -> None: + """ + Test importing a database with a version set. + """ + from superset import security_manager + from superset.commands.database.importers.v1.utils import import_database + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import database_config + + mocker.patch.object(security_manager, "can_access", return_value=True) + + engine = session.get_bind() + Database.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(database_config) + config["extra"]["version"] = "1.1.1" + database = import_database(session, config) + assert json.loads(database.extra)["version"] == "1.1.1" diff --git a/tests/unit_tests/databases/commands/test_connection_test.py b/tests/unit_tests/databases/commands/test_connection_test.py index 8e86cfd1cf..66efa7d717 100644 --- a/tests/unit_tests/databases/commands/test_connection_test.py +++ b/tests/unit_tests/databases/commands/test_connection_test.py @@ -17,7 +17,7 @@ from parameterized import parameterized -from superset.databases.commands.test_connection import get_log_connection_action +from superset.commands.database.test_connection import get_log_connection_action from superset.databases.ssh_tunnel.models import SSHTunnel diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py index fbad104c1d..bd891b64f0 100644 --- a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py +++ b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py @@ -19,11 +19,11 @@ import pytest from sqlalchemy.orm.session import Session -from superset.databases.ssh_tunnel.commands.exceptions import SSHTunnelInvalidError +from superset.commands.database.ssh_tunnel.exceptions import SSHTunnelInvalidError def test_create_ssh_tunnel_command() -> None: - from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand + from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel from superset.models.core import Database @@ -44,7 +44,7 @@ def test_create_ssh_tunnel_command() -> None: def test_create_ssh_tunnel_command_invalid_params() -> None: - from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand + from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel from superset.models.core import Database diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py index 641e34d347..14838ddc58 100644 --- a/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py +++ b/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py @@ -54,8 +54,8 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_delete_ssh_tunnel_command( mocker: MockFixture, session_with_data: Session ) -> None: + from superset.commands.database.ssh_tunnel.delete import DeleteSSHTunnelCommand from superset.daos.database import DatabaseDAO - from superset.databases.ssh_tunnel.commands.delete import DeleteSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel result = DatabaseDAO.get_ssh_tunnel(1) @@ -64,7 +64,7 @@ def test_delete_ssh_tunnel_command( assert isinstance(result, SSHTunnel) assert 1 == result.database_id mocker.patch( - "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + "superset.commands.database.ssh_tunnel.delete.is_feature_enabled", return_value=True, ) DeleteSSHTunnelCommand(1).run() diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py index d4a5faba8b..5c3907b016 100644 --- a/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py +++ b/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py @@ -20,7 +20,7 @@ from collections.abc import Iterator import pytest from sqlalchemy.orm.session import Session -from superset.databases.ssh_tunnel.commands.exceptions import SSHTunnelInvalidError +from superset.commands.database.ssh_tunnel.exceptions import SSHTunnelInvalidError @pytest.fixture @@ -50,8 +50,8 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_update_shh_tunnel_command(session_with_data: Session) -> None: + from superset.commands.database.ssh_tunnel.update import UpdateSSHTunnelCommand from superset.daos.database import DatabaseDAO - from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel result = DatabaseDAO.get_ssh_tunnel(1) @@ -72,8 +72,8 @@ def test_update_shh_tunnel_command(session_with_data: Session) -> None: def test_update_shh_tunnel_invalid_params(session_with_data: Session) -> None: + from superset.commands.database.ssh_tunnel.update import UpdateSSHTunnelCommand from superset.daos.database import DatabaseDAO - from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel result = DatabaseDAO.get_ssh_tunnel(1) diff --git a/tests/unit_tests/datasets/commands/export_test.py b/tests/unit_tests/datasets/commands/export_test.py index be6a637f8f..20565da5bc 100644 --- a/tests/unit_tests/datasets/commands/export_test.py +++ b/tests/unit_tests/datasets/commands/export_test.py @@ -25,8 +25,8 @@ def test_export(session: Session) -> None: """ Test exporting a dataset. """ + from superset.commands.dataset.export import ExportDatasetsCommand from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn - from superset.datasets.commands.export import ExportDatasetsCommand from superset.models.core import Database engine = session.get_bind() diff --git a/tests/unit_tests/datasets/commands/importers/v1/import_test.py b/tests/unit_tests/datasets/commands/importers/v1/import_test.py index e8e8c8e7c5..5089838e69 100644 --- a/tests/unit_tests/datasets/commands/importers/v1/import_test.py +++ b/tests/unit_tests/datasets/commands/importers/v1/import_test.py @@ -28,11 +28,11 @@ from flask import current_app from pytest_mock import MockFixture from sqlalchemy.orm.session import Session -from superset.datasets.commands.exceptions import ( +from superset.commands.dataset.exceptions import ( DatasetForbiddenDataURI, ImportFailedError, ) -from superset.datasets.commands.importers.v1.utils import validate_data_uri +from superset.commands.dataset.importers.v1.utils import validate_data_uri def test_import_dataset(mocker: MockFixture, session: Session) -> None: @@ -40,8 +40,8 @@ def test_import_dataset(mocker: MockFixture, session: Session) -> None: Test importing a dataset. """ from superset import security_manager + from superset.commands.dataset.importers.v1.utils import import_dataset from superset.connectors.sqla.models import SqlaTable - from superset.datasets.commands.importers.v1.utils import import_dataset from superset.models.core import Database mocker.patch.object(security_manager, "can_access", return_value=True) @@ -156,8 +156,8 @@ def test_import_dataset_duplicate_column(mocker: MockFixture, session: Session) """ from superset import security_manager from superset.columns.models import Column as NewColumn + from superset.commands.dataset.importers.v1.utils import import_dataset from superset.connectors.sqla.models import SqlaTable, TableColumn - from superset.datasets.commands.importers.v1.utils import import_dataset from superset.models.core import Database mocker.patch.object(security_manager, "can_access", return_value=True) @@ -281,8 +281,8 @@ def test_import_column_extra_is_string(mocker: MockFixture, session: Session) -> Test importing a dataset when the column extra is a string. """ from superset import security_manager + from superset.commands.dataset.importers.v1.utils import import_dataset from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn - from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.core import Database @@ -366,8 +366,8 @@ def test_import_dataset_extra_empty_string( Test importing a dataset when the extra field is an empty string. """ from superset import security_manager + from superset.commands.dataset.importers.v1.utils import import_dataset from superset.connectors.sqla.models import SqlaTable - from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.core import Database @@ -422,7 +422,7 @@ def test_import_dataset_extra_empty_string( assert sqla_table.extra == None -@patch("superset.datasets.commands.importers.v1.utils.request") +@patch("superset.commands.dataset.importers.v1.utils.request") def test_import_column_allowed_data_url( request: Mock, mocker: MockFixture, @@ -434,8 +434,8 @@ def test_import_column_allowed_data_url( import io from superset import security_manager + from superset.commands.dataset.importers.v1.utils import import_dataset from superset.connectors.sqla.models import SqlaTable - from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.core import Database @@ -510,8 +510,8 @@ def test_import_dataset_managed_externally( Test importing a dataset that is managed externally. """ from superset import security_manager + from superset.commands.dataset.importers.v1.utils import import_dataset from superset.connectors.sqla.models import SqlaTable - from superset.datasets.commands.importers.v1.utils import import_dataset from superset.models.core import Database from tests.integration_tests.fixtures.importexport import dataset_config diff --git a/tests/unit_tests/db_engine_specs/test_doris.py b/tests/unit_tests/db_engine_specs/test_doris.py new file mode 100644 index 0000000000..d7444f8d2d --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_doris.py @@ -0,0 +1,147 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Any, Optional + +import pytest +from sqlalchemy import JSON, types +from sqlalchemy.engine.url import make_url + +from superset.db_engine_specs.doris import ( + AggState, + ARRAY, + BITMAP, + DOUBLE, + HLL, + LARGEINT, + MAP, + QuantileState, + STRUCT, + TINYINT, +) +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import assert_column_spec + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + # Numeric + ("tinyint", TINYINT, None, GenericDataType.NUMERIC, False), + ("largeint", LARGEINT, None, GenericDataType.NUMERIC, False), + ("decimal(38,18)", types.DECIMAL, None, GenericDataType.NUMERIC, False), + ("decimalv3(38,18)", types.DECIMAL, None, GenericDataType.NUMERIC, False), + ("double", DOUBLE, None, GenericDataType.NUMERIC, False), + # String + ("char(10)", types.CHAR, None, GenericDataType.STRING, False), + ("varchar(65533)", types.VARCHAR, None, GenericDataType.STRING, False), + ("binary", types.BINARY, None, GenericDataType.STRING, False), + ("text", types.TEXT, None, GenericDataType.STRING, False), + ("string", types.String, None, GenericDataType.STRING, False), + # Date + ("datetimev2", types.DateTime, None, GenericDataType.STRING, False), + ("datev2", types.Date, None, GenericDataType.STRING, False), + # Complex type + ("array<varchar(65533)>", ARRAY, None, GenericDataType.STRING, False), + ("map<string,int>", MAP, None, GenericDataType.STRING, False), + ("struct<int,string>", STRUCT, None, GenericDataType.STRING, False), + ("json", JSON, None, GenericDataType.STRING, False), + ("jsonb", JSON, None, GenericDataType.STRING, False), + ("bitmap", BITMAP, None, GenericDataType.STRING, False), + ("hll", HLL, None, GenericDataType.STRING, False), + ("quantile_state", QuantileState, None, GenericDataType.STRING, False), + ("agg_state", AggState, None, GenericDataType.STRING, False), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: type[types.TypeEngine], + attrs: Optional[dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.doris import DorisEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) + + +@pytest.mark.parametrize( + "sqlalchemy_uri,connect_args,return_schema,return_connect_args", + [ + ( + "doris://user:password@host/db1", + {"param1": "some_value"}, + "db1", + {"param1": "some_value"}, + ), + ( + "pydoris://user:password@host/db1", + {"param1": "some_value"}, + "db1", + {"param1": "some_value"}, + ), + ( + "doris://user:password@host/catalog1.db1", + {"param1": "some_value"}, + "catalog1.db1", + {"param1": "some_value"}, + ), + ( + "pydoris://user:password@host/catalog1.db1", + {"param1": "some_value"}, + "catalog1.db1", + {"param1": "some_value"}, + ), + ], +) +def test_adjust_engine_params( + sqlalchemy_uri: str, + connect_args: dict[str, Any], + return_schema: str, + return_connect_args: dict[str, Any], +) -> None: + from superset.db_engine_specs.doris import DorisEngineSpec + + url = make_url(sqlalchemy_uri) + returned_url, returned_connect_args = DorisEngineSpec.adjust_engine_params( + url, connect_args + ) + assert returned_url.database == return_schema + assert returned_connect_args == return_connect_args + + +def test_get_schema_from_engine_params() -> None: + """ + Test the ``get_schema_from_engine_params`` method. + """ + from superset.db_engine_specs.doris import DorisEngineSpec + + assert ( + DorisEngineSpec.get_schema_from_engine_params( + make_url("doris://localhost:9030/hive.test"), + {}, + ) + == "test" + ) + + assert ( + DorisEngineSpec.get_schema_from_engine_params( + make_url("doris://localhost:9030/hive"), + {}, + ) + is None + ) diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py index 1b50a683a0..15e55fc5af 100644 --- a/tests/unit_tests/db_engine_specs/test_trino.py +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=unused-argument, import-outside-toplevel, protected-access +import copy import json from datetime import datetime from typing import Any, Optional @@ -24,9 +25,11 @@ import pandas as pd import pytest from pytest_mock import MockerFixture from sqlalchemy import types +from trino.sqlalchemy import datatype import superset.config from superset.constants import QUERY_CANCEL_KEY, QUERY_EARLY_CANCEL_KEY, USER_AGENT +from superset.superset_typing import ResultSetColumnType, SQLAColumnType from superset.utils.core import GenericDataType from tests.unit_tests.db_engine_specs.utils import ( assert_column_spec, @@ -35,6 +38,24 @@ from tests.unit_tests.db_engine_specs.utils import ( from tests.unit_tests.fixtures.common import dttm +def _assert_columns_equal(actual_cols, expected_cols) -> None: + """ + Assert equality of the given cols, bearing in mind sqlalchemy type + instances can't be compared for equality, so will have to be converted to + strings first. + """ + actual = copy.deepcopy(actual_cols) + expected = copy.deepcopy(expected_cols) + + for col in actual: + col["type"] = str(col["type"]) + + for col in expected: + col["type"] = str(col["type"]) + + assert actual == expected + + @pytest.mark.parametrize( "extra,expected", [ @@ -395,3 +416,104 @@ def test_execute_with_cursor_in_parallel(mocker: MockerFixture): mock_query.set_extra_json_key.assert_called_once_with( key=QUERY_CANCEL_KEY, value=query_id ) + + +def test_get_columns(mocker: MockerFixture): + """Test that ROW columns are not expanded without expand_rows""" + from superset.db_engine_specs.trino import TrinoEngineSpec + + field1_type = datatype.parse_sqltype("row(a varchar, b date)") + field2_type = datatype.parse_sqltype("row(r1 row(a varchar, b varchar))") + field3_type = datatype.parse_sqltype("int") + + sqla_columns = [ + SQLAColumnType(name="field1", type=field1_type, is_dttm=False), + SQLAColumnType(name="field2", type=field2_type, is_dttm=False), + SQLAColumnType(name="field3", type=field3_type, is_dttm=False), + ] + mock_inspector = mocker.MagicMock() + mock_inspector.get_columns.return_value = sqla_columns + + actual = TrinoEngineSpec.get_columns(mock_inspector, "table", "schema") + expected = [ + ResultSetColumnType( + name="field1", column_name="field1", type=field1_type, is_dttm=False + ), + ResultSetColumnType( + name="field2", column_name="field2", type=field2_type, is_dttm=False + ), + ResultSetColumnType( + name="field3", column_name="field3", type=field3_type, is_dttm=False + ), + ] + + _assert_columns_equal(actual, expected) + + +def test_get_columns_expand_rows(mocker: MockerFixture): + """Test that ROW columns are correctly expanded with expand_rows""" + from superset.db_engine_specs.trino import TrinoEngineSpec + + field1_type = datatype.parse_sqltype("row(a varchar, b date)") + field2_type = datatype.parse_sqltype("row(r1 row(a varchar, b varchar))") + field3_type = datatype.parse_sqltype("int") + + sqla_columns = [ + SQLAColumnType(name="field1", type=field1_type, is_dttm=False), + SQLAColumnType(name="field2", type=field2_type, is_dttm=False), + SQLAColumnType(name="field3", type=field3_type, is_dttm=False), + ] + mock_inspector = mocker.MagicMock() + mock_inspector.get_columns.return_value = sqla_columns + + actual = TrinoEngineSpec.get_columns( + mock_inspector, "table", "schema", {"expand_rows": True} + ) + expected = [ + ResultSetColumnType( + name="field1", column_name="field1", type=field1_type, is_dttm=False + ), + ResultSetColumnType( + name="field1.a", + column_name="field1.a", + type=types.VARCHAR(), + is_dttm=False, + query_as='"field1"."a" AS "field1.a"', + ), + ResultSetColumnType( + name="field1.b", + column_name="field1.b", + type=types.DATE(), + is_dttm=True, + query_as='"field1"."b" AS "field1.b"', + ), + ResultSetColumnType( + name="field2", column_name="field2", type=field2_type, is_dttm=False + ), + ResultSetColumnType( + name="field2.r1", + column_name="field2.r1", + type=datatype.parse_sqltype("row(a varchar, b varchar)"), + is_dttm=False, + query_as='"field2"."r1" AS "field2.r1"', + ), + ResultSetColumnType( + name="field2.r1.a", + column_name="field2.r1.a", + type=types.VARCHAR(), + is_dttm=False, + query_as='"field2"."r1"."a" AS "field2.r1.a"', + ), + ResultSetColumnType( + name="field2.r1.b", + column_name="field2.r1.b", + type=types.VARCHAR(), + is_dttm=False, + query_as='"field2"."r1"."b" AS "field2.r1.b"', + ), + ResultSetColumnType( + name="field3", column_name="field3", type=field3_type, is_dttm=False + ), + ] + + _assert_columns_equal(actual, expected) diff --git a/tests/unit_tests/explore/utils_test.py b/tests/unit_tests/explore/utils_test.py index de39187ec7..fa99091f09 100644 --- a/tests/unit_tests/explore/utils_test.py +++ b/tests/unit_tests/explore/utils_test.py @@ -18,20 +18,20 @@ from flask_appbuilder.security.sqla.models import User from pytest import raises from pytest_mock import MockFixture -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( ChartAccessDeniedError, ChartNotFoundError, ) +from superset.commands.dataset.exceptions import ( + DatasetAccessDeniedError, + DatasetNotFoundError, +) from superset.commands.exceptions import ( DatasourceNotFoundValidationError, DatasourceTypeInvalidError, OwnersNotFoundValidationError, QueryNotFoundValidationError, ) -from superset.datasets.commands.exceptions import ( - DatasetAccessDeniedError, - DatasetNotFoundError, -) from superset.exceptions import SupersetSecurityException from superset.utils.core import DatasourceType, override_user diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py index 114f046300..e2a5e8cd49 100644 --- a/tests/unit_tests/jinja_context_test.py +++ b/tests/unit_tests/jinja_context_test.py @@ -22,7 +22,7 @@ import pytest from pytest_mock import MockFixture from sqlalchemy.dialects import mysql -from superset.datasets.commands.exceptions import DatasetNotFoundError +from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.jinja_context import dataset_macro, WhereInMacro diff --git a/tests/unit_tests/migrations/viz/dual_line_to_mixed_chart_test.py b/tests/unit_tests/migrations/viz/dual_line_to_mixed_chart_test.py index 76addd8009..3d9dc53122 100644 --- a/tests/unit_tests/migrations/viz/dual_line_to_mixed_chart_test.py +++ b/tests/unit_tests/migrations/viz/dual_line_to_mixed_chart_test.py @@ -14,9 +14,10 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -import json +from typing import Any from superset.migrations.shared.migrate_viz import MigrateDualLine +from tests.unit_tests.migrations.viz.utils import migrate_and_assert ADHOC_FILTERS = [ { @@ -28,7 +29,7 @@ ADHOC_FILTERS = [ } ] -SOURCE_FORM_DATA = { +SOURCE_FORM_DATA: dict[str, Any] = { "metric": "num_boys", "y_axis_format": ",d", "y_axis_bounds": [50, 100], @@ -42,7 +43,7 @@ SOURCE_FORM_DATA = { "yAxisIndex": 0, } -TARGET_FORM_DATA = { +TARGET_FORM_DATA: dict[str, Any] = { "metrics": ["num_boys"], "y_axis_format": ",d", "y_axis_bounds": [50, 100], @@ -64,34 +65,4 @@ TARGET_FORM_DATA = { def test_migration() -> None: source = SOURCE_FORM_DATA.copy() target = TARGET_FORM_DATA.copy() - upgrade_downgrade(source, target) - - -def upgrade_downgrade(source, target) -> None: - from superset.models.slice import Slice - - dumped_form_data = json.dumps(source) - - slc = Slice( - viz_type=MigrateDualLine.source_viz_type, - datasource_type="table", - params=dumped_form_data, - query_context=f'{{"form_data": {dumped_form_data}}}', - ) - - # upgrade - slc = MigrateDualLine.upgrade_slice(slc) - - # verify form_data - new_form_data = json.loads(slc.params) - assert new_form_data == target - assert new_form_data["form_data_bak"] == source - - # verify query_context - new_query_context = json.loads(slc.query_context) - assert new_query_context["form_data"]["viz_type"] == "mixed_timeseries" - - # downgrade - slc = MigrateDualLine.downgrade_slice(slc) - assert slc.viz_type == MigrateDualLine.source_viz_type - assert json.loads(slc.params) == source + migrate_and_assert(MigrateDualLine, source, target) diff --git a/tests/unit_tests/migrations/viz/nvd3_area_chart_to_echarts_test.py b/tests/unit_tests/migrations/viz/nvd3_area_chart_to_echarts_test.py new file mode 100644 index 0000000000..a6b87c6d7a --- /dev/null +++ b/tests/unit_tests/migrations/viz/nvd3_area_chart_to_echarts_test.py @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from superset.migrations.shared.migrate_viz import MigrateAreaChart +from tests.unit_tests.migrations.viz.utils import ( + migrate_and_assert, + TIMESERIES_SOURCE_FORM_DATA, + TIMESERIES_TARGET_FORM_DATA, +) + +SOURCE_FORM_DATA: dict[str, Any] = { + "viz_type": "area", + "stacked_style": "stream", +} + +TARGET_FORM_DATA: dict[str, Any] = { + "form_data_bak": SOURCE_FORM_DATA, + "viz_type": "echarts_area", + "opacity": 0.7, + "stack": "Stream", +} + + +def test_migration() -> None: + SOURCE_FORM_DATA.update(TIMESERIES_SOURCE_FORM_DATA) + TARGET_FORM_DATA.update(TIMESERIES_TARGET_FORM_DATA) + migrate_and_assert(MigrateAreaChart, SOURCE_FORM_DATA, TARGET_FORM_DATA) diff --git a/tests/unit_tests/migrations/viz/nvd3_bubble_chart_to_echarts_test.py b/tests/unit_tests/migrations/viz/nvd3_bubble_chart_to_echarts_test.py new file mode 100644 index 0000000000..070083b7ae --- /dev/null +++ b/tests/unit_tests/migrations/viz/nvd3_bubble_chart_to_echarts_test.py @@ -0,0 +1,76 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from superset.migrations.shared.migrate_viz import MigrateBubbleChart +from tests.unit_tests.migrations.viz.utils import migrate_and_assert + +SOURCE_FORM_DATA: dict[str, Any] = { + "adhoc_filters": [], + "bottom_margin": 20, + "color_scheme": "default", + "entity": "count", + "left_margin": 20, + "limit": 100, + "max_bubble_size": 50, + "series": ["region"], + "show_legend": True, + "size": 75, + "viz_type": "bubble", + "x": "year", + "x_axis_format": "SMART_DATE", + "x_axis_label": "Year", + "x_axis_showminmax": True, + "x_log_scale": True, + "x_ticks_layout": "45°", + "y": "country", + "y_axis_bounds": [0, 100], + "y_axis_format": "SMART_DATE", + "y_axis_label": "Year", + "y_axis_showminmax": False, + "y_log_scale": True, +} + +TARGET_FORM_DATA: dict[str, Any] = { + "adhoc_filters": [], + "color_scheme": "default", + "entity": "count", + "form_data_bak": SOURCE_FORM_DATA, + "logXAxis": True, + "logYAxis": True, + "max_bubble_size": 50, + "row_limit": 100, + "series": ["region"], + "show_legend": True, + "size": 75, + "truncateYAxis": True, + "viz_type": "bubble_v2", + "x": "year", + "xAxisFormat": "SMART_DATE", + "xAxisLabelRotation": 45, + "x_axis_label": "Year", + "x_axis_title_margin": 20, + "y": "country", + "y_axis_bounds": [0, 100], + "y_axis_format": "SMART_DATE", + "y_axis_label": "Year", + "y_axis_title_margin": 20, +} + + +def test_migration() -> None: + migrate_and_assert(MigrateBubbleChart, SOURCE_FORM_DATA, TARGET_FORM_DATA) diff --git a/tests/unit_tests/migrations/viz/nvd3_line_chart_to_echarts_test.py b/tests/unit_tests/migrations/viz/nvd3_line_chart_to_echarts_test.py new file mode 100644 index 0000000000..5999a90702 --- /dev/null +++ b/tests/unit_tests/migrations/viz/nvd3_line_chart_to_echarts_test.py @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from superset.migrations.shared.migrate_viz import MigrateLineChart +from tests.unit_tests.migrations.viz.utils import ( + migrate_and_assert, + TIMESERIES_SOURCE_FORM_DATA, + TIMESERIES_TARGET_FORM_DATA, +) + +SOURCE_FORM_DATA: dict[str, Any] = { + "viz_type": "line", +} + +TARGET_FORM_DATA: dict[str, Any] = { + "form_data_bak": SOURCE_FORM_DATA, + "viz_type": "echarts_timeseries_line", +} + + +def test_migration() -> None: + SOURCE_FORM_DATA.update(TIMESERIES_SOURCE_FORM_DATA) + TARGET_FORM_DATA.update(TIMESERIES_TARGET_FORM_DATA) + migrate_and_assert(MigrateLineChart, SOURCE_FORM_DATA, TARGET_FORM_DATA) diff --git a/tests/unit_tests/migrations/viz/pivot_table_v1_v2_test.py b/tests/unit_tests/migrations/viz/pivot_table_v1_v2_test.py index 1e2229ca83..788fd14770 100644 --- a/tests/unit_tests/migrations/viz/pivot_table_v1_v2_test.py +++ b/tests/unit_tests/migrations/viz/pivot_table_v1_v2_test.py @@ -14,122 +14,40 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -import json +from typing import Any from superset.migrations.shared.migrate_viz import MigratePivotTable -from tests.unit_tests.conftest import with_feature_flags +from tests.unit_tests.migrations.viz.utils import migrate_and_assert -SOURCE_FORM_DATA = { - "adhoc_filters": [], +SOURCE_FORM_DATA: dict[str, Any] = { "any_other_key": "untouched", "columns": ["state"], "combine_metric": True, - "granularity_sqla": "ds", "groupby": ["name"], "number_format": "SMART_NUMBER", "pandas_aggfunc": "sum", "pivot_margins": True, - "time_range": "100 years ago : now", "timeseries_limit_metric": "count", "transpose_pivot": True, "viz_type": "pivot_table", } -TARGET_FORM_DATA = { - "adhoc_filters": [], +TARGET_FORM_DATA: dict[str, Any] = { "any_other_key": "untouched", "aggregateFunction": "Sum", "colTotals": True, "colSubTotals": True, "combineMetric": True, "form_data_bak": SOURCE_FORM_DATA, - "granularity_sqla": "ds", "groupbyColumns": ["state"], "groupbyRows": ["name"], "rowOrder": "value_z_to_a", "series_limit_metric": "count", - "time_range": "100 years ago : now", "transposePivot": True, "valueFormat": "SMART_NUMBER", "viz_type": "pivot_table_v2", } -@with_feature_flags(GENERIC_CHART_AXES=False) -def test_migration_without_generic_chart_axes() -> None: - source = SOURCE_FORM_DATA.copy() - target = TARGET_FORM_DATA.copy() - upgrade_downgrade(source, target) - - -@with_feature_flags(GENERIC_CHART_AXES=True) -def test_migration_with_generic_chart_axes() -> None: - source = SOURCE_FORM_DATA.copy() - target = TARGET_FORM_DATA.copy() - target["adhoc_filters"] = [ - { - "clause": "WHERE", - "comparator": "100 years ago : now", - "expressionType": "SIMPLE", - "operator": "TEMPORAL_RANGE", - "subject": "ds", - } - ] - target.pop("granularity_sqla") - target.pop("time_range") - upgrade_downgrade(source, target) - - -@with_feature_flags(GENERIC_CHART_AXES=True) -def test_custom_sql_time_column() -> None: - source = SOURCE_FORM_DATA.copy() - source["granularity_sqla"] = { - "expressionType": "SQL", - "label": "ds", - "sqlExpression": "sum(ds)", - } - target = TARGET_FORM_DATA.copy() - target["adhoc_filters"] = [ - { - "clause": "WHERE", - "comparator": None, - "expressionType": "SQL", - "operator": "TEMPORAL_RANGE", - "sqlExpression": "sum(ds)", - "subject": "ds", - } - ] - target["form_data_bak"] = source - target.pop("granularity_sqla") - target.pop("time_range") - upgrade_downgrade(source, target) - - -def upgrade_downgrade(source, target) -> None: - from superset.models.slice import Slice - - dumped_form_data = json.dumps(source) - - slc = Slice( - viz_type=MigratePivotTable.source_viz_type, - datasource_type="table", - params=dumped_form_data, - query_context=f'{{"form_data": {dumped_form_data}}}', - ) - - # upgrade - slc = MigratePivotTable.upgrade_slice(slc) - - # verify form_data - new_form_data = json.loads(slc.params) - assert new_form_data == target - assert new_form_data["form_data_bak"] == source - - # verify query_context - new_query_context = json.loads(slc.query_context) - assert new_query_context["form_data"]["viz_type"] == "pivot_table_v2" - - # downgrade - slc = MigratePivotTable.downgrade_slice(slc) - assert slc.viz_type == MigratePivotTable.source_viz_type - assert json.loads(slc.params) == source +def test_migration() -> None: + migrate_and_assert(MigratePivotTable, SOURCE_FORM_DATA, TARGET_FORM_DATA) diff --git a/tests/unit_tests/migrations/viz/time_related_fields_test.py b/tests/unit_tests/migrations/viz/time_related_fields_test.py new file mode 100644 index 0000000000..06fdf611ce --- /dev/null +++ b/tests/unit_tests/migrations/viz/time_related_fields_test.py @@ -0,0 +1,89 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from superset.migrations.shared.migrate_viz import MigratePivotTable +from tests.unit_tests.conftest import with_feature_flags +from tests.unit_tests.migrations.viz.utils import migrate_and_assert + +SOURCE_FORM_DATA: dict[str, Any] = { + "granularity_sqla": "ds", + "time_range": "100 years ago : now", + "viz_type": "pivot_table", +} + +TARGET_FORM_DATA: dict[str, Any] = { + "form_data_bak": SOURCE_FORM_DATA, + "granularity_sqla": "ds", + "rowOrder": "value_z_to_a", + "time_range": "100 years ago : now", + "viz_type": "pivot_table_v2", +} + + +@with_feature_flags(GENERIC_CHART_AXES=False) +def test_migration_without_generic_chart_axes() -> None: + source = SOURCE_FORM_DATA.copy() + target = TARGET_FORM_DATA.copy() + upgrade_downgrade(source, target) + + +@with_feature_flags(GENERIC_CHART_AXES=True) +def test_migration_with_generic_chart_axes() -> None: + source = SOURCE_FORM_DATA.copy() + target = TARGET_FORM_DATA.copy() + target["adhoc_filters"] = [ + { + "clause": "WHERE", + "comparator": "100 years ago : now", + "expressionType": "SIMPLE", + "operator": "TEMPORAL_RANGE", + "subject": "ds", + } + ] + target.pop("granularity_sqla") + target.pop("time_range") + upgrade_downgrade(source, target) + + +@with_feature_flags(GENERIC_CHART_AXES=True) +def test_custom_sql_time_column() -> None: + source = SOURCE_FORM_DATA.copy() + source["granularity_sqla"] = { + "expressionType": "SQL", + "label": "ds", + "sqlExpression": "sum(ds)", + } + target = TARGET_FORM_DATA.copy() + target["adhoc_filters"] = [ + { + "clause": "WHERE", + "comparator": None, + "expressionType": "SQL", + "operator": "TEMPORAL_RANGE", + "sqlExpression": "sum(ds)", + "subject": "ds", + } + ] + target["form_data_bak"] = source + target.pop("granularity_sqla") + target.pop("time_range") + upgrade_downgrade(source, target) + + +def upgrade_downgrade(source, target) -> None: + migrate_and_assert(MigratePivotTable, source, target) diff --git a/tests/unit_tests/migrations/viz/utils.py b/tests/unit_tests/migrations/viz/utils.py new file mode 100644 index 0000000000..9da90c853f --- /dev/null +++ b/tests/unit_tests/migrations/viz/utils.py @@ -0,0 +1,96 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from typing import Any + +from superset.migrations.shared.migrate_viz import MigrateViz + +TIMESERIES_SOURCE_FORM_DATA: dict[str, Any] = { + "bottom_margin": 20, + "comparison_type": "absolute", + "contribution": True, + "left_margin": 20, + "rich_tooltip": True, + "rolling_type": "sum", + "show_brush": "yes", + "show_controls": True, + "show_legend": True, + "show_markers": True, + "time_compare": "1 year", + "x_axis_label": "x", + "x_axis_format": "SMART_DATE", + "x_ticks_layout": "45°", + "y_axis_bounds": [0, 100], + "y_axis_format": "SMART_NUMBER", + "y_axis_label": "y", + "y_axis_showminmax": True, + "y_log_scale": True, +} + +TIMESERIES_TARGET_FORM_DATA: dict[str, Any] = { + "comparison_type": "difference", + "contributionMode": "row", + "logAxis": True, + "markerEnabled": True, + "rich_tooltip": True, + "rolling_type": "sum", + "show_extra_controls": True, + "show_legend": True, + "time_compare": ["1 year ago"], + "truncateYAxis": True, + "x_axis_title_margin": 20, + "y_axis_title_margin": 20, + "x_axis_title": "x", + "x_axis_time_format": "SMART_DATE", + "xAxisLabelRotation": 45, + "y_axis_bounds": [0, 100], + "y_axis_format": "SMART_NUMBER", + "y_axis_title": "y", + "zoomable": True, +} + + +def migrate_and_assert( + cls: type[MigrateViz], source: dict[str, Any], target: dict[str, Any] +) -> None: + from superset.models.slice import Slice + + dumped_form_data = json.dumps(source) + + slc = Slice( + viz_type=cls.source_viz_type, + datasource_type="table", + params=dumped_form_data, + query_context=f'{{"form_data": {dumped_form_data}}}', + ) + + # upgrade + cls.upgrade_slice(slc) + + # verify form_data + new_form_data = json.loads(slc.params) + assert new_form_data == target + assert new_form_data["form_data_bak"] == source + + # verify query_context + new_query_context = json.loads(slc.query_context) + assert new_query_context["form_data"]["viz_type"] == cls.target_viz_type + + # downgrade + cls.downgrade_slice(slc) + assert slc.viz_type == cls.source_viz_type + assert json.loads(slc.params) == source diff --git a/tests/unit_tests/reports/notifications/slack_tests.py b/tests/unit_tests/reports/notifications/slack_tests.py new file mode 100644 index 0000000000..0a5e9baa46 --- /dev/null +++ b/tests/unit_tests/reports/notifications/slack_tests.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd + + +def test_get_channel_with_multi_recipients() -> None: + """ + Test the _get_channel function to ensure it will return a string + with recipients separated by commas without interstitial spacing + """ + from superset.reports.models import ReportRecipients, ReportRecipientType + from superset.reports.notifications.base import NotificationContent + from superset.reports.notifications.slack import SlackNotification + + content = NotificationContent( + name="test alert", + header_data={ + "notification_format": "PNG", + "notification_type": "Alert", + "owners": [1], + "notification_source": None, + "chart_id": None, + "dashboard_id": None, + }, + embedded_data=pd.DataFrame( + { + "A": [1, 2, 3], + "B": [4, 5, 6], + "C": ["111", "222", '<a href="http://www.example.com">333</a>'], + } + ), + description='<p>This is <a href="#">a test</a> alert</p><br />', + ) + slack_notification = SlackNotification( + recipient=ReportRecipients( + type=ReportRecipientType.SLACK, + recipient_config_json='{"target": "some_channel; second_channel, third_channel"}', + ), + content=content, + ) + + result = slack_notification._get_channel() + + assert result == "some_channel,second_channel,third_channel" diff --git a/tests/unit_tests/tags/commands/create_test.py b/tests/unit_tests/tags/commands/create_test.py index d4143bd4ae..ca31e44566 100644 --- a/tests/unit_tests/tags/commands/create_test.py +++ b/tests/unit_tests/tags/commands/create_test.py @@ -49,13 +49,13 @@ def session_with_data(session: Session): def test_create_command_success(session_with_data: Session, mocker: MockFixture): + from superset.commands.tag.create import CreateCustomTagWithRelationshipsCommand from superset.connectors.sqla.models import SqlaTable from superset.daos.tag import TagDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.models.sql_lab import Query, SavedQuery - from superset.tags.commands.create import CreateCustomTagWithRelationshipsCommand - from superset.tags.models import ObjectTypes, TaggedObject + from superset.tags.models import ObjectType, TaggedObject # Define a list of objects to tag query = session_with_data.query(SavedQuery).first() @@ -69,9 +69,9 @@ def test_create_command_success(session_with_data: Session, mocker: MockFixture) mocker.patch("superset.daos.query.SavedQueryDAO.find_by_id", return_value=query) objects_to_tag = [ - (ObjectTypes.query, query.id), - (ObjectTypes.chart, chart.id), - (ObjectTypes.dashboard, dashboard.id), + (ObjectType.query, query.id), + (ObjectType.chart, chart.id), + (ObjectType.dashboard, dashboard.id), ] CreateCustomTagWithRelationshipsCommand( @@ -92,13 +92,13 @@ def test_create_command_success(session_with_data: Session, mocker: MockFixture) def test_create_command_success_clear(session_with_data: Session, mocker: MockFixture): + from superset.commands.tag.create import CreateCustomTagWithRelationshipsCommand from superset.connectors.sqla.models import SqlaTable from superset.daos.tag import TagDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.models.sql_lab import Query, SavedQuery - from superset.tags.commands.create import CreateCustomTagWithRelationshipsCommand - from superset.tags.models import ObjectTypes, TaggedObject + from superset.tags.models import ObjectType, TaggedObject # Define a list of objects to tag query = session_with_data.query(SavedQuery).first() @@ -112,9 +112,9 @@ def test_create_command_success_clear(session_with_data: Session, mocker: MockFi mocker.patch("superset.daos.query.SavedQueryDAO.find_by_id", return_value=query) objects_to_tag = [ - (ObjectTypes.query, query.id), - (ObjectTypes.chart, chart.id), - (ObjectTypes.dashboard, dashboard.id), + (ObjectType.query, query.id), + (ObjectType.chart, chart.id), + (ObjectType.dashboard, dashboard.id), ] CreateCustomTagWithRelationshipsCommand( diff --git a/tests/unit_tests/tags/commands/update_test.py b/tests/unit_tests/tags/commands/update_test.py index 84007fbb68..47ef16e4e7 100644 --- a/tests/unit_tests/tags/commands/update_test.py +++ b/tests/unit_tests/tags/commands/update_test.py @@ -58,10 +58,10 @@ def session_with_data(session: Session): def test_update_command_success(session_with_data: Session, mocker: MockFixture): + from superset.commands.tag.update import UpdateTagCommand from superset.daos.tag import TagDAO from superset.models.dashboard import Dashboard - from superset.tags.commands.update import UpdateTagCommand - from superset.tags.models import ObjectTypes, TaggedObject + from superset.tags.models import ObjectType, TaggedObject dashboard = session_with_data.query(Dashboard).first() mocker.patch( @@ -72,7 +72,7 @@ def test_update_command_success(session_with_data: Session, mocker: MockFixture) ) objects_to_tag = [ - (ObjectTypes.dashboard, dashboard.id), + (ObjectType.dashboard, dashboard.id), ] tag_to_update = TagDAO.find_by_name("test_name") @@ -94,12 +94,12 @@ def test_update_command_success(session_with_data: Session, mocker: MockFixture) def test_update_command_success_duplicates( session_with_data: Session, mocker: MockFixture ): + from superset.commands.tag.create import CreateCustomTagWithRelationshipsCommand + from superset.commands.tag.update import UpdateTagCommand from superset.daos.tag import TagDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice - from superset.tags.commands.create import CreateCustomTagWithRelationshipsCommand - from superset.tags.commands.update import UpdateTagCommand - from superset.tags.models import ObjectTypes, TaggedObject + from superset.tags.models import ObjectType, TaggedObject dashboard = session_with_data.query(Dashboard).first() chart = session_with_data.query(Slice).first() @@ -113,7 +113,7 @@ def test_update_command_success_duplicates( ) objects_to_tag = [ - (ObjectTypes.dashboard, dashboard.id), + (ObjectType.dashboard, dashboard.id), ] CreateCustomTagWithRelationshipsCommand( @@ -123,7 +123,7 @@ def test_update_command_success_duplicates( tag_to_update = TagDAO.find_by_name("test_tag") objects_to_tag = [ - (ObjectTypes.chart, chart.id), + (ObjectType.chart, chart.id), ] changed_model = UpdateTagCommand( tag_to_update.id, @@ -144,18 +144,18 @@ def test_update_command_success_duplicates( def test_update_command_failed_validation( session_with_data: Session, mocker: MockFixture ): + from superset.commands.tag.create import CreateCustomTagWithRelationshipsCommand + from superset.commands.tag.exceptions import TagInvalidError + from superset.commands.tag.update import UpdateTagCommand from superset.daos.tag import TagDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice - from superset.tags.commands.create import CreateCustomTagWithRelationshipsCommand - from superset.tags.commands.exceptions import TagInvalidError - from superset.tags.commands.update import UpdateTagCommand - from superset.tags.models import ObjectTypes + from superset.tags.models import ObjectType dashboard = session_with_data.query(Dashboard).first() chart = session_with_data.query(Slice).first() objects_to_tag = [ - (ObjectTypes.chart, chart.id), + (ObjectType.chart, chart.id), ] mocker.patch( diff --git a/tests/unit_tests/tasks/test_async_queries.py b/tests/unit_tests/tasks/test_async_queries.py index 5787bbdc8b..1e14d742da 100644 --- a/tests/unit_tests/tasks/test_async_queries.py +++ b/tests/unit_tests/tasks/test_async_queries.py @@ -3,7 +3,7 @@ from unittest import mock import pytest from flask_babel import lazy_gettext as _ -from superset.charts.commands.exceptions import ChartDataQueryFailedError +from superset.commands.chart.exceptions import ChartDataQueryFailedError @mock.patch("superset.tasks.async_queries.security_manager") diff --git a/tests/unit_tests/utils/date_parser_tests.py b/tests/unit_tests/utils/date_parser_tests.py index a2ec20901a..0311377237 100644 --- a/tests/unit_tests/utils/date_parser_tests.py +++ b/tests/unit_tests/utils/date_parser_tests.py @@ -22,7 +22,7 @@ from unittest.mock import Mock, patch import pytest from dateutil.relativedelta import relativedelta -from superset.charts.commands.exceptions import ( +from superset.commands.chart.exceptions import ( TimeRangeAmbiguousError, TimeRangeParseFailError, )