Merge branch 'master' into fix_upload_folder

This commit is contained in:
Sebastian Liebscher 2023-10-06 07:25:37 +02:00 committed by GitHub
commit cf639bbe9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
365 changed files with 10094 additions and 4564 deletions

2
.github/CODEOWNERS vendored
View File

@ -24,6 +24,6 @@
/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida
# Notify PMC members of changes to Github Actions
# Notify PMC members of changes to GitHub Actions
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje

View File

@ -3,7 +3,7 @@ name: SIP
about: "Superset Improvement Proposal. See [here](https://github.com/apache/superset/issues/5602) for details."
labels: sip
title: "[SIP] Your Title Here (do not add SIP number)"
asignees: "apache/superset-committers"
assignees: "apache/superset-committers"
---
*Please make sure you are familiar with the SIP process documented*

View File

@ -1,5 +1,5 @@
# .github/workflows/chromatic.yml
# seee https://www.chromatic.com/docs/github-actions
# see https://www.chromatic.com/docs/github-actions
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with

View File

@ -174,7 +174,7 @@ cat << EOF
-c Move discovered coverage reports to the trash
-z FILE Upload specified file directly to Codecov and bypass all report generation.
This is inteded to be used only with a pre-formatted Codecov report and is not
This is intended to be used only with a pre-formatted Codecov report and is not
expected to work under any other circumstances.
-Z Exit with 1 if not successful. Default will Exit with 0
@ -1152,7 +1152,7 @@ fi
if [ "$ft_search" = "1" ];
then
# detect bower comoponents location
# detect bower components location
bower_components="bower_components"
bower_rc=$(cd "$git_root" && cat .bowerrc 2>/dev/null || echo "")
if [ "$bower_rc" != "" ];

View File

@ -29,6 +29,10 @@ jobs:
persist-credentials: false
submodules: recursive
ref: ${{ github.ref }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- shell: bash
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}

View File

@ -35,7 +35,10 @@ jobs:
uses: actions/checkout@v3
with:
persist-credentials: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- shell: bash
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
@ -49,7 +52,14 @@ jobs:
mkdir -p ./build
echo ${{ github.sha }} > ./build/SHA
echo ${{ github.event.pull_request.number }} > ./build/PR-NUM
DOCKER_BUILDKIT=1 docker build --target ci -t ${{ github.sha }} -t "pr-${{ github.event.pull_request.number }}" .
docker buildx build --target ci \
--load \
--cache-from=type=local,src=/tmp/superset \
-t ${{ github.sha }} \
-t "pr-${{ github.event.pull_request.number }}" \
--platform linux/amd64 \
--label "build_actor=${GITHUB_ACTOR}" \
.
docker save ${{ github.sha }} | gzip > ./build/${{ github.sha }}.tar.gz
- name: Upload build artifacts

View File

@ -43,13 +43,50 @@ cat<<EOF
- ${REPO_NAME}:${LATEST_TAG}
EOF
if [ -z "${DOCKERHUB_TOKEN}" ]; then
# Skip if secrets aren't populated -- they're only visible for actions running in the repo (not on forks)
echo "Skipping Docker push"
# By default load it back
DOCKER_ARGS="--load"
ARCHITECTURE_FOR_BUILD="linux/amd64 linux/arm64"
else
# Login and push
docker logout
docker login --username "${DOCKERHUB_USER}" --password "${DOCKERHUB_TOKEN}"
DOCKER_ARGS="--push"
ARCHITECTURE_FOR_BUILD="linux/amd64,linux/arm64"
fi
set -x
#
# Build the dev image
#
docker buildx build --target dev \
$DOCKER_ARGS \
--cache-from=type=registry,ref=apache/superset:master-dev \
--cache-from=type=local,src=/tmp/superset \
--cache-to=type=local,ignore-error=true,dest=/tmp/superset \
-t "${REPO_NAME}:${SHA}-dev" \
-t "${REPO_NAME}:${REFSPEC}-dev" \
-t "${REPO_NAME}:${LATEST_TAG}-dev" \
--platform linux/amd64 \
--label "sha=${SHA}" \
--label "built_at=$(date)" \
--label "target=dev" \
--label "build_actor=${GITHUB_ACTOR}" \
.
#
# Build the "lean" image
#
DOCKER_BUILDKIT=1 docker build --target lean \
docker buildx build --target lean \
$DOCKER_ARGS \
--cache-from=type=local,src=/tmp/superset \
--cache-to=type=local,ignore-error=true,dest=/tmp/superset \
-t "${REPO_NAME}:${SHA}" \
-t "${REPO_NAME}:${REFSPEC}" \
-t "${REPO_NAME}:${LATEST_TAG}" \
--platform linux/amd64 \
--label "sha=${SHA}" \
--label "built_at=$(date)" \
--label "target=lean" \
@ -59,60 +96,48 @@ DOCKER_BUILDKIT=1 docker build --target lean \
#
# Build the "lean310" image
#
DOCKER_BUILDKIT=1 docker build --target lean \
docker buildx build --target lean \
$DOCKER_ARGS \
--cache-from=type=local,src=/tmp/superset \
--cache-to=type=local,ignore-error=true,dest=/tmp/superset \
-t "${REPO_NAME}:${SHA}-py310" \
-t "${REPO_NAME}:${REFSPEC}-py310" \
-t "${REPO_NAME}:${LATEST_TAG}-py310" \
--platform linux/amd64 \
--build-arg PY_VER="3.10-slim-bookworm"\
--label "sha=${SHA}" \
--label "built_at=$(date)" \
--label "target=lean310" \
--label "build_actor=${GITHUB_ACTOR}" \
.
for BUILD_PLATFORM in $ARCHITECTURE_FOR_BUILD; do
#
# Build the "websocket" image
#
DOCKER_BUILDKIT=1 docker build \
docker buildx build \
$DOCKER_ARGS \
--cache-from=type=registry,ref=apache/superset:master-websocket \
-t "${REPO_NAME}:${SHA}-websocket" \
-t "${REPO_NAME}:${REFSPEC}-websocket" \
-t "${REPO_NAME}:${LATEST_TAG}-websocket" \
--platform ${BUILD_PLATFORM} \
--label "sha=${SHA}" \
--label "built_at=$(date)" \
--label "target=websocket" \
--label "build_actor=${GITHUB_ACTOR}" \
superset-websocket
#
# Build the dev image
#
DOCKER_BUILDKIT=1 docker build --target dev \
-t "${REPO_NAME}:${SHA}-dev" \
-t "${REPO_NAME}:${REFSPEC}-dev" \
-t "${REPO_NAME}:${LATEST_TAG}-dev" \
--label "sha=${SHA}" \
--label "built_at=$(date)" \
--label "target=dev" \
--label "build_actor=${GITHUB_ACTOR}" \
.
#
# Build the dockerize image
#
DOCKER_BUILDKIT=1 docker build \
docker buildx build \
$DOCKER_ARGS \
--cache-from=type=registry,ref=apache/superset:dockerize \
-t "${REPO_NAME}:dockerize" \
--platform ${BUILD_PLATFORM} \
--label "sha=${SHA}" \
--label "built_at=$(date)" \
--label "build_actor=${GITHUB_ACTOR}" \
-f dockerize.Dockerfile \
.
if [ -z "${DOCKERHUB_TOKEN}" ]; then
# Skip if secrets aren't populated -- they're only visible for actions running in the repo (not on forks)
echo "Skipping Docker push"
else
# Login and push
docker logout
docker login --username "${DOCKERHUB_USER}" --password "${DOCKERHUB_TOKEN}"
docker push --all-tags "${REPO_NAME}"
fi
done

View File

@ -1,4 +1,4 @@
name: Prefer Typescript
name: Prefer TypeScript
on:
push:
@ -9,7 +9,7 @@ on:
jobs:
prefer_typescript:
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
name: Prefer Typescript
name: Prefer TypeScript
runs-on: ubuntu-latest
permissions:
contents: read

2
.gitignore vendored
View File

@ -57,6 +57,8 @@ superset.egg-info/
superset/bin/supersetc
tmp
rat-results.txt
superset/app/
superset-websocket/config.json
# Node.js, webpack artifacts, storybook
*.entry.js

File diff suppressed because it is too large Load Diff

View File

@ -138,7 +138,7 @@ The best way to report a bug is to file an issue on GitHub. Please include:
When posting Python stack traces, please quote them using
[Markdown blocks](https://help.github.com/articles/creating-and-highlighting-code-blocks/).
_Please note that feature requests opened as Github Issues will be moved to Discussions._
_Please note that feature requests opened as GitHub Issues will be moved to Discussions._
### Submit Ideas or Feature Requests
@ -170,7 +170,7 @@ articles. See [Documentation](#documentation) for more details.
### Add Translations
If you are proficient in a non-English language, you can help translate
text strings from Superset's UI. You can jump in to the existing
text strings from Superset's UI. You can jump into the existing
language dictionaries at
`superset/translations/<language_code>/LC_MESSAGES/messages.po`, or
even create a dictionary for a new language altogether.
@ -329,16 +329,16 @@ Triaging goals
First, add **Category labels (a.k.a. hash labels)**. Every issue/PR must have one hash label (except spam entry). Labels that begin with `#` defines issue/PR type:
| Label | for Issue | for PR |
| --------------- | --------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- |
| `#bug` | Bug report | Bug fix |
| `#code-quality` | Describe problem with code, architecture or productivity | Refactor, tests, tooling |
| `#feature` | New feature request | New feature implementation |
| `#refine` | Propose improvement that does not provide new features and is also not a bug fix nor refactor, such as adjust padding, refine UI style. | Implementation of improvement that does not provide new features and is also not a bug fix nor refactor, such as adjust padding, refine UI style. |
| `#doc` | Documentation | Documentation |
| `#question` | Troubleshooting: Installation, Running locally, Ask how to do something. Can be changed to `#bug` later. | N/A |
| `#SIP` | Superset Improvement Proposal | N/A |
| `#ASF` | Tasks related to Apache Software Foundation policy | Tasks related to Apache Software Foundation policy |
| Label | for Issue | for PR |
| --------------- | ----------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- |
| `#bug` | Bug report | Bug fix |
| `#code-quality` | Describe problem with code, architecture or productivity | Refactor, tests, tooling |
| `#feature` | New feature request | New feature implementation |
| `#refine` | Propose improvement such as adjusting padding or refining UI style, excluding new features, bug fixes, and refactoring. | Implementation of improvement such as adjusting padding or refining UI style, excluding new features, bug fixes, and refactoring. |
| `#doc` | Documentation | Documentation |
| `#question` | Troubleshooting: Installation, Running locally, Ask how to do something. Can be changed to `#bug` later. | N/A |
| `#SIP` | Superset Improvement Proposal | N/A |
| `#ASF` | Tasks related to Apache Software Foundation policy | Tasks related to Apache Software Foundation policy |
Then add other types of labels as appropriate.
@ -720,7 +720,7 @@ We use [Pylint](https://pylint.org/) for linting which can be invoked via:
tox -e pylint
```
In terms of best practices please avoid blanket disablement of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disablement should occur inline as it prevents masking issues and provides context as to why said message is disabled.
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
@ -1211,7 +1211,7 @@ To contribute a plugin to Superset, your plugin must meet the following criteria
- The plugin should contain sufficient unit/e2e tests
- The plugin should use appropriate namespacing, e.g. a folder name of `plugin-chart-whatever` and a package name of `@superset-ui/plugin-chart-whatever`
- The plugin should use them variables via Emotion, as passed in by the ThemeProvider
- The plugin should provide adequate error handling (no data returned, malformatted data, invalid controls, etc.)
- The plugin should provide adequate error handling (no data returned, malformed data, invalid controls, etc.)
- The plugin should contain documentation in the form of a populated `README.md` file
- The plugin should have a meaningful and unique icon
- Above all else, the plugin should come with a _commitment to maintenance_ from the original author(s)

View File

@ -22,9 +22,15 @@ ARG PY_VER=3.9-slim-bookworm
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
FROM --platform=${BUILDPLATFORM} node:16-slim AS superset-node
FROM --platform=${BUILDPLATFORM} node:16-bookworm-slim AS superset-node
ARG NPM_BUILD_CMD="build"
RUN apt-get update -qq \
&& apt-get install -yqq --no-install-recommends \
build-essential \
python3
ENV BUILD_CMD=${NPM_BUILD_CMD} \
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
# NPM ci first, as to NOT invalidate previous steps except for when package.json changes
@ -33,8 +39,9 @@ WORKDIR /app/superset-frontend
RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \
/frontend-mem-nag.sh
COPY superset-frontend/package*.json ./
RUN npm ci
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
npm ci
COPY ./superset-frontend ./
# This seems to be the most expensive step
@ -54,10 +61,11 @@ ENV LANG=C.UTF-8 \
SUPERSET_HOME="/app/superset_home" \
SUPERSET_PORT=8088
RUN mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg-info requirements \
RUN --mount=target=/var/lib/apt/lists,type=cache \
--mount=target=/var/cache/apt,type=cache \
mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg-info requirements \
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
&& apt-get update -q \
&& apt-get install -yq --no-install-recommends \
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
curl \
default-libmysqlclient-dev \
@ -66,27 +74,30 @@ RUN mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg
libpq-dev \
libecpg-dev \
libldap2-dev \
&& apt-get autoremove -yqq --purge && rm -rf /var/lib/apt/lists/* /var/[log,tmp]/* /tmp/* && apt-get clean \
&& touch superset/static/version_info.json \
&& chown -R superset:superset ./*
COPY --chown=superset:superset ./requirements/*.txt requirements/
COPY --chown=superset:superset setup.py MANIFEST.in README.md ./
# setup.py uses the version information in package.json
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
RUN pip install --no-cache-dir -r requirements/local.txt
RUN --mount=type=bind,target=./requirements/local.txt,src=./requirements/local.txt \
--mount=type=bind,target=./requirements/development.txt,src=./requirements/development.txt \
--mount=type=bind,target=./requirements/base.txt,src=./requirements/base.txt \
--mount=type=cache,target=/root/.cache/pip \
pip install -r requirements/local.txt
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
## Lastly, let's install superset itself
COPY --chown=superset:superset superset superset
RUN pip install --no-cache-dir -e . \
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -e . \
&& flask fab babel-compile --target superset/translations \
&& chown -R superset:superset superset/translations
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
USER superset
HEALTHCHECK CMD curl -f "http://localhost:$SUPERSET_PORT/health"
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
EXPOSE ${SUPERSET_PORT}
@ -96,13 +107,14 @@ CMD ["/usr/bin/run-server.sh"]
# Dev image...
######################################################################
FROM lean AS dev
ARG GECKODRIVER_VERSION=v0.32.0 \
FIREFOX_VERSION=106.0.3
ARG GECKODRIVER_VERSION=v0.33.0 \
FIREFOX_VERSION=117.0.1
USER root
RUN apt-get update -q \
&& apt-get install -yq --no-install-recommends \
RUN --mount=target=/var/lib/apt/lists,type=cache \
--mount=target=/var/cache/apt,type=cache \
apt-get install -yqq --no-install-recommends \
libnss3 \
libdbus-glib-1-2 \
libgtk-3-0 \
@ -111,14 +123,16 @@ RUN apt-get update -q \
libxtst6 \
wget \
# Install GeckoDriver WebDriver
&& wget https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
# Install Firefox
&& wget https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget && rm -rf /var/lib/apt/lists/* /var/[log,tmp]/* /tmp/* && apt-get clean
&& apt-get autoremove -yqq --purge wget && rm -rf /var/[log,tmp]/* /tmp/*
# Cache everything for dev purposes...
RUN pip install --no-cache-dir -r requirements/docker.txt
RUN --mount=type=bind,target=./requirements/base.txt,src=./requirements/base.txt \
--mount=type=bind,target=./requirements/docker.txt,src=./requirements/docker.txt \
--mount=type=cache,target=/root/.cache/pip \
pip install -r requirements/docker.txt
USER superset
######################################################################

View File

@ -238,7 +238,7 @@ Similar to `cherrytree`, the change log script requires a github token, either a
#### Initial release (e.g. 1.5.0)
When generating the changelog for an initial minor relese, you should compare with
When generating the changelog for an initial minor release, you should compare with
the previous release (in the example, the previous release branch is `1.4`, so remember to
update it accordingly):
@ -343,7 +343,7 @@ To build and run the recently created tarball **from SVN**:
# login using admin/admin
```
## Create a release on Github
## Create a release on GitHub
After submitting the tag and testing the release candidate, follow the steps [here](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository) to create the release on GitHub. Use the vote email text as the content for the release description. Make sure to check the "This is a pre-release" checkbox for release candidates. You can check previous releases if you need an example.

View File

@ -28,11 +28,11 @@ Superset 2.0 is a big step forward. This release cleans up many legacy code path
## Developer Experience
- Addition of a statsd guage metric for Slack and email notifications for increased visibility into errors around alerts / reports ([#20158](https://github.com/apache/superset/pull/20158))
- Addition of a statsd gauge metric for Slack and email notifications for increased visibility into errors around alerts / reports ([#20158](https://github.com/apache/superset/pull/20158))
- Helm chart now supports resource limits and requests for each component ([#20052](https://github.com/apache/superset/pull/20052))
- New Github workflow to test Storybook Netlify instance nightly ([#19852](https://github.com/apache/superset/pull/19852))
- New GitHub workflow to test Storybook Netlify instance nightly ([#19852](https://github.com/apache/superset/pull/19852))
- Minimum requirement for Superset is now Python 3.8 ([#19017](https://github.com/apache/superset/pull/19017)

View File

@ -59,7 +59,7 @@ under the License.
- [#19898](https://github.com/apache/superset/pull/19898) feat: When editing the label/title in the Metrics popover, hitting Enter should save what you've typed (@diegomedina248)
- [#16493](https://github.com/apache/superset/pull/16493) feat(plugin-chart-echarts): [feature-parity] support extra control for the area chart V2 (@stephenLYZ)
- [#19855](https://github.com/apache/superset/pull/19855) feat(explore): Frontend implementation of dataset creation from infobox (@lyndsiWilliams)
- [#20165](https://github.com/apache/superset/pull/20165) feat: add modfied col and timezone info to schedule col (@pkdotson)
- [#20165](https://github.com/apache/superset/pull/20165) feat: add modified col and timezone info to schedule col (@pkdotson)
- [#20144](https://github.com/apache/superset/pull/20144) feat: showing results pane in dashboard (@zhaoyongjie)
- [#20242](https://github.com/apache/superset/pull/20242) feat: derived metrics use different line style (@zhaoyongjie)
- [#20010](https://github.com/apache/superset/pull/20010) feat: standardized form_data (@zhaoyongjie)
@ -115,7 +115,7 @@ under the License.
- [#19558](https://github.com/apache/superset/pull/19558) feat(explore): Redesign of Run/Save buttons (@kgabryje)
- [#19650](https://github.com/apache/superset/pull/19650) feat(embedded): API get embedded dashboard config by uuid (@lilykuang)
- [#19310](https://github.com/apache/superset/pull/19310) feat(CRUD): add new empty state (@stephenLYZ)
- [#19622](https://github.com/apache/superset/pull/19622) feat(plugin-chart-echarts): add aggregate total for the Pie/Donuct chart (@stephenLYZ)
- [#19622](https://github.com/apache/superset/pull/19622) feat(plugin-chart-echarts): add aggregate total for the Pie/Donut chart (@stephenLYZ)
- [#19314](https://github.com/apache/superset/pull/19314) feat: Move Database Import option into DB Connection modal (@lyndsiWilliams)
- [#19434](https://github.com/apache/superset/pull/19434) feat: deprecate old API and create new API for dashes created by me (@dpgaspar)
- [#19482](https://github.com/apache/superset/pull/19482) feat: add success toast to alerts and reports (@pkdotson)
@ -181,7 +181,7 @@ under the License.
- [#20204](https://github.com/apache/superset/pull/20204) fix: Fixes issue where results panel height was incorrect [sc-49045] (@eric-briscoe)
- [#20235](https://github.com/apache/superset/pull/20235) fix: Box Plot Chart throws an error when the average (AVG) / SUM is being calculated on the Metrics (@diegomedina248)
- [#20088](https://github.com/apache/superset/pull/20088) fix: datatype tracking issue on virtual dataset (@codemaster08240328)
- [#20220](https://github.com/apache/superset/pull/20220) fix: dashbaord unable to refresh (@zhaoyongjie)
- [#20220](https://github.com/apache/superset/pull/20220) fix: dashboard unable to refresh (@zhaoyongjie)
- [#20228](https://github.com/apache/superset/pull/20228) fix: failed samples should throw exception (@zhaoyongjie)
- [#20203](https://github.com/apache/superset/pull/20203) fix: move columns to datasource object for bootstrap data (@hughhhh)
- [#20151](https://github.com/apache/superset/pull/20151) fix(csv): Ensure df_to_escaped_csv does not coerce integer columns to float (@john-bodley)
@ -191,7 +191,7 @@ under the License.
- [#20206](https://github.com/apache/superset/pull/20206) fix(sql lab): SQL Lab Compile Query Delay (@diegomedina248)
- [#20201](https://github.com/apache/superset/pull/20201) fix: unable to set destroyOnClose on ModalTrigger (@zhaoyongjie)
- [#20186](https://github.com/apache/superset/pull/20186) fix(db): make to allow to show/hide the password when only creating (@prosdev0107)
- [#20127](https://github.com/apache/superset/pull/20127) fix(database): retrival of tables and views from schema for exasol backend (@Nicoretti)
- [#20127](https://github.com/apache/superset/pull/20127) fix(database): retrieval of tables and views from schema for exasol backend (@Nicoretti)
- [#19899](https://github.com/apache/superset/pull/19899) fix: always create parameter json field (@pkdotson)
- [#20173](https://github.com/apache/superset/pull/20173) fix: avoid while cycle in computeMaxFontSize for big Number run forever when css rule applied (@diegomedina248)
- [#20086](https://github.com/apache/superset/pull/20086) fix(css): transparent linear gradient not working in safari (@stephenLYZ)
@ -298,7 +298,7 @@ under the License.
- [#19570](https://github.com/apache/superset/pull/19570) fix: sqloxide optional (@betodealmeida)
- [#19397](https://github.com/apache/superset/pull/19397) fix: weight tooltip issue (@codemaster08240328)
- [#19313](https://github.com/apache/superset/pull/19313) fix(sql lab): increase the size of the action icons in the history tab (@diegomedina248)
- [#19039](https://github.com/apache/superset/pull/19039) fix(explore): clean data when hidding control (@stephenLYZ)
- [#19039](https://github.com/apache/superset/pull/19039) fix(explore): clean data when hiding control (@stephenLYZ)
- [#19444](https://github.com/apache/superset/pull/19444) fix: Error Message is cut off in alerts & reports log page (@codemaster08240328)
- [#19312](https://github.com/apache/superset/pull/19312) fix: adaptive formatting typo in explore dropdowns (@diegomedina248)
- [#19534](https://github.com/apache/superset/pull/19534) fix(explore): Chart header icon paddings (@kgabryje)
@ -463,7 +463,7 @@ under the License.
- [#19486](https://github.com/apache/superset/pull/19486) refactor: Removes the CSS files from the Treemap plugin (@michael-s-molina)
- [#19488](https://github.com/apache/superset/pull/19488) refactor: Removes the CSS files from the Sunburst plugin (@michael-s-molina)
- [#19490](https://github.com/apache/superset/pull/19490) chore: Add theme color to ParallelCoordinates (@geido)
- [#19442](https://github.com/apache/superset/pull/19442) chore: Remove FilterbaleTableStyles.less (@geido)
- [#19442](https://github.com/apache/superset/pull/19442) chore: Remove FilterableTableStyles.less (@geido)
- [#19441](https://github.com/apache/superset/pull/19441) chore: Remove StyledQueryButton.less (@geido)
- [#19473](https://github.com/apache/superset/pull/19473) refactor: Removes the CSS files from the Rose plugin (@michael-s-molina)
- [#19466](https://github.com/apache/superset/pull/19466) chore: Removes hard-coded colors from legacy-plugin-chart-world-map (@michael-s-molina)

View File

@ -52,6 +52,7 @@ These features are **finished** but currently being tested. They are usable, but
- GENERIC_CHART_AXES
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
- HORIZONTAL_FILTER_BAR
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
- RLS_IN_SQLLAB
- SSH_TUNNELING [(docs)](https://superset.apache.org/docs/installation/setup-ssh-tunneling)
- USE_ANALAGOUS_COLORS

View File

@ -101,6 +101,7 @@ Join our growing community!
- [Preset, Inc.](https://preset.io)
- [Pronto Tools](http://www.prontotools.io) [@zkan]
- [PubNub](https://pubnub.com) [@jzucker2]
- [ReadyTech](https://www.readytech.io)
- [Reward Gateway](https://www.rewardgateway.com)
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
- [Showmax](https://tech.showmax.com) [@bobek]

View File

@ -24,15 +24,24 @@ assists people when migrating to a new version.
## Next
- [24657](https://github.com/apache/superset/pull/24657): Bumps the cryptography package to augment the OpenSSL security vulnerability.
### Breaking Changes
### Potential Downtime
### Other
- [24982](https://github.com/apache/superset/pull/24982): By default, physical datasets on Oracle-like dialects like Snowflake will now use denormalized column names. However, existing datasets won't be affected. To change this behavior, the "Advanced" section on the dataset modal has a "Normalize column names" flag which can be changed to change this behavior.
## 3.0.0
- [25053](https://github.com/apache/superset/pull/25053): Extends the `ab_user.email` column from 64 to 320 characters which has an associated unique key constraint. This will be problematic for MySQL metadata databases which use the InnoDB storage engine with the `innodb_large_prefix` parameter disabled as the key prefix limit is 767 bytes. Enabling said parameter and ensuring that the table uses either the `DYNAMIC` or `COMPRESSED` row format should remedy the problem. See [here](https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html) for more details.
- [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
- [24939](https://github.com/apache/superset/pull/24939): Augments the foreign key constraints for the `embedded_dashboards` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard is deleted. Scheduled downtime may be advised.
- [24938](https://github.com/apache/superset/pull/24938): Augments the foreign key constraints for the `dashboard_slices` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard or slice is deleted. Scheduled downtime may be advised.
- [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
- [24657](https://github.com/apache/superset/pull/24657): Bumps the cryptography package to augment the OpenSSL security vulnerability.
- [24628](https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24335](https://github.com/apache/superset/pull/24335): Removed deprecated API `/superset/filter/<datasource_type>/<int:datasource_id>/<column>/`
- [24185](https://github.com/apache/superset/pull/24185): `/api/v1/database/test_connection` and `api/v1/database/validate_parameters` permissions changed from `can_read` to `can_write`. Only Admin user's have access.
- [24628]https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables which reference the `tables` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated.
- [23652](https://github.com/apache/superset/pull/23652): Enables GENERIC_CHART_AXES feature flag by default.
- [23226](https://github.com/apache/superset/pull/23226): Migrated endpoint `/estimate_query_cost/<int:database_id>` to `/api/v1/sqllab/estimate/`. Corresponding permissions are can estimate query cost on SQLLab. Make sure you add/replace the necessary permissions on any custom roles you may have.
@ -40,7 +49,7 @@ assists people when migrating to a new version.
- [24404](https://github.com/apache/superset/pull/24404): FLASK_ENV is getting
deprecated, we recommend using SUPERSET_ENV and reviewing your
config for ENVIRONMENT_TAG_CONFIG, which enables adding a tag in the navbar to
make it more clear which envrionment your are in.
make it more clear which environment your are in.
`SUPERSET_ENV=production` and `SUPERSET_ENV=development` are the two
supported switches based on the default config.
- [19242](https://github.com/apache/superset/pull/19242): Adhoc subqueries are now disabled by default for security reasons. To enable them, set the feature flag `ALLOW_ADHOC_SUBQUERY` to `True`.
@ -78,11 +87,13 @@ assists people when migrating to a new version.
- [23663](https://github.com/apache/superset/pull/23663): Removes deprecated feature flags `ALLOW_DASHBOARD_DOMAIN_SHARDING`, `DISPLAY_MARKDOWN_HTML`, and `FORCE_DATABASE_CONNECTIONS_SSL`.
- [22325](https://github.com/apache/superset/pull/22325): "RLS_FORM_QUERY_REL_FIELDS" is replaced by "RLS_BASE_RELATED_FIELD_FILTERS" feature flag. Its value format stays same.
### Potential Downtime
## 2.1.1
- [24185](https://github.com/apache/superset/pull/24185): `/api/v1/database/test_connection` and `api/v1/database/validate_parameters` permissions changed from `can_read` to `can_write`. Only Admin user's have access.
### Other
- [24982](https://github.com/apache/superset/pull/24982): By default, physical datasets on Oracle-like dialects like Snowflake will now use denormalized column names. However, existing datasets won't be affected. To change this behavior, the "Advanced" section on the dataset modal has a "Normalize column names" flag which can be changed to change this behavior.
- [23888](https://github.com/apache/superset/pull/23888): Database Migration for json serialization instead of pickle should upgrade/downgrade correctly when bumping to/from this patch version
## 2.1.0
@ -118,7 +129,7 @@ assists people when migrating to a new version.
## 2.0.1
- [21895](https://github.com/apache/superset/pull/21895): Markdown components had their security increased by adhering to the same sanitization process enforced by Github. This means that some HTML elements found in markdowns are not allowed anymore due to the security risks they impose. If you're deploying Superset in a trusted environment and wish to use some of the blocked elements, then you can use the HTML_SANITIZATION_SCHEMA_EXTENSIONS configuration to extend the default sanitization schema. There's also the option to disable HTML sanitization using the HTML_SANITIZATION configuration but we do not recommend this approach because of the security risks. Given the provided configurations, we don't view the improved sanitization as a breaking change but as a security patch.
- [21895](https://github.com/apache/superset/pull/21895): Markdown components had their security increased by adhering to the same sanitization process enforced by GitHub. This means that some HTML elements found in markdowns are not allowed anymore due to the security risks they impose. If you're deploying Superset in a trusted environment and wish to use some of the blocked elements, then you can use the HTML_SANITIZATION_SCHEMA_EXTENSIONS configuration to extend the default sanitization schema. There's also the option to disable HTML sanitization using the HTML_SANITIZATION configuration but we do not recommend this approach because of the security risks. Given the provided configurations, we don't view the improved sanitization as a breaking change but as a security patch.
## Breaking Changes
@ -139,8 +150,8 @@ assists people when migrating to a new version.
- [19770](https://github.com/apache/superset/pull/19770): Per [SIP-11](https://github.com/apache/superset/issues/6032) and [SIP-68](https://github.com/apache/superset/issues/14909), the native NoSQL Druid connector is deprecated and has been removed. Druid is still supported through SQLAlchemy via pydruid. The config keys `DRUID_IS_ACTIVE` and `DRUID_METADATA_LINKS_ENABLED` have also been removed.
- [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE = "Gamma"` to have the same functionality.
- [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure Celery directly using `CELERY_CONFIG` on Superset.
- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (premantly enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case.
- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permantly enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case.
- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case.
- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case.
- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation. Please follow [these](https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
- [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default.
- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward.
@ -155,7 +166,7 @@ assists people when migrating to a new version.
### Other
- [22022](https://github.com/apache/superset/pull/22022): HTTP API endpoints `/superset/approve` and `/superset/request_access` have been deprecated and their HTTP methods were changed from GET to POST
- [21895](https://github.com/apache/superset/pull/21895): Markdown components had their security increased by adhering to the same sanitization process enforced by Github. This means that some HTML elements found in markdowns are not allowed anymore due to the security risks they impose. If you're deploying Superset in a trusted environment and wish to use some of the blocked elements, then you can use the HTML_SANITIZATION_SCHEMA_EXTENSIONS configuration to extend the default sanitization schema. There's also the option to disable HTML sanitization using the HTML_SANITIZATION configuration but we do not recommend this approach because of the security risks. Given the provided configurations, we don't view the improved sanitization as a breaking change but as a security patch.
- [21895](https://github.com/apache/superset/pull/21895): Markdown components had their security increased by adhering to the same sanitization process enforced by GitHub. This means that some HTML elements found in markdowns are not allowed anymore due to the security risks they impose. If you're deploying Superset in a trusted environment and wish to use some of the blocked elements, then you can use the HTML_SANITIZATION_SCHEMA_EXTENSIONS configuration to extend the default sanitization schema. There's also the option to disable HTML sanitization using the HTML_SANITIZATION configuration but we do not recommend this approach because of the security risks. Given the provided configurations, we don't view the improved sanitization as a breaking change but as a security patch.
## 1.5.2
@ -247,7 +258,7 @@ assists people when migrating to a new version.
### Other
- [13772](https://github.com/apache/superset/pull/13772): Row level security (RLS) is now enabled by default. To activate the feature, please run `superset init` to expose the RLS menus to Admin users.
- [13980](https://github.com/apache/superset/pull/13980): Data health checks no longer use the metadata database as an interim cache. Though non-breaking, deployments which implement complex logic should likely memoize the callback function. Refer to documentation in the confg.py file for more detail.
- [13980](https://github.com/apache/superset/pull/13980): Data health checks no longer use the metadata database as an interim cache. Though non-breaking, deployments which implement complex logic should likely memoize the callback function. Refer to documentation in the config.py file for more detail.
- [14255](https://github.com/apache/superset/pull/14255): The default `CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC` callable logic has been updated to leverage the specified database and schema to ensure the upload S3 key prefix is unique. Previously tables generated via upload from CSV with the same name but differ schema and/or cluster would use the same S3 key prefix. Note this change does not impact previously imported tables.
## 1.1.0

View File

@ -29,6 +29,14 @@ x-superset-volumes: &superset-volumes
version: "3.7"
services:
nginx:
image: nginx:latest
container_name: superset_nginx
restart: unless-stopped
ports:
- "80:80"
volumes:
- ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
redis:
image: redis:7
container_name: superset_cache

View File

@ -35,6 +35,14 @@ else
echo "Skipping local overrides"
fi
#
# playwright is an optional package - run only if it is installed
#
if command -v playwright > /dev/null 2>&1; then
playwright install-deps
playwright install chromium
fi
case "${1}" in
worker)
echo "Starting Celery worker..."

View File

@ -1,5 +1,5 @@
# ------------------------------------------------------------------------
# Creates the examples database and repective user. This database location
# Creates the examples database and respective user. This database location
# and access credentials are defined on the environment variables
# ------------------------------------------------------------------------
set -e

127
docker/nginx/nginx.conf Normal file
View File

@ -0,0 +1,127 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
user nginx;
worker_processes 1;
error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent [$connection_requests] "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
#tcp_nopush on;
keepalive_timeout 30;
keepalive_requests 2;
###### Compression Stuff
# Enable Gzip compressed.
gzip on;
# Compression level (1-9).
# 5 is a perfect compromise between size and cpu usage, offering about
# 75% reduction for most ascii files (almost identical to level 9).
gzip_comp_level 5;
# Don't compress anything that's already small and unlikely to shrink much
# if at all (the default is 20 bytes, which is bad as that usually leads to
# larger files after gzipping).
gzip_min_length 256;
# Compress data even for clients that are connecting to us via proxies,
# identified by the "Via" header (required for CloudFront).
gzip_proxied any;
# Tell proxies to cache both the gzipped and regular version of a resource
# whenever the client's Accept-Encoding capabilities header varies;
# Avoids the issue where a non-gzip capable client (which is extremely rare
# today) would display gibberish if their proxy gave them the gzipped version.
gzip_vary on;
# Compress all output labeled with one of the following MIME-types.
gzip_types
application/atom+xml
application/javascript
application/json
application/rss+xml
application/vnd.ms-fontobject
application/x-font-ttf
application/x-web-app-manifest+json
application/xhtml+xml
application/xml
font/opentype
image/svg+xml
image/x-icon
text/css
text/plain
text/x-component;
# text/html is always compressed by HttpGzipModule
output_buffers 20 10m;
client_max_body_size 10m;
upstream superset_app {
server host.docker.internal:8088;
keepalive 100;
}
upstream superset_websocket {
server host.docker.internal:8080;
keepalive 100;
}
server {
listen 80 default_server;
server_name _;
location /ws {
proxy_pass http://superset_websocket;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header Host $host;
}
location / {
proxy_pass http://superset_app;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
port_in_redirect off;
proxy_connect_timeout 300;
}
}
}

View File

@ -4,10 +4,11 @@ ARG DOCKERIZE_VERSION=v0.7.0
RUN apk update --no-cache \
&& apk add --no-cache wget openssl \
&& wget -O - https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz | tar xzf - -C /usr/local/bin \
&& case "$(apk --print-arch)" in \
x86_64) ARCH=amd64 ;; \
aarch64) ARCH=arm64 ;; \
esac \
&& wget -O - https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-${ARCH}-${DOCKERIZE_VERSION}.tar.gz | tar xzf - -C /usr/local/bin \
&& apk del wget
USER 10001
ENTRYPOINT ["dockerize"]
CMD ["--help"]

View File

@ -15,7 +15,7 @@ import { Alert } from 'antd';
Superset's public **REST API** follows the
[OpenAPI specification](https://swagger.io/specification/), and is
documented here. The docs bellow are generated using
documented here. The docs below are generated using
[Swagger React UI](https://www.npmjs.com/package/swagger-ui-react).
<Alert

View File

@ -39,7 +39,7 @@ We use [Pylint](https://pylint.org/) for linting which can be invoked via:
tox -e pylint
```
In terms of best practices please avoid blanket disablement of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disablement should occur inline as it prevents masking issues and provides context as to why said message is disabled.
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)

View File

@ -58,7 +58,7 @@ for your target language already exists in the `superset/translations` directory
ls superset/translations | grep -E "^[a-z]{2}\/"
```
If your language already has a pre-existing translation, skip to the next section
If your language already has a preexisting translation, skip to the next section
The following languages are already supported by Flask AppBuilder, and will make it
easier to translate the application to your target language:

View File

@ -19,7 +19,7 @@ The best way to report a bug is to file an issue on GitHub. Please include:
When posting Python stack traces, please quote them using
[Markdown blocks](https://help.github.com/articles/creating-and-highlighting-code-blocks/).
_Please note that feature requests opened as Github Issues will be moved to Discussions._
_Please note that feature requests opened as GitHub Issues will be moved to Discussions._
### Submit Ideas or Feature Requests
@ -51,7 +51,7 @@ articles. See [Documentation](#documentation) for more details.
### Add Translations
If you are proficient in a non-English language, you can help translate
text strings from Superset's UI. You can jump in to the existing
text strings from Superset's UI. You can jump into the existing
language dictionaries at
`superset/translations/<language_code>/LC_MESSAGES/messages.po`, or
even create a dictionary for a new language altogether.

View File

@ -40,7 +40,7 @@ Then select your database type in the resulting modal:
<img src={useBaseUrl("/img/tutorial/tutorial_02_select_database.png" )} width="600" />{" "} <br/><br/>
Once you've selected a database, you can configure a number of advanced options in this window,
or for the purposes of this this walkthrough, you can click the link below all these fields:
or for the purposes of this walkthrough, you can click the link below all these fields:
<img src={useBaseUrl("/img/tutorial/tutorial_03a_database_connection_string_link.png" )} width="600" />{" "} <br/><br/>

View File

@ -28,7 +28,7 @@ We added a new configuration option where the admin can define their preferred d
# displayed prominently in the "Add Database" dialog. You should
# use the "engine_name" attribute of the corresponding DB engine spec
# in `superset/db_engine_specs/`.
PREFERRED_DATABASES: List[str] = [
PREFERRED_DATABASES: list[str] = [
"PostgreSQL",
"Presto",
"MySQL",

View File

@ -17,7 +17,8 @@ mysql://{username}:{password}@{host}/{database}
Host:
- For Localhost or Docker running Linux: `localhost` or `127.0.0.1`
- For Localhost: `localhost` or `127.0.0.1`
- Docker running on Linux: `172.18.0.1`
- For On Prem: IP address or Host name
- For Docker running in OSX: `docker.for.mac.host.internal`
Port: `3306` by default

View File

@ -7,6 +7,15 @@ version: 1
## Snowflake
### Install Snowflake Driver
Follow the steps [here](/docs/databases/docker-add-drivers) about how to
install new database drivers when setting up Superset locally via docker-compose.
```
echo "snowflake-sqlalchemy" >> ./docker/requirements-local.txt
```
The recommended connector library for Snowflake is
[snowflake-sqlalchemy](https://pypi.org/project/snowflake-sqlalchemy/).
@ -31,7 +40,7 @@ button in the Create or Edit Database dialog, user/role credentials are validate
is not authorized to access the database, an error is recorded in the Superset logs.
And if you want connect Snowflake with [Key Pair Authentication](https://docs.snowflake.com/en/user-guide/key-pair-auth.html#step-6-configure-the-snowflake-client-to-use-key-pair-authentication).
Plase make sure you have the key pair and the public key is registered in Snowflake.
Please make sure you have the key pair and the public key is registered in Snowflake.
To connect Snowflake with Key Pair Authentication, you need to add the following parameters to "SECURE EXTRA" field.
***Please note that you need to merge multi-line private key content to one line and insert `\n` between each line***

View File

@ -19,5 +19,5 @@ It is also possible to connect using [pyodbc](https://pypi.org/project/pyodbc) w
The connection string for SQL Server looks like this:
```
mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_datasbase%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30
mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_database%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30
```

View File

@ -6,35 +6,52 @@ sidebar_position: 7
## Frequently Asked Questions
### How big of a dataset can Superset handle?
Superset can work with even gigantic databases! Superset acts as a thin layer above your underlying
databases or data engines, which do all the processing. Superset simply visualizes the results of
the query.
The key to achieving acceptable performance in Superset is whether your database can execute queries
and return results at a speed that is acceptable to your users. If you experience slow performance with
Superset, benchmark and tune your data warehouse.
### What are the computing specifications required to run Superset?
The specs of your Superset installation depend on how many users you have and what their activity is, not
on the size of your data. Superset admins in the community have reported 8GB RAM, 2vCPUs as adequate to
run a moderately-sized instance. To develop Superset, e.g., compile code or build images, you may
need more power.
Monitor your resource usage and increase or decrease as needed. Note that Superset usage has a tendency
to occur in spikes, e.g., if everyone in a meeting loads the same dashboard at once.
Superset's application metadata does not require a very large database to store it, though
the log file grows over time.
### Can I join / query multiple tables at one time?
Not in the Explore or Visualization UI. A Superset SQLAlchemy datasource can only be a single table
or a view.
When working with tables, the solution would be to materialize a table that contains all the fields
When working with tables, the solution would be to create a table that contains all the fields
needed for your analysis, most likely through some scheduled batch process.
A view is a simple logical layer that abstract an arbitrary SQL queries as a virtual table. This can
allow you to join and union multiple tables, and to apply some transformation using arbitrary SQL
expressions. The limitation there is your database performance as Superset effectively will run a
A view is a simple logical layer that abstracts an arbitrary SQL queries as a virtual table. This can
allow you to join and union multiple tables and to apply some transformation using arbitrary SQL
expressions. The limitation there is your database performance, as Superset effectively will run a
query on top of your query (view). A good practice may be to limit yourself to joining your main
large table to one or many small tables only, and avoid using _GROUP BY_ where possible as Superset
will do its own _GROUP BY_ and doing the work twice might slow down performance.
Whether you use a table or a view, the important factor is whether your database is fast enough to
serve it in an interactive fashion to provide a good user experience in Superset.
Whether you use a table or a view, performance depends on how fast your database can deliver
the result to users interacting with Superset.
However, if you are using SQL Lab, there is no such limitation. You can write SQL queries to join
multiple tables as long as your database account has access to the tables.
### How BIG can my datasource be?
It can be gigantic! Superset acts as a thin layer above your underlying databases or data engines.
As mentioned above, the main criteria is whether your database can execute queries and return
results in a time frame that is acceptable to your users. Many distributed databases out there can
execute queries that scan through terabytes in an interactive fashion.
### How do I create my own visualization?
We recommend reading the instructions in
@ -78,49 +95,6 @@ to **.env** and **.env-non-dev** at the key MAPBOX_API_KEY:
MAPBOX_API_KEY = "longstringofalphanumer1c"
```
### How to add dynamic filters to a dashboard?
Use the **Filter Box** widget, build a slice, and add it to your dashboard.
The **Filter Box** widget allows you to define a query to populate dropdowns that can be used for
filtering. To build the list of distinct values, we run a query, and sort the result by the metric
you provide, sorting descending.
The widget also has a checkbox **Date Filter**, which enables time filtering capabilities to your
dashboard. After checking the box and refreshing, youll see a from and a to dropdown show up.
By default, the filtering will be applied to all the slices that are built on top of a datasource
that shares the column name that the filter is based on. Its also a requirement for that column to
be checked as “filterable” in the column tab of the table editor.
But what about if you dont want certain widgets to get filtered on your dashboard? You can do that
by editing your dashboard, and in the form, edit the JSON Metadata field, more specifically the
`filter_immune_slices` key, that receives an array of sliceIds that should never be affected by any
dashboard level filtering.
```
{
"filter_immune_slices": [324, 65, 92],
"expanded_slices": {},
"filter_immune_slice_fields": {
"177": ["country_name", "__time_range"],
"32": ["__time_range"]
},
"timed_refresh_immune_slices": [324]
}
```
In the json blob above, slices 324, 65 and 92 wont be affected by any dashboard level filtering.
Now note the `filter_immune_slice_fields` key. This one allows you to be more specific and define
for a specific slice_id, which filter fields should be disregarded.
Note the use of the `__time_range` keyword, which is reserved for dealing with the time boundary
filtering mentioned above.
But what happens with filtering when dealing with slices coming from different tables or databases?
If the column name is shared, the filter will be applied, its as simple as that.
### How to limit the timed refresh on a dashboard?
By default, the dashboard timed refresh feature allows you to automatically re-query every slice on
@ -192,8 +166,9 @@ only a few database engines are supported for use as the OLTP backend / metadata
Superset is tested using MySQL, PostgreSQL, and SQLite backends. Its recommended you install
Superset on one of these database servers for production. Installation on other OLTP databases
may work but isnt tested. Column-store, non-OLTP databases are not designed for this type of workload.
may work but isnt tested. It has been reported that [Microsoft SQL Server does *not*
work as a Superset backend](https://github.com/apache/superset/issues/18961). Column-store,
non-OLTP databases are not designed for this type of workload.
### How can I configure OAuth authentication and authorization?
@ -277,6 +252,13 @@ guarantees and are not recommended but may fit your use case temporarily:
- using the internal FAB ModelView API (to be deprecated in Superset)
- altering the source code in your fork
### How can I see usage statistics (e.g., monthly active users)?
This functionality is not included with Superset, but you can extract and analyze Superset's application
metadata to see what actions have occurred. By default, user activities are logged in the `logs` table
in Superset's metadata database. One company has published a write-up of [how they analyzed Superset
usage, including example queries](https://engineering.hometogo.com/monitor-superset-usage-via-superset-c7f9fba79525).
### What Does Hours Offset in the Edit Dataset view do?
In the Edit Dataset view, you can specify a time offset. This field lets you configure the
@ -288,3 +270,11 @@ This can be used, for example, to convert UTC time to local time.
Superset uses [Scarf](https://about.scarf.sh/) by default to collect basic telemetry data upon installing and/or running Superset. This data helps the maintainers of Superset better understand which versions of Superset are being used, in order to prioritize patch/minor releases and security fixes.
We use the [Scarf Gateway](https://docs.scarf.sh/gateway/) to sit in front of container registries, and the [scarf-js](https://about.scarf.sh/package-sdks) package to track `npm` installations.
Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track) and [here](https://docs.scarf.sh/package-analytics/#as-a-user-of-a-package-using-scarf-js-how-can-i-opt-out-of-analytics). Additional opt-out instructions for Docker users are available on the [Docker Installation](https://superset.apache.org/docs/installation/installing-superset-using-docker-compose) page.
### Does Superset have an archive panel or trash bin from which a user can recover deleted assets?
No. Currently, there is no way to recover a deleted Superset dashboard/chart/dataset/database from the UI. However, there is an [ongoing discussion](https://github.com/apache/superset/discussions/18386) about implementing such a feature.
Hence, it is recommended to take periodic backups of the metadata database. For recovery, you can launch a recovery instance of a Superset server with the backed-up copy of the DB attached and use the Export Dashboard button in the Superset UI (or the `superset export-dashboards` CLI command). Then, take the .zip file and import it into the current Superset instance.
Alternatively, you can programmatically take regular exports of the assets as a backup.

View File

@ -149,6 +149,11 @@ If you're not using Gunicorn, you may want to disable the use of `flask-compress
Currently, Google BigQuery python sdk is not compatible with `gevent`, due to some dynamic monkeypatching on python core library by `gevent`.
So, when you use `BigQuery` datasource on Superset, you have to use `gunicorn` worker type except `gevent`.
### HTTPS Configuration
You can configure HTTPS upstream via a load balancer or a reverse proxy (such as nginx) and do SSL/TLS Offloading before traffic reaches the Superset application. In this setup, local traffic from a Celery worker taking a snapshot of a chart for Alerts & Reports can access Superset at a `http://` URL, from behind the ingress point.
You can also configure [SSL in Gunicorn](https://docs.gunicorn.org/en/stable/settings.html#ssl) (the Python webserver) if you are using an official Superset Docker image.
### Configuration Behind a Load Balancer
If you are running superset behind a load balancer or reverse proxy (e.g. NGINX or ELB on AWS), you
@ -169,8 +174,9 @@ RequestHeader set X-Forwarded-Proto "https"
### Custom OAuth2 Configuration
Beyond FAB supported providers (GitHub, Twitter, LinkedIn, Google, Azure, etc), its easy to connect
Superset with other OAuth2 Authorization Server implementations that support “code” authorization.
Superset is built on Flask-AppBuilder (FAB), which supports many providers out of the box
(GitHub, Twitter, LinkedIn, Google, Azure, etc). Beyond those, Superset can be configured to connect
with other OAuth2 Authorization Server implementations that support “code” authorization.
Make sure the pip package [`Authlib`](https://authlib.org/) is installed on the webserver.
@ -196,6 +202,7 @@ OAUTH_PROVIDERS = [
'access_token_params':{ # Additional parameters for calls to access_token_url
'client_id':'myClientId'
},
'jwks_uri':'https://myAuthorizationServe/adfs/discovery/keys', # may be required to generate token
'access_token_headers':{ # Additional headers for calls to access_token_url
'Authorization': 'Basic Base64EncodedClientIdAndSecret'
},
@ -264,6 +271,13 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
]
```
### LDAP Authentication
FAB supports authenticating user credentials against an LDAP server.
To use LDAP you must install the [python-ldap](https://www.python-ldap.org/en/latest/installing.html) package.
See [FAB's LDAP documentation](https://flask-appbuilder.readthedocs.io/en/latest/security.html#authentication-ldap)
for details.
### Flask app Configuration Hook
`FLASK_APP_MUTATOR` is a configuration function that can be provided in your environment, receives

View File

@ -62,6 +62,9 @@ When working on master branch, run the following commands to run `development` m
```bash
docker compose up
```
:::tip
When running in development mode the `superset-node` container needs to finish building assets in order for the UI to render properly. If you would just like to try out Superset without making any code changes follow the steps documented for `production` or a specific version below.
:::
When working on master branch, run the following commands to run `production` mode using `docker compose`:
@ -72,16 +75,25 @@ docker-compose -f docker-compose-non-dev.yml up
Alternatively, you can also run a specific version of Superset by first checking out
the branch/tag, and then starting `docker-compose` with the `TAG` variable.
For example, to run the 2.1.0 version, run the following commands:
For example, to run the 3.0.0 version, run the following commands on Linux-based systems:
```bash
git checkout 2.1.0
TAG=2.1.0 docker-compose -f docker-compose-non-dev.yml pull
TAG=2.1.0 docker-compose -f docker-compose-non-dev.yml up
git checkout 3.0.0
TAG=3.0.0 docker-compose -f docker-compose-non-dev.yml pull
TAG=3.0.0 docker-compose -f docker-compose-non-dev.yml up
```
If you are using Docker Desktop for Windows then run the following commands:
```bash
git checkout 3.0.0
set TAG=3.0.0
docker-compose -f docker-compose-non-dev.yml pull
docker-compose -f docker-compose-non-dev.yml up
```
:::tip
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of `SECRET_KEY`. Please see [Configuring Superset](https://superset.apache.org/docs/installation/configuring-superset/).
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of `SECRET_KEY` in a Superset config file or `SUPERSET_SECRET_KEY` as an [environment variable](https://github.com/apache/superset/blob/master/docker/.env-non-dev). Please see [Configuring Superset](https://superset.apache.org/docs/installation/configuring-superset/) for more details.
:::
:::caution
All of the content belonging to a Superset instance - charts, dashboards, users, etc. - is stored in its metadata database. In production, this database should be backed up.
@ -94,17 +106,13 @@ You should see a wall of logging output from the containers being launched on yo
this output slows, you should have a running instance of Superset on your local machine! To
avoid the wall of text on future runs, add the `-d` option to the end of the `docker-compose up` command.
**Note:** This will bring up superset in a non-dev mode, changes to the codebase will not be reflected.
If you would like to run superset in dev mode to test local changes, simply replace the previous command with: `docker-compose up`,
and wait for the `superset_node` container to finish building the assets.
#### Configuring Docker Compose
The following is for users who want to configure how Superset runs in Docker Compose; otherwise, you can skip to the next section.
You can install additional python packages and apply config overrides by following the steps mentioned in [docker/README.md](https://github.com/apache/superset/tree/master/docker#configuration)
You can configure the Docker Compose environment varirables for dev and non-dev mode with `docker/.env` and `docker/.env-non-dev` respectively. These environment files set the environment for most containers in the Docker Compose setup, and some variables affect multiple containers and others only single ones.
You can configure the Docker Compose environment variables for dev and non-dev mode with `docker/.env` and `docker/.env-non-dev` respectively. These environment files set the environment for most containers in the Docker Compose setup, and some variables affect multiple containers and others only single ones.
One important variable is `SUPERSET_LOAD_EXAMPLES` which determines whether the `superset_init` container will populate example data and visualizations into the metadata database. These examples are helpful for learning and testing out Superset but unnecessary for experienced users and production deployments. The loading process can sometimes take a few minutes and a good amount of CPU, so you may want to disable it on a resource-constrained device.
@ -138,9 +146,9 @@ password: admin
### 5. Connecting Superset to your local database instance
When running Superset using `docker` or `docker-compose` it runs in its own docker container, as if the Superset was running in a separate machine entirely. Therefore attempts to connect to your local database with hostname `localhost` won't work as `localhost` refers to the docker container Superset is running in, and not your actual host machine. Fortunately, docker provides an easy way to access network resources in the host machine from inside a container, and we will leverage this capability to connect to our local database instance.
When running Superset using `docker` or `docker-compose` it runs in its own docker container, as if the Superset was running in a separate machine entirely. Therefore attempts to connect to your local database with the hostname `localhost` won't work as `localhost` refers to the docker container Superset is running in, and not your actual host machine. Fortunately, docker provides an easy way to access network resources in the host machine from inside a container, and we will leverage this capability to connect to our local database instance.
Here the instructions are for connecting to postgresql (which is running on your host machine) from Superset (which is running in its docker container). Other databases may have slightly different configurations but gist would be same and boils down to 2 steps -
1. **(Mac users may skip this step)** Configuring the local postgresql/database instance to accept public incoming connections. By default postgresql only allows incoming connections from `localhost` only, but re-iterating once again, `localhosts` are different for host machine and docker container. For postgresql this involves make one-line changes to the files `postgresql.conf` and `pg_hba.conf`, you can find helpful links tailored to your OS / PG version on the web easily for this task. For docker it suffices to only whitelist IPs `172.0.0.0/8` instead of `*`, but in any case you are _warned_ that doing this in a production database _may_ have disastrous consequences as you are opening your database to the public internet.
2. Instead of `localhost`, try using `host.docker.internal` (Mac users, Ubuntu) or `172.18.0.1` (Linux users) as the host name when attempting to connect to the database. This is docker internal detail, what is happening is that in Mac systems docker creates a dns entry for the host name `host.docker.internal` which resolves to the correct address for the host machine, whereas in linux this is not the case (at least by default). If neither of these 2 hostnames work then you may want to find the exact host name you want to use, for that you can do `ifconfig` or `ip addr show` and look at the IP address of `docker0` interface that must have been created by docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo) `docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP address.
1. **(Mac users may skip this step)** Configuring the local postgresql/database instance to accept public incoming connections. By default, postgresql only allows incoming connections from `localhost` and under Docker, unless you use `--network=host`, `localhost` will refer to different endpoints on the host machine and in a docker container respectively. Allowing postgresql to accept connections from the Docker involves making one-line changes to the files `postgresql.conf` and `pg_hba.conf`; you can find helpful links tailored to your OS / PG version on the web easily for this task. For Docker it suffices to only whitelist IPs `172.0.0.0/8` instead of `*`, but in any case you are _warned_ that doing this in a production database _may_ have disastrous consequences as you are opening your database to the public internet.
2. Instead of `localhost`, try using `host.docker.internal` (Mac users, Ubuntu) or `172.18.0.1` (Linux users) as the hostname when attempting to connect to the database. This is a Docker internal detail -- what is happening is that, in Mac systems, Docker Desktop creates a dns entry for the hostname `host.docker.internal` which resolves to the correct address for the host machine, whereas in Linux this is not the case (at least by default). If neither of these 2 hostnames work then you may want to find the exact hostname you want to use, for that you can do `ifconfig` or `ip addr show` and look at the IP address of `docker0` interface that must have been created by Docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo) `docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP address.

View File

@ -33,6 +33,15 @@ Add the following setting in your `superset_config.py` file:
- `SUPERSET_WEBSERVER_DOMAINS`: list of allowed hostnames for domain sharding feature.
Please create your domain shards as subdomains of your main domain for authorization to
work properly on new domains. For Example:
- `SUPERSET_WEBSERVER_DOMAINS=['superset-1.mydomain.com','superset-2.mydomain.com','superset-3.mydomain.com','superset-4.mydomain.com']`
or add the following setting in your `superset_config.py` file if domain shards are not subdomains of main domain.
- `SESSION_COOKIE_DOMAIN = '.mydomain.com'`
### Middleware
Superset allows you to add your own middleware. To add your own middleware, update the

View File

@ -15,7 +15,7 @@ version: 1
2. Create database w/ ssh tunnel enabled
- With the feature flag enabled you should now see ssh tunnel toggle.
- Click the toggle to enables ssh tunneling and add your credentials accordingly.
- Superset allows for 2 different type authenticaion (Basic + Private Key). These credentials should come from your service provider.
- Superset allows for 2 different type authentication (Basic + Private Key). These credentials should come from your service provider.
3. Verify data is flowing
- Once SSH tunneling has been enabled, go to SQL Lab and write a query to verify data is properly flowing.

View File

@ -15,14 +15,14 @@ Note not all fields are correctly categorized. The fields vary based on visualiz
### Datasource & Chart Type
| Field | Type | Notes |
| ----------------- | -------- | ----------------------------------- |
| `database_name` | _string_ | _Deprecated?_ |
| `datasource` | _string_ | `<datasouce_id>__<datasource_type>` |
| `datasource_id` | _string_ | _Deprecated?_ See `datasource` |
| `datasource_name` | _string_ | _Deprecated?_ |
| `datasource_type` | _string_ | _Deprecated?_ See `datasource` |
| `viz_type` | _string_ | The **Visualization Type** widget |
| Field | Type | Notes |
| ----------------- | -------- | ------------------------------------ |
| `database_name` | _string_ | _Deprecated?_ |
| `datasource` | _string_ | `<datasource_id>__<datasource_type>` |
| `datasource_id` | _string_ | _Deprecated?_ See `datasource` |
| `datasource_name` | _string_ | _Deprecated?_ |
| `datasource_type` | _string_ | _Deprecated?_ See `datasource` |
| `viz_type` | _string_ | The **Visualization Type** widget |
### Time

View File

@ -264,7 +264,7 @@ The database might be under heavy load, running too many queries. Please try aga
## Issue 1028
```
One or more parameters specified in the query are malformatted.
One or more parameters specified in the query are malformed.
```
The query contains one or more malformed template parameters. Please check your query and confirm that all template parameters are surround by double braces, for example, "{{ ds }}". Then, try running your query again.

View File

@ -4,20 +4,34 @@ hide_title: true
sidebar_position: 2
---
#### Version 2.1.1
| CVE | Title | Affected |
|:---------------|:------------------------------------------------------------------------|---------:|
| CVE-2023-36387 | Improper API permission for low privilege users | < 2.1.1 |
| CVE-2023-36388 | Improper API permission for low privilege users allows for SSRF | < 2.1.1 |
| CVE-2023-27523 | Improper data permission validation on Jinja templated queries | < 2.1.1 |
| CVE-2023-27526 | Improper Authorization check on import charts | < 2.1.1 |
| CVE-2023-39264 | Stack traces enabled by default | < 2.1.1 |
| CVE-2023-39265 | Possible Unauthorized Registration of SQLite Database Connections | < 2.1.1 |
| CVE-2023-37941 | Metadata db write access can lead to remote code execution | < 2.1.1 |
| CVE-2023-32672 | SQL parser edge case bypasses data access authorization | < 2.1.1 |
#### Version 2.1.0
| CVE | Title | Affected |
| :------------- | :---------------------------------------------------------------------- | -----------------:|
| CVE-2023-25504 | Possible SSRF on import datasets | <= 2.1.0 |
| CVE-2023-27524 | Session validation vulnerability when using provided default SECRET_KEY | <= 2.1.0 |
| CVE-2023-27525 | Incorrect default permissions for Gamma role | <= 2.1.0 |
| CVE-2023-30776 | Database connection password leak | <= 2.1.0 |
| CVE | Title | Affected |
|:---------------|:------------------------------------------------------------------------|---------:|
| CVE-2023-25504 | Possible SSRF on import datasets | < 2.1.0 |
| CVE-2023-27524 | Session validation vulnerability when using provided default SECRET_KEY | < 2.1.0 |
| CVE-2023-27525 | Incorrect default permissions for Gamma role | < 2.1.0 |
| CVE-2023-30776 | Database connection password leak | < 2.1.0 |
#### Version 2.0.1
| CVE | Title | Affected |
| :------------- | :---------------------------------------------------------- | -----------------:|
| CVE | Title | Affected |
|:---------------|:------------------------------------------------------------|------------------:|
| CVE-2022-41703 | SQL injection vulnerability in adhoc clauses | < 2.0.1 or <1.5.2 |
| CVE-2022-43717 | Cross-Site Scripting on dashboards | < 2.0.1 or <1.5.2 |
| CVE-2022-43718 | Cross-Site Scripting vulnerability on upload forms | < 2.0.1 or <1.5.2 |

View File

@ -15,7 +15,7 @@
# limitations under the License.
#
apiVersion: v2
appVersion: "2.1.0"
appVersion: "3.0.0"
description: Apache Superset is a modern, enterprise-ready business intelligence web application
name: superset
icon: https://artifacthub.io/image/68c1d717-0e97-491f-b046-754e46f46922@2x
@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.10.6
version: 0.10.9
dependencies:
- name: postgresql
version: 12.1.6

View File

@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.10.6](https://img.shields.io/badge/Version-0.10.6-informational?style=flat-square)
![Version: 0.10.9](https://img.shields.io/badge/Version-0.10.9-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application
@ -169,6 +169,10 @@ helm install my-superset superset/superset
| supersetCeleryFlower.startupProbe.timeoutSeconds | int | `1` | |
| supersetCeleryFlower.topologySpreadConstraints | list | `[]` | TopologySpreadConstrains to be added to supersetCeleryFlower deployments |
| supersetNode.affinity | object | `{}` | Affinity to be added to supersetNode deployment |
| supersetNode.autoscaling.enabled | bool | `false` | |
| supersetNode.autoscaling.maxReplicas | int | `100` | |
| supersetNode.autoscaling.minReplicas | int | `1` | |
| supersetNode.autoscaling.targetCPUUtilizationPercentage | int | `80` | |
| supersetNode.command | list | See `values.yaml` | Startup command |
| supersetNode.connections.db_host | string | `"{{ .Release.Name }}-postgresql"` | |
| supersetNode.connections.db_name | string | `"superset"` | |
@ -257,6 +261,10 @@ helm install my-superset superset/superset
| supersetWebsockets.strategy | object | `{}` | |
| supersetWebsockets.topologySpreadConstraints | list | `[]` | TopologySpreadConstrains to be added to supersetWebsockets deployments |
| supersetWorker.affinity | object | `{}` | Affinity to be added to supersetWorker deployment |
| supersetWorker.autoscaling.enabled | bool | `false` | |
| supersetWorker.autoscaling.maxReplicas | int | `100` | |
| supersetWorker.autoscaling.minReplicas | int | `1` | |
| supersetWorker.autoscaling.targetCPUUtilizationPercentage | int | `80` | |
| supersetWorker.command | list | a `celery worker` command | Worker startup command |
| supersetWorker.containerSecurityContext | object | `{}` | |
| supersetWorker.deploymentAnnotations | object | `{}` | Annotations to be added to supersetWorker deployment |

View File

@ -34,7 +34,9 @@ metadata:
annotations: {{- toYaml .Values.supersetWorker.deploymentAnnotations | nindent 4 }}
{{- end }}
spec:
{{- if not .Values.supersetWorker.autoscaling.enabled }}
replicas: {{ .Values.supersetWorker.replicaCount }}
{{- end }}
selector:
matchLabels:
app: {{ template "superset.name" . }}-worker

View File

@ -34,7 +34,9 @@ metadata:
annotations: {{- toYaml .Values.supersetNode.deploymentAnnotations | nindent 4 }}
{{- end }}
spec:
{{- if not .Values.supersetNode.autoscaling.enabled }}
replicas: {{ .Values.supersetNode.replicaCount }}
{{- end }}
{{- if .Values.supersetNode.strategy }}
strategy: {{- toYaml .Values.supersetNode.strategy | nindent 4 }}
{{- end }}

View File

@ -0,0 +1,54 @@
{{/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/}}
{{- if .Values.supersetNode.autoscaling.enabled }}
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: {{ include "superset.fullname" . }}-hpa
labels:
app: {{ template "superset.name" . }}
chart: {{ template "superset.chart" . }}
release: {{ .Release.Name }}
heritage: {{ .Release.Service }}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: {{ include "superset.fullname" . }}
minReplicas: {{ .Values.supersetNode.autoscaling.minReplicas }}
maxReplicas: {{ .Values.supersetNode.autoscaling.maxReplicas }}
metrics:
{{- if .Values.supersetNode.autoscaling.targetCPUUtilizationPercentage }}
- type: Resource
resource:
name: cpu
target:
type: Utilization
averageUtilization: {{ .Values.supersetNode.autoscaling.targetCPUUtilizationPercentage }}
{{- end }}
{{- if .Values.supersetNode.autoscaling.targetMemoryUtilizationPercentage }}
- type: Resource
resource:
name: memory
target:
type: Utilization
averageUtilization: {{ .Values.supersetNode.autoscaling.targetMemoryUtilizationPercentage }}
{{- end }}
{{- end }}

View File

@ -0,0 +1,54 @@
{{/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/}}
{{- if .Values.supersetWorker.autoscaling.enabled }}
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: {{ include "superset.fullname" . }}-hpa-worker
labels:
app: {{ template "superset.name" . }}
chart: {{ template "superset.chart" . }}
release: {{ .Release.Name }}
heritage: {{ .Release.Service }}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: {{ include "superset.fullname" . }}-worker
minReplicas: {{ .Values.supersetWorker.autoscaling.minReplicas }}
maxReplicas: {{ .Values.supersetWorker.autoscaling.maxReplicas }}
metrics:
{{- if .Values.supersetWorker.autoscaling.targetCPUUtilizationPercentage }}
- type: Resource
resource:
name: cpu
target:
type: Utilization
averageUtilization: {{ .Values.supersetWorker.autoscaling.targetCPUUtilizationPercentage }}
{{- end }}
{{- if .Values.supersetWorker.autoscaling.targetMemoryUtilizationPercentage }}
- type: Resource
resource:
name: memory
target:
type: Utilization
averageUtilization: {{ .Values.supersetWorker.autoscaling.targetMemoryUtilizationPercentage }}
{{- end }}
{{- end }}

View File

@ -240,6 +240,13 @@ hostAliases: []
# Superset node configuration
supersetNode:
replicaCount: 1
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
# -- Startup command
# @default -- See `values.yaml`
command:
@ -334,6 +341,13 @@ supersetNode:
# Superset Celery worker configuration
supersetWorker:
replicaCount: 1
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
# -- Worker startup command
# @default -- a `celery worker` command
command:

View File

@ -71,7 +71,7 @@ cron-descriptor==1.2.24
# via apache-superset
croniter==1.0.15
# via apache-superset
cryptography==41.0.0
cryptography==41.0.2
# via
# apache-superset
# paramiko
@ -96,7 +96,7 @@ flask==2.2.5
# flask-migrate
# flask-sqlalchemy
# flask-wtf
flask-appbuilder==4.3.6
flask-appbuilder==4.3.7
# via apache-superset
flask-babel==1.0.0
# via flask-appbuilder
@ -134,7 +134,7 @@ greenlet==2.0.2
# via
# shillelagh
# sqlalchemy
gunicorn==20.1.0
gunicorn==21.2.0
# via apache-superset
hashids==1.3.1
# via apache-superset

View File

@ -16,7 +16,7 @@
#
-r development.in
-r integration.in
-e file:.[bigquery,hive,presto,prophet,trino,gsheets]
-e file:.[bigquery,hive,presto,prophet,trino,gsheets,playwright]
docker
flask-testing
freezegun

View File

@ -1,4 +1,4 @@
# SHA1:78d0270a4f583095e0587aa21f57fc2ff7fe8b84
# SHA1:95300275481abb1413eb98a5c79fb7cf96814cdd
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@ -104,6 +104,8 @@ parameterized==0.9.0
# via -r requirements/testing.in
pathable==0.4.3
# via jsonschema-spec
playwright==1.37.0
# via apache-superset
prophet==1.1.1
# via apache-superset
proto-plus==1.22.2

View File

@ -46,7 +46,7 @@ while (directories.length) {
// Check for existence of js, jsx, ts, and tsx files. Show a filled box if only ts and tsx,
// show an empty box if any js or jsx, and don't print the line if neither exist in the
// directory.
const hasTypescriptFiles =
const hasTypeScriptFiles =
getFilesByExtensions("./", [".ts", ".tsx"]).length > 0;
const hasJavaScriptFiles =
getFilesByExtensions("./", [".js", ".jsx"]).length > 0;
@ -57,7 +57,7 @@ while (directories.length) {
curDirectory.split("/").length - 1
)}- [ ] \`${curDirectory}\``
);
} else if (hasTypescriptFiles) {
} else if (hasTypeScriptFiles) {
console.log(
`${" ".repeat(
curDirectory.split("/").length - 1

View File

@ -81,10 +81,10 @@ setup(
"colorama",
"croniter>=0.3.28",
"cron-descriptor",
"cryptography>=41.0.0, <41.0.2",
"cryptography>=41.0.2, <41.1.0",
"deprecation>=2.1.0, <2.2.0",
"flask>=2.2.5, <3.0.0",
"flask-appbuilder>=4.3.6, <5.0.0",
"flask-appbuilder>=4.3.7, <5.0.0",
"flask-caching>=1.11.1, <2.0",
"flask-compress>=1.13, <2.0",
"flask-talisman>=1.0.0, <2.0",
@ -93,7 +93,7 @@ setup(
"flask-wtf>=1.1.0, <2.0",
"func_timeout",
"geopy",
"gunicorn>=20.1.0; sys_platform != 'win32'",
"gunicorn>=21.2.0, <22.0; sys_platform != 'win32'",
"hashids>=1.3.1, <2",
"holidays>=0.23, <0.24",
"humanize",
@ -183,6 +183,7 @@ setup(
],
"oracle": ["cx-Oracle>8.0.0, <8.1"],
"pinot": ["pinotdb>=0.3.3, <0.4"],
"playwright": ["playwright>=1.37.0, <2"],
"postgres": ["psycopg2-binary==2.9.6"],
"presto": ["pyhive[presto]>=0.6.5"],
"trino": ["trino>=0.324.0"],

View File

@ -72,6 +72,6 @@ This is used by consumers who install the embedded sdk via npm, yarn, or other p
Webpack is used to bundle the `bundle` directory,
for use directly in the browser with no build step e.g. when importing via unpkg.
Typescript outputs type definition files to the `dist` directory.
TypeScript outputs type definition files to the `dist` directory.
Which of these outputs is used by the library consumer is determined by our package.json's `main`, `module`, and `types` fields.

View File

@ -1 +1 @@
v16.9.1
v16.20.2

View File

@ -38,6 +38,23 @@ export default defineConfig({
// We've imported your old cypress plugins here.
// You may want to clean this up later by importing these.
setupNodeEvents(on, config) {
// ECONNRESET on Chrome/Chromium 117.0.5851.0 when using Cypress <12.15.0
// Check https://github.com/cypress-io/cypress/issues/27804 for context
// TODO: This workaround should be removed when upgrading Cypress
on('before:browser:launch', (browser, launchOptions) => {
if (browser.name === 'chrome' && browser.isHeadless) {
// eslint-disable-next-line no-param-reassign
launchOptions.args = launchOptions.args.map(arg => {
if (arg === '--headless') {
return '--headless=new';
}
return arg;
});
}
return launchOptions;
});
// eslint-disable-next-line global-require,import/extensions
return require('./cypress/plugins/index.js')(on, config);
},

View File

@ -32,13 +32,13 @@ function openDashboardsAddedTo() {
cy.getBySel('actions-trigger').click();
cy.get('.ant-dropdown-menu-submenu-title')
.contains('Dashboards added to')
.trigger('mouseover');
.trigger('mouseover', { force: true });
}
function closeDashboardsAddedTo() {
cy.get('.ant-dropdown-menu-submenu-title')
.contains('Dashboards added to')
.trigger('mouseout');
.trigger('mouseout', { force: true });
cy.getBySel('actions-trigger').click();
}

View File

@ -20,7 +20,7 @@ import { selectResultsTab } from './sqllab.helper';
describe.skip('SqlLab datasource panel', () => {
beforeEach(() => {
cy.visit('/superset/sqllab');
cy.visit('/sqllab');
});
// TODO the test bellow is flaky, and has been disabled for the time being

View File

@ -25,7 +25,7 @@ function parseClockStr(node: JQuery) {
describe('SqlLab query panel', () => {
beforeEach(() => {
cy.visit('/superset/sqllab');
cy.visit('/sqllab');
});
it.skip('supports entering and running a query', () => {

View File

@ -19,7 +19,7 @@
describe('SqlLab view', () => {
beforeEach(() => {
cy.visit('/superset/sqllab');
cy.visit('/sqllab');
});
it('should load the SqlLab', () => {

View File

@ -18,7 +18,7 @@
*/
describe('SqlLab query tabs', () => {
beforeEach(() => {
cy.visit('/superset/sqllab');
cy.visit('/sqllab');
});
const tablistSelector = '[data-test="sql-editor-tabs"] > [role="tablist"]';

View File

@ -7481,9 +7481,9 @@
}
},
"node_modules/get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz",
"integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==",
"engines": {
"node": "*"
}
@ -17953,9 +17953,9 @@
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="
},
"get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig=="
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz",
"integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ=="
},
"get-intrinsic": {
"version": "1.1.1",

View File

@ -17,6 +17,9 @@
* under the License.
*/
// timezone for unit tests
process.env.TZ = 'America/New_York';
module.exports = {
testRegex:
'\\/superset-frontend\\/(spec|src|plugins|packages|tools)\\/.*(_spec|\\.test)\\.[jt]sx?$',

View File

@ -191,6 +191,7 @@
"@types/jquery": "^3.5.8",
"@types/js-levenshtein": "^1.1.0",
"@types/json-bigint": "^1.0.1",
"@types/mousetrap": "^1.6.11",
"@types/react": "^16.9.43",
"@types/react-dom": "^16.9.8",
"@types/react-gravatar": "^2.6.8",
@ -19514,6 +19515,12 @@
"integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==",
"dev": true
},
"node_modules/@types/mousetrap": {
"version": "1.6.11",
"resolved": "https://registry.npmjs.org/@types/mousetrap/-/mousetrap-1.6.11.tgz",
"integrity": "sha512-F0oAily9Q9QQpv9JKxKn0zMKfOo36KHCW7myYsmUyf2t0g+sBTbG3UleTPoguHdE1z3GLFr3p7/wiOio52QFjQ==",
"dev": true
},
"node_modules/@types/ms": {
"version": "0.7.31",
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz",
@ -77984,7 +77991,7 @@
"@mapbox/geojson-extent": "^1.0.1",
"@math.gl/web-mercator": "^3.2.2",
"@types/d3-array": "^2.0.0",
"@types/mapbox__geojson-extent": "*",
"@types/mapbox__geojson-extent": "^1.0.0",
"@types/underscore": "^1.11.6",
"@types/urijs": "^1.19.19",
"bootstrap-slider": "^10.0.0",
@ -79317,6 +79324,12 @@
"integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==",
"dev": true
},
"@types/mousetrap": {
"version": "1.6.11",
"resolved": "https://registry.npmjs.org/@types/mousetrap/-/mousetrap-1.6.11.tgz",
"integrity": "sha512-F0oAily9Q9QQpv9JKxKn0zMKfOo36KHCW7myYsmUyf2t0g+sBTbG3UleTPoguHdE1z3GLFr3p7/wiOio52QFjQ==",
"dev": true
},
"@types/ms": {
"version": "0.7.31",
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz",

View File

@ -256,6 +256,7 @@
"@types/jquery": "^3.5.8",
"@types/js-levenshtein": "^1.1.0",
"@types/json-bigint": "^1.0.1",
"@types/mousetrap": "^1.6.11",
"@types/react": "^16.9.43",
"@types/react-dom": "^16.9.8",
"@types/react-gravatar": "^2.6.8",
@ -348,8 +349,8 @@
"webpack-sources": "^3.2.3"
},
"engines": {
"node": "^16.9.1",
"npm": "^7.5.4 || ^8.1.2"
"node": "^16.20.2",
"npm": "^8.19.4"
},
"overrides": {
"d3-color": "^3.1.0",

View File

@ -71,8 +71,9 @@ export const dndGroupByControl: SharedControlConfig<
default: [],
includeTime: false,
description: t(
'One or many columns to group by. High cardinality groupings should include a sort by metric ' +
'and series limit to limit the number of fetched and rendered series.',
'Dimensions contain qualitative values such as names, dates, or geographical data. ' +
'Use dimensions to categorize, segment, and reveal the details in your data. ' +
'Dimensions affect the level of detail in the view.',
),
optionRenderer: (c: ColumnMeta) => <ColumnOption showType column={c} />,
valueRenderer: (c: ColumnMeta) => <ColumnOption column={c} />,
@ -108,7 +109,7 @@ export const dndGroupByControl: SharedControlConfig<
export const dndColumnsControl: typeof dndGroupByControl = {
...dndGroupByControl,
label: t('Columns'),
description: t('One or many columns to pivot as columns'),
description: t('Add dataset columns here to group the pivot table columns.'),
};
export const dndSeriesControl: typeof dndGroupByControl = {
@ -118,8 +119,7 @@ export const dndSeriesControl: typeof dndGroupByControl = {
default: null,
description: t(
'Defines the grouping of entities. ' +
'Each series is shown as a specific color on the chart and ' +
'has a legend toggle',
'Each series is represented by a specific color in the chart.',
),
};
@ -166,21 +166,29 @@ export const dndAdhocMetricsControl: SharedControlConfig<
datasource,
datasourceType: datasource?.type,
}),
description: t('One or many metrics to display'),
description: t(
'Select one or many metrics to display. ' +
'You can use an aggregation function on a column ' +
'or write custom SQL to create a metric.',
),
};
export const dndAdhocMetricControl: typeof dndAdhocMetricsControl = {
...dndAdhocMetricsControl,
multi: false,
label: t('Metric'),
description: t('Metric'),
description: t(
'Select a metric to display. ' +
'You can use an aggregation function on a column ' +
'or write custom SQL to create a metric.',
),
};
export const dndAdhocMetricControl2: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
label: t('Right Axis Metric'),
clearable: true,
description: t('Choose a metric for right axis'),
description: t('Select a metric to display on the right axis'),
};
export const dndSortByControl: SharedControlConfig<
@ -190,8 +198,8 @@ export const dndSortByControl: SharedControlConfig<
label: t('Sort by'),
default: null,
description: t(
'Metric used to define how the top series are sorted if a series or row limit is present. ' +
'If undefined reverts to the first metric (where appropriate).',
'This metric is used to define row selection criteria (how the rows are sorted) if a series or row limit is present. ' +
'If not defined, it reverts to the first metric (where appropriate).',
),
mapStateToProps: ({ datasource }) => ({
columns: datasource?.columns || [],
@ -211,14 +219,18 @@ export const dndSizeControl: typeof dndAdhocMetricControl = {
export const dndXControl: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
label: t('X Axis'),
description: t('Metric assigned to the [X] axis'),
description: t(
"The dataset column/metric that returns the values on your chart's x-axis.",
),
default: null,
};
export const dndYControl: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
label: t('Y Axis'),
description: t('Metric assigned to the [Y] axis'),
description: t(
"The dataset column/metric that returns the values on your chart's y-axis.",
),
default: null,
};

View File

@ -198,11 +198,9 @@ const time_grain_sqla: SharedControlConfig<'SelectControl'> = {
: 'P1D';
},
description: t(
'The time granularity for the visualization. This ' +
'applies a date transformation to alter ' +
'your time column and defines a new time granularity. ' +
'The options here are defined on a per database ' +
'engine basis in the Superset source code.',
'Select a time grain for the visualization. The ' +
'grain is the time interval represented by a ' +
'single point on the chart.',
),
mapStateToProps: ({ datasource }) => ({
choices: (datasource as Dataset)?.time_grain_sqla || [],
@ -232,7 +230,7 @@ const time_range: SharedControlConfig<'DateFilterControl'> = {
label: TIME_FILTER_LABELS.time_range,
default: NO_TIME_RANGE, // this value is an empty filter constant so shouldn't translate it.
description: t(
'The time range for the visualization. All relative times, e.g. "Last month", ' +
'This control filters the whole chart based on the selected time range. All relative times, e.g. "Last month", ' +
'"Last 7 days", "now", etc. are evaluated on the server using the server\'s ' +
'local time (sans timezone). All tooltips and placeholder times are expressed ' +
'in UTC (sans timezone). The timestamps are then evaluated by the database ' +
@ -248,14 +246,18 @@ const row_limit: SharedControlConfig<'SelectControl'> = {
validators: [legacyValidateInteger],
default: 10000,
choices: formatSelectOptions(ROW_LIMIT_OPTIONS),
description: t('Limits the number of rows that get displayed.'),
description: t(
'Limits the number of the rows that are computed in the query that is the source of the data used for this chart.',
),
};
const order_desc: SharedControlConfig<'CheckboxControl'> = {
type: 'CheckboxControl',
label: t('Sort Descending'),
default: true,
description: t('Whether to sort descending or ascending'),
description: t(
'If enabled, this control sorts the results/values descending, otherwise it sorts the results ascending.',
),
visibility: ({ controls }) =>
Boolean(
controls?.timeseries_limit_metric.value &&

View File

@ -58,6 +58,7 @@ export enum AppSection {
export type FilterState = { value?: any; [key: string]: any };
export type DataMask = {
__cache?: FilterState;
extraFormData?: ExtraFormData;
filterState?: FilterState;
ownState?: JsonObject;

View File

@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0,
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import finestTemporalGrain from './finestTemporalGrain';
test('finestTemporalGrain', () => {
const monthFormatter = finestTemporalGrain([
new Date('2003-01-01 00:00:00Z').getTime(),
new Date('2003-02-01 00:00:00Z').getTime(),
]);
expect(monthFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe(
'2003-01-01',
);
expect(monthFormatter(new Date('2003-02-01 00:00:00Z').getTime())).toBe(
'2003-02-01',
);
const yearFormatter = finestTemporalGrain([
new Date('2003-01-01 00:00:00Z').getTime(),
new Date('2004-01-01 00:00:00Z').getTime(),
]);
expect(yearFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe(
'2003',
);
expect(yearFormatter(new Date('2004-01-01 00:00:00Z').getTime())).toBe(
'2004',
);
const milliSecondFormatter = finestTemporalGrain([
new Date('2003-01-01 00:00:00Z').getTime(),
new Date('2003-04-05 06:07:08.123Z').getTime(),
]);
expect(milliSecondFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe(
'2003-01-01 00:00:00.000',
);
const localTimeFormatter = finestTemporalGrain(
[
new Date('2003-01-01 00:00:00Z').getTime(),
new Date('2003-02-01 00:00:00Z').getTime(),
],
true,
);
expect(localTimeFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe(
'2002-12-31 19:00',
);
});

View File

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { utcFormat, timeFormat } from 'd3-time-format';
import { utcUtils, localTimeUtils } from '../utils/d3Time';
import TimeFormatter from '../TimeFormatter';
/*
* A formatter that examines all the values, and uses the finest temporal grain.
*/
export default function finestTemporalGrain(
values: any[],
useLocalTime = false,
) {
const format = useLocalTime ? timeFormat : utcFormat;
const formatMillisecond = format('%Y-%m-%d %H:%M:%S.%L');
const formatSecond = format('%Y-%m-%d %H:%M:%S');
const formatMinute = format('%Y-%m-%d %H:%M');
const formatHour = format('%Y-%m-%d %H:%M');
const formatDay = format('%Y-%m-%d');
const formatMonth = format('%Y-%m-%d');
const formatYear = format('%Y');
const {
hasMillisecond,
hasSecond,
hasMinute,
hasHour,
isNotFirstDayOfMonth,
isNotFirstMonth,
} = useLocalTime ? localTimeUtils : utcUtils;
let formatFunc = formatYear;
values.forEach((value: any) => {
if (formatFunc === formatYear && isNotFirstMonth(value)) {
formatFunc = formatMonth;
}
if (formatFunc === formatMonth && isNotFirstDayOfMonth(value)) {
formatFunc = formatDay;
}
if (formatFunc === formatDay && hasHour(value)) {
formatFunc = formatHour;
}
if (formatFunc === formatHour && hasMinute(value)) {
formatFunc = formatMinute;
}
if (formatFunc === formatMinute && hasSecond(value)) {
formatFunc = formatSecond;
}
if (formatFunc === formatSecond && hasMillisecond(value)) {
formatFunc = formatMillisecond;
}
});
return new TimeFormatter({
description:
'Use the finest grain in an array of dates to format all dates in the array',
formatFunc,
id: 'finest_temporal_grain',
label: 'Format temporal columns with the finest grain',
useLocalTime,
});
}

View File

@ -35,6 +35,7 @@ export { default as createMultiFormatter } from './factories/createMultiFormatte
export { default as smartDateFormatter } from './formatters/smartDate';
export { default as smartDateDetailedFormatter } from './formatters/smartDateDetailed';
export { default as smartDateVerboseFormatter } from './formatters/smartDateVerbose';
export { default as finestTemporalGrainFormatter } from './formatters/finestTemporalGrain';
export { default as normalizeTimestamp } from './utils/normalizeTimestamp';
export { default as denormalizeTimestamp } from './utils/denormalizeTimestamp';

View File

@ -0,0 +1,128 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import { SuperChart, getChartTransformPropsRegistry } from '@superset-ui/core';
import {
boolean,
number,
select,
text,
withKnobs,
} from '@storybook/addon-knobs';
import {
EchartsBubbleChartPlugin,
BubbleTransformProps,
} from '@superset-ui/plugin-chart-echarts';
import { simpleBubbleData } from './data';
import { withResizableChartDemo } from '../../../../shared/components/ResizableChartDemo';
new EchartsBubbleChartPlugin().configure({ key: 'bubble_v2' }).register();
getChartTransformPropsRegistry().registerValue(
'bubble_v2',
BubbleTransformProps,
);
export default {
title: 'Chart Plugins/plugin-chart-echarts/Bubble',
decorators: [withKnobs, withResizableChartDemo],
};
export const SimpleBubble = ({ width, height }) => (
<SuperChart
chartType="bubble_v2"
width={width}
height={height}
queriesData={[{ data: simpleBubbleData }]}
formData={{
entity: 'customer_name',
x: 'count',
y: {
aggregate: 'SUM',
column: {
advanced_data_type: null,
certification_details: null,
certified_by: null,
column_name: 'price_each',
description: null,
expression: null,
filterable: true,
groupby: true,
id: 570,
is_certified: false,
is_dttm: false,
python_date_format: null,
type: 'DOUBLE PRECISION',
type_generic: 0,
verbose_name: null,
warning_markdown: null,
},
expressionType: 'SIMPLE',
hasCustomLabel: false,
isNew: false,
label: 'SUM(price_each)',
optionName: 'metric_d9rpclvys0a_fs4bs0m2l1f',
sqlExpression: null,
},
adhocFilters: [],
size: {
aggregate: 'SUM',
column: {
advanced_data_type: null,
certification_details: null,
certified_by: null,
column_name: 'sales',
description: null,
expression: null,
filterable: true,
groupby: true,
id: 571,
is_certified: false,
is_dttm: false,
python_date_format: null,
type: 'DOUBLE PRECISION',
type_generic: 0,
verbose_name: null,
warning_markdown: null,
},
expressionType: 'SIMPLE',
hasCustomLabel: false,
isNew: false,
label: 'SUM(sales)',
optionName: 'metric_itj9wncjxk_dp3yibib0q',
sqlExpression: null,
},
limit: 10,
colorScheme: 'supersetColors',
maxBubbleSize: select('Max bubble size', [5, 10, 25, 50, 100, 125], 10),
xAxisTitle: text('X axis title', ''),
xAxisTitleMargin: number('X axis title margin', 30),
yAxisTitle: text('Y axis title', ''),
yAxisTitleMargin: number('Y axis title margin', 30),
yAxisTitlePosition: 'Left',
xAxisFormat: null,
logYAxis: boolean('Log Y axis', false),
yAxisFormat: null,
logXAxis: boolean('Log X axis', false),
truncateYAxis: false,
yAxisBounds: [],
extraFormData: {},
}}
/>
);

View File

@ -0,0 +1,80 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const simpleBubbleData = [
{
customer_name: 'AV Stores, Co.',
count: 51,
'SUM(price_each)': 3975.33,
'SUM(sales)': 157807.80999999997,
},
{
customer_name: 'Alpha Cognac',
count: 20,
'SUM(price_each)': 1701.95,
'SUM(sales)': 70488.44,
},
{
customer_name: 'Amica Models & Co.',
count: 26,
'SUM(price_each)': 2218.41,
'SUM(sales)': 94117.26000000002,
},
{
customer_name: "Anna's Decorations, Ltd",
count: 46,
'SUM(price_each)': 3843.67,
'SUM(sales)': 153996.13000000003,
},
{
customer_name: 'Atelier graphique',
count: 7,
'SUM(price_each)': 558.4300000000001,
'SUM(sales)': 24179.96,
},
{
customer_name: 'Australian Collectables, Ltd',
count: 23,
'SUM(price_each)': 1809.7099999999998,
'SUM(sales)': 64591.46000000001,
},
{
customer_name: 'Australian Collectors, Co.',
count: 55,
'SUM(price_each)': 4714.479999999999,
'SUM(sales)': 200995.40999999997,
},
{
customer_name: 'Australian Gift Network, Co',
count: 15,
'SUM(price_each)': 1271.05,
'SUM(sales)': 59469.11999999999,
},
{
customer_name: 'Auto Assoc. & Cie.',
count: 18,
'SUM(price_each)': 1484.8600000000001,
'SUM(sales)': 64834.32000000001,
},
{
customer_name: 'Auto Canal Petit',
count: 27,
'SUM(price_each)': 2188.82,
'SUM(sales)': 93170.65999999999,
},
];

File diff suppressed because one or more lines are too long

View File

@ -82,7 +82,8 @@ function buildHierarchy(rows) {
let currentNode = root;
for (let level = 0; level < levels.length; level += 1) {
const children = currentNode.children || [];
const nodeName = levels[level].toString();
const node = levels[level];
const nodeName = node ? node.toString() : t('N/A');
// If the next node has the name '0', it will
const isLeafNode = level >= levels.length - 1 || levels[level + 1] === 0;
let childNode;

View File

@ -24,7 +24,7 @@
*/
/* eslint no-underscore-dangle: ["error", { "allow": ["", "__timestamp"] }] */
import React from 'react';
import React, { memo, useCallback, useEffect, useRef, useState } from 'react';
import {
CategoricalColorNamespace,
Datasource,
@ -40,7 +40,7 @@ import sandboxedEval from './utils/sandbox';
// eslint-disable-next-line import/extensions
import fitViewport, { Viewport } from './utils/fitViewport';
import {
DeckGLContainer,
DeckGLContainerHandle,
DeckGLContainerStyledWrapper,
} from './DeckGLContainer';
import { Point } from './types';
@ -83,113 +83,51 @@ export type CategoricalDeckGLContainerProps = {
setControlValue: (control: string, value: JsonValue) => void;
};
export type CategoricalDeckGLContainerState = {
formData?: QueryFormData;
viewport: Viewport;
categories: JsonObject;
};
const CategoricalDeckGLContainer = (props: CategoricalDeckGLContainerProps) => {
const containerRef = useRef<DeckGLContainerHandle>(null);
export default class CategoricalDeckGLContainer extends React.PureComponent<
CategoricalDeckGLContainerProps,
CategoricalDeckGLContainerState
> {
containerRef = React.createRef<DeckGLContainer>();
/*
* A Deck.gl container that handles categories.
*
* The container will have an interactive legend, populated from the
* categories present in the data.
*/
constructor(props: CategoricalDeckGLContainerProps) {
super(props);
this.state = this.getStateFromProps(props);
this.getLayers = this.getLayers.bind(this);
this.toggleCategory = this.toggleCategory.bind(this);
this.showSingleCategory = this.showSingleCategory.bind(this);
}
UNSAFE_componentWillReceiveProps(nextProps: CategoricalDeckGLContainerProps) {
if (nextProps.payload.form_data !== this.state.formData) {
this.setState({ ...this.getStateFromProps(nextProps) });
}
}
// eslint-disable-next-line class-methods-use-this
getStateFromProps(
props: CategoricalDeckGLContainerProps,
state?: CategoricalDeckGLContainerState,
) {
const features = props.payload.data.features || [];
const categories = getCategories(props.formData, features);
// the state is computed only from the payload; if it hasn't changed, do
// not recompute state since this would reset selections and/or the play
// slider position due to changes in form controls
if (state && props.payload.form_data === state.formData) {
return { ...state, categories };
}
const { width, height, formData } = props;
let { viewport } = props;
if (formData.autozoom) {
const getAdjustedViewport = useCallback(() => {
let viewport = { ...props.viewport };
if (props.formData.autozoom) {
viewport = fitViewport(viewport, {
width,
height,
points: props.getPoints(features),
width: props.width,
height: props.height,
points: props.getPoints(props.payload.data.features || []),
});
}
if (viewport.zoom < 0) {
viewport.zoom = 0;
}
return viewport;
}, [props]);
return {
viewport,
selected: [],
lastClick: 0,
formData: props.payload.form_data,
categories,
};
}
const [categories, setCategories] = useState<JsonObject>(
getCategories(props.formData, props.payload.data.features || []),
);
const [stateFormData, setStateFormData] = useState<JsonObject>(
props.payload.form_data,
);
const [viewport, setViewport] = useState(getAdjustedViewport());
getLayers() {
const { getLayer, payload, formData: fd, onAddFilter } = this.props;
let features = payload.data.features ? [...payload.data.features] : [];
useEffect(() => {
if (props.payload.form_data !== stateFormData) {
const features = props.payload.data.features || [];
const categories = getCategories(props.formData, features);
// Add colors from categories or fixed color
features = this.addColor(features, fd);
// Apply user defined data mutator if defined
if (fd.js_data_mutator) {
const jsFnMutator = sandboxedEval(fd.js_data_mutator);
features = jsFnMutator(features);
setViewport(getAdjustedViewport());
setStateFormData(props.payload.form_data);
setCategories(categories);
}
}, [getAdjustedViewport, props, stateFormData]);
// Show only categories selected in the legend
const cats = this.state.categories;
if (fd.dimension) {
features = features.filter(d => cats[d.cat_color]?.enabled);
const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => {
const { current } = containerRef;
if (current) {
current.setTooltip(tooltip);
}
}, []);
const filteredPayload = {
...payload,
data: { ...payload.data, features },
};
return [
getLayer(
fd,
filteredPayload,
onAddFilter,
this.setTooltip,
this.props.datasource,
) as Layer,
];
}
// eslint-disable-next-line class-methods-use-this
addColor(data: JsonObject[], fd: QueryFormData) {
const addColor = useCallback((data: JsonObject[], fd: QueryFormData) => {
const c = fd.color_picker || { r: 0, g: 0, b: 0, a: 1 };
const colorFn = getScale(fd.color_scheme);
@ -203,67 +141,99 @@ export default class CategoricalDeckGLContainer extends React.PureComponent<
return d;
});
}
}, []);
toggleCategory(category: string) {
const categoryState = this.state.categories[category];
const categories = {
...this.state.categories,
[category]: {
...categoryState,
enabled: !categoryState.enabled,
},
const getLayers = useCallback(() => {
const { getLayer, payload, formData: fd, onAddFilter } = props;
let features = payload.data.features ? [...payload.data.features] : [];
// Add colors from categories or fixed color
features = addColor(features, fd);
// Apply user defined data mutator if defined
if (fd.js_data_mutator) {
const jsFnMutator = sandboxedEval(fd.js_data_mutator);
features = jsFnMutator(features);
}
// Show only categories selected in the legend
if (fd.dimension) {
features = features.filter(d => categories[d.cat_color]?.enabled);
}
const filteredPayload = {
...payload,
data: { ...payload.data, features },
};
// if all categories are disabled, enable all -- similar to nvd3
if (Object.values(categories).every(v => !v.enabled)) {
/* eslint-disable no-param-reassign */
Object.values(categories).forEach(v => {
v.enabled = true;
return [
getLayer(
fd,
filteredPayload,
onAddFilter,
setTooltip,
props.datasource,
) as Layer,
];
}, [addColor, categories, props, setTooltip]);
const toggleCategory = useCallback(
(category: string) => {
const categoryState = categories[category];
const categoriesExtended = {
...categories,
[category]: {
...categoryState,
enabled: !categoryState.enabled,
},
};
// if all categories are disabled, enable all -- similar to nvd3
if (Object.values(categoriesExtended).every(v => !v.enabled)) {
/* eslint-disable no-param-reassign */
Object.values(categoriesExtended).forEach(v => {
v.enabled = true;
});
}
setCategories(categoriesExtended);
},
[categories],
);
const showSingleCategory = useCallback(
(category: string) => {
const modifiedCategories = { ...categories };
Object.values(modifiedCategories).forEach(v => {
v.enabled = false;
});
}
this.setState({ categories });
}
modifiedCategories[category].enabled = true;
setCategories(modifiedCategories);
},
[categories],
);
showSingleCategory(category: string) {
const categories = { ...this.state.categories };
/* eslint-disable no-param-reassign */
Object.values(categories).forEach(v => {
v.enabled = false;
});
categories[category].enabled = true;
this.setState({ categories });
}
return (
<div style={{ position: 'relative' }}>
<DeckGLContainerStyledWrapper
ref={containerRef}
viewport={viewport}
layers={getLayers()}
setControlValue={props.setControlValue}
mapStyle={props.formData.mapbox_style}
mapboxApiAccessToken={props.mapboxApiKey}
width={props.width}
height={props.height}
/>
<Legend
forceCategorical
categories={categories}
format={props.formData.legend_format}
position={props.formData.legend_position}
showSingleCategory={showSingleCategory}
toggleCategory={toggleCategory}
/>
</div>
);
};
setTooltip = (tooltip: TooltipProps['tooltip']) => {
const { current } = this.containerRef;
if (current) {
current.setTooltip(tooltip);
}
};
render() {
return (
<div style={{ position: 'relative' }}>
<DeckGLContainerStyledWrapper
ref={this.containerRef}
viewport={this.state.viewport}
layers={this.getLayers()}
setControlValue={this.props.setControlValue}
mapStyle={this.props.formData.mapbox_style}
mapboxApiAccessToken={this.props.mapboxApiKey}
width={this.props.width}
height={this.props.height}
/>
<Legend
forceCategorical
categories={this.state.categories}
format={this.props.formData.legend_format}
position={this.props.formData.legend_position}
showSingleCategory={this.showSingleCategory}
toggleCategory={this.toggleCategory}
/>
</div>
);
}
}
export default memo(CategoricalDeckGLContainer);

View File

@ -20,11 +20,19 @@
* specific language governing permissions and limitations
* under the License.
*/
import React, { ReactNode } from 'react';
import React, {
forwardRef,
memo,
ReactNode,
useCallback,
useEffect,
useImperativeHandle,
useState,
} from 'react';
import { isEqual } from 'lodash';
import { StaticMap } from 'react-map-gl';
import DeckGL, { Layer } from 'deck.gl/typed';
import { JsonObject, JsonValue, styled } from '@superset-ui/core';
import { JsonObject, JsonValue, styled, usePrevious } from '@superset-ui/core';
import Tooltip, { TooltipProps } from './components/Tooltip';
import 'mapbox-gl/dist/mapbox-gl.css';
import { Viewport } from './utils/fitViewport';
@ -43,76 +51,57 @@ export type DeckGLContainerProps = {
onViewportChange?: (viewport: Viewport) => void;
};
export type DeckGLContainerState = {
lastUpdate: number | null;
viewState: Viewport;
tooltip: TooltipProps['tooltip'];
timer: ReturnType<typeof setInterval>;
};
export const DeckGLContainer = memo(
forwardRef((props: DeckGLContainerProps, ref) => {
const [tooltip, setTooltip] = useState<TooltipProps['tooltip']>(null);
const [lastUpdate, setLastUpdate] = useState<number | null>(null);
const [viewState, setViewState] = useState(props.viewport);
const prevViewport = usePrevious(props.viewport);
export class DeckGLContainer extends React.Component<
DeckGLContainerProps,
DeckGLContainerState
> {
constructor(props: DeckGLContainerProps) {
super(props);
this.tick = this.tick.bind(this);
this.onViewStateChange = this.onViewStateChange.bind(this);
// This has to be placed after this.tick is bound to this
this.state = {
timer: setInterval(this.tick, TICK),
tooltip: null,
viewState: props.viewport,
lastUpdate: null,
};
}
useImperativeHandle(ref, () => ({ setTooltip }), []);
UNSAFE_componentWillReceiveProps(nextProps: DeckGLContainerProps) {
if (!isEqual(nextProps.viewport, this.props.viewport)) {
this.setState({ viewState: nextProps.viewport });
}
}
componentWillUnmount() {
clearInterval(this.state.timer);
}
onViewStateChange({ viewState }: { viewState: JsonObject }) {
this.setState({ viewState: viewState as Viewport, lastUpdate: Date.now() });
}
tick() {
// Rate limiting updating viewport controls as it triggers lotsa renders
const { lastUpdate } = this.state;
if (lastUpdate && Date.now() - lastUpdate > TICK) {
const setCV = this.props.setControlValue;
if (setCV) {
setCV('viewport', this.state.viewState);
const tick = useCallback(() => {
// Rate limiting updating viewport controls as it triggers lots of renders
if (lastUpdate && Date.now() - lastUpdate > TICK) {
const setCV = props.setControlValue;
if (setCV) {
setCV('viewport', viewState);
}
setLastUpdate(null);
}
this.setState({ lastUpdate: null });
}
}
}, [lastUpdate, props.setControlValue, viewState]);
layers() {
// Support for layer factory
if (this.props.layers.some(l => typeof l === 'function')) {
return this.props.layers.map(l =>
typeof l === 'function' ? l() : l,
) as Layer[];
}
useEffect(() => {
const timer = setInterval(tick, TICK);
return clearInterval(timer);
}, [tick]);
return this.props.layers as Layer[];
}
useEffect(() => {
if (!isEqual(props.viewport, prevViewport)) {
setViewState(props.viewport);
}
}, [prevViewport, props.viewport]);
setTooltip = (tooltip: TooltipProps['tooltip']) => {
this.setState({ tooltip });
};
const onViewStateChange = useCallback(
({ viewState }: { viewState: JsonObject }) => {
setViewState(viewState as Viewport);
setLastUpdate(Date.now());
},
[],
);
render() {
const { children = null, height, width } = this.props;
const { viewState, tooltip } = this.state;
const layers = useCallback(() => {
// Support for layer factory
if (props.layers.some(l => typeof l === 'function')) {
return props.layers.map(l =>
typeof l === 'function' ? l() : l,
) as Layer[];
}
const layers = this.layers();
return props.layers as Layer[];
}, [props.layers]);
const { children = null, height, width } = props;
return (
<>
@ -121,15 +110,15 @@ export class DeckGLContainer extends React.Component<
controller
width={width}
height={height}
layers={layers}
layers={layers()}
viewState={viewState}
glOptions={{ preserveDrawingBuffer: true }}
onViewStateChange={this.onViewStateChange}
onViewStateChange={onViewStateChange}
>
<StaticMap
preserveDrawingBuffer
mapStyle={this.props.mapStyle || 'light'}
mapboxApiAccessToken={this.props.mapboxApiAccessToken}
mapStyle={props.mapStyle || 'light'}
mapboxApiAccessToken={props.mapboxApiAccessToken}
/>
</DeckGL>
{children}
@ -137,8 +126,8 @@ export class DeckGLContainer extends React.Component<
<Tooltip tooltip={tooltip} />
</>
);
}
}
}),
);
export const DeckGLContainerStyledWrapper = styled(DeckGLContainer)`
.deckgl-tooltip > div {
@ -146,3 +135,7 @@ export const DeckGLContainerStyledWrapper = styled(DeckGLContainer)`
text-overflow: ellipsis;
}
`;
export type DeckGLContainerHandle = typeof DeckGLContainer & {
setTooltip: (tooltip: ReactNode) => void;
};

View File

@ -19,7 +19,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import React, { memo, useCallback, useEffect, useRef, useState } from 'react';
import { isEqual } from 'lodash';
import {
Datasource,
@ -28,11 +28,12 @@ import {
JsonValue,
QueryFormData,
SupersetClient,
usePrevious,
} from '@superset-ui/core';
import { Layer } from 'deck.gl/typed';
import {
DeckGLContainer,
DeckGLContainerHandle,
DeckGLContainerStyledWrapper,
} from '../DeckGLContainer';
import { getExploreLongUrl } from '../utils/explore';
@ -52,120 +53,97 @@ export type DeckMultiProps = {
onSelect: () => void;
};
export type DeckMultiState = {
subSlicesLayers: Record<number, Layer>;
viewport?: Viewport;
};
const DeckMulti = (props: DeckMultiProps) => {
const containerRef = useRef<DeckGLContainerHandle>();
class DeckMulti extends React.PureComponent<DeckMultiProps, DeckMultiState> {
containerRef = React.createRef<DeckGLContainer>();
const [viewport, setViewport] = useState<Viewport>();
const [subSlicesLayers, setSubSlicesLayers] = useState<Record<number, Layer>>(
{},
);
constructor(props: DeckMultiProps) {
super(props);
this.state = { subSlicesLayers: {} };
this.onViewportChange = this.onViewportChange.bind(this);
}
componentDidMount() {
const { formData, payload } = this.props;
this.loadLayers(formData, payload);
}
UNSAFE_componentWillReceiveProps(nextProps: DeckMultiProps) {
const { formData, payload } = nextProps;
const hasChanges = !isEqual(
this.props.formData.deck_slices,
nextProps.formData.deck_slices,
);
if (hasChanges) {
this.loadLayers(formData, payload);
}
}
onViewportChange(viewport: Viewport) {
this.setState({ viewport });
}
loadLayers(
formData: QueryFormData,
payload: JsonObject,
viewport?: Viewport,
) {
this.setState({ subSlicesLayers: {}, viewport });
payload.data.slices.forEach(
(subslice: { slice_id: number } & JsonObject) => {
// Filters applied to multi_deck are passed down to underlying charts
// note that dashboard contextual information (filter_immune_slices and such) aren't
// taken into consideration here
const filters = [
...(subslice.form_data.filters || []),
...(formData.filters || []),
...(formData.extra_filters || []),
];
const subsliceCopy = {
...subslice,
form_data: {
...subslice.form_data,
filters,
},
};
const url = getExploreLongUrl(subsliceCopy.form_data, 'json');
if (url) {
SupersetClient.get({
endpoint: url,
})
.then(({ json }) => {
const layer = layerGenerators[subsliceCopy.form_data.viz_type](
subsliceCopy.form_data,
json,
this.props.onAddFilter,
this.setTooltip,
this.props.datasource,
[],
this.props.onSelect,
);
this.setState({
subSlicesLayers: {
...this.state.subSlicesLayers,
[subsliceCopy.slice_id]: layer,
},
});
})
.catch(() => {});
}
},
);
}
setTooltip = (tooltip: TooltipProps['tooltip']) => {
const { current } = this.containerRef;
const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => {
const { current } = containerRef;
if (current) {
current.setTooltip(tooltip);
}
};
}, []);
render() {
const { payload, formData, setControlValue, height, width } = this.props;
const { subSlicesLayers } = this.state;
const loadLayers = useCallback(
(formData: QueryFormData, payload: JsonObject, viewport?: Viewport) => {
setViewport(viewport);
setSubSlicesLayers({});
payload.data.slices.forEach(
(subslice: { slice_id: number } & JsonObject) => {
// Filters applied to multi_deck are passed down to underlying charts
// note that dashboard contextual information (filter_immune_slices and such) aren't
// taken into consideration here
const filters = [
...(subslice.form_data.filters || []),
...(formData.filters || []),
...(formData.extra_filters || []),
];
const subsliceCopy = {
...subslice,
form_data: {
...subslice.form_data,
filters,
},
};
const layers = Object.values(subSlicesLayers);
const url = getExploreLongUrl(subsliceCopy.form_data, 'json');
return (
<DeckGLContainerStyledWrapper
ref={this.containerRef}
mapboxApiAccessToken={payload.data.mapboxApiKey}
viewport={this.state.viewport || this.props.viewport}
layers={layers}
mapStyle={formData.mapbox_style}
setControlValue={setControlValue}
onViewportChange={this.onViewportChange}
height={height}
width={width}
/>
);
}
}
if (url) {
SupersetClient.get({
endpoint: url,
})
.then(({ json }) => {
const layer = layerGenerators[subsliceCopy.form_data.viz_type](
subsliceCopy.form_data,
json,
props.onAddFilter,
setTooltip,
props.datasource,
[],
props.onSelect,
);
setSubSlicesLayers(subSlicesLayers => ({
...subSlicesLayers,
[subsliceCopy.slice_id]: layer,
}));
})
.catch(() => {});
}
},
);
},
[props.datasource, props.onAddFilter, props.onSelect, setTooltip],
);
export default DeckMulti;
const prevDeckSlices = usePrevious(props.formData.deck_slices);
useEffect(() => {
const { formData, payload } = props;
const hasChanges = !isEqual(prevDeckSlices, formData.deck_slices);
if (hasChanges) {
loadLayers(formData, payload);
}
}, [loadLayers, prevDeckSlices, props]);
const { payload, formData, setControlValue, height, width } = props;
const layers = Object.values(subSlicesLayers);
return (
<DeckGLContainerStyledWrapper
ref={containerRef}
mapboxApiAccessToken={payload.data.mapboxApiKey}
viewport={viewport || props.viewport}
layers={layers}
mapStyle={formData.mapbox_style}
setControlValue={setControlValue}
onViewportChange={setViewport}
height={height}
width={width}
/>
);
};
export default memo(DeckMulti);

View File

@ -23,15 +23,11 @@ type TooltipRowProps = {
value: string;
};
export default class TooltipRow extends React.PureComponent<TooltipRowProps> {
render() {
const { label, value } = this.props;
const TooltipRow = ({ label, value }: TooltipRowProps) => (
<div>
{label}
<strong>{value}</strong>
</div>
);
return (
<div>
{label}
<strong>{value}</strong>
</div>
);
}
}
export default TooltipRow;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import React, { memo, useCallback, useEffect, useRef, useState } from 'react';
import { isEqual } from 'lodash';
import { Layer } from 'deck.gl/typed';
import {
@ -24,11 +24,12 @@ import {
QueryFormData,
JsonObject,
HandlerFunction,
usePrevious,
} from '@superset-ui/core';
import {
DeckGLContainerStyledWrapper,
DeckGLContainer,
DeckGLContainerHandle,
} from './DeckGLContainer';
import CategoricalDeckGLContainer from './CategoricalDeckGLContainer';
import fitViewport, { Viewport } from './utils/fitViewport';
@ -57,91 +58,73 @@ export interface getLayerType<T> {
interface getPointsType {
(data: JsonObject[]): Point[];
}
type deckGLComponentState = {
viewport: Viewport;
layer: Layer;
};
export function createDeckGLComponent(
getLayer: getLayerType<unknown>,
getPoints: getPointsType,
): React.ComponentClass<deckGLComponentProps> {
) {
// Higher order component
class Component extends React.PureComponent<
deckGLComponentProps,
deckGLComponentState
> {
containerRef: React.RefObject<DeckGLContainer> = React.createRef();
constructor(props: deckGLComponentProps) {
super(props);
return memo((props: deckGLComponentProps) => {
const containerRef = useRef<DeckGLContainerHandle>();
const prevFormData = usePrevious(props.formData);
const prevPayload = usePrevious(props.payload);
const getAdjustedViewport = () => {
const { width, height, formData } = props;
let { viewport } = props;
if (formData.autozoom) {
viewport = fitViewport(viewport, {
return fitViewport(props.viewport, {
width,
height,
points: getPoints(props.payload.data.features),
}) as Viewport;
}
return props.viewport;
};
this.state = {
viewport,
layer: this.computeLayer(props),
};
this.onViewportChange = this.onViewportChange.bind(this);
}
const [viewport, setViewport] = useState(getAdjustedViewport());
UNSAFE_componentWillReceiveProps(nextProps: deckGLComponentProps) {
// Only recompute the layer if anything BUT the viewport has changed
const nextFdNoVP = { ...nextProps.formData, viewport: null };
const currFdNoVP = { ...this.props.formData, viewport: null };
if (
!isEqual(nextFdNoVP, currFdNoVP) ||
nextProps.payload !== this.props.payload
) {
this.setState({ layer: this.computeLayer(nextProps) });
}
}
onViewportChange(viewport: Viewport) {
this.setState({ viewport });
}
computeLayer(props: deckGLComponentProps) {
const { formData, payload, onAddFilter } = props;
return getLayer(formData, payload, onAddFilter, this.setTooltip) as Layer;
}
setTooltip = (tooltip: TooltipProps['tooltip']) => {
const { current } = this.containerRef;
const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => {
const { current } = containerRef;
if (current) {
current?.setTooltip(tooltip);
}
};
}, []);
render() {
const { formData, payload, setControlValue, height, width } = this.props;
const { layer, viewport } = this.state;
const computeLayer = useCallback(
(props: deckGLComponentProps) => {
const { formData, payload, onAddFilter } = props;
return (
<DeckGLContainerStyledWrapper
ref={this.containerRef}
mapboxApiAccessToken={payload.data.mapboxApiKey}
viewport={viewport}
layers={[layer]}
mapStyle={formData.mapbox_style}
setControlValue={setControlValue}
width={width}
height={height}
onViewportChange={this.onViewportChange}
/>
);
}
}
return Component;
return getLayer(formData, payload, onAddFilter, setTooltip) as Layer;
},
[setTooltip],
);
const [layer, setLayer] = useState(computeLayer(props));
useEffect(() => {
// Only recompute the layer if anything BUT the viewport has changed
const prevFdNoVP = { ...prevFormData, viewport: null };
const currFdNoVP = { ...props.formData, viewport: null };
if (!isEqual(prevFdNoVP, currFdNoVP) || prevPayload !== props.payload) {
setLayer(computeLayer(props));
}
}, [computeLayer, prevFormData, prevPayload, props]);
const { formData, payload, setControlValue, height, width } = props;
return (
<DeckGLContainerStyledWrapper
ref={containerRef}
mapboxApiAccessToken={payload.data.mapboxApiKey}
viewport={viewport}
layers={[layer]}
mapStyle={formData.mapbox_style}
setControlValue={setControlValue}
width={width}
height={height}
onViewportChange={setViewport}
/>
);
});
}
export function createCategoricalDeckGLComponent(

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import React, { memo, useCallback, useMemo, useRef } from 'react';
import { GeoJsonLayer } from 'deck.gl/typed';
import geojsonExtent from '@mapbox/geojson-extent';
import {
@ -27,7 +27,7 @@ import {
} from '@superset-ui/core';
import {
DeckGLContainer,
DeckGLContainerHandle,
DeckGLContainerStyledWrapper,
} from '../../DeckGLContainer';
import { hexToRGB } from '../../utils/colors';
@ -164,21 +164,19 @@ export type DeckGLGeoJsonProps = {
width: number;
};
class DeckGLGeoJson extends React.Component<DeckGLGeoJsonProps> {
containerRef = React.createRef<DeckGLContainer>();
setTooltip = (tooltip: TooltipProps['tooltip']) => {
const { current } = this.containerRef;
const DeckGLGeoJson = (props: DeckGLGeoJsonProps) => {
const containerRef = useRef<DeckGLContainerHandle>();
const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => {
const { current } = containerRef;
if (current) {
current.setTooltip(tooltip);
}
};
}, []);
render() {
const { formData, payload, setControlValue, onAddFilter, height, width } =
this.props;
const { formData, payload, setControlValue, onAddFilter, height, width } =
props;
let { viewport } = this.props;
const viewport: Viewport = useMemo(() => {
if (formData.autozoom) {
const points =
payload?.data?.features?.reduce?.(
@ -194,29 +192,36 @@ class DeckGLGeoJson extends React.Component<DeckGLGeoJsonProps> {
) || [];
if (points.length) {
viewport = fitViewport(viewport, {
return fitViewport(props.viewport, {
width,
height,
points,
});
}
}
return props.viewport;
}, [
formData.autozoom,
height,
payload?.data?.features,
props.viewport,
width,
]);
const layer = getLayer(formData, payload, onAddFilter, this.setTooltip);
const layer = getLayer(formData, payload, onAddFilter, setTooltip);
return (
<DeckGLContainerStyledWrapper
ref={this.containerRef}
mapboxApiAccessToken={payload.data.mapboxApiKey}
viewport={viewport}
layers={[layer]}
mapStyle={formData.mapbox_style}
setControlValue={setControlValue}
height={height}
width={width}
/>
);
}
}
return (
<DeckGLContainerStyledWrapper
ref={containerRef}
mapboxApiAccessToken={payload.data.mapboxApiKey}
viewport={viewport}
layers={[layer]}
mapStyle={formData.mapbox_style}
setControlValue={setControlValue}
height={height}
width={width}
/>
);
};
export default DeckGLGeoJson;
export default memo(DeckGLGeoJson);

View File

@ -21,7 +21,7 @@
*/
/* eslint no-underscore-dangle: ["error", { "allow": ["", "__timestamp"] }] */
import React from 'react';
import React, { memo, useCallback, useEffect, useRef, useState } from 'react';
import {
HandlerFunction,
JsonObject,
@ -41,7 +41,7 @@ import sandboxedEval from '../../utils/sandbox';
import getPointsFromPolygon from '../../utils/getPointsFromPolygon';
import fitViewport, { Viewport } from '../../utils/fitViewport';
import {
DeckGLContainer,
DeckGLContainerHandle,
DeckGLContainerStyledWrapper,
} from '../../DeckGLContainer';
import { TooltipProps } from '../../components/Tooltip';
@ -173,145 +173,134 @@ export type DeckGLPolygonProps = {
height: number;
};
export type DeckGLPolygonState = {
lastClick: number;
viewport: Viewport;
formData: PolygonFormData;
selected: JsonObject[];
};
const DeckGLPolygon = (props: DeckGLPolygonProps) => {
const containerRef = useRef<DeckGLContainerHandle>();
class DeckGLPolygon extends React.PureComponent<
DeckGLPolygonProps,
DeckGLPolygonState
> {
containerRef = React.createRef<DeckGLContainer>();
constructor(props: DeckGLPolygonProps) {
super(props);
this.state = DeckGLPolygon.getDerivedStateFromProps(
props,
) as DeckGLPolygonState;
this.getLayers = this.getLayers.bind(this);
this.onSelect = this.onSelect.bind(this);
}
static getDerivedStateFromProps(
props: DeckGLPolygonProps,
state?: DeckGLPolygonState,
) {
const { width, height, formData, payload } = props;
// the state is computed only from the payload; if it hasn't changed, do
// not recompute state since this would reset selections and/or the play
// slider position due to changes in form controls
if (state && payload.form_data === state.formData) {
return null;
}
const features = payload.data.features || [];
let { viewport } = props;
if (formData.autozoom) {
const getAdjustedViewport = useCallback(() => {
let viewport = { ...props.viewport };
if (props.formData.autozoom) {
const features = props.payload.data.features || [];
viewport = fitViewport(viewport, {
width,
height,
width: props.width,
height: props.height,
points: features.flatMap(getPointsFromPolygon),
});
}
if (viewport.zoom < 0) {
viewport.zoom = 0;
}
return viewport;
}, [props]);
return {
viewport,
selected: [],
lastClick: 0,
formData: payload.form_data,
};
}
const [lastClick, setLastClick] = useState(0);
const [viewport, setViewport] = useState(getAdjustedViewport());
const [stateFormData, setStateFormData] = useState(props.payload.form_data);
const [selected, setSelected] = useState<JsonObject[]>([]);
onSelect(polygon: JsonObject) {
const { formData, onAddFilter } = this.props;
useEffect(() => {
const { payload } = props;
const now = new Date().getDate();
const doubleClick = now - this.state.lastClick <= DOUBLE_CLICK_THRESHOLD;
if (payload.form_data !== stateFormData) {
setViewport(getAdjustedViewport());
setSelected([]);
setLastClick(0);
setStateFormData(payload.form_data);
}
}, [getAdjustedViewport, props, stateFormData, viewport]);
// toggle selected polygons
const selected = [...this.state.selected];
if (doubleClick) {
selected.splice(0, selected.length, polygon);
} else if (formData.toggle_polygons) {
const i = selected.indexOf(polygon);
if (i === -1) {
selected.push(polygon);
const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => {
const { current } = containerRef;
if (current) {
current.setTooltip(tooltip);
}
}, []);
const onSelect = useCallback(
(polygon: JsonObject) => {
const { formData, onAddFilter } = props;
const now = new Date().getDate();
const doubleClick = now - lastClick <= DOUBLE_CLICK_THRESHOLD;
// toggle selected polygons
const selectedCopy = [...selected];
if (doubleClick) {
selectedCopy.splice(0, selectedCopy.length, polygon);
} else if (formData.toggle_polygons) {
const i = selectedCopy.indexOf(polygon);
if (i === -1) {
selectedCopy.push(polygon);
} else {
selectedCopy.splice(i, 1);
}
} else {
selected.splice(i, 1);
selectedCopy.splice(0, 1, polygon);
}
} else {
selected.splice(0, 1, polygon);
}
this.setState({ selected, lastClick: now });
if (formData.table_filter) {
onAddFilter(formData.line_column, selected, false, true);
}
}
setSelected(selectedCopy);
setLastClick(now);
if (formData.table_filter) {
onAddFilter(formData.line_column, selected, false, true);
}
},
[lastClick, props, selected],
);
getLayers() {
if (this.props.payload.data.features === undefined) {
const getLayers = useCallback(() => {
if (props.payload.data.features === undefined) {
return [];
}
const layer = getLayer(
this.props.formData,
this.props.payload,
this.props.onAddFilter,
this.setTooltip,
this.state.selected,
this.onSelect,
props.formData,
props.payload,
props.onAddFilter,
setTooltip,
selected,
onSelect,
);
return [layer];
}
}, [
onSelect,
props.formData,
props.onAddFilter,
props.payload,
selected,
setTooltip,
]);
setTooltip = (tooltip: TooltipProps['tooltip']) => {
const { current } = this.containerRef;
if (current) {
current.setTooltip(tooltip);
}
};
const { payload, formData, setControlValue } = props;
render() {
const { payload, formData, setControlValue } = this.props;
const metricLabel = formData.metric
? formData.metric.label || formData.metric
: null;
const accessor = (d: JsonObject) => d[metricLabel];
const fd = formData;
const metricLabel = fd.metric ? fd.metric.label || fd.metric : null;
const accessor = (d: JsonObject) => d[metricLabel];
const buckets = getBuckets(formData, payload.data.features, accessor);
const buckets = getBuckets(formData, payload.data.features, accessor);
return (
<div style={{ position: 'relative' }}>
<DeckGLContainerStyledWrapper
ref={containerRef}
viewport={viewport}
layers={getLayers()}
setControlValue={setControlValue}
mapStyle={formData.mapbox_style}
mapboxApiAccessToken={payload.data.mapboxApiKey}
width={props.width}
height={props.height}
/>
return (
<div style={{ position: 'relative' }}>
<DeckGLContainerStyledWrapper
ref={this.containerRef}
viewport={this.state.viewport}
layers={this.getLayers()}
setControlValue={setControlValue}
mapStyle={formData.mapbox_style}
mapboxApiAccessToken={payload.data.mapboxApiKey}
width={this.props.width}
height={this.props.height}
{formData.metric !== null && (
<Legend
categories={buckets}
position={formData.legend_position}
format={formData.legend_format}
/>
)}
</div>
);
};
{formData.metric !== null && (
<Legend
categories={buckets}
position={formData.legend_position}
format={formData.legend_format}
/>
)}
</div>
);
}
}
export default DeckGLPolygon;
export default memo(DeckGLPolygon);

View File

@ -20,7 +20,7 @@
*/
/* eslint no-underscore-dangle: ["error", { "allow": ["", "__timestamp"] }] */
import React from 'react';
import React, { memo, useCallback, useEffect, useRef, useState } from 'react';
import { ScreenGridLayer } from 'deck.gl/typed';
import { JsonObject, JsonValue, QueryFormData, t } from '@superset-ui/core';
import { noop } from 'lodash';
@ -30,7 +30,7 @@ import TooltipRow from '../../TooltipRow';
// eslint-disable-next-line import/extensions
import fitViewport, { Viewport } from '../../utils/fitViewport';
import {
DeckGLContainer,
DeckGLContainerHandle,
DeckGLContainerStyledWrapper,
} from '../../DeckGLContainer';
import { TooltipProps } from '../../components/Tooltip';
@ -99,93 +99,63 @@ export type DeckGLScreenGridProps = {
onAddFilter: () => void;
};
export type DeckGLScreenGridState = {
viewport: Viewport;
formData: QueryFormData;
};
class DeckGLScreenGrid extends React.PureComponent<
DeckGLScreenGridProps,
DeckGLScreenGridState
> {
containerRef = React.createRef<DeckGLContainer>();
constructor(props: DeckGLScreenGridProps) {
super(props);
this.state = DeckGLScreenGrid.getDerivedStateFromProps(
props,
) as DeckGLScreenGridState;
this.getLayers = this.getLayers.bind(this);
}
static getDerivedStateFromProps(
props: DeckGLScreenGridProps,
state?: DeckGLScreenGridState,
) {
// the state is computed only from the payload; if it hasn't changed, do
// not recompute state since this would reset selections and/or the play
// slider position due to changes in form controls
if (state && props.payload.form_data === state.formData) {
return null;
}
const DeckGLScreenGrid = (props: DeckGLScreenGridProps) => {
const containerRef = useRef<DeckGLContainerHandle>();
const getAdjustedViewport = useCallback(() => {
const features = props.payload.data.features || [];
const { width, height, formData } = props;
let { viewport } = props;
if (formData.autozoom) {
viewport = fitViewport(viewport, {
return fitViewport(props.viewport, {
width,
height,
points: getPoints(features),
});
}
return props.viewport;
}, [props]);
return {
viewport,
formData: props.payload.form_data as QueryFormData,
};
}
const [stateFormData, setStateFormData] = useState(props.payload.form_data);
const [viewport, setViewport] = useState(getAdjustedViewport());
getLayers() {
const layer = getLayer(
this.props.formData,
this.props.payload,
noop,
this.setTooltip,
);
useEffect(() => {
if (props.payload.form_data !== stateFormData) {
setViewport(getAdjustedViewport());
setStateFormData(props.payload.form_data);
}
}, [getAdjustedViewport, props.payload.form_data, stateFormData]);
return [layer];
}
setTooltip = (tooltip: TooltipProps['tooltip']) => {
const { current } = this.containerRef;
const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => {
const { current } = containerRef;
if (current) {
current.setTooltip(tooltip);
}
};
}, []);
render() {
const { formData, payload, setControlValue } = this.props;
const getLayers = useCallback(() => {
const layer = getLayer(props.formData, props.payload, noop, setTooltip);
return (
<div>
<DeckGLContainerStyledWrapper
ref={this.containerRef}
viewport={this.state.viewport}
layers={this.getLayers()}
setControlValue={setControlValue}
mapStyle={formData.mapbox_style}
mapboxApiAccessToken={payload.data.mapboxApiKey}
width={this.props.width}
height={this.props.height}
/>
</div>
);
}
}
return [layer];
}, [props.formData, props.payload, setTooltip]);
export default DeckGLScreenGrid;
const { formData, payload, setControlValue } = props;
return (
<div>
<DeckGLContainerStyledWrapper
ref={containerRef}
viewport={viewport}
layers={getLayers()}
setControlValue={setControlValue}
mapStyle={formData.mapbox_style}
mapboxApiAccessToken={payload.data.mapboxApiKey}
width={props.width}
height={props.height}
/>
</div>
);
};
export default memo(DeckGLScreenGrid);

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { t, ChartMetadata, ChartPlugin } from '@superset-ui/core';
import { t, ChartMetadata, ChartPlugin, ChartLabel } from '@superset-ui/core';
import transformProps from '../transformProps';
import example from './images/example.jpg';
import thumbnail from './images/thumbnail.png';
@ -29,7 +29,8 @@ const metadata = new ChartMetadata({
'Visualizes a metric across three dimensions of data in a single chart (X axis, Y axis, and bubble size). Bubbles from the same group can be showcased using bubble color.',
),
exampleGallery: [{ url: example }],
name: t('Bubble Chart'),
label: ChartLabel.DEPRECATED,
name: t('Bubble Chart (legacy)'),
tags: [
t('Multi-Dimensions'),
t('Aesthetic'),
@ -39,11 +40,15 @@ const metadata = new ChartMetadata({
t('Time'),
t('Trend'),
t('nvd3'),
t('Deprecated'),
],
thumbnail,
useLegacyApi: true,
});
/**
* @deprecated in version 4.0.
*/
export default class BubbleChartPlugin extends ChartPlugin {
constructor() {
super({

View File

@ -17,7 +17,17 @@
* under the License.
*/
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
import { BubbleChartTransformedProps } from './types';
import Echart from '../components/Echart';
ReactDOM.render(<App />, document.getElementById('app'));
export default function EchartsBubble(props: BubbleChartTransformedProps) {
const { height, width, echartOptions, refs } = props;
return (
<Echart
height={height}
width={width}
echartOptions={echartOptions}
refs={refs}
/>
);
}

View File

@ -16,6 +16,25 @@
* specific language governing permissions and limitations
* under the License.
*/
export default function commonReducer(state = {}) {
return state;
import {
buildQueryContext,
ensureIsArray,
QueryFormData,
} from '@superset-ui/core';
export default function buildQuery(formData: QueryFormData) {
const columns = [
...ensureIsArray(formData.entity),
...ensureIsArray(formData.series),
];
return buildQueryContext(formData, baseQueryObject => [
{
...baseQueryObject,
columns,
orderby: baseQueryObject.orderby
? [[baseQueryObject.orderby[0], !baseQueryObject.order_desc]]
: undefined,
},
]);
}

View File

@ -16,17 +16,20 @@
* specific language governing permissions and limitations
* under the License.
*/
import { combineReducers } from 'redux';
import messageToasts from 'src/components/MessageToasts/reducers';
import sqlLab from './sqlLab';
import localStorageUsageInKilobytes from './localStorageUsage';
import common from './common';
import { DEFAULT_LEGEND_FORM_DATA } from '../constants';
import { EchartsBubbleFormData } from './types';
export const reducers = {
sqlLab,
localStorageUsageInKilobytes,
messageToasts,
common,
export const DEFAULT_FORM_DATA: Partial<EchartsBubbleFormData> = {
...DEFAULT_LEGEND_FORM_DATA,
emitFilter: false,
logXAis: false,
logYAxis: false,
xAxisTitleMargin: 30,
yAxisTitleMargin: 30,
truncateYAxis: false,
yAxisBounds: [null, null],
xAxisLabelRotation: 0,
opacity: 0.6,
};
export default combineReducers(reducers);
export const MINIMUM_BUBBLE_SIZE = 5;

View File

@ -0,0 +1,287 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { t } from '@superset-ui/core';
import {
ControlPanelConfig,
formatSelectOptions,
sections,
ControlPanelsContainerProps,
sharedControls,
} from '@superset-ui/chart-controls';
import { DEFAULT_FORM_DATA } from './constants';
import { legendSection } from '../controls';
const { logAxis, truncateYAxis, yAxisBounds, xAxisLabelRotation, opacity } =
DEFAULT_FORM_DATA;
const config: ControlPanelConfig = {
controlPanelSections: [
{
label: t('Query'),
expanded: true,
controlSetRows: [
['series'],
['entity'],
['x'],
['y'],
['adhoc_filters'],
['size'],
['orderby'],
[
{
name: 'order_desc',
config: {
...sharedControls.order_desc,
visibility: ({ controls }) => Boolean(controls.orderby.value),
},
},
],
['row_limit'],
],
},
{
label: t('Chart Options'),
expanded: true,
tabOverride: 'customize',
controlSetRows: [
['color_scheme'],
...legendSection,
[
{
name: 'max_bubble_size',
config: {
type: 'SelectControl',
renderTrigger: true,
freeForm: true,
label: t('Max Bubble Size'),
default: '25',
choices: formatSelectOptions([
'5',
'10',
'15',
'25',
'50',
'75',
'100',
]),
},
},
],
[
{
name: 'tooltipSizeFormat',
config: {
...sharedControls.y_axis_format,
label: t('Bubble size number format'),
},
},
],
[
{
name: 'opacity',
config: {
type: 'SliderControl',
label: t('Bubble Opacity'),
renderTrigger: true,
min: 0,
max: 1,
step: 0.1,
default: opacity,
description: t(
'Opacity of bubbles, 0 means completely transparent, 1 means opaque',
),
},
},
],
],
},
{
label: t('X Axis'),
expanded: true,
controlSetRows: [
[
{
name: 'x_axis_label',
config: {
type: 'TextControl',
label: t('X Axis Title'),
renderTrigger: true,
default: '',
},
},
],
[
{
name: 'xAxisLabelRotation',
config: {
type: 'SelectControl',
freeForm: true,
clearable: false,
label: t('Rotate x axis label'),
choices: [
[0, '0°'],
[45, '45°'],
],
default: xAxisLabelRotation,
renderTrigger: true,
description: t(
'Input field supports custom rotation. e.g. 30 for 30°',
),
},
},
],
[
{
name: 'x_axis_title_margin',
config: {
type: 'SelectControl',
freeForm: true,
clearable: true,
label: t('X AXIS TITLE MARGIN'),
renderTrigger: true,
default: sections.TITLE_MARGIN_OPTIONS[1],
choices: formatSelectOptions(sections.TITLE_MARGIN_OPTIONS),
},
},
],
[
{
name: 'xAxisFormat',
config: {
...sharedControls.y_axis_format,
label: t('X Axis Format'),
},
},
],
[
{
name: 'logXAxis',
config: {
type: 'CheckboxControl',
label: t('Logarithmic x-axis'),
renderTrigger: true,
default: logAxis,
description: t('Logarithmic x-axis'),
},
},
],
],
},
{
label: t('Y Axis'),
expanded: true,
controlSetRows: [
[
{
name: 'y_axis_label',
config: {
type: 'TextControl',
label: t('Y Axis Title'),
renderTrigger: true,
default: '',
},
},
],
[
{
name: 'yAxisLabelRotation',
config: {
type: 'SelectControl',
freeForm: true,
clearable: false,
label: t('Rotate y axis label'),
choices: [
[0, '0°'],
[45, '45°'],
],
default: xAxisLabelRotation,
renderTrigger: true,
description: t(
'Input field supports custom rotation. e.g. 30 for 30°',
),
},
},
],
[
{
name: 'y_axis_title_margin',
config: {
type: 'SelectControl',
freeForm: true,
clearable: true,
label: t('Y AXIS TITLE MARGIN'),
renderTrigger: true,
default: sections.TITLE_MARGIN_OPTIONS[1],
choices: formatSelectOptions(sections.TITLE_MARGIN_OPTIONS),
},
},
],
['y_axis_format'],
[
{
name: 'logYAxis',
config: {
type: 'CheckboxControl',
label: t('Logarithmic y-axis'),
renderTrigger: true,
default: logAxis,
description: t('Logarithmic y-axis'),
},
},
],
[
{
name: 'truncateYAxis',
config: {
type: 'CheckboxControl',
label: t('Truncate Y Axis'),
default: truncateYAxis,
renderTrigger: true,
description: t(
'Truncate Y Axis. Can be overridden by specifying a min or max bound.',
),
},
},
],
[
{
name: 'y_axis_bounds',
config: {
type: 'BoundsControl',
label: t('Y Axis Bounds'),
renderTrigger: true,
default: yAxisBounds,
description: t(
'Bounds for the Y-axis. When left empty, the bounds are ' +
'dynamically defined based on the min/max of the data. Note that ' +
"this feature will only expand the axis range. It won't " +
"narrow the data's extent.",
),
visibility: ({ controls }: ControlPanelsContainerProps) =>
Boolean(controls?.truncateYAxis?.value),
},
},
],
],
},
],
};
export default config;

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 112 KiB

View File

@ -0,0 +1,60 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Behavior, ChartMetadata, ChartPlugin, t } from '@superset-ui/core';
import thumbnail from './images/thumbnail.png';
import transformProps from './transformProps';
import buildQuery from './buildQuery';
import controlPanel from './controlPanel';
import example1 from './images/example1.png';
import example2 from './images/example2.png';
import { EchartsBubbleChartProps, EchartsBubbleFormData } from './types';
export default class EchartsBubbleChartPlugin extends ChartPlugin<
EchartsBubbleFormData,
EchartsBubbleChartProps
> {
constructor() {
super({
buildQuery,
controlPanel,
loadChart: () => import('./EchartsBubble'),
metadata: new ChartMetadata({
behaviors: [Behavior.INTERACTIVE_CHART],
category: t('Correlation'),
credits: ['https://echarts.apache.org'],
description: t(
'Visualizes a metric across three dimensions of data in a single chart (X axis, Y axis, and bubble size). Bubbles from the same group can be showcased using bubble color.',
),
exampleGallery: [{ url: example1 }, { url: example2 }],
name: t('Bubble Chart'),
tags: [
t('Multi-Dimensions'),
t('Aesthetic'),
t('Comparison'),
t('Scatter'),
t('Time'),
t('Trend'),
t('ECharts'),
],
thumbnail,
}),
transformProps,
});
}
}

View File

@ -0,0 +1,229 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { EChartsCoreOption, ScatterSeriesOption } from 'echarts';
import { extent } from 'd3-array';
import {
CategoricalColorNamespace,
getNumberFormatter,
AxisType,
getMetricLabel,
NumberFormatter,
} from '@superset-ui/core';
import { EchartsBubbleChartProps, EchartsBubbleFormData } from './types';
import { DEFAULT_FORM_DATA, MINIMUM_BUBBLE_SIZE } from './constants';
import { defaultGrid } from '../defaults';
import { getLegendProps } from '../utils/series';
import { Refs } from '../types';
import { parseYAxisBound } from '../utils/controls';
import { getDefaultTooltip } from '../utils/tooltip';
import { getPadding } from '../Timeseries/transformers';
import { convertInteger } from '../utils/convertInteger';
import { NULL_STRING } from '../constants';
function normalizeSymbolSize(
nodes: ScatterSeriesOption[],
maxBubbleValue: number,
) {
const [bubbleMinValue, bubbleMaxValue] = extent(nodes, x => x.data![0][2]);
const nodeSpread = bubbleMaxValue - bubbleMinValue;
nodes.forEach(node => {
// eslint-disable-next-line no-param-reassign
node.symbolSize =
(((node.data![0][2] - bubbleMinValue) / nodeSpread) *
(maxBubbleValue * 2) || 0) + MINIMUM_BUBBLE_SIZE;
});
}
export function formatTooltip(
params: any,
xAxisLabel: string,
yAxisLabel: string,
sizeLabel: string,
xAxisFormatter: NumberFormatter,
yAxisFormatter: NumberFormatter,
tooltipSizeFormatter: NumberFormatter,
) {
const title = params.data[4]
? `${params.data[3]} </br> ${params.data[4]}`
: params.data[3];
return `<p>${title}</p>
${xAxisLabel}: ${xAxisFormatter(params.data[0])} <br/>
${yAxisLabel}: ${yAxisFormatter(params.data[1])} <br/>
${sizeLabel}: ${tooltipSizeFormatter(params.data[2])}`;
}
export default function transformProps(chartProps: EchartsBubbleChartProps) {
const { height, width, hooks, queriesData, formData, inContextMenu, theme } =
chartProps;
const { data = [] } = queriesData[0];
const {
x,
y,
size,
entity,
maxBubbleSize,
colorScheme,
series: bubbleSeries,
xAxisLabel: bubbleXAxisTitle,
yAxisLabel: bubbleYAxisTitle,
xAxisFormat,
yAxisFormat,
yAxisBounds,
logXAxis,
logYAxis,
xAxisTitleMargin,
yAxisTitleMargin,
truncateYAxis,
xAxisLabelRotation,
yAxisLabelRotation,
tooltipSizeFormat,
opacity,
showLegend,
legendOrientation,
legendMargin,
legendType,
}: EchartsBubbleFormData = { ...DEFAULT_FORM_DATA, ...formData };
const colorFn = CategoricalColorNamespace.getScale(colorScheme as string);
const legends: string[] = [];
const series: ScatterSeriesOption[] = [];
const xAxisLabel: string = getMetricLabel(x);
const yAxisLabel: string = getMetricLabel(y);
const sizeLabel: string = getMetricLabel(size);
const refs: Refs = {};
data.forEach(datum => {
const name =
((bubbleSeries ? datum[bubbleSeries] : datum[entity]) as string) ||
NULL_STRING;
const bubbleSeriesValue = bubbleSeries ? datum[bubbleSeries] : null;
series.push({
name,
data: [
[
datum[xAxisLabel],
datum[yAxisLabel],
datum[sizeLabel],
datum[entity],
bubbleSeriesValue as any,
],
],
type: 'scatter',
itemStyle: { color: colorFn(name), opacity },
});
legends.push(name);
});
normalizeSymbolSize(series, maxBubbleSize);
const xAxisFormatter = getNumberFormatter(xAxisFormat);
const yAxisFormatter = getNumberFormatter(yAxisFormat);
const tooltipSizeFormatter = getNumberFormatter(tooltipSizeFormat);
const [min, max] = yAxisBounds.map(parseYAxisBound);
const padding = getPadding(
showLegend,
legendOrientation,
true,
false,
legendMargin,
true,
'Left',
convertInteger(yAxisTitleMargin),
convertInteger(xAxisTitleMargin),
);
const echartOptions: EChartsCoreOption = {
series,
xAxis: {
axisLabel: { formatter: xAxisFormatter },
splitLine: {
lineStyle: {
type: 'dashed',
},
},
nameRotate: xAxisLabelRotation,
scale: true,
name: bubbleXAxisTitle,
nameLocation: 'middle',
nameTextStyle: {
fontWight: 'bolder',
},
nameGap: convertInteger(xAxisTitleMargin),
type: logXAxis ? AxisType.log : AxisType.value,
},
yAxis: {
axisLabel: { formatter: yAxisFormatter },
splitLine: {
lineStyle: {
type: 'dashed',
},
},
nameRotate: yAxisLabelRotation,
scale: truncateYAxis,
name: bubbleYAxisTitle,
nameLocation: 'middle',
nameTextStyle: {
fontWight: 'bolder',
},
nameGap: convertInteger(yAxisTitleMargin),
min,
max,
type: logYAxis ? AxisType.log : AxisType.value,
},
legend: {
...getLegendProps(legendType, legendOrientation, showLegend, theme),
data: legends,
},
tooltip: {
show: !inContextMenu,
...getDefaultTooltip(refs),
formatter: (params: any): string =>
formatTooltip(
params,
xAxisLabel,
yAxisLabel,
sizeLabel,
xAxisFormatter,
yAxisFormatter,
tooltipSizeFormatter,
),
},
grid: { ...defaultGrid, ...padding },
};
const { onContextMenu, setDataMask = () => {} } = hooks;
return {
refs,
height,
width,
echartOptions,
onContextMenu,
setDataMask,
formData,
};
}

View File

@ -0,0 +1,57 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
ChartProps,
ChartDataResponseResult,
QueryFormData,
} from '@superset-ui/core';
import {
LegendFormData,
BaseTransformedProps,
CrossFilterTransformedProps,
} from '../types';
export type EchartsBubbleFormData = QueryFormData &
LegendFormData & {
series?: string;
entity: string;
xAxisFormat: string;
yAXisFormat: string;
logXAxis: boolean;
logYAxis: boolean;
xAxisBounds: [number | undefined | null, number | undefined | null];
yAxisBounds: [number | undefined | null, number | undefined | null];
xAxisLabel?: string;
colorScheme?: string;
defaultValue?: string[] | null;
dateFormat: string;
emitFilter: boolean;
tooltipFormat: string;
x: string;
y: string;
};
export interface EchartsBubbleChartProps
extends ChartProps<EchartsBubbleFormData> {
formData: EchartsBubbleFormData;
queriesData: ChartDataResponseResult[];
}
export type BubbleChartTransformedProps =
BaseTransformedProps<EchartsBubbleFormData> & CrossFilterTransformedProps;

View File

@ -32,7 +32,7 @@ import {
} from '@superset-ui/core';
import { EChartsCoreOption } from 'echarts';
import { CallbackDataParams } from 'echarts/types/src/util/types';
import { OpacityEnum } from '../constants';
import { NULL_STRING, OpacityEnum } from '../constants';
import { defaultGrid } from '../defaults';
import { Refs } from '../types';
import { formatSeriesName, getColtypesMapping } from '../utils/series';
@ -138,7 +138,10 @@ export function formatTooltip({
color: ${theme.colors.grayscale.base}"
>`,
`<div style="font-weight: ${theme.typography.weights.bold}">
${node.name}
${(node.name || NULL_STRING)
.toString()
.replaceAll('<', '&lt;')
.replaceAll('>', '&gt;')}
</div>`,
`<div">
${absolutePercentage} of total

View File

@ -168,7 +168,6 @@ export default function transformProps(
treeNodes.map(treeNode => {
const { name: nodeName, value, groupBy } = treeNode;
const name = formatSeriesName(nodeName, {
numberFormatter,
timeFormatter: getTimeFormatter(dateFormat),
...(coltypeMapping[groupBy] && {
coltype: coltypeMapping[groupBy],

View File

@ -34,6 +34,7 @@ export { default as EchartsTreeChartPlugin } from './Tree';
export { default as EchartsTreemapChartPlugin } from './Treemap';
export { BigNumberChartPlugin, BigNumberTotalChartPlugin } from './BigNumber';
export { default as EchartsSunburstChartPlugin } from './Sunburst';
export { default as EchartsBubbleChartPlugin } from './Bubble';
export { default as BoxPlotTransformProps } from './BoxPlot/transformProps';
export { default as FunnelTransformProps } from './Funnel/transformProps';
@ -46,6 +47,7 @@ export { default as TimeseriesTransformProps } from './Timeseries/transformProps
export { default as TreeTransformProps } from './Tree/transformProps';
export { default as TreemapTransformProps } from './Treemap/transformProps';
export { default as SunburstTransformProps } from './Sunburst/transformProps';
export { default as BubbleTransformProps } from './Bubble/transformProps';
export { DEFAULT_FORM_DATA as TimeseriesDefaultFormData } from './Timeseries/constants';

View File

@ -0,0 +1,93 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import buildQuery from '../../src/Bubble/buildQuery';
describe('Bubble buildQuery', () => {
const formData = {
datasource: '1__table',
viz_type: 'echarts_bubble',
entity: 'customer_name',
x: 'count',
y: {
aggregate: 'sum',
column: {
column_name: 'price_each',
},
expressionType: 'simple',
label: 'SUM(price_each)',
},
size: {
aggregate: 'sum',
column: {
column_name: 'sales',
},
expressionType: 'simple',
label: 'SUM(sales)',
},
};
it('Should build query without dimension', () => {
const queryContext = buildQuery(formData);
const [query] = queryContext.queries;
expect(query.columns).toEqual(['customer_name']);
expect(query.metrics).toEqual([
'count',
{
aggregate: 'sum',
column: {
column_name: 'price_each',
},
expressionType: 'simple',
label: 'SUM(price_each)',
},
{
aggregate: 'sum',
column: {
column_name: 'sales',
},
expressionType: 'simple',
label: 'SUM(sales)',
},
]);
});
it('Should build query with dimension', () => {
const queryContext = buildQuery({ ...formData, series: 'state' });
const [query] = queryContext.queries;
expect(query.columns).toEqual(['customer_name', 'state']);
expect(query.metrics).toEqual([
'count',
{
aggregate: 'sum',
column: {
column_name: 'price_each',
},
expressionType: 'simple',
label: 'SUM(price_each)',
},
{
aggregate: 'sum',
column: {
column_name: 'sales',
},
expressionType: 'simple',
label: 'SUM(sales)',
},
]);
});
});

View File

@ -0,0 +1,160 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
ChartProps,
getNumberFormatter,
SqlaFormData,
supersetTheme,
} from '@superset-ui/core';
import { EchartsBubbleChartProps } from 'plugins/plugin-chart-echarts/src/Bubble/types';
import transformProps, { formatTooltip } from '../../src/Bubble/transformProps';
describe('Bubble transformProps', () => {
const formData: SqlaFormData = {
datasource: '1__table',
viz_type: 'echarts_bubble',
entity: 'customer_name',
x: 'count',
y: {
aggregate: 'sum',
column: {
column_name: 'price_each',
},
expressionType: 'simple',
label: 'SUM(price_each)',
},
size: {
aggregate: 'sum',
column: {
column_name: 'sales',
},
expressionType: 'simple',
label: 'SUM(sales)',
},
yAxisBounds: [null, null],
};
const chartProps = new ChartProps({
formData,
height: 800,
width: 800,
queriesData: [
{
data: [
{
customer_name: 'AV Stores, Co.',
count: 10,
'SUM(price_each)': 20,
'SUM(sales)': 30,
},
{
customer_name: 'Alpha Cognac',
count: 40,
'SUM(price_each)': 50,
'SUM(sales)': 60,
},
{
customer_name: 'Amica Models & Co.',
count: 70,
'SUM(price_each)': 80,
'SUM(sales)': 90,
},
],
},
],
theme: supersetTheme,
});
it('Should transform props for viz', () => {
expect(transformProps(chartProps as EchartsBubbleChartProps)).toEqual(
expect.objectContaining({
width: 800,
height: 800,
echartOptions: expect.objectContaining({
series: expect.arrayContaining([
expect.objectContaining({
data: expect.arrayContaining([
[10, 20, 30, 'AV Stores, Co.', null],
]),
}),
expect.objectContaining({
data: expect.arrayContaining([
[40, 50, 60, 'Alpha Cognac', null],
]),
}),
expect.objectContaining({
data: expect.arrayContaining([
[70, 80, 90, 'Amica Models & Co.', null],
]),
}),
]),
}),
}),
);
});
});
describe('Bubble formatTooltip', () => {
const dollerFormatter = getNumberFormatter('$,.2f');
const percentFormatter = getNumberFormatter(',.1%');
it('Should generate correct bubble label content with dimension', () => {
const params = {
data: [10000, 20000, 3, 'bubble title', 'bubble dimension'],
};
expect(
formatTooltip(
params,
'x-axis-label',
'y-axis-label',
'size-label',
dollerFormatter,
dollerFormatter,
percentFormatter,
),
).toEqual(
`<p>bubble title </br> bubble dimension</p>
x-axis-label: $10,000.00 <br/>
y-axis-label: $20,000.00 <br/>
size-label: 300.0%`,
);
});
it('Should generate correct bubble label content without dimension', () => {
const params = {
data: [10000, 25000, 3, 'bubble title', null],
};
expect(
formatTooltip(
params,
'x-axis-label',
'y-axis-label',
'size-label',
dollerFormatter,
dollerFormatter,
percentFormatter,
),
).toEqual(
`<p>bubble title</p>
x-axis-label: $10,000.00 <br/>
y-axis-label: $25,000.00 <br/>
size-label: 300.0%`,
);
});
});

View File

@ -32,7 +32,9 @@ const percentMetrics: typeof sharedControls.metrics = {
type: 'MetricsControl',
label: t('Percentage metrics'),
description: t(
'Metrics for which percentage of total are to be displayed. Calculated from only data within the row limit.',
'Select one or many metrics to display, that will be displayed in the percentages of total. ' +
'Percentage metrics will be calculated only from data within the row limit. ' +
'You can use an aggregation function on a column or write custom SQL to create a percentage metric.',
),
multi: true,
visibility: isAggMode,

Some files were not shown because too many files have changed in this diff Show More