Merge branch 'master' into hotfix/lnglat-and-delimited

This commit is contained in:
RoXoM 2024-03-26 20:47:20 +08:00
commit edbd423599
271 changed files with 20270 additions and 4445 deletions

View File

@ -72,7 +72,10 @@ github:
- test-mysql (3.9)
- test-postgres (3.9)
- test-postgres (3.10)
- test-postgres-hive (3.9)
- test-postgres-presto (3.9)
- test-sqlite (3.9)
- unit-tests (3.9)
required_pull_request_reviews:
dismiss_stale_reviews: false

View File

@ -0,0 +1,34 @@
name: 'Setup Python Environment'
description: 'Set up Python and install dependencies with optional configurations.'
inputs:
python-version:
description: 'Python version to set up.'
required: true
default: '3.9'
cache:
description: 'Cache dependencies. Options: pip'
required: false
default: 'pip'
requirements-type:
description: 'Type of requirements to install. Options: base, development, default'
required: false
default: 'dev'
runs:
using: 'composite'
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
cache: ${{ inputs.cache }}
- name: Install dependencies
run: |
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
pip install --upgrade pip setuptools wheel
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
pip install -r requirements/development.txt
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
pip install -r requirements/base.txt
fi
shell: bash

View File

@ -0,0 +1,11 @@
name: 'Setup supersetbot'
description: 'Sets up supersetbot npm lib from the repo'
runs:
using: 'composite'
steps:
- name: Install dependencies
shell: bash
run: |
cd .github/supersetbot
npm install
npm link

22
.github/supersetbot/.eslintrc.json vendored Normal file
View File

@ -0,0 +1,22 @@
{
"extends": "airbnb-base",
"rules": {
"import/extensions": 0,
"import/prefer-default-export": 0,
"func-names": 0,
"no-console": 0,
"class-methods-use-this": 0
},
"parserOptions": {
"ecmaVersion": 2020,
"sourceType": "module"
},
"parserOptions": {
"ecmaVersion": "latest",
"sourceType": "module",
"requireConfigFile": false
},
"env": {
"jest": true
}
}

37
.github/supersetbot/README.md vendored Normal file
View File

@ -0,0 +1,37 @@
# supersetbot
supersetbot is a utility bot that can be used to help around GitHub, CI and beyond.
The bot can be used as a local CLI OR, for a subset of fitted use cases, can be invoked directly
from GitHub comments.
Because it's its own npm app, it can be tested/deployed/used in isolation from the rest of
Superset, and take on some of the complexity from GitHub actions and onto a nifty
utility that can be used in different contexts.
## Features
```bash
$ use nvm 20
$ npm i -g supersetbot
$ supersetbot
Usage: supersetbot [options] [command]
Options:
-v, --verbose Output extra debugging information
-r, --repo <repo> The GitHub repo to use (ie: "apache/superset")
-d, --dry-run Run the command in dry-run mode
-a, --actor <actor> The actor
-h, --help display help for command
Commands:
label [options] <label> Add a label to an issue or PR
unlabel [options] <label> Remove a label from an issue or PR
release-label-pr [options] <prId> Figure out first release for PR and label it
version Prints supersetbot's version number
release-label-prs [options] Given a set of PRs, auto-release label them
release-label [options] <release> Figure out first release for PR and label it
orglabel [options] Add an org label based on the author
docker [options] Generates/run docker build commands use in CI
help [command] display help for command
```

8
.github/supersetbot/jest.config.js vendored Normal file
View File

@ -0,0 +1,8 @@
export default {
transform: {
},
testEnvironment: 'node',
moduleNameMapper: {
'^(\\.{1,2}/.*)\\.js$': '$1',
},
};

11020
.github/supersetbot/package-lock.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

36
.github/supersetbot/package.json vendored Normal file
View File

@ -0,0 +1,36 @@
{
"name": "supersetbot",
"version": "0.4.2",
"description": "A bot for the Superset GitHub repo",
"type": "module",
"main": "src/index.js",
"scripts": {
"test": "node --experimental-vm-modules node_modules/jest/bin/jest.js",
"eslint": "eslint",
"supersetbot": "supersetbot"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@octokit/plugin-throttling": "^8.1.3",
"@octokit/rest": "^20.0.2",
"commander": "^11.0.0",
"semver": "^7.6.0",
"simple-git": "^3.22.0",
"string-argv": "^0.3.2"
},
"devDependencies": {
"@jest/globals": "^29.7.0",
"eslint": "^8.56.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-jsx-a11y": "^6.8.0",
"eslint-plugin-react": "^7.33.2",
"eslint-plugin-react-hooks": "^4.6.0",
"jest": "^29.7.0"
},
"bin": {
"supersetbot": "./src/supersetbot"
}
}

175
.github/supersetbot/src/cli.js vendored Executable file
View File

@ -0,0 +1,175 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Command, Option } from 'commander';
import * as docker from './docker.js';
import * as utils from './utils.js';
import Github from './github.js';
import Git from './git.js';
export default function getCLI(context) {
const program = new Command();
// Some reusable options
const issueOption = new Option('-i, --issue <issue>', 'The issue number', process.env.GITHUB_ISSUE_NUMBER);
const excludeCherriesOption = new Option('-c, --exclude-cherries', 'Generate cherry labels point to each release where the PR has been cherried');
// Setting up top-level CLI options
program
.option('-v, --verbose', 'Output extra debugging information')
.option('-r, --repo <repo>', 'The GitHub repo to use (ie: "apache/superset")', process.env.GITHUB_REPOSITORY)
.option('-d, --dry-run', 'Run the command in dry-run mode')
.option('-a, --actor <actor>', 'The actor', process.env.GITHUB_ACTOR);
program.command('label <label>')
.description('Add a label to an issue or PR')
.addOption(issueOption)
.action(async function (label) {
const opts = context.processOptions(this, ['issue', 'repo']);
const github = new Github({ context, issue: opts.issue });
await github.label(opts.issue, label, context, opts.actor, opts.verbose, opts.dryRun);
});
program.command('unlabel <label>')
.description('Remove a label from an issue or PR')
.addOption(issueOption)
.action(async function (label) {
const opts = context.processOptions(this, ['issue', 'repo']);
const github = new Github({ context, issueNumber: opts.issue });
await github.unlabel(opts.issue, label, context, opts.actor, opts.verbose, opts.dryRun);
});
program.command('release-label-pr <prId>')
.description('Figure out first release for PR and label it')
.addOption(excludeCherriesOption)
.action(async function (prId) {
const opts = context.processOptions(this, ['repo']);
const git = new Git(context);
await git.loadReleases();
let wrapped = context.commandWrapper({
func: git.getReleaseLabels,
verbose: opts.verbose,
});
const labels = await wrapped(parseInt(prId, 10), opts.verbose, opts.excludeCherries);
const github = new Github({ context, issueNumber: opts.issue });
wrapped = context.commandWrapper({
func: github.syncLabels,
verbose: opts.verbose,
});
await wrapped({labels, prId, actor: opts.actor, verbose: opts.verbose, dryRun: opts.dryRun});
});
program.command('version')
.description("Prints supersetbot's version number")
.action(async () => {
const version = await utils.currentPackageVersion();
context.log(version);
});
if (context.source === 'CLI') {
program.command('release-label-prs')
.description('Given a set of PRs, auto-release label them')
.option('-s, --search <search>', 'extra search string to append using the GitHub mini-language')
.option('-p, --pages <pages>', 'the number of pages (100 per page) to fetch and process', 10)
.action(async function () {
const opts = context.processOptions(this, ['repo']);
const github = new Github({ context, issueNumber: opts.issue });
const prs = await github.searchMergedPRs({
query: opts.search,
onlyUnlabeled: true,
verbose: opts.verbose,
pages: opts.pages,
});
const prIdLabelMap = new Map(prs.map((pr) => [pr.number, pr.labels]));
const git = new Git(context);
await git.loadReleases();
const prsPromises = prs.map(async (pr) => {
const labels = await git.getReleaseLabels(pr.number, opts.verbose);
return { prId: pr.number, labels };
});
const prsTargetLabel = await Promise.all(prsPromises);
// eslint-disable-next-line no-restricted-syntax
for (const { prId, labels } of prsTargetLabel) {
// Running sequentially to avoid rate limiting
// eslint-disable-next-line no-await-in-loop
await github.syncLabels({
labels,
existingLabels: prIdLabelMap.get(prId).map(l => l.name),
prId,
...opts,
});
}
});
program.command('release-label <release>')
.description('Figure out first release for PR and label it')
.addOption(excludeCherriesOption)
.action(async function (release) {
const opts = context.processOptions(this, ['repo']);
const git = new Git(context);
await git.loadReleases();
const prs = await git.getPRsToSync(release, opts.verbose, opts.excludeCherries);
const github = new Github({ context });
// eslint-disable-next-line no-restricted-syntax
for (const { prId, labels } of prs) {
// Running sequentially to avoid rate limiting
// eslint-disable-next-line no-await-in-loop
await github.syncLabels({
prId,
labels,
...opts,
});
}
});
program.command('orglabel')
.description('Add an org label based on the author')
.addOption(issueOption)
.action(async function () {
const opts = context.processOptions(this, ['issue', 'repo']);
const github = new Github({ context, issueNumber: opts.issue });
await github.assignOrgLabel(opts.issue, opts.verbose, opts.dryRun);
});
program.command('docker')
.description('Generates/run docker build commands use in CI')
.option('-t, --preset <preset>', 'Build preset', /^(lean|dev|dockerize|websocket|py310|ci)$/i, 'lean')
.option('-c, --context <context>', 'Build context', /^(push|pull_request|release)$/i, 'local')
.option('-r, --context-ref <ref>', 'Reference to the PR, release, or branch')
.option('-p, --platform <platform...>', 'Platforms (multiple values allowed)')
.option('-f, --force-latest', 'Force the "latest" tag on the release')
.option('-v, --verbose', 'Print more info')
.action(function () {
const opts = context.processOptions(this, ['preset']);
opts.platform = opts.platform || ['linux/arm64'];
const cmd = docker.getDockerCommand({ ...opts });
context.log(cmd);
if (!opts.dryRun) {
utils.runShellCommand(cmd, false);
}
});
}
return program;
}

12
.github/supersetbot/src/cli.test.js vendored Normal file
View File

@ -0,0 +1,12 @@
import { spawnSync } from 'child_process';
describe('CLI Test', () => {
test.each([
['./src/supersetbot', ['docker', '--preset', 'dev', '--dry-run'], '--target dev'],
['./src/supersetbot', ['docker', '--dry-run'], '--target lean'],
])('returns %s for release %s', (command, arg, contains) => {
const result = spawnSync(command, arg);
const output = result.stdout.toString();
expect(result.stdout.toString()).toContain(contains);
});
});

152
.github/supersetbot/src/context.js vendored Normal file
View File

@ -0,0 +1,152 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { parseArgsStringToArgv } from 'string-argv';
class Context {
constructor(source) {
this.hasErrors = false;
this.source = source;
this.options = {};
this.errorLogs = [];
this.logs = [];
this.repo = null;
this.optToEnvMap = {
issue: 'GITHUB_ISSUE_NUMBER',
repo: 'GITHUB_REPOSITORY',
};
}
requireOption(optionName, options) {
const optionValue = options[optionName];
if (optionValue === undefined || optionValue === null) {
this.logError(`option [${optionName}] is required`);
this.exit(1);
}
}
parseArgs(s) {
return parseArgsStringToArgv(s);
}
requireOptions(optionNames, options) {
optionNames.forEach((optionName) => {
this.requireOption(optionName, options);
});
}
processOptions(command, requiredOptions = []) {
const raw = command.parent?.rawArgs;
this.command = '???';
if (raw) {
this.command = raw.map((s) => (s.includes(' ') ? `"${s}"` : s)).join(' ').replace('node ', '');
}
this.options = { ...command.opts(), ...command.parent.opts() };
// Runtime defaults for unit tests since commanders can't receive callables as default
Object.entries(this.optToEnvMap).forEach(([k, v]) => {
if (!this.options[k]) {
this.options[k] = process.env[v];
}
});
this.requireOptions(requiredOptions, this.options);
this.issueNumber = this.options.issue;
if (this.source === 'GHA') {
this.options.actor = process.env.GITHUB_ACTOR || 'UNKNOWN';
this.options.repo = process.env.GITHUB_REPOSITORY;
}
this.repo = this.options.repo;
return this.options;
}
log(msg) {
console.log(msg);
this.logs = [...this.logs, msg];
}
logSuccess(msg) {
const augMsg = `🟢 SUCCESS: ${msg}`;
console.log(augMsg);
this.logs.push(augMsg);
}
logError(msg) {
this.hasErrors = true;
const augMsg = `🔴 ERROR: ${msg}`;
console.error(augMsg);
this.errorLogs.push(augMsg);
}
exit(code = 0) {
this.onDone();
process.exit(code);
}
commandWrapper({
func, successMsg, errorMsg = null, verbose = false, dryRun = false,
}) {
return async (...args) => {
let resp;
let hasError = false;
try {
if (!dryRun) {
resp = await func(...args);
}
if (verbose && resp) {
console.log(resp);
}
} catch (error) {
hasError = true;
if (errorMsg) {
this.logError(errorMsg);
} else {
this.logError(error);
}
throw (error);
}
if (successMsg && !hasError) {
this.logSuccess(successMsg);
}
return resp;
};
}
doneComment() {
const msgs = [...this.logs, ...this.errorLogs];
let comment = '';
comment += `> \`${this.command}\`\n`;
comment += '```\n';
comment += msgs.join('\n');
comment += '\n```';
return comment;
}
async onDone() {
let msg;
if (this.source === 'GHA') {
msg = this.doneComment();
}
return msg;
}
}
export default Context;

142
.github/supersetbot/src/docker.js vendored Normal file
View File

@ -0,0 +1,142 @@
import { spawnSync } from 'child_process';
const REPO = 'apache/superset';
const CACHE_REPO = `${REPO}-cache`;
const BASE_PY_IMAGE = '3.9-slim-bookworm';
export function runCmd(command, raiseOnFailure = true) {
const { stdout, stderr } = spawnSync(command, { shell: true, encoding: 'utf-8', env: process.env });
if (stderr && raiseOnFailure) {
throw new Error(stderr);
}
return stdout;
}
function getGitSha() {
return runCmd('git rev-parse HEAD').trim();
}
function getBuildContextRef(buildContext) {
const event = buildContext || process.env.GITHUB_EVENT_NAME;
const githubRef = process.env.GITHUB_REF || '';
if (event === 'pull_request') {
const githubHeadRef = process.env.GITHUB_HEAD_REF || '';
return githubHeadRef.replace(/[^a-zA-Z0-9]/g, '-').slice(0, 40);
} if (event === 'release') {
return githubRef.replace('refs/tags/', '').slice(0, 40);
} if (event === 'push') {
return githubRef.replace('refs/heads/', '').replace(/[^a-zA-Z0-9]/g, '-').slice(0, 40);
}
return '';
}
export function isLatestRelease(release) {
const output = runCmd(`../../scripts/tag_latest_release.sh ${release} --dry-run`, false) || '';
return output.includes('SKIP_TAG::false');
}
function makeDockerTag(parts) {
return `${REPO}:${parts.filter((part) => part).join('-')}`;
}
export function getDockerTags({
preset, platforms, sha, buildContext, buildContextRef, forceLatest = false,
}) {
const tags = new Set();
const tagChunks = [];
const isLatest = isLatestRelease(buildContextRef);
if (preset !== 'lean') {
tagChunks.push(preset);
}
if (platforms.length === 1) {
const platform = platforms[0];
const shortBuildPlatform = platform.replace('linux/', '').replace('64', '');
if (shortBuildPlatform !== 'amd') {
tagChunks.push(shortBuildPlatform);
}
}
tags.add(makeDockerTag([sha, ...tagChunks]));
tags.add(makeDockerTag([sha.slice(0, 7), ...tagChunks]));
if (buildContext === 'release') {
tags.add(makeDockerTag([buildContextRef, ...tagChunks]));
if (isLatest || forceLatest) {
tags.add(makeDockerTag(['latest', ...tagChunks]));
}
} else if (buildContext === 'push' && buildContextRef === 'master') {
tags.add(makeDockerTag(['master', ...tagChunks]));
} else if (buildContext === 'pull_request') {
tags.add(makeDockerTag([`pr-${buildContextRef}`, ...tagChunks]));
}
return [...tags];
}
export function getDockerCommand({
preset, platform, buildContext, buildContextRef, forceLatest = false,
}) {
const platforms = platform;
let buildTarget = '';
let pyVer = BASE_PY_IMAGE;
let dockerContext = '.';
if (preset === 'dev') {
buildTarget = 'dev';
} else if (preset === 'lean') {
buildTarget = 'lean';
} else if (preset === 'py310') {
buildTarget = 'lean';
pyVer = '3.10-slim-bookworm';
} else if (preset === 'websocket') {
dockerContext = 'superset-websocket';
} else if (preset === 'ci') {
buildTarget = 'ci';
} else if (preset === 'dockerize') {
dockerContext = '-f dockerize.Dockerfile .';
} else {
console.error(`Invalid build preset: ${preset}`);
process.exit(1);
}
let ref = buildContextRef;
if (!ref) {
ref = getBuildContextRef(buildContext);
}
const sha = getGitSha();
const tags = getDockerTags({
preset, platforms, sha, buildContext, buildContextRef: ref, forceLatest,
}).map((tag) => `-t ${tag}`).join(' \\\n ');
const isAuthenticated = !!(process.env.DOCKERHUB_TOKEN);
const dockerArgs = isAuthenticated ? '--push' : '--load';
const targetArgument = buildTarget ? `--target ${buildTarget}` : '';
const cacheRef = `${CACHE_REPO}:${pyVer}`;
const platformArg = `--platform ${platforms.join(',')}`;
const cacheFromArg = `--cache-from=type=registry,ref=${cacheRef}`;
const cacheToArg = isAuthenticated ? `--cache-to=type=registry,mode=max,ref=${cacheRef}` : '';
const buildArg = pyVer ? `--build-arg PY_VER=${pyVer}` : '';
const actor = process.env.GITHUB_ACTOR;
return `docker buildx build \\
${dockerArgs} \\
${tags} \\
${cacheFromArg} \\
${cacheToArg} \\
${targetArgument} \\
${buildArg} \\
${platformArg} \\
--label sha=${sha} \\
--label target=${buildTarget} \\
--label build_trigger=${ref} \\
--label base=${pyVer} \\
--label build_actor=${actor} \\
${dockerContext}
`;
}

244
.github/supersetbot/src/docker.test.js vendored Normal file
View File

@ -0,0 +1,244 @@
import * as dockerUtils from './docker.js';
const SHA = '22e7c602b9aa321ec7e0df4bb0033048664dcdf0';
const PR_ID = '666';
const OLD_REL = '2.1.0';
const NEW_REL = '2.1.1';
const REPO = 'apache/superset';
beforeEach(() => {
process.env.TEST_ENV = 'true';
});
afterEach(() => {
delete process.env.TEST_ENV;
});
describe('isLatestRelease', () => {
test.each([
['2.1.0', false],
['2.1.1', true],
['1.0.0', false],
['3.0.0', true],
])('returns %s for release %s', (release, expectedBool) => {
expect(dockerUtils.isLatestRelease(release)).toBe(expectedBool);
});
});
describe('getDockerTags', () => {
test.each([
// PRs
[
'lean',
['linux/arm64'],
SHA,
'pull_request',
PR_ID,
[`${REPO}:22e7c60-arm`, `${REPO}:${SHA}-arm`, `${REPO}:pr-${PR_ID}-arm`],
],
[
'ci',
['linux/amd64'],
SHA,
'pull_request',
PR_ID,
[`${REPO}:22e7c60-ci`, `${REPO}:${SHA}-ci`, `${REPO}:pr-${PR_ID}-ci`],
],
[
'lean',
['linux/amd64'],
SHA,
'pull_request',
PR_ID,
[`${REPO}:22e7c60`, `${REPO}:${SHA}`, `${REPO}:pr-${PR_ID}`],
],
[
'dev',
['linux/arm64'],
SHA,
'pull_request',
PR_ID,
[
`${REPO}:22e7c60-dev-arm`,
`${REPO}:${SHA}-dev-arm`,
`${REPO}:pr-${PR_ID}-dev-arm`,
],
],
[
'dev',
['linux/amd64'],
SHA,
'pull_request',
PR_ID,
[`${REPO}:22e7c60-dev`, `${REPO}:${SHA}-dev`, `${REPO}:pr-${PR_ID}-dev`],
],
// old releases
[
'lean',
['linux/arm64'],
SHA,
'release',
OLD_REL,
[`${REPO}:22e7c60-arm`, `${REPO}:${SHA}-arm`, `${REPO}:${OLD_REL}-arm`],
],
[
'lean',
['linux/amd64'],
SHA,
'release',
OLD_REL,
[`${REPO}:22e7c60`, `${REPO}:${SHA}`, `${REPO}:${OLD_REL}`],
],
[
'dev',
['linux/arm64'],
SHA,
'release',
OLD_REL,
[
`${REPO}:22e7c60-dev-arm`,
`${REPO}:${SHA}-dev-arm`,
`${REPO}:${OLD_REL}-dev-arm`,
],
],
[
'dev',
['linux/amd64'],
SHA,
'release',
OLD_REL,
[`${REPO}:22e7c60-dev`, `${REPO}:${SHA}-dev`, `${REPO}:${OLD_REL}-dev`],
],
// new releases
[
'lean',
['linux/arm64'],
SHA,
'release',
NEW_REL,
[
`${REPO}:22e7c60-arm`,
`${REPO}:${SHA}-arm`,
`${REPO}:${NEW_REL}-arm`,
`${REPO}:latest-arm`,
],
],
[
'lean',
['linux/amd64'],
SHA,
'release',
NEW_REL,
[`${REPO}:22e7c60`, `${REPO}:${SHA}`, `${REPO}:${NEW_REL}`, `${REPO}:latest`],
],
[
'dev',
['linux/arm64'],
SHA,
'release',
NEW_REL,
[
`${REPO}:22e7c60-dev-arm`,
`${REPO}:${SHA}-dev-arm`,
`${REPO}:${NEW_REL}-dev-arm`,
`${REPO}:latest-dev-arm`,
],
],
[
'dev',
['linux/amd64'],
SHA,
'release',
NEW_REL,
[
`${REPO}:22e7c60-dev`,
`${REPO}:${SHA}-dev`,
`${REPO}:${NEW_REL}-dev`,
`${REPO}:latest-dev`,
],
],
// merge on master
[
'lean',
['linux/arm64'],
SHA,
'push',
'master',
[`${REPO}:22e7c60-arm`, `${REPO}:${SHA}-arm`, `${REPO}:master-arm`],
],
[
'lean',
['linux/amd64'],
SHA,
'push',
'master',
[`${REPO}:22e7c60`, `${REPO}:${SHA}`, `${REPO}:master`],
],
[
'dev',
['linux/arm64'],
SHA,
'push',
'master',
[
`${REPO}:22e7c60-dev-arm`,
`${REPO}:${SHA}-dev-arm`,
`${REPO}:master-dev-arm`,
],
],
[
'dev',
['linux/amd64'],
SHA,
'push',
'master',
[`${REPO}:22e7c60-dev`, `${REPO}:${SHA}-dev`, `${REPO}:master-dev`],
],
])('returns expected tags', (preset, platforms, sha, buildContext, buildContextRef, expectedTags) => {
const tags = dockerUtils.getDockerTags({
preset, platforms, sha, buildContext, buildContextRef,
});
expect(tags).toEqual(expect.arrayContaining(expectedTags));
});
});
describe('getDockerCommand', () => {
test.each([
[
'lean',
['linux/amd64'],
true,
SHA,
'push',
'master',
['--push', `-t ${REPO}:master `],
],
[
'dev',
['linux/amd64'],
false,
SHA,
'push',
'master',
['--load', `-t ${REPO}:master-dev `],
],
// multi-platform
[
'lean',
['linux/arm64', 'linux/amd64'],
true,
SHA,
'push',
'master',
['--platform linux/arm64,linux/amd64'],
],
])('returns expected docker command', (preset, platform, isAuthenticated, sha, buildContext, buildContextRef, contains) => {
const cmd = dockerUtils.getDockerCommand({
preset, platform, isAuthenticated, sha, buildContext, buildContextRef,
});
contains.forEach((expectedSubstring) => {
expect(cmd).toContain(expectedSubstring);
});
});
});

120
.github/supersetbot/src/git.js vendored Normal file
View File

@ -0,0 +1,120 @@
import simpleGit from 'simple-git';
import semver from 'semver';
import GitRelease from './git_release.js';
export default class Git {
#releaseTags;
constructor(context, mainBranch = 'master') {
this.context = context;
this.mainBranch = mainBranch;
this.releases = new Map();
this.git = simpleGit();
this.mainBranchGitRelease = this.mainBranchGitRelease.bind(this);
this.getReleaseLabels = this.getReleaseLabels.bind(this);
}
async mainBranchGitRelease() {
let rel = this.releases.get(this.mainBranch);
if (!rel) {
rel = await this.loadRelease(this.mainBranch);
}
return rel;
}
async releaseTags() {
if (!this.#releaseTags) {
const tags = await this.git.tags();
// Filter tags to include only those that match semver and are official releases
const semverTags = tags.all.filter((tag) => semver.valid(tag) && !tag.includes('-') && !tag.includes('v'));
semverTags.sort((a, b) => semver.compare(a, b));
this.#releaseTags = semverTags;
}
return this.#releaseTags;
}
async loadMainBranch() {
await this.loadRelease(this.mainBranch);
}
async loadReleases(tags = null) {
const tagsToFetch = tags || await this.releaseTags();
if (!tags) {
await this.loadMainBranch();
}
const promises = [];
tagsToFetch.forEach((tag) => {
promises.push(this.loadRelease(tag));
});
await Promise.all(promises);
}
async loadRelease(tag) {
const release = new GitRelease(tag, this.context);
await release.load();
this.releases.set(tag, release);
return release;
}
static shortenSHA(sha) {
return sha.substring(0, 7);
}
async getReleaseLabels(prNumber, verbose, excludeCherries = false) {
const labels = [];
const main = await this.mainBranchGitRelease();
const commit = main.prIdCommitMap.get(prNumber);
if (commit) {
const { sha } = commit;
const shortSHA = Git.shortenSHA(sha);
if (verbose) {
console.log(`PR ${prNumber} is ${shortSHA} on branch ${this.mainBranch}`);
}
let firstGitReleased = null;
const tags = await this.releaseTags();
tags.forEach((tag) => {
const release = this.releases.get(tag);
if (release.shaCommitMap.get(sha) && !firstGitReleased && release.tag !== this.mainBranch) {
firstGitReleased = release.tag;
labels.push(`🚢 ${release.tag}`);
}
const commitInGitRelease = release.prIdCommitMap.get(prNumber);
if (!excludeCherries && commitInGitRelease && commitInGitRelease.sha !== sha) {
labels.push(`🍒 ${release.tag}`);
}
});
if (labels.length >= 1) {
// using this emoji to show it's been labeled by the bot
labels.push('🏷️ bot');
}
}
return labels;
}
async previousRelease(release) {
const tags = await this.releaseTags();
return tags[tags.indexOf(release) - 1];
}
async getPRsToSync(release, verbose = false, excludeCherries = false) {
const prevRelease = await this.previousRelease(release);
const releaseRange = new GitRelease(release, this.context, prevRelease);
await releaseRange.load();
const prIds = releaseRange.prIdCommitMap.keys();
const prs = [];
const promises = [];
[...prIds].forEach(prId => {
promises.push(
this.getReleaseLabels(prId, verbose, excludeCherries)
.then((labels) => {
prs.push({ prId, labels });
}),
);
});
await Promise.all(promises);
return prs;
}
}

50
.github/supersetbot/src/git_release.js vendored Normal file
View File

@ -0,0 +1,50 @@
import simpleGit from 'simple-git';
export default class GitRelease {
constructor(tag, context, from = null) {
this.tag = tag;
this.context = context;
this.prNumberRegex = /\(#(\d+)\)/;
this.shaCommitMap = null;
this.prIdCommitMap = null;
this.prCommitMap = null;
this.git = simpleGit();
this.from = from;
}
extractPRNumber(commitMessage) {
const match = (commitMessage || '').match(this.prNumberRegex);
return match ? parseInt(match[1], 10) : null;
}
async load() {
let from = this.from || await this.git.firstCommit();
if (from.includes('\n')) {
[from] = from.split('\n');
}
const range = `${this.from || 'first'}..${this.tag}`;
const commits = await this.git.log({ from, to: this.tag });
this.context.log(`${range} - fetched ${commits.all.length} commits`);
this.shaCommitMap = new Map();
commits.all.forEach((commit) => {
const sha = commit.hash.substring(0, 7);
this.shaCommitMap.set(
sha,
{
prId: this.extractPRNumber(commit.message),
message: commit.message,
sha,
},
);
});
this.prIdCommitMap = new Map();
// eslint-disable-next-line no-restricted-syntax
for (const commit of this.shaCommitMap.values()) {
if (commit.prId) {
this.prIdCommitMap.set(commit.prId, commit);
}
}
}
}

252
.github/supersetbot/src/github.js vendored Normal file
View File

@ -0,0 +1,252 @@
import { Octokit } from '@octokit/rest';
import { throttling } from '@octokit/plugin-throttling';
import { ORG_LIST, PROTECTED_LABEL_PATTERNS, COMMITTER_TEAM } from './metadata.js';
class Github {
#userInTeamCache;
constructor({ context, issueNumber = null, token = null }) {
this.context = context;
this.issueNumber = issueNumber;
const githubToken = token || process.env.GITHUB_TOKEN;
if (!githubToken) {
const msg = 'GITHUB_TOKEN is not set';
this.context.logError(msg);
}
const throttledOctokit = Octokit.plugin(throttling);
// eslint-disable-next-line new-cap
this.octokit = new throttledOctokit({
auth: githubToken,
throttle: {
id: 'supersetbot',
onRateLimit: (retryAfter, options, octokit, retryCount) => {
const howManyRetries = 10;
octokit.log.warn(`Retry ${retryCount} out of ${howManyRetries} - retrying in ${retryAfter} seconds!`);
if (retryCount < howManyRetries) {
return true;
}
return false;
},
onSecondaryRateLimit: (retryAfter, options, octokit) => {
octokit.log.warn(`SecondaryRateLimit detected for request ${options.method} ${options.url}`);
},
},
});
this.syncLabels = this.syncLabels.bind(this);
this.#userInTeamCache = new Map();
}
unPackRepo() {
const [owner, repo] = this.context.repo.split('/');
return { repo, owner };
}
async label(issueNumber, label, actor = null, verbose = false, dryRun = false) {
let hasPerm = true;
if (actor && Github.isLabelProtected(label)) {
hasPerm = await this.checkIfUserInTeam(actor, COMMITTER_TEAM, verbose);
}
if (hasPerm) {
const addLabelWrapped = this.context.commandWrapper({
func: this.octokit.rest.issues.addLabels,
successMsg: `label "${label}" added to issue ${issueNumber}`,
verbose,
dryRun,
});
await addLabelWrapped({
...this.unPackRepo(),
issue_number: issueNumber,
labels: [label],
});
}
}
async createComment(body) {
if (this.issueNumber) {
await this.octokit.rest.issues.createComment({
...this.unPackRepo(),
body,
issue_number: this.issueNumber,
});
}
}
async unlabel(issueNumber, label, actor = null, verbose = false, dryRun = false) {
let hasPerm = true;
if (actor && Github.isLabelProtected(label)) {
hasPerm = await this.checkIfUserInTeam(actor, COMMITTER_TEAM, verbose);
}
if (hasPerm) {
const removeLabelWrapped = this.context.commandWrapper({
func: this.octokit.rest.issues.removeLabel,
successMsg: `label "${label}" removed from issue ${issueNumber}`,
verbose,
dryRun,
});
await removeLabelWrapped({
...this.unPackRepo(),
issue_number: issueNumber,
name: label,
});
}
}
async assignOrgLabel(issueNumber, verbose = false, dryRun = false) {
const issue = await this.octokit.rest.issues.get({
...this.unPackRepo(),
issue_number: issueNumber,
});
const username = issue.data.user.login;
const orgs = await this.octokit.orgs.listForUser({ username });
const orgNames = orgs.data.map((v) => v.login);
// get list of matching github orgs
const matchingOrgs = orgNames.filter((org) => ORG_LIST.includes(org));
if (matchingOrgs.length) {
const wrapped = this.context.commandWrapper({
func: this.octokit.rest.issues.addLabels,
successMsg: `added label(s) ${matchingOrgs} to issue ${issueNumber}`,
errorMsg: "couldn't add labels to issue",
verbose,
dryRun,
});
wrapped({
...this.unPackRepo(),
issue_number: issueNumber,
labels: matchingOrgs,
});
}
}
async searchMergedPRs({
query = '',
onlyUnlabeled = true,
verbose = false,
startPage = 0,
pages = 5,
}) {
// look for PRs
let q = `repo:${this.context.repo} is:merged ${query}`;
if (onlyUnlabeled) {
q = `${q} -label:"🏷️ bot"`;
}
if (verbose) {
this.context.log(`Query: ${q}`);
}
let prs = [];
for (let i = 0; i < pages; i += 1) {
if (verbose) {
this.context.log(`Fetching PRs to process page ${i + 1} out of ${pages}`);
}
// eslint-disable-next-line no-await-in-loop
const data = await this.octokit.search.issuesAndPullRequests({
q,
per_page: 100,
page: startPage + i,
});
prs = [...prs, ...data.data.items];
}
if (verbose) {
this.context.log(`Fetched ${prs.length}`);
}
return prs;
}
async syncLabels({
labels,
prId,
actor = null,
verbose = false,
dryRun = false,
existingLabels = null,
}) {
if (verbose) {
this.context.log(`[PR: ${prId}] - sync labels ${labels}`);
}
let hasPerm = true;
if (actor) {
hasPerm = await this.checkIfUserInTeam(actor, COMMITTER_TEAM, verbose);
}
if (!hasPerm) {
return;
}
let targetLabels = existingLabels;
if (targetLabels === null) {
// No labels have been passed as an array, so checking against GitHub
const resp = await this.octokit.issues.listLabelsOnIssue({
...this.unPackRepo(),
issue_number: prId,
});
targetLabels = resp.data.map((l) => l.name);
}
if (verbose) {
this.context.log(`[PR: ${prId}] - target release labels: ${labels}`);
this.context.log(`[PR: ${prId}] - existing labels on issue: ${existingLabels}`);
}
// Extract existing labels with the given prefixes
const prefixes = ['🚢', '🍒', '🎯', '🏷️'];
const existingPrefixLabels = targetLabels
.filter((label) => prefixes.some((s) => typeof(label) === 'string' && label.startsWith(s)));
// Labels to add
const labelsToAdd = labels.filter((label) => !existingPrefixLabels.includes(label));
if (verbose) {
this.context.log(`[PR: ${prId}] - labels to add: ${labelsToAdd}`);
}
// Labels to remove
const labelsToRemove = existingPrefixLabels.filter((label) => !labels.includes(label));
if (verbose) {
this.context.log(`[PR: ${prId}] - labels to remove: ${labelsToRemove}`);
}
// Add labels
if (labelsToAdd.length > 0 && !dryRun) {
await this.octokit.issues.addLabels({
...this.unPackRepo(),
issue_number: prId,
labels: labelsToAdd,
});
}
// Remove labels
if (labelsToRemove.length > 0 && !dryRun) {
await Promise.all(labelsToRemove.map((label) => this.octokit.issues.removeLabel({
...this.unPackRepo(),
issue_number: prId,
name: label,
})));
}
this.context.logSuccess(`synched labels for PR ${prId} with labels ${labels}`);
}
async checkIfUserInTeam(username, team, verbose = false) {
let isInTeam = this.#userInTeamCache.get([username, team]);
if (isInTeam !== undefined) {
return isInTeam;
}
const [org, teamSlug] = team.split('/');
const wrapped = this.context.commandWrapper({
func: this.octokit.teams.getMembershipForUserInOrg,
errorMsg: `User "${username}" is not authorized to alter protected labels.`,
verbose,
});
const resp = await wrapped({
org,
team_slug: teamSlug,
username,
});
isInTeam = resp?.data?.state === 'active';
this.#userInTeamCache.set([username, team], isInTeam);
return isInTeam;
}
static isLabelProtected(label) {
return PROTECTED_LABEL_PATTERNS.some((pattern) => new RegExp(pattern).test(label));
}
}
export default Github;

39
.github/supersetbot/src/index.js vendored Normal file
View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import getCLI from './cli.js';
import Context from './context.js';
import Github from './github.js';
async function runCommandFromGithubAction(rawCommand) {
const context = new Context('GHA');
const cli = getCLI(context);
const github = new Github(context);
// Make rawCommand look like argv
const cmd = rawCommand.trim().replace('@supersetbot', 'supersetbot');
const args = context.parseArgs(cmd);
await cli.parseAsync(['node', ...args]);
const msg = await context.onDone();
github.createComment(msg);
}
export { runCommandFromGithubAction };

51
.github/supersetbot/src/index.test.js vendored Normal file
View File

@ -0,0 +1,51 @@
// import * as stringArgv from 'string-argv';
import { jest } from '@jest/globals';
import Context from './context.js';
import Github from './github.js';
import * as index from './index.js';
describe('runCommandFromGithubAction', () => {
const labelSpy = jest.spyOn(Github.prototype, 'label').mockImplementation(jest.fn());
// mocking some of the Context object
const onDoneSpy = jest.spyOn(Context.prototype, 'onDone');
const doneCommentSpy = jest.spyOn(Context.prototype, 'doneComment');
const parseArgsSpy = jest.spyOn(Context.prototype, 'parseArgs');
jest.spyOn(Github.prototype, 'createComment').mockImplementation(jest.fn());
let originalEnv;
afterEach(() => {
process.env = originalEnv;
});
beforeEach(() => {
jest.clearAllMocks();
originalEnv = process.env;
process.env.GITHUB_ISSUE_NUMBER = '666';
process.env.GITHUB_REPOSITORY = 'apache/superset';
});
it('should strip the command', async () => {
await index.runCommandFromGithubAction(' @supersetbot label test-label ');
expect(parseArgsSpy).toHaveBeenCalledWith('supersetbot label test-label');
await index.runCommandFromGithubAction(' \n @supersetbot label test-label \n \n \n');
expect(parseArgsSpy).toHaveBeenCalledWith('supersetbot label test-label');
await index.runCommandFromGithubAction(' \n \t@supersetbot label test-label \t \n \n\t \n');
expect(parseArgsSpy).toHaveBeenCalledWith('supersetbot label test-label');
});
it('should parse the raw command correctly and call commands.label and context.onDone', async () => {
await index.runCommandFromGithubAction('@supersetbot label test-label');
expect(labelSpy).toHaveBeenCalled();
expect(onDoneSpy).toHaveBeenCalled();
});
it('should generate a good comment message', async () => {
await index.runCommandFromGithubAction('@supersetbot label test-label');
const comment = doneCommentSpy.mock.results[0].value;
expect(comment).toContain('> `supersetbot label test-label`');
});
});

35
.github/supersetbot/src/metadata.js vendored Normal file
View File

@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const ORG_LIST = [
'preset-io',
'airbnb',
'dropbox',
'lyft',
'Turing',
'Superset-Community-Partners',
'CybercentreCanada',
];
export const PROTECTED_LABEL_PATTERNS = [
'protected.*',
'released.*',
'hold.*',
'^v\\d+(\\.\\d+)*$',
'(🚢|🍒|🎯).*',
];
export const COMMITTER_TEAM = 'apache/superset-committers';

27
.github/supersetbot/src/supersetbot vendored Executable file
View File

@ -0,0 +1,27 @@
#!/usr/bin/env node
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import getCLI from './cli.js';
import Context from './context.js';
const envContext = new Context('CLI');
const cli = getCLI(envContext);
cli.parse();

78
.github/supersetbot/src/utils.js vendored Normal file
View File

@ -0,0 +1,78 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { spawn } from 'child_process';
import { readFile } from 'fs/promises';
import { fileURLToPath } from 'url';
import path from 'path';
const dirname = path.dirname(fileURLToPath(import.meta.url));
async function loadPackageJson() {
try {
const packageJsonPath = path.join(dirname, '../package.json');
const data = await readFile(packageJsonPath, 'utf8');
const packageJson = JSON.parse(data);
return packageJson;
} catch (error) {
console.error('Error reading package.json:', error);
return null;
}
}
export async function currentPackageVersion() {
const data = await loadPackageJson();
return data.version;
}
export function runShellCommand(command, raiseOnError = true) {
return new Promise((resolve, reject) => {
// Split the command string into an array of arguments
const args = command.split(/\s+/).filter((s) => !!s && s !== '\\');
const childProcess = spawn(args.shift(), args);
let stdoutData = '';
let stderrData = '';
// Capture stdout data
childProcess.stdout.on('data', (data) => {
stdoutData += data;
console.log(`stdout: ${data}`);
});
// Capture stderr data
childProcess.stderr.on('data', (data) => {
stderrData += data;
console.error(`stderr: ${data}`);
});
// Handle process exit
childProcess.on('close', (code) => {
if (code === 0) {
resolve(stdoutData);
} else {
const msg = `Command failed with code ${code}: ${stderrData}`;
if (raiseOnError) {
reject(new Error(msg));
} else {
console.error(msg);
process.exit(1);
}
}
});
});
}

View File

@ -31,20 +31,6 @@ say() {
fi
}
# default command to run when the `run` input is empty
default-setup-command() {
apt-get-install
pip-upgrade
}
apt-get-install() {
say "::group::apt-get install dependencies"
sudo apt-get update && sudo apt-get install --yes \
libsasl2-dev \
libldap2-dev
say "::endgroup::"
}
pip-upgrade() {
say "::group::Upgrade pip"
pip install --upgrade pip

View File

@ -45,21 +45,24 @@ jobs:
build_preset: ["dev", "lean", "py310", "websocket", "dockerize"]
fail-fast: false
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
ref: ${{ github.ref }}
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build Docker Image
- name: Setup Node Env
uses: actions/setup-node@v4
with:
node-version: '20'
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Execute custom Node.js script
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
@ -79,11 +82,10 @@ jobs:
cp /tmp/build_docker.py scripts/
EVENT="release"
fi
pip install click
# Make a multi-platform image
./scripts/build_docker.py \
${{ matrix.build_preset }} \
"$EVENT" \
--build_context_ref "$RELEASE" $FORCE_LATEST \
supersetbot docker \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--platform "linux/arm64" \
--platform "linux/amd64"

View File

@ -1,4 +1,4 @@
name: Docker
name: Build & publish docker images
on:
push:
@ -33,11 +33,11 @@ jobs:
matrix:
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
fail-fast: false
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@ -45,24 +45,38 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Setup Node Env
uses: actions/setup-node@v4
with:
node-version: '20'
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Build Docker Image
shell: bash
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
pip install click
# Single platform builds in pull_request context to speed things up
if [ "${{ github.event_name }}" = "push" ]; then
./scripts/build_docker.py \
${{ matrix.build_preset }} \
${{ github.event_name }} \
--build_context_ref "$RELEASE" $FORCE_LATEST \
--platform "linux/arm64" \
--platform "linux/amd64"
PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64"
elif [ "${{ github.event_name }}" = "pull_request" ]; then
./scripts/build_docker.py \
${{ matrix.build_preset }} \
${{ github.event_name }} \
--build_context_ref "$RELEASE" $FORCE_LATEST \
--platform "linux/amd64"
PLATFORM_ARG="--platform linux/amd64"
fi
supersetbot docker \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
$PLATFORM_ARG

32
.github/workflows/issue_creation.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: supersetbot orglabel based on author
on:
issues:
types: [created, edited]
pull_request:
types: [created, edited]
jobs:
superbot-orglabel:
runs-on: ubuntu-latest
steps:
- name: Execute SupersetBot Command
uses: actions/setup-node@v4
with:
node-version: '20'
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Execute custom Node.js script
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Label the issue with the appropriate org using supersetbot
# - this requires for the author to be publicly associated with their org
# - and for the org to be listed in `supersetbot/src/metadata.js`
supersetbot orglabel --issue ${{ github.event.number }} --repo ${{ github.repository }}

View File

@ -52,3 +52,33 @@ jobs:
run: |
echo "This is a no-op step for python-lint to ensure a successful status."
exit 0
test-postgres-hive:
strategy:
matrix:
python-version: ["3.9"]
runs-on: ubuntu-latest
steps:
- name: No-op for frontend-build
run: |
echo "This is a no-op step for test-postgres-postgres to ensure a successful status when skipped."
exit 0
test-postgres-presto:
strategy:
matrix:
python-version: ["3.9"]
runs-on: ubuntu-latest
steps:
- name: No-op for frontend-build
run: |
echo "This is a no-op step for test-postgres-postgres to ensure a successful status when skipped."
exit 0
unit-tests:
strategy:
matrix:
python-version: ["3.9"]
runs-on: ubuntu-latest
steps:
- name: No-op for frontend-build
run: |
echo "This is a no-op step for unit-tests to ensure a successful status when skipped."
exit 0

View File

@ -26,24 +26,9 @@ jobs:
persist-credentials: false
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: |
requirements/base.txt
requirements/integration.txt
- name: Install dependencies
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/base.txt
pip install -r requirements/integration.txt
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
uses: ./.github/actions/setup-backend/
- name: Enable brew and helm-docs
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"

View File

@ -58,23 +58,7 @@ jobs:
submodules: recursive
ref: master
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.9"
- name: OS dependencies
uses: ./.github/actions/cached-dependencies
with:
run: apt-get-install
- name: Install python dependencies
uses: ./.github/actions/cached-dependencies
with:
run: |
pip-upgrade
pip install -r requirements/testing.txt
- name: Setup postgres
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
uses: ./.github/actions/setup-backend/
- name: Import test data
uses: ./.github/actions/cached-dependencies
with:
@ -91,6 +75,11 @@ jobs:
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Setup Postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
- name: Install cypress
uses: ./.github/actions/cached-dependencies
with:

View File

@ -52,22 +52,13 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
- name: Setup Postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
setup-postgres
run: setup-postgres
- name: superset init
if: steps.check.outcome == 'failure'
run: |

View File

@ -68,22 +68,8 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python frontend
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: "3.9"
- name: OS dependencies
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: apt-get-install
- name: Install python dependencies
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
pip-upgrade
pip install -r requirements/testing.txt
- name: Setup postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies

View File

@ -51,21 +51,15 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
- name: Setup MySQL
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
setup-mysql
- name: Run celery
if: steps.check.outcome == 'failure'
@ -78,7 +72,6 @@ jobs:
if: steps.check.outcome == 'failure'
run: |
bash .github/workflows/codecov.sh -c -F python -F mysql
test-postgres:
runs-on: ubuntu-20.04
strategy:
@ -117,21 +110,15 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
- name: Setup Postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
setup-postgres
- name: Run celery
if: steps.check.outcome == 'failure'
@ -177,21 +164,16 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
# sqlite needs this working directory
mkdir ${{ github.workspace }}/.temp
- name: Run celery
if: steps.check.outcome == 'failure'

View File

@ -38,21 +38,10 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
- name: pylint
if: steps.check.outcome == 'failure'
# `-j 0` run Pylint in parallel
@ -70,18 +59,8 @@ jobs:
persist-credentials: false
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v5
uses: ./.github/actions/setup-backend/
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/base.txt"
- name: Install dependencies
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/base.txt
- name: Test babel extraction
run: flask fab babel-extract --target superset/translations --output superset/translations/messages.pot --config superset/translations/babel.cfg -k _,__,t,tn,tct

View File

@ -67,22 +67,13 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
- name: Setup Postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
setup-postgres
run: setup-postgres
- name: Run celery
if: steps.check.outcome == 'failure'
run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 &
@ -144,22 +135,13 @@ jobs:
if: steps.check.outcome == 'failure'
run: docker compose -f scripts/databases/hive/docker-compose.yml up -d
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
- name: Install dependencies
- name: Setup Postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
setup-postgres
run: setup-postgres
- name: Run celery
if: steps.check.outcome == 'failure'
run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 &

View File

@ -46,27 +46,15 @@ jobs:
continue-on-error: true
run: ./scripts/ci_check_no_file_changes.sh python
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outcome == 'failure'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements/testing.txt"
# TODO: separated requirements.txt file just for unit tests
- name: Install dependencies
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install wheel
pip install -r requirements/testing.txt
mkdir ${{ github.workspace }}/.temp
- name: Python unit tests
if: steps.check.outcome == 'failure'
env:
SUPERSET_TESTENV: true
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
- name: Upload code coverage

View File

@ -47,15 +47,8 @@ jobs:
persist-credentials: false
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v5
uses: ./.github/actions/setup-backend/
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
uses: ./.github/actions/cached-dependencies
with:
run: |
apt-get-install
pip-upgrade
pip install -r requirements/base.txt
- name: Test babel extraction
run: ./scripts/babel_update.sh

63
.github/workflows/supersetbot.yml vendored Normal file
View File

@ -0,0 +1,63 @@
name: SupersetBot Workflow
on:
issue_comment:
types: [created, edited]
# Making the workflow testable since `issue_comment` only triggers on
# the default branch
workflow_dispatch:
inputs:
comment_body:
description: 'Comment Body'
required: true
type: string
jobs:
supersetbot:
runs-on: ubuntu-latest
if: >
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))
steps:
- name: Quickly add thumbs up!
uses: actions/github-script@v5
with:
script: |
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
await github.rest.reactions.createForIssueComment({
owner,
repo,
comment_id: ${{ github.event.comment.id }},
content: '+1'
});
- name: Execute SupersetBot Command
uses: actions/setup-node@v4
with:
node-version: '20'
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Execute custom Node.js script
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
COMMENT_BODY: ${{ github.event.comment.body }}
INPUT_COMMENT_BODY: ${{ github.event.inputs.comment_body }}
run: |
cat <<EOF > script.js
const run = async () => {
const { runCommandFromGithubAction } = await import('supersetbot');
const cmd = process.env.COMMENT_BODY || process.env.INPUT_COMMENT_BODY;
console.log("Executing: ", cmd);
await runCommandFromGithubAction(cmd);
};
run().catch(console.error);
EOF
node script.js

View File

@ -477,7 +477,7 @@ python3 -m venv venv # setup a python3 virtualenv
source venv/bin/activate
# Install external dependencies
pip install -r requirements/testing.txt
pip install -r requirements/development.txt
# Install Superset in editable (development) mode
pip install -e .
@ -531,7 +531,7 @@ If you add a new requirement or update an existing requirement (per the `install
```bash
$ python3 -m venv venv
$ source venv/bin/activate
$ python3 -m pip install -r requirements/integration.txt
$ python3 -m pip install -r requirements/development.txt
$ pip-compile-multi --no-upgrade
```
@ -749,7 +749,7 @@ The current status of the usability of each flag (stable vs testing, etc) can be
Superset uses Git pre-commit hooks courtesy of [pre-commit](https://pre-commit.com/). To install run the following:
```bash
pip3 install -r requirements/integration.txt
pip3 install -r requirements/development.txt
pre-commit install
```

View File

@ -61,7 +61,7 @@ ENV LANG=C.UTF-8 \
SUPERSET_HOME="/app/superset_home" \
SUPERSET_PORT=8088
RUN mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg-info requirements \
RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
@ -79,11 +79,10 @@ RUN mkdir -p ${PYTHONPATH} superset/static superset-frontend apache_superset.egg
COPY --chown=superset:superset setup.py MANIFEST.in README.md ./
# setup.py uses the version information in package.json
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
RUN --mount=type=bind,target=./requirements/local.txt,src=./requirements/local.txt \
--mount=type=bind,target=./requirements/development.txt,src=./requirements/development.txt \
--mount=type=bind,target=./requirements/base.txt,src=./requirements/base.txt \
--mount=type=cache,target=/root/.cache/pip \
pip install -r requirements/local.txt
COPY --chown=superset:superset requirements/base.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --upgrade setuptools pip && \
pip install -r requirements/base.txt
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
## Lastly, let's install superset itself
@ -127,10 +126,10 @@ RUN apt-get update -qq \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*
# Cache everything for dev purposes...
RUN --mount=type=bind,target=./requirements/base.txt,src=./requirements/base.txt \
--mount=type=bind,target=./requirements/docker.txt,src=./requirements/docker.txt \
--mount=type=cache,target=/root/.cache/pip \
pip install -r requirements/docker.txt
COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -r requirements/development.txt
USER superset
######################################################################

View File

@ -24,7 +24,7 @@ install: superset pre-commit
superset:
# Install external dependencies
pip install -r requirements/local.txt
pip install -r requirements/development.txt
# Install Superset in editable (development) mode
pip install -e .
@ -53,7 +53,7 @@ update: update-py update-js
update-py:
# Install external dependencies
pip install -r requirements/local.txt
pip install -r requirements/development.txt
# Install Superset in editable (development) mode
pip install -e .
@ -79,7 +79,7 @@ activate:
pre-commit:
# setup pre commit dependencies
pip3 install -r requirements/integration.txt
pip3 install -r requirements/development.txt
pre-commit install
format: py-format js-format

View File

@ -23,11 +23,19 @@ This file documents any backwards-incompatible changes in Superset and
assists people when migrating to a new version.
## Next
- [27505](https://github.com/apache/superset/pull/27505): We simplified the files under
`requirements/` folder. If you use these files for your builds you may want to double
check that your builds are not affected. `base.txt` should be the same as before, though
`development.txt` becomes a bigger set, incorporating the now defunct local,testing,integration, and docker
- [27119](https://github.com/apache/superset/pull/27119): Updates various database columns to use the `MediumText` type, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
- [26450](https://github.com/apache/superset/pull/26450): Deprecates the `KV_STORE` feature flag and its related assets such as the API endpoint and `keyvalue` table. The main dependency of this feature is the `SHARE_QUERIES_VIA_KV_STORE` feature flag which allows sharing SQL Lab queries without the necessity of saving the query. Our intention is to use the permalink feature to implement this use case before 5.0 and that's why we are deprecating the feature flag now.
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker-compose.*
files for production use cases! While we never really supported
or should have tried to support docker-compose for production use cases, we now actively
have taken a stance against supporting it. See the PR for details.
### Breaking Changes
- [27130](https://github.com/apache/superset/pull/27130): Fixes the DELETE `/database/{id}/ssh_tunnel/`` endpoint to now correctly accept a database ID as a parameter, rather than an SSH tunnel ID.

View File

@ -0,0 +1,101 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest}
x-superset-depends-on: &superset-depends-on
- db
- redis
x-superset-volumes:
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker
- superset_home:/app/superset_home
version: "3.7"
services:
redis:
image: redis:7
container_name: superset_cache
restart: unless-stopped
volumes:
- redis:/data
db:
env_file: docker/.env
image: postgres:15
container_name: superset_db
restart: unless-stopped
volumes:
- db_home:/var/lib/postgresql/data
- ./docker/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
superset:
env_file: docker/.env
image: *superset-image
container_name: superset_app
command: ["/app/docker/docker-bootstrap.sh", "app-gunicorn"]
user: "root"
restart: unless-stopped
ports:
- 8088:8088
depends_on: *superset-depends-on
volumes: *superset-volumes
superset-init:
image: *superset-image
container_name: superset_init
command: ["/app/docker/docker-init.sh"]
env_file: docker/.env
depends_on: *superset-depends-on
user: "root"
volumes: *superset-volumes
healthcheck:
disable: true
superset-worker:
image: *superset-image
container_name: superset_worker
command: ["/app/docker/docker-bootstrap.sh", "worker"]
env_file: docker/.env
restart: unless-stopped
depends_on: *superset-depends-on
user: "root"
volumes: *superset-volumes
healthcheck:
test:
[
"CMD-SHELL",
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
]
superset-worker-beat:
image: *superset-image
container_name: superset_worker_beat
command: ["/app/docker/docker-bootstrap.sh", "beat"]
env_file: docker/.env
restart: unless-stopped
depends_on: *superset-depends-on
user: "root"
volumes: *superset-volumes
healthcheck:
disable: true
volumes:
superset_home:
external: false
db_home:
external: false
redis:
external: false

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest}
x-superset-depends-on: &superset-depends-on
- db
- redis
@ -23,7 +22,13 @@ x-superset-volumes:
- ./docker:/app/docker
- superset_home:/app/superset_home
version: "3.7"
x-common-build: &common-build
context: .
target: dev
cache_from:
- apache/superset-cache:3.9-slim-bookworm
version: "4.0"
services:
redis:
image: redis:7
@ -33,7 +38,7 @@ services:
- redis:/data
db:
env_file: docker/.env-non-dev
env_file: docker/.env
image: postgres:15
container_name: superset_db
restart: unless-stopped
@ -42,8 +47,9 @@ services:
- ./docker/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
superset:
env_file: docker/.env-non-dev
image: *superset-image
env_file: docker/.env
build:
<<: *common-build
container_name: superset_app
command: ["/app/docker/docker-bootstrap.sh", "app-gunicorn"]
user: "root"
@ -54,10 +60,11 @@ services:
volumes: *superset-volumes
superset-init:
image: *superset-image
container_name: superset_init
build:
<<: *common-build
command: ["/app/docker/docker-init.sh"]
env_file: docker/.env-non-dev
env_file: docker/.env
depends_on: *superset-depends-on
user: "root"
volumes: *superset-volumes
@ -65,10 +72,11 @@ services:
disable: true
superset-worker:
image: *superset-image
build:
<<: *common-build
container_name: superset_worker
command: ["/app/docker/docker-bootstrap.sh", "worker"]
env_file: docker/.env-non-dev
env_file: docker/.env
restart: unless-stopped
depends_on: *superset-depends-on
user: "root"
@ -81,10 +89,11 @@ services:
]
superset-worker-beat:
image: *superset-image
build:
<<: *common-build
container_name: superset_worker_beat
command: ["/app/docker/docker-bootstrap.sh", "beat"]
env_file: docker/.env-non-dev
env_file: docker/.env
restart: unless-stopped
depends_on: *superset-depends-on
user: "root"

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-master-dev}
x-superset-user: &superset-user root
x-superset-depends-on: &superset-depends-on
- db
@ -27,7 +26,13 @@ x-superset-volumes: &superset-volumes
- superset_home:/app/superset_home
- ./tests:/app/tests
version: "3.7"
x-common-build: &common-build
context: .
target: dev
cache_from:
- apache/superset-cache:3.9-slim-bookworm
version: "4.0"
services:
nginx:
image: nginx:latest
@ -61,7 +66,8 @@ services:
superset:
env_file: docker/.env
image: *superset-image
build:
<<: *common-build
container_name: superset_app
command: ["/app/docker/docker-bootstrap.sh", "app"]
restart: unless-stopped
@ -106,7 +112,8 @@ services:
- REDIS_SSL=false
superset-init:
image: *superset-image
build:
<<: *common-build
container_name: superset_init
command: ["/app/docker/docker-init.sh"]
env_file: docker/.env
@ -120,16 +127,21 @@ services:
superset-node:
image: node:16
environment:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!
BUILD_SUPERSET_FRONTEND_IN_DOCKER: ${BUILD_SUPERSET_FRONTEND_IN_DOCKER:-true}
SCARF_ANALYTICS: "${SCARF_ANALYTICS}"
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: ${BUILD_SUPERSET_FRONTEND_IN_DOCKER:-false}
container_name: superset_node
command: ["/app/docker/docker-frontend.sh"]
env_file: docker/.env
depends_on: *superset-depends-on
environment:
SCARF_ANALYTICS: "${SCARF_ANALYTICS}"
volumes: *superset-volumes
superset-worker:
image: *superset-image
build:
<<: *common-build
container_name: superset_worker
command: ["/app/docker/docker-bootstrap.sh", "worker"]
env_file: docker/.env
@ -146,7 +158,8 @@ services:
# mem_reservation: 128M
superset-worker-beat:
image: *superset-image
build:
<<: *common-build
container_name: superset_worker_beat
command: ["/app/docker/docker-bootstrap.sh", "beat"]
env_file: docker/.env
@ -158,7 +171,8 @@ services:
disable: true
superset-tests-worker:
image: *superset-image
build:
<<: *common-build
container_name: superset_tests_worker
command: ["/app/docker/docker-bootstrap.sh", "worker"]
env_file: docker/.env

View File

@ -51,3 +51,5 @@ SUPERSET_LOAD_EXAMPLES=yes
CYPRESS_CONFIG=false
SUPERSET_PORT=8088
MAPBOX_API_KEY=''
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET

View File

@ -1,53 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
COMPOSE_PROJECT_NAME=superset
# database configurations (do not modify)
DATABASE_DB=superset
DATABASE_HOST=db
DATABASE_PASSWORD=superset
DATABASE_USER=superset
DATABASE_PORT=5432
DATABASE_DIALECT=postgresql
EXAMPLES_DB=examples
EXAMPLES_HOST=db
EXAMPLES_USER=examples
EXAMPLES_PASSWORD=examples
EXAMPLES_PORT=5432
# database engine specific environment variables
# change the below if you prefer another database engine
POSTGRES_DB=superset
POSTGRES_USER=superset
POSTGRES_PASSWORD=superset
#MYSQL_DATABASE=superset
#MYSQL_USER=superset
#MYSQL_PASSWORD=superset
#MYSQL_RANDOM_ROOT_PASSWORD=yes
# Add the mapped in /app/pythonpath_docker which allows devs to override stuff
PYTHONPATH=/app/pythonpath:/app/docker/pythonpath_dev
REDIS_HOST=redis
REDIS_PORT=6379
SUPERSET_ENV=production
SUPERSET_LOAD_EXAMPLES=yes
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
CYPRESS_CONFIG=false
SUPERSET_PORT=8088
MAPBOX_API_KEY=''

View File

@ -19,11 +19,17 @@ set -e
# Packages needed for puppeteer:
apt update
apt install -y chromium
if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then
apt install -y chromium
fi
cd /app/superset-frontend
npm install -f --no-optional --global webpack webpack-cli
npm install -f --no-optional
if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
cd /app/superset-frontend
npm install -f --no-optional --global webpack webpack-cli
npm install -f --no-optional
echo "Running frontend"
npm run dev
echo "Running frontend"
npm run dev
else
echo "Skipping frontend build steps - YOU RUN IT MANUALLY ON THE HOST!"
fi

View File

@ -10,7 +10,7 @@ version: 1
Superset uses Git pre-commit hooks courtesy of [pre-commit](https://pre-commit.com/). To install run the following:
```bash
pip3 install -r requirements/integration.txt
pip3 install -r requirements/development.txt
pre-commit install
```

View File

@ -20,7 +20,7 @@ python3 -m venv venv # setup a python3 virtualenv
source venv/bin/activate
# Install external dependencies
pip install -r requirements/testing.txt
pip install -r requirements/development.txt
# Install Superset in editable (development) mode
pip install -e .
@ -75,7 +75,7 @@ If you add a new requirement or update an existing requirement (per the `install
```bash
$ python3 -m venv venv
$ source venv/bin/activate
$ python3 -m pip install -r requirements/integration.txt
$ python3 -m pip install -r requirements/development.txt
$ pip-compile-multi --no-upgrade
```

View File

@ -89,7 +89,7 @@ SUPERSET_WEBSERVER_TIMEOUT = 60
### Why is the map not visible in the geospatial visualization?
You need to register a free account at [Mapbox.com](https://www.mapbox.com), obtain an API key, and add it
to **.env** and **.env-non-dev** at the key MAPBOX_API_KEY:
to **.env** at the key MAPBOX_API_KEY:
```
MAPBOX_API_KEY = "longstringofalphanumer1c"

View File

@ -39,7 +39,7 @@ ROW_LIMIT = 5000
# and encrypting sensitive information on the database
# Make sure you are changing this key for your deployment with a strong key.
# Alternatively you can set it with `SUPERSET_SECRET_KEY` environment variable.
# You MUST set this for production environments or the server will not refuse
# You MUST set this for production environments or the server will refuse
# to start and you will see an error in the logs accordingly.
SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY'

View File

@ -1,15 +1,41 @@
---
title: Installing Locally Using Docker Compose
hide_title: true
sidebar_position: 1
sidebar_position: 3
version: 1
---
## Installing Superset Locally Using Docker Compose
## Using Docker Compose
The fastest way to try Superset locally is using Docker and Docker Compose on a Linux or Mac OSX
**DO NOT USE THIS FOR PRODUCTION!**
The fastest way to try Superset locally is using Docker Compose on a Linux or Mac OSX
computer. Superset does not have official support for Windows, so we have provided a VM workaround
below.
below. It's also the easiest way to launch a fully functioning **development environment** quickly.
:::caution
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
and can't credibly support **high availability** as a result, we do not support nor recommend
using our `docker-compose` constructs to support production-type use-cases. For single host
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
documentation.
:::
Note that there are 3 major ways we support to run docker-compose:
1. **docker-compose.yml:** for interactive development, where we mount your local folder with the
frontend/backend files that you can edit and experience the changes you
make in the app in real time
1. **docker-compose-non-dev.yml** where we just build a more immutable image based on the
local branch and get all the required images running. Changes in the local branch
at the time you fire this up will be reflected, but changes to the code
while `up` won't be reflected in the app
1. **docker-compose-image-tag.yml** where we fetch an image from docker-hub say for the
`3.0.0` release for instance, and fire it up so you can try it. Here what's in
the local branch has no effects on what's running, we just fetch and run
pre-built images from docker-hub
More on these two approaches after setting up the requirements for either.
### 1. Install a Docker Engine and Docker Compose
@ -31,12 +57,13 @@ part of the base Docker installation on Linux, once you have a working engine, f
**Windows**
Superset is not officially supported on Windows unfortunately. One option for Windows users to
try out Superset locally is to install an Ubuntu Desktop VM via
Superset is not officially supported on Windows unfortunately. One option for Windows users to try
out Superset locally is to install an Ubuntu Desktop VM via
[VirtualBox](https://www.virtualbox.org/) and proceed with the Docker on Linux instructions inside
of that VM. We recommend assigning at least 8GB of RAM to the virtual machine as well as
provisioning a hard drive of at least 40GB, so that there will be enough space for both the OS and
all of the required dependencies. Docker Desktop [recently added support for Windows Subsystem for Linux (WSL) 2](https://docs.docker.com/docker-for-windows/wsl/), which may be another option.
all of the required dependencies. Docker Desktop [recently added support for Windows Subsystem for
Linux (WSL) 2](https://docs.docker.com/docker-for-windows/wsl/), which may be another option.
### 2. Clone Superset's GitHub repository
@ -52,81 +79,109 @@ current directory.
### 3. Launch Superset Through Docker Compose
Navigate to the folder you created in step 1:
First let's assume you're familiar with docker-compose mechanics. Here we'll refer generally
to `docker compose up` even though in some cases you may want to force a check for newer remote
images using `docker compose pull`, force a build with `docker compose build` or force a build
on latest base images using `docker compose build --pull`. In most cases though, the simple
`up` command should do just fine. Refer to docker compose docs for more information on the topic.
```bash
cd superset
```
### Option #1 - for an interactive development environment
When working on master branch, run the following commands to run `development` mode using `docker compose`:
```bash
docker compose up
```
:::tip
When running in development mode the `superset-node` container needs to finish building assets in order for the UI to render properly. If you would just like to try out Superset without making any code changes follow the steps documented for `production` or a specific version below.
When running in development mode the `superset-node`
container needs to finish building assets in order for the UI to render properly. If you would just
like to try out Superset without making any code changes follow the steps documented for
`production` or a specific version below.
:::
When working on master branch, run the following commands to run `production` mode using `docker compose`:
:::tip
By default, we mount the local superset-frontend folder here and run `npm install` as well
as `npm run dev` which triggers webpack to compile/bundle the frontend code. Depending
on your local setup, especially if you have less than 16GB of memory, it may be very slow to
perform those operations. In this case, we recommend you set the env var
`BUILD_SUPERSET_FRONTEND_IN_DOCKER` to `false`, and to run this locally instead in a terminal.
Simply trigger `npm i && npm run dev`, this should be MUCH faster.
:::
### Option #2 - build an immutable image from the local branch
```bash
docker compose -f docker-compose-non-dev.yml pull
docker compose -f docker-compose-non-dev.yml up
```
Alternatively, you can also run a specific version of Superset by first checking out
the branch/tag, and then starting `docker compose` with the `TAG` variable.
For example, to run the 3.0.0 version, run the following commands on Linux-based systems:
### Option #3 - pull and build a release image from docker-hub
```bash
git checkout 3.0.0
TAG=3.0.0 docker compose -f docker-compose-non-dev.yml pull
TAG=3.0.0 docker compose -f docker-compose-non-dev.yml up
export TAG=3.1.1
docker compose -f docker-compose-image-tag.yml up
```
If you are using Docker Desktop for Windows then run the following commands:
Here various release tags, github SHA, and latest `master` can be referenced by the TAG env var.
Refer to the docker-related documentation to learn more about existing tags you can point to
from Docker Hub.
```bash
git checkout 3.0.0
set TAG=3.0.0
docker compose -f docker-compose-non-dev.yml pull
docker compose -f docker-compose-non-dev.yml up
```
## General tips & configuration
:::tip
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of `SECRET_KEY` in a Superset config file or `SUPERSET_SECRET_KEY` as an [environment variable](https://github.com/apache/superset/blob/master/docker/.env-non-dev). Please see [Configuring Superset](/docs/installation/configuring-superset/) for more details.
:::
:::caution
All of the content belonging to a Superset instance - charts, dashboards, users, etc. - is stored in its metadata database. In production, this database should be backed up.
The default installation with docker compose will store that data in a PostgreSQL database contained in a Docker [volume](https://docs.docker.com/storage/volumes/),
which is not backed up. To avoid risking data loss, either use a managed database for your metadata (recommended) or perform your own regular backups by extracting
and storing the contents of the default PostgreSQL database from its volume (here's an
[example of how to dump and restore](https://stackoverflow.com/questions/24718706/backup-restore-a-dockerized-postgresql-database)).
All of the content belonging to a Superset instance - charts, dashboards, users, etc. - is stored in
its metadata database. In production, this database should be backed up. The default installation
with docker compose will store that data in a PostgreSQL database contained in a Docker
[volume](https://docs.docker.com/storage/volumes/), which is not backed up.
Again **DO NOT USE THIS FOR PRODUCTION**
:::
You should see a wall of logging output from the containers being launched on your machine. Once
this output slows, you should have a running instance of Superset on your local machine! To
avoid the wall of text on future runs, add the `-d` option to the end of the `docker compose up` command.
this output slows, you should have a running instance of Superset on your local machine! To avoid
the wall of text on future runs, add the `-d` option to the end of the `docker compose up` command.
#### Configuring Docker Compose
#### Configuring Further
The following is for users who want to configure how Superset runs in Docker Compose; otherwise, you can skip to the next section.
The following is for users who want to configure how Superset runs in Docker Compose; otherwise, you
can skip to the next section.
You can install additional python packages and apply config overrides by following the steps mentioned in [docker/README.md](https://github.com/apache/superset/tree/master/docker#configuration)
You can install additional python packages and apply config overrides by following the steps
mentioned in [docker/README.md](https://github.com/apache/superset/tree/master/docker#configuration)
You can configure the Docker Compose environment variables for dev and non-dev mode with `docker/.env` and `docker/.env-non-dev` respectively. These environment files set the environment for most containers in the Docker Compose setup, and some variables affect multiple containers and others only single ones.
You can configure the Docker Compose environment variables for dev and non-dev mode with
`docker/.env`. This environment file sets the environment
for most containers in the Docker Compose setup, and some variables affect multiple containers and
others only single ones.
One important variable is `SUPERSET_LOAD_EXAMPLES` which determines whether the `superset_init` container will populate example data and visualizations into the metadata database. These examples are helpful for learning and testing out Superset but unnecessary for experienced users and production deployments. The loading process can sometimes take a few minutes and a good amount of CPU, so you may want to disable it on a resource-constrained device.
One important variable is `SUPERSET_LOAD_EXAMPLES` which determines whether the `superset_init`
container will populate example data and visualizations into the metadata database. These examples
are helpful for learning and testing out Superset but unnecessary for experienced users and
production deployments. The loading process can sometimes take a few minutes and a good amount of
CPU, so you may want to disable it on a resource-constrained device.
:::note
Users often want to connect to other databases from Superset. Currently, the easiest way to do this is to modify the `docker-compose-non-dev.yml` file and add your database as a service that the other services depend on (via `x-superset-depends-on`). Others have attempted to set `network_mode: host` on the Superset services, but these generally break the installation, because the configuration requires use of the Docker Compose DNS resolver for the service names. If you have a good solution for this, let us know!
Users often want to connect to other databases from Superset. Currently, the easiest way to
do this is to modify the `docker-compose-non-dev.yml` file and add your database as a service that
the other services depend on (via `x-superset-depends-on`). Others have attempted to set
`network_mode: host` on the Superset services, but these generally break the installation,
because the configuration requires use of the Docker Compose DNS resolver for the service names.
If you have a good solution for this, let us know!
:::
:::note
Superset uses [Scarf Gateway](https://about.scarf.sh/scarf-gateway) to collect telemetry data. Knowing the installation counts for different Superset versions informs the project's decisions about patching and long-term support. Scarf purges personally identifiable information (PII) and provides only aggregated statistics.
Superset uses [Scarf Gateway](https://about.scarf.sh/scarf-gateway) to collect telemetry
data. Knowing the installation counts for different Superset versions informs the project's
decisions about patching and long-term support. Scarf purges personally identifiable information
(PII) and provides only aggregated statistics.
To opt-out of this data collection for packages downloaded through the Scarf Gateway by your docker compose based installation, edit the `x-superset-image:` line in your `docker-compose.yml` and `docker-compose-non-dev.yml` files, replacing `apachesuperset.docker.scarf.sh/apache/superset` with `apache/superset` to pull the image directly from Docker Hub.
To opt-out of this data collection for packages downloaded through the Scarf Gateway by your docker
compose based installation, edit the `x-superset-image:` line in your `docker-compose.yml` and
`docker-compose-non-dev.yml` files, replacing `apachesuperset.docker.scarf.sh/apache/superset` with
`apache/superset` to pull the image directly from Docker Hub.
To disable the Scarf telemetry pixel, set the `SCARF_ANALYTICS` environment variable to `False` in your terminal and/or in your `docker/.env` and `docker/.env-non-dev` files.
To disable the Scarf telemetry pixel, set the `SCARF_ANALYTICS` environment variable to `False` in
your terminal and/or in your `docker/.env` file.
:::
### 4. Log in to Superset
@ -148,9 +203,32 @@ password: admin
### 5. Connecting Superset to your local database instance
When running Superset using `docker` or `docker compose` it runs in its own docker container, as if the Superset was running in a separate machine entirely. Therefore attempts to connect to your local database with the hostname `localhost` won't work as `localhost` refers to the docker container Superset is running in, and not your actual host machine. Fortunately, docker provides an easy way to access network resources in the host machine from inside a container, and we will leverage this capability to connect to our local database instance.
When running Superset using `docker` or `docker compose` it runs in its own docker container, as if
the Superset was running in a separate machine entirely. Therefore attempts to connect to your local
database with the hostname `localhost` won't work as `localhost` refers to the docker container
Superset is running in, and not your actual host machine. Fortunately, docker provides an easy way
to access network resources in the host machine from inside a container, and we will leverage this
capability to connect to our local database instance.
Here the instructions are for connecting to postgresql (which is running on your host machine) from Superset (which is running in its docker container). Other databases may have slightly different configurations but gist would be same and boils down to 2 steps -
Here the instructions are for connecting to postgresql (which is running on your host machine) from
Superset (which is running in its docker container). Other databases may have slightly different
configurations but gist would be same and boils down to 2 steps -
1. **(Mac users may skip this step)** Configuring the local postgresql/database instance to accept public incoming connections. By default, postgresql only allows incoming connections from `localhost` and under Docker, unless you use `--network=host`, `localhost` will refer to different endpoints on the host machine and in a docker container respectively. Allowing postgresql to accept connections from the Docker involves making one-line changes to the files `postgresql.conf` and `pg_hba.conf`; you can find helpful links tailored to your OS / PG version on the web easily for this task. For Docker it suffices to only whitelist IPs `172.0.0.0/8` instead of `*`, but in any case you are _warned_ that doing this in a production database _may_ have disastrous consequences as you are opening your database to the public internet.
2. Instead of `localhost`, try using `host.docker.internal` (Mac users, Ubuntu) or `172.18.0.1` (Linux users) as the hostname when attempting to connect to the database. This is a Docker internal detail -- what is happening is that, in Mac systems, Docker Desktop creates a dns entry for the hostname `host.docker.internal` which resolves to the correct address for the host machine, whereas in Linux this is not the case (at least by default). If neither of these 2 hostnames work then you may want to find the exact hostname you want to use, for that you can do `ifconfig` or `ip addr show` and look at the IP address of `docker0` interface that must have been created by Docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo) `docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP address.
1. **(Mac users may skip this step)** Configuring the local postgresql/database instance to accept
public incoming connections. By default, postgresql only allows incoming connections from
`localhost` and under Docker, unless you use `--network=host`, `localhost` will refer to different
endpoints on the host machine and in a docker container respectively. Allowing postgresql to accept
connections from the Docker involves making one-line changes to the files `postgresql.conf` and
`pg_hba.conf`; you can find helpful links tailored to your OS / PG version on the web easily for
this task. For Docker it suffices to only whitelist IPs `172.0.0.0/8` instead of `*`, but in any
case you are _warned_ that doing this in a production database _may_ have disastrous consequences as
you are opening your database to the public internet. 2. Instead of `localhost`, try using
`host.docker.internal` (Mac users, Ubuntu) or `172.18.0.1` (Linux users) as the hostname when
attempting to connect to the database. This is a Docker internal detail -- what is happening is
that, in Mac systems, Docker Desktop creates a dns entry for the hostname `host.docker.internal`
which resolves to the correct address for the host machine, whereas in Linux this is not the case
(at least by default). If neither of these 2 hostnames work then you may want to find the exact
hostname you want to use, for that you can do `ifconfig` or `ip addr show` and look at the IP
address of `docker0` interface that must have been created by Docker for you. Alternately if you
don't even see the `docker0` interface try (if needed with sudo) `docker network inspect bridge` and
see if there is an entry for `"Gateway"` and note the IP address.

View File

@ -1,19 +1,27 @@
---
title: Installing on Kubernetes
hide_title: true
sidebar_position: 3
sidebar_position: 1
version: 1
---
## Installing on Kubernetes
Running Superset on Kubernetes is supported with the provided [Helm](https://helm.sh/) chart found in the official [Superset helm repository](https://apache.github.io/superset/index.yaml).
Running Superset on Kubernetes is supported with the provided [Helm](https://helm.sh/) chart
found in the official [Superset helm repository](https://apache.github.io/superset/index.yaml).
### Prerequisites
- A Kubernetes cluster
- Helm installed
:::note
For simpler, single host environments, we recommend using
[minikube](https://minikube.sigs.k8s.io/docs/start/) which is easy to setup on many platforms
and works fantastically well with the Helm chart referenced here.
:::
### Running
1. Add the Superset helm repository

View File

@ -369,3 +369,17 @@ Since metrics are aggregations, the resulting SQL expression will be grouped by
```
SELECT * FROM {{ dataset(42, include_metrics=True, columns=["ds", "category"]) }} LIMIT 10
```
**Metrics**
The `{{ metric('metric_key', dataset_id) }}` macro can be used to retrieve the metric SQL syntax from a dataset. This can be useful for different purposes:
- Override the metric label in the chart level
- Combine multiple metrics in a calculation
- Retrieve a metric syntax in SQL lab
- Re-use metrics across datasets
This macro avoids copy/paste, allowing users to centralize the metric definition in the dataset layer.
The `dataset_id` parameter is optional, and if not provided Superset will use the current dataset from context (for example, when using this macro in the Chart Builder, by default the `macro_key` will be searched in the dataset powering the chart).
The parameter can be used in SQL Lab, or when fetching a metric from another dataset.

View File

@ -15,7 +15,6 @@ Here are a **few different ways you can get started with Superset**:
- Try a [Quickstart deployment](/docs/quickstart), which runs a single Docker container
- Install Superset [from PyPI](/docs/installation/installing-superset-from-pypi/)
- Deploy Superset [using Docker Compose](/docs/installation/installing-superset-using-docker-compose)
- Deploy Superset [with Kubernetes](/docs/installation/running-on-kubernetes)
- Download the [source code from Apache Foundation's website](https://dist.apache.org/repos/dist/release/superset/)

View File

@ -16,39 +16,107 @@ code is less ambiguous and is unique to all regions in the world.
## Included Maps
The current list of countries can be found in the src
[legacy-plugin-chart-country-map/src/countries.ts](https://github.com/apache/superset/blob/master/superset-frontend/plugins/legacy-plugin-chart-country-map/src/countries.ts)
The Country Maps visualization already ships with the maps for the following countries:
- Belgium
- Brazil
- Bulgaria
- Canada
- China
- Egypt
- France
- Germany
- India
- Iran
- Italy
- Japan
- Korea
- Liechtenstein
- Morocco
- Myanmar
- Netherlands
- Portugal
- Russia
- Singapore
- Spain
- Switzerland
- Syria
- Thailand
- Timorleste
- Turkey
- UK
- Ukraine
- Uruguay
- USA
- Zambia
- Afghanistan
- Albania
- Algeria
- Argentina
- Australia
- Austria
- Belgium
- Bolivia
- Brazil
- Bulgaria
- Burundi
- Canada
- Chile
- China
- Colombia
- Costa Rica
- Cuba
- Cyprus
- Denmark
- Dominican Republic
- Ecuador
- Egypt
- El_salvador
- Estonia
- Ethiopia
- France
- France Regions
- Finland
- Germany
- Guatemala
- Haiti
- Honduras
- Iceland
- India
- Indonesia
- Iran
- Italy
- Italy Regions
- Japan
- Jordan
- Kazakhstan
- Kenya
- Korea
- Kuwait
- Kyrgyzstan
- Latvia
- Liechtenstein
- Lithuania
- Malaysia
- Mexico
- Morocco
- Myanmar
- Netherlands
- Nicaragua
- Nigeria
- Norway
- Oman
- Pakistan
- Panama
- Papua New Guinea
- Paraguay
- Peru
- Philippines
- Portugal
- Poland
- Puerto_rico
- Qatar
- Russia
- Rwanda
- Saint Barthelemy
- Saint Martin
- Saudi Arabia
- Singapore
- Slovenia
- Spain
- Sri Lanka
- Sweden
- Switzerland
- Syria
- Tajikistan
- Tanzania
- Thailand
- Timorleste
- Turkey
- Turkey Regions
- Turkmenistan
- Uganda
- Uk
- Ukraine
- United Arab Emirates
- Uruguay
- USA
- Uzbekistan
- Venezuela
- Vietnam
- Zambia
## Adding a New Country

View File

@ -53,7 +53,7 @@ $ docker exec -it superset superset db upgrade &&
:::tip
This step can take some time. While you wait, feel free to join the official Slack channel to check for new releases,
ask questions, and engage with the community.
[Click here to join.](https://apache-superset.slack.com/join/shared_invite/zt-26ol9ge4y-kzUnSo9inRepOay0ufBTsA#/shared-invite/email)
[Click here to join.](http://bit.ly/join-superset-slack)
:::
### 5. Start using Superset

View File

@ -49,7 +49,7 @@
"@tsconfig/docusaurus": "^2.0.2",
"@types/react": "^17.0.42",
"typescript": "^5.3.3",
"webpack": "^5.90.1"
"webpack": "^5.91.0"
},
"browserslist": {
"production": [

View File

@ -22,8 +22,8 @@ import DocItem from '@theme-original/DocItem';
const EditPageLink = styled('a')`
position: fixed;
bottom: 20px;
right: 20px;
bottom: 40px;
right: 10px;
padding: 1rem;
padding-left: 4rem;
background-color: #444;
@ -37,6 +37,7 @@ const EditPageLink = styled('a')`
bpx-shadow: 0 0 0 0 rgba(0,0,0,0); /* Smooth transition for hover effect */
scale: .9;
transition: all 0.3s;
transform-origin: bottom right;
&:hover {
background-color: #333;

View File

@ -3613,6 +3613,9 @@
"disable_data_preview": {
"type": "boolean"
},
"disable_drill_to_detail": {
"type": "boolean"
},
"explore_database_id": {
"type": "integer"
},

View File

@ -3059,10 +3059,10 @@
resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406"
integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==
"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24"
integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==
"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb"
integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==
dependencies:
"@webassemblyjs/helper-numbers" "1.11.6"
"@webassemblyjs/helper-wasm-bytecode" "1.11.6"
@ -3077,10 +3077,10 @@
resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768"
integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==
"@webassemblyjs/helper-buffer@1.11.6":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093"
integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==
"@webassemblyjs/helper-buffer@1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6"
integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==
"@webassemblyjs/helper-numbers@1.11.6":
version "1.11.6"
@ -3096,15 +3096,15 @@
resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9"
integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==
"@webassemblyjs/helper-wasm-section@1.11.6":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577"
integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==
"@webassemblyjs/helper-wasm-section@1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf"
integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==
dependencies:
"@webassemblyjs/ast" "1.11.6"
"@webassemblyjs/helper-buffer" "1.11.6"
"@webassemblyjs/ast" "1.12.1"
"@webassemblyjs/helper-buffer" "1.12.1"
"@webassemblyjs/helper-wasm-bytecode" "1.11.6"
"@webassemblyjs/wasm-gen" "1.11.6"
"@webassemblyjs/wasm-gen" "1.12.1"
"@webassemblyjs/ieee754@1.11.6":
version "1.11.6"
@ -3125,59 +3125,59 @@
resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a"
integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==
"@webassemblyjs/wasm-edit@^1.11.5":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab"
integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==
"@webassemblyjs/wasm-edit@^1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b"
integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==
dependencies:
"@webassemblyjs/ast" "1.11.6"
"@webassemblyjs/helper-buffer" "1.11.6"
"@webassemblyjs/ast" "1.12.1"
"@webassemblyjs/helper-buffer" "1.12.1"
"@webassemblyjs/helper-wasm-bytecode" "1.11.6"
"@webassemblyjs/helper-wasm-section" "1.11.6"
"@webassemblyjs/wasm-gen" "1.11.6"
"@webassemblyjs/wasm-opt" "1.11.6"
"@webassemblyjs/wasm-parser" "1.11.6"
"@webassemblyjs/wast-printer" "1.11.6"
"@webassemblyjs/helper-wasm-section" "1.12.1"
"@webassemblyjs/wasm-gen" "1.12.1"
"@webassemblyjs/wasm-opt" "1.12.1"
"@webassemblyjs/wasm-parser" "1.12.1"
"@webassemblyjs/wast-printer" "1.12.1"
"@webassemblyjs/wasm-gen@1.11.6":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268"
integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==
"@webassemblyjs/wasm-gen@1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547"
integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==
dependencies:
"@webassemblyjs/ast" "1.11.6"
"@webassemblyjs/ast" "1.12.1"
"@webassemblyjs/helper-wasm-bytecode" "1.11.6"
"@webassemblyjs/ieee754" "1.11.6"
"@webassemblyjs/leb128" "1.11.6"
"@webassemblyjs/utf8" "1.11.6"
"@webassemblyjs/wasm-opt@1.11.6":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2"
integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==
"@webassemblyjs/wasm-opt@1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5"
integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==
dependencies:
"@webassemblyjs/ast" "1.11.6"
"@webassemblyjs/helper-buffer" "1.11.6"
"@webassemblyjs/wasm-gen" "1.11.6"
"@webassemblyjs/wasm-parser" "1.11.6"
"@webassemblyjs/ast" "1.12.1"
"@webassemblyjs/helper-buffer" "1.12.1"
"@webassemblyjs/wasm-gen" "1.12.1"
"@webassemblyjs/wasm-parser" "1.12.1"
"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1"
integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==
"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937"
integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==
dependencies:
"@webassemblyjs/ast" "1.11.6"
"@webassemblyjs/ast" "1.12.1"
"@webassemblyjs/helper-api-error" "1.11.6"
"@webassemblyjs/helper-wasm-bytecode" "1.11.6"
"@webassemblyjs/ieee754" "1.11.6"
"@webassemblyjs/leb128" "1.11.6"
"@webassemblyjs/utf8" "1.11.6"
"@webassemblyjs/wast-printer@1.11.6":
version "1.11.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20"
integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==
"@webassemblyjs/wast-printer@1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac"
integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==
dependencies:
"@webassemblyjs/ast" "1.11.6"
"@webassemblyjs/ast" "1.12.1"
"@xtuc/long" "4.2.2"
"@xtuc/ieee754@^1.2.0":
@ -3628,13 +3628,13 @@ binary-extensions@^2.0.0:
resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz"
integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
body-parser@1.20.1:
version "1.20.1"
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668"
integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==
body-parser@1.20.2:
version "1.20.2"
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd"
integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==
dependencies:
bytes "3.1.2"
content-type "~1.0.4"
content-type "~1.0.5"
debug "2.6.9"
depd "2.0.0"
destroy "1.2.0"
@ -3642,7 +3642,7 @@ body-parser@1.20.1:
iconv-lite "0.4.24"
on-finished "2.4.1"
qs "6.11.0"
raw-body "2.5.1"
raw-body "2.5.2"
type-is "~1.6.18"
unpipe "1.0.0"
@ -4172,6 +4172,11 @@ content-type@~1.0.4:
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
content-type@~1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918"
integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==
convert-source-map@^1.5.0, convert-source-map@^1.7.0:
version "1.8.0"
resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz"
@ -4189,10 +4194,10 @@ cookie-signature@1.0.6:
resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
cookie@0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
cookie@0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
cookie@~0.4.1:
version "0.4.1"
@ -4854,10 +4859,10 @@ end-of-stream@^1.1.0:
dependencies:
once "^1.4.0"
enhanced-resolve@^5.15.0:
version "5.15.0"
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35"
integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==
enhanced-resolve@^5.16.0:
version "5.16.0"
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.16.0.tgz#65ec88778083056cb32487faa9aef82ed0864787"
integrity sha512-O+QWCviPNSSLAD9Ucn8Awv+poAkqn3T1XY5/N7kR7rQO9yfSGWkYZDwpJ+iKF7B8rxaQKWngSqACpgzeapSyoA==
dependencies:
graceful-fs "^4.2.4"
tapable "^2.2.0"
@ -5101,16 +5106,16 @@ execa@^5.0.0:
strip-final-newline "^2.0.0"
express@^4.17.3:
version "4.18.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59"
integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==
version "4.19.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465"
integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==
dependencies:
accepts "~1.3.8"
array-flatten "1.1.1"
body-parser "1.20.1"
body-parser "1.20.2"
content-disposition "0.5.4"
content-type "~1.0.4"
cookie "0.5.0"
cookie "0.6.0"
cookie-signature "1.0.6"
debug "2.6.9"
depd "2.0.0"
@ -5348,9 +5353,9 @@ flux@^4.0.1:
fbjs "^3.0.0"
follow-redirects@^1.0.0, follow-redirects@^1.14.7:
version "1.15.4"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.4.tgz#cdc7d308bf6493126b17ea2191ea0ccf3e535adf"
integrity sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==
version "1.15.6"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
fork-ts-checker-webpack-plugin@^6.5.0:
version "6.5.0"
@ -5428,6 +5433,11 @@ fs-monkey@1.0.3:
resolved "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz"
integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==
fs-monkey@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.5.tgz#fe450175f0db0d7ea758102e1d84096acb925788"
integrity sha512-8uMbBjrhzW76TYgEV27Y5E//W2f/lTFmx78P2w19FZSxarhI/798APGQyuGCwmkNxgwGRhrLfvWyLBvNtuOmew==
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
@ -5602,10 +5612,10 @@ got@^9.6.0:
to-readable-stream "^1.0.0"
url-parse-lax "^3.0.0"
graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9:
version "4.2.9"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96"
integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==
graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9:
version "4.2.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
gray-matter@^4.0.3:
version "4.0.3"
@ -6900,12 +6910,12 @@ memfs@^3.1.2:
dependencies:
fs-monkey "1.0.3"
memfs@^3.4.1:
version "3.4.1"
resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.4.1.tgz#b78092f466a0dce054d63d39275b24c71d3f1305"
integrity sha512-1c9VPVvW5P7I85c35zAdEr1TD5+F11IToIHIlrVIcflfnzPkJa0ZoYEoEdYDP8KgPFoSZ/opDrUsAoZWym3mtw==
memfs@^3.4.3:
version "3.6.0"
resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6"
integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ==
dependencies:
fs-monkey "1.0.3"
fs-monkey "^1.0.4"
memoize-one@^6.0.0:
version "6.0.0"
@ -8278,10 +8288,10 @@ range-parser@^1.2.1, range-parser@~1.2.1:
resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
raw-body@2.5.1:
version "2.5.1"
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857"
integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==
raw-body@2.5.2:
version "2.5.2"
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a"
integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==
dependencies:
bytes "3.1.2"
http-errors "2.0.0"
@ -10486,10 +10496,10 @@ wait-on@^6.0.1:
minimist "^1.2.5"
rxjs "^7.5.4"
watchpack@^2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d"
integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==
watchpack@^2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.1.tgz#29308f2cac150fa8e4c92f90e0ec954a9fed7fff"
integrity sha512-8wrBCMtVhqcXP2Sup1ctSkga6uc2Bx0IIvKyT7yTFier5AXHooSI+QyQQAtTb7+E0IUCCKyTFmXqdqgum2XWGg==
dependencies:
glob-to-regexp "^0.4.1"
graceful-fs "^4.1.2"
@ -10532,12 +10542,12 @@ webpack-bundle-analyzer@^4.5.0:
ws "^7.3.1"
webpack-dev-middleware@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.1.tgz#aa079a8dedd7e58bfeab358a9af7dab304cee57f"
integrity sha512-81EujCKkyles2wphtdrnPg/QqegC/AtqNH//mQkBYSMqwFVCQrxM6ktB2O/SPlZy7LqeEfTbV3cZARGQz6umhg==
version "5.3.4"
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz#eb7b39281cbce10e104eb2b8bf2b63fce49a3517"
integrity sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==
dependencies:
colorette "^2.0.10"
memfs "^3.4.1"
memfs "^3.4.3"
mime-types "^2.1.31"
range-parser "^1.2.1"
schema-utils "^4.0.0"
@ -10599,26 +10609,26 @@ webpack-sources@^3.2.2, webpack-sources@^3.2.3:
resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde"
integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==
webpack@^5.73.0, webpack@^5.88.1, webpack@^5.90.1:
version "5.90.1"
resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.90.1.tgz#62ab0c097d7cbe83d32523dbfbb645cdb7c3c01c"
integrity sha512-SstPdlAC5IvgFnhiRok8hqJo/+ArAbNv7rhU4fnWGHNVfN59HSQFaxZDSAL3IFG2YmqxuRs+IU33milSxbPlog==
webpack@^5.73.0, webpack@^5.88.1, webpack@^5.91.0:
version "5.91.0"
resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.91.0.tgz#ffa92c1c618d18c878f06892bbdc3373c71a01d9"
integrity sha512-rzVwlLeBWHJbmgTC/8TvAcu5vpJNII+MelQpylD4jNERPwpBJOE2lEcko1zJX3QJeLjTTAnQxn/OJ8bjDzVQaw==
dependencies:
"@types/eslint-scope" "^3.7.3"
"@types/estree" "^1.0.5"
"@webassemblyjs/ast" "^1.11.5"
"@webassemblyjs/wasm-edit" "^1.11.5"
"@webassemblyjs/wasm-parser" "^1.11.5"
"@webassemblyjs/ast" "^1.12.1"
"@webassemblyjs/wasm-edit" "^1.12.1"
"@webassemblyjs/wasm-parser" "^1.12.1"
acorn "^8.7.1"
acorn-import-assertions "^1.9.0"
browserslist "^4.21.10"
chrome-trace-event "^1.0.2"
enhanced-resolve "^5.15.0"
enhanced-resolve "^5.16.0"
es-module-lexer "^1.2.1"
eslint-scope "5.1.1"
events "^3.2.0"
glob-to-regexp "^0.4.1"
graceful-fs "^4.2.9"
graceful-fs "^4.2.11"
json-parse-even-better-errors "^2.3.1"
loader-runner "^4.2.0"
mime-types "^2.1.27"
@ -10626,7 +10636,7 @@ webpack@^5.73.0, webpack@^5.88.1, webpack@^5.90.1:
schema-utils "^3.2.0"
tapable "^2.1.1"
terser-webpack-plugin "^5.3.10"
watchpack "^2.4.0"
watchpack "^2.4.1"
webpack-sources "^3.2.3"
webpackbar@^5.0.2:

View File

@ -15,7 +15,7 @@
# limitations under the License.
#
apiVersion: v2
appVersion: "3.1.0"
appVersion: "3.1.1"
description: Apache Superset is a modern, enterprise-ready business intelligence web application
name: superset
icon: https://artifacthub.io/image/68c1d717-0e97-491f-b046-754e46f46922@2x
@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.12.6
version: 0.12.7
dependencies:
- name: postgresql
version: 12.1.6

View File

@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.12.6](https://img.shields.io/badge/Version-0.12.6-informational?style=flat-square)
![Version: 0.12.7](https://img.shields.io/badge/Version-0.12.7-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@ -7,7 +7,7 @@
#
-e file:.
# via -r requirements/base.in
alembic==1.6.5
alembic==1.13.1
# via flask-migrate
amqp==5.1.1
# via kombu
@ -161,7 +161,11 @@ idna==3.2
# email-validator
# requests
importlib-metadata==6.6.0
# via apache-superset
# via
# apache-superset
# flask
# markdown
# shillelagh
importlib-resources==5.12.0
# via limits
isodate==0.6.0
@ -188,7 +192,7 @@ mako==1.2.4
# via
# alembic
# apache-superset
markdown==3.3.4
markdown==3.6
# via apache-superset
markdown-it-py==2.2.0
# via rich
@ -274,11 +278,10 @@ pynacl==1.5.0
# via paramiko
pyparsing==3.0.6
# via apache-superset
pyrsistent==0.19.3
pyrsistent==0.20.0
# via jsonschema
python-dateutil==2.8.2
# via
# alembic
# apache-superset
# celery
# croniter
@ -288,8 +291,6 @@ python-dateutil==2.8.2
# shillelagh
python-dotenv==0.19.0
# via apache-superset
python-editor==1.0.4
# via alembic
python-geohash==0.8.5
# via apache-superset
pytz==2021.3
@ -344,7 +345,7 @@ sqlalchemy-utils==0.38.3
# via
# apache-superset
# flask-appbuilder
sqlglot==20.8.0
sqlglot==23.0.2
# via apache-superset
sqlparse==0.4.4
# via apache-superset
@ -354,9 +355,11 @@ tabulate==0.8.9
# via apache-superset
typing-extensions==4.4.0
# via
# alembic
# apache-superset
# cattrs
# flask-limiter
# kombu
# limits
# shillelagh
tzdata==2023.3
@ -398,7 +401,9 @@ wtforms-json==0.3.5
xlsxwriter==3.0.7
# via apache-superset
zipp==3.15.0
# via importlib-metadata
# via
# importlib-metadata
# importlib-resources
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View File

@ -17,11 +17,26 @@
# under the License.
#
-r base.in
-e .[cors,druid,hive,mysql,postgres,thumbnails]
-e .[bigquery,cors,druid,gevent,gsheets,hive,mysql,playwright,postgres,presto,prophet,trino,thumbnails]
docker
flask-testing
freezegun
greenlet>=2.0.2
grpcio>=1.55.3
ipython
openapi-spec-validator
parameterized
pip-compile-multi
pre-commit
progress>=1.5,<2
pyfakefs
pyinstrument>=4.0.2,<5
pylint
pytest
pytest-cov
pytest-mock
python-ldap>=3.4.3
setuptools>=65.5.1
sqloxide
statsd
tox

View File

@ -1,4 +1,4 @@
# SHA1:e35d6e709dc86002ca35ad59f7119aa6cc1e7179
# SHA1:3b6a7d105f9d14b449d4232aa368bd6a40d4c7ef
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@ -10,6 +10,8 @@
# via
# -r requirements/base.in
# -r requirements/development.in
appnope==0.1.4
# via ipython
astroid==2.15.8
# via pylint
asttokens==2.2.1
@ -22,24 +24,100 @@ botocore==1.29.130
# via
# boto3
# s3transfer
build==0.10.0
# via pip-tools
cached-property==1.5.2
# via tableschema
cfgv==3.3.1
# via pre-commit
chardet==5.1.0
# via tabulator
# via
# tabulator
# tox
cmdstanpy==1.1.0
# via prophet
contourpy==1.0.7
# via matplotlib
coverage[toml]==7.2.5
# via pytest-cov
cycler==0.11.0
# via matplotlib
db-dtypes==1.1.1
# via pandas-gbq
decorator==5.1.1
# via ipython
dill==0.3.6
# via pylint
distlib==0.3.6
# via virtualenv
docker==6.1.1
# via -r requirements/development.in
et-xmlfile==1.1.0
# via openpyxl
executing==1.2.0
# via stack-data
filelock==3.12.2
# via
# tox
# virtualenv
flask-cors==3.0.10
# via apache-superset
flask-testing==0.8.1
# via -r requirements/development.in
fonttools==4.43.0
# via matplotlib
freezegun==1.2.2
# via -r requirements/development.in
future==0.18.3
# via pyhive
# via
# pyhive
# sqlalchemy-bigquery
gevent==23.9.1
# via apache-superset
google-api-core[grpc]==2.11.0
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
# google-cloud-core
# pandas-gbq
# sqlalchemy-bigquery
google-auth-oauthlib==1.0.0
# via
# pandas-gbq
# pydata-google-auth
google-cloud-bigquery==3.10.0
# via
# apache-superset
# pandas-gbq
# sqlalchemy-bigquery
google-cloud-bigquery-storage==2.19.1
# via
# pandas-gbq
# sqlalchemy-bigquery
google-cloud-core==2.3.2
# via google-cloud-bigquery
google-crc32c==1.5.0
# via google-resumable-media
google-resumable-media==2.5.0
# via google-cloud-bigquery
googleapis-common-protos==1.59.0
# via
# google-api-core
# grpcio-status
grpcio==1.60.1
# via
# -r requirements/development.in
# google-api-core
# google-cloud-bigquery
# grpcio-status
grpcio-status==1.60.1
# via google-api-core
identify==2.5.24
# via pre-commit
ijson==3.2.0.post0
# via tabulator
iniconfig==2.0.0
# via pytest
ipython==8.12.2
# via -r requirements/development.in
isort==5.12.0
@ -52,28 +130,78 @@ jmespath==1.0.1
# botocore
jsonlines==3.1.0
# via tabulator
jsonschema-spec==0.1.4
# via openapi-spec-validator
kiwisolver==1.4.4
# via matplotlib
lazy-object-proxy==1.9.0
# via astroid
# via
# astroid
# openapi-spec-validator
linear-tsv==1.1.0
# via tabulator
matplotlib==3.7.1
# via prophet
matplotlib-inline==0.1.6
# via ipython
mccabe==0.7.0
# via pylint
mysqlclient==2.1.0
# via apache-superset
nodeenv==1.7.0
# via pre-commit
oauthlib==3.2.2
# via requests-oauthlib
openapi-schema-validator==0.4.4
# via openapi-spec-validator
openapi-spec-validator==0.5.6
# via -r requirements/development.in
openpyxl==3.1.2
# via tabulator
pandas-gbq==0.19.1
# via apache-superset
parameterized==0.9.0
# via -r requirements/development.in
parso==0.8.3
# via jedi
pathable==0.4.3
# via jsonschema-spec
pexpect==4.8.0
# via ipython
pickleshare==0.7.5
# via ipython
pillow==10.2.0
# via
# apache-superset
# matplotlib
pip-compile-multi==2.6.3
# via -r requirements/development.in
pip-tools==7.3.0
# via pip-compile-multi
playwright==1.41.2
# via apache-superset
pluggy==1.2.0
# via
# pytest
# tox
pre-commit==3.3.3
# via -r requirements/development.in
progress==1.6
# via -r requirements/development.in
prophet==1.1.5
# via apache-superset
proto-plus==1.22.2
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
protobuf==4.23.0
# via
# google-api-core
# google-cloud-bigquery
# google-cloud-bigquery-storage
# googleapis-common-protos
# grpcio-status
# proto-plus
psycopg2-binary==2.9.6
# via apache-superset
ptyprocess==0.7.0
@ -84,24 +212,53 @@ pure-sasl==0.6.2
# via
# pyhive
# thrift-sasl
pydata-google-auth==1.7.0
# via pandas-gbq
pydruid==0.6.5
# via apache-superset
pyee==11.0.1
# via playwright
pyfakefs==5.2.2
# via -r requirements/development.in
pyhive[hive_pure_sasl]==0.7.0
# via apache-superset
pyinstrument==4.4.0
# via -r requirements/development.in
pylint==2.17.7
# via -r requirements/development.in
pyproject-api==1.5.2
# via tox
pyproject-hooks==1.0.0
# via build
pytest==7.3.1
# via
# -r requirements/development.in
# pytest-cov
# pytest-mock
pytest-cov==4.0.0
# via -r requirements/development.in
pytest-mock==3.10.0
# via -r requirements/development.in
python-ldap==3.4.3
# via -r requirements/development.in
pytz-deprecation-shim==0.1.0.post0
# via tzlocal
requests-oauthlib==1.3.1
# via google-auth-oauthlib
rfc3339-validator==0.1.4
# via openapi-schema-validator
rfc3986==2.0.0
# via tableschema
s3transfer==0.6.1
# via boto3
sqlalchemy-bigquery==1.6.1
# via apache-superset
sqloxide==0.1.33
# via -r requirements/development.in
stack-data==0.6.2
# via ipython
statsd==4.0.1
# via -r requirements/development.in
tableschema==1.20.2
# via apache-superset
tabulator==1.53.5
@ -114,19 +271,52 @@ thrift==0.16.0
thrift-sasl==0.4.3
# via pyhive
tomli==2.0.1
# via pylint
# via
# build
# coverage
# pip-tools
# pylint
# pyproject-api
# pyproject-hooks
# pytest
# tox
tomlkit==0.11.8
# via pylint
toposort==1.10
# via pip-compile-multi
tox==4.6.4
# via -r requirements/development.in
tqdm==4.65.0
# via
# cmdstanpy
# prophet
traitlets==5.9.0
# via
# ipython
# matplotlib-inline
trino==0.328.0
# via apache-superset
tzlocal==4.3
# via trino
unicodecsv==0.14.1
# via
# tableschema
# tabulator
virtualenv==20.23.1
# via
# pre-commit
# tox
websocket-client==1.5.1
# via docker
wheel==0.40.0
# via pip-tools
xlrd==2.0.1
# via tabulator
zope-event==4.5.0
# via gevent
zope-interface==5.4.0
# via gevent
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@ -1,19 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
-r base.in
-e .[postgres,gevent]
greenlet>=2.0.2

View File

@ -1,23 +0,0 @@
# SHA1:f00a57c70a52607d638c19f64f426f887382927e
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-r base.txt
-e file:.
# via
# -r requirements/base.in
# -r requirements/docker.in
gevent==23.9.1
# via apache-superset
psycopg2-binary==2.9.6
# via apache-superset
zope-event==4.5.0
# via gevent
zope-interface==5.4.0
# via gevent
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View File

@ -1,19 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
pip-compile-multi
pre-commit
tox

View File

@ -1,74 +0,0 @@
# SHA1:39179f2c476f94362aa0705be059a488d7e38b6d
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
build==0.10.0
# via pip-tools
cachetools==5.3.2
# via tox
cfgv==3.3.1
# via pre-commit
chardet==5.1.0
# via tox
click==8.1.3
# via
# pip-compile-multi
# pip-tools
colorama==0.4.6
# via tox
distlib==0.3.6
# via virtualenv
filelock==3.12.2
# via
# tox
# virtualenv
identify==2.5.24
# via pre-commit
nodeenv==1.7.0
# via pre-commit
packaging==23.1
# via
# build
# pyproject-api
# tox
pip-compile-multi==2.6.3
# via -r requirements/integration.in
pip-tools==7.3.0
# via pip-compile-multi
platformdirs==3.8.1
# via
# tox
# virtualenv
pluggy==1.2.0
# via tox
pre-commit==3.3.3
# via -r requirements/integration.in
pyproject-api==1.5.2
# via tox
pyproject-hooks==1.0.0
# via build
pyyaml==6.0.1
# via pre-commit
tomli==2.0.1
# via
# build
# pip-tools
# pyproject-api
# tox
toposort==1.10
# via pip-compile-multi
tox==4.6.4
# via -r requirements/integration.in
virtualenv==20.23.1
# via
# pre-commit
# tox
wheel==0.40.0
# via pip-tools
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@ -1,17 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
-r development.in

View File

@ -1,15 +0,0 @@
# SHA1:a71c19ba0170092851941268a0a3dc233feba06d
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-r development.txt
-e file:.
# via
# -r requirements/base.in
# -r requirements/development.in
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View File

@ -1,31 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
-r development.in
-r integration.in
-e file:.[bigquery,hive,presto,prophet,trino,gsheets,playwright]
docker
flask-testing
freezegun
grpcio>=1.55.3
openapi-spec-validator
parameterized
pyfakefs
pylint
pytest
pytest-cov
pytest-mock
statsd

View File

@ -1,147 +0,0 @@
# SHA1:a37a1037f359c1101162ef43288178fbf00c487d
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-r development.txt
-r integration.txt
-e file:.
# via
# -r requirements/base.in
# -r requirements/development.in
# -r requirements/testing.in
cmdstanpy==1.1.0
# via prophet
contourpy==1.0.7
# via matplotlib
coverage[toml]==7.2.5
# via pytest-cov
cycler==0.11.0
# via matplotlib
db-dtypes==1.1.1
# via pandas-gbq
docker==6.1.1
# via -r requirements/testing.in
flask-testing==0.8.1
# via -r requirements/testing.in
fonttools==4.43.0
# via matplotlib
freezegun==1.2.2
# via -r requirements/testing.in
google-api-core[grpc]==2.11.0
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
# google-cloud-core
# pandas-gbq
# sqlalchemy-bigquery
google-auth-oauthlib==1.0.0
# via
# pandas-gbq
# pydata-google-auth
google-cloud-bigquery==3.10.0
# via
# apache-superset
# pandas-gbq
# sqlalchemy-bigquery
google-cloud-bigquery-storage==2.19.1
# via
# pandas-gbq
# sqlalchemy-bigquery
google-cloud-core==2.3.2
# via google-cloud-bigquery
google-crc32c==1.5.0
# via google-resumable-media
google-resumable-media==2.5.0
# via google-cloud-bigquery
googleapis-common-protos==1.59.0
# via
# google-api-core
# grpcio-status
grpcio==1.60.1
# via
# -r requirements/testing.in
# google-api-core
# google-cloud-bigquery
# grpcio-status
grpcio-status==1.60.1
# via google-api-core
iniconfig==2.0.0
# via pytest
jsonschema-spec==0.1.4
# via openapi-spec-validator
kiwisolver==1.4.4
# via matplotlib
matplotlib==3.7.1
# via prophet
oauthlib==3.2.2
# via requests-oauthlib
openapi-schema-validator==0.4.4
# via openapi-spec-validator
openapi-spec-validator==0.5.6
# via -r requirements/testing.in
pandas-gbq==0.19.1
# via apache-superset
parameterized==0.9.0
# via -r requirements/testing.in
pathable==0.4.3
# via jsonschema-spec
playwright==1.41.2
# via apache-superset
prophet==1.1.5
# via apache-superset
proto-plus==1.22.2
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
protobuf==4.23.0
# via
# google-api-core
# google-cloud-bigquery
# google-cloud-bigquery-storage
# googleapis-common-protos
# grpcio-status
# proto-plus
pydata-google-auth==1.7.0
# via pandas-gbq
pyee==11.0.1
# via playwright
pyfakefs==5.2.2
# via -r requirements/testing.in
pyhive[presto]==0.7.0
# via apache-superset
pytest==7.3.1
# via
# -r requirements/testing.in
# pytest-cov
# pytest-mock
pytest-cov==4.0.0
# via -r requirements/testing.in
pytest-mock==3.10.0
# via -r requirements/testing.in
pytz-deprecation-shim==0.1.0.post0
# via tzlocal
requests-oauthlib==1.3.1
# via google-auth-oauthlib
rfc3339-validator==0.1.4
# via openapi-schema-validator
sqlalchemy-bigquery==1.6.1
# via apache-superset
statsd==4.0.1
# via -r requirements/testing.in
tqdm==4.65.0
# via
# cmdstanpy
# prophet
trino==0.328.0
# via apache-superset
tzlocal==4.3
# via trino
websocket-client==1.5.1
# via docker
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@ -126,7 +126,7 @@ setup(
"slack_sdk>=3.19.0, <4",
"sqlalchemy>=1.4, <2",
"sqlalchemy-utils>=0.38.3, <0.39",
"sqlglot>=20,<21",
"sqlglot>=23.0.2,<24",
"sqlparse>=0.4.4, <0.5",
"tabulate>=0.8.9, <0.9",
"typing-extensions>=4, <5",

View File

@ -4099,9 +4099,9 @@
}
},
"node_modules/follow-redirects": {
"version": "1.15.4",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
"integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==",
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"dev": true,
"funding": [
{
@ -11308,9 +11308,9 @@
}
},
"follow-redirects": {
"version": "1.15.4",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
"integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==",
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"dev": true
},
"form-data": {

View File

@ -25,7 +25,7 @@ describe('AdhocMetrics', () => {
});
it('Clear metric and set simple adhoc metric', () => {
const metric = 'sum(num_girls)';
const metric = 'SUM(num_girls)';
const metricName = 'Sum Girls';
cy.get('[data-test=metrics]')
.find('[data-test="remove-control-button"]')

View File

@ -100,7 +100,7 @@ describe('Visualization > Table', () => {
});
cy.verifySliceSuccess({
waitAlias: '@chartData',
querySubstring: /group by.*name/i,
querySubstring: /group by\n.*name/i,
chartSelector: 'table',
});
});
@ -246,7 +246,7 @@ describe('Visualization > Table', () => {
cy.visitChartByParams(formData);
cy.verifySliceSuccess({
waitAlias: '@chartData',
querySubstring: /group by.*state/i,
querySubstring: /group by\n.*state/i,
chartSelector: 'table',
});
cy.get('td').contains(/\d*%/);

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import React, { ReactNode, MouseEventHandler } from 'react';
/**
* A function which returns text (or marked-up text)
@ -44,15 +44,15 @@ interface MenuObjectChildProps {
disable?: boolean;
}
export interface SwitchProps {
isEditMode: boolean;
dbFetched: any;
disableSSHTunnelingForEngine?: boolean;
useSSHTunneling: boolean;
setUseSSHTunneling: React.Dispatch<React.SetStateAction<boolean>>;
setDB: React.Dispatch<any>;
isSSHTunneling: boolean;
}
// loose typing to avoid any circular dependencies
// refer to SSHTunnelSwitch component for strict typing
type SwitchProps = {
db: object;
changeMethods: {
onParametersChange: (event: any) => void;
};
clearValidationErrors: () => void;
};
type ConfigDetailsProps = {
embeddedId: string;
@ -144,6 +144,26 @@ export interface DashboardEmbedModalExtensions {
onHide: () => void;
}
export interface ButtonProps {
name: ReactNode;
onClick?: MouseEventHandler<HTMLElement>;
'data-test'?: string;
buttonStyle:
| 'primary'
| 'secondary'
| 'dashed'
| 'link'
| 'warning'
| 'success'
| 'tertiary';
}
export interface SubMenuProps {
buttons?: Array<ButtonProps>;
name?: string | ReactNode;
activeChild?: string;
}
export type Extensions = Partial<{
'alertsreports.header.icon': React.ComponentType;
'embedded.documentation.configuration_details': React.ComponentType<ConfigDetailsProps>;
@ -151,6 +171,7 @@ export type Extensions = Partial<{
'embedded.documentation.url': string;
'embedded.modal': React.ComponentType<DashboardEmbedModalExtensions>;
'dashboard.nav.right': React.ComponentType;
'home.submenu': React.ComponentType<SubMenuProps>;
'navbar.right-menu.item.icon': React.ComponentType<RightMenuItemIconProps>;
'navbar.right': React.ComponentType;
'report-modal.dropdown.item.icon': React.ComponentType;

View File

@ -41,7 +41,7 @@
"@storybook/addon-links": "^7.6.13",
"@storybook/addons": "^7.6.13",
"@storybook/react": "^7.6.13",
"@storybook/types": "^7.6.13",
"@storybook/types": "^7.6.17",
"@types/react-loadable": "^5.5.3",
"antd": "4.10.3",
"bootstrap": "^3.4.1",

View File

@ -101,6 +101,7 @@ import tanzania from './countries/tanzania.geojson';
import thailand from './countries/thailand.geojson';
import timorleste from './countries/timorleste.geojson';
import turkey from './countries/turkey.geojson';
import turkey_regions from './countries/turkey_regions.geojson';
import turkmenistan from './countries/turkmenistan.geojson';
import uganda from './countries/uganda.geojson';
import uk from './countries/uk.geojson';
@ -198,6 +199,7 @@ export const countries = {
thailand,
timorleste,
turkey,
turkey_regions,
turkmenistan,
uganda,
uk,
@ -221,6 +223,9 @@ export const countryOptions = Object.keys(countries).map(x => {
if (x === 'france_regions') {
return [x, 'France (regions)'];
}
if (x === 'turkey_regions') {
return [x, 'Turkey (regions)'];
}
return [
x,
x

File diff suppressed because one or more lines are too long

View File

@ -20,6 +20,7 @@ import React, { useMemo } from 'react';
import { css, styled, t, useTheme } from '@superset-ui/core';
import { Tooltip } from '@superset-ui/chart-controls';
import {
ColorSchemeEnum,
PopKPIComparisonSymbolStyleProps,
PopKPIComparisonValueStyleProps,
PopKPIProps,
@ -66,6 +67,7 @@ export default function PopKPI(props: PopKPIProps) {
headerFontSize,
subheaderFontSize,
comparisonColorEnabled,
comparisonColorScheme,
percentDifferenceNumber,
comparatorText,
} = props;
@ -90,8 +92,18 @@ export default function PopKPI(props: PopKPIProps) {
`;
const getArrowIndicatorColor = () => {
if (!comparisonColorEnabled) return theme.colors.grayscale.base;
return percentDifferenceNumber > 0
if (!comparisonColorEnabled || percentDifferenceNumber === 0) {
return theme.colors.grayscale.base;
}
if (percentDifferenceNumber > 0) {
// Positive difference
return comparisonColorScheme === ColorSchemeEnum.Green
? theme.colors.success.base
: theme.colors.error.base;
}
// Negative difference
return comparisonColorScheme === ColorSchemeEnum.Red
? theme.colors.success.base
: theme.colors.error.base;
};
@ -106,23 +118,32 @@ export default function PopKPI(props: PopKPIProps) {
const { backgroundColor, textColor } = useMemo(() => {
let bgColor = defaultBackgroundColor;
let txtColor = defaultTextColor;
if (percentDifferenceNumber > 0) {
if (comparisonColorEnabled) {
bgColor = theme.colors.success.light2;
txtColor = theme.colors.success.base;
}
} else if (percentDifferenceNumber < 0) {
if (comparisonColorEnabled) {
bgColor = theme.colors.error.light2;
txtColor = theme.colors.error.base;
}
if (comparisonColorEnabled && percentDifferenceNumber !== 0) {
const useSuccess =
(percentDifferenceNumber > 0 &&
comparisonColorScheme === ColorSchemeEnum.Green) ||
(percentDifferenceNumber < 0 &&
comparisonColorScheme === ColorSchemeEnum.Red);
// Set background and text colors based on the conditions
bgColor = useSuccess
? theme.colors.success.light2
: theme.colors.error.light2;
txtColor = useSuccess
? theme.colors.success.base
: theme.colors.error.base;
}
return {
backgroundColor: bgColor,
textColor: txtColor,
};
}, [theme, comparisonColorEnabled, percentDifferenceNumber]);
}, [
theme,
comparisonColorScheme,
comparisonColorEnabled,
percentDifferenceNumber,
]);
const SYMBOLS_WITH_VALUES = useMemo(
() => [

View File

@ -32,6 +32,7 @@ import {
sharedControls,
} from '@superset-ui/chart-controls';
import { headerFontSize, subheaderFontSize } from '../sharedControls';
import { ColorSchemeEnum } from './types';
const config: ControlPanelConfig = {
controlPanelSections: [
@ -118,6 +119,15 @@ const config: ControlPanelConfig = {
expanded: true,
controlSetRows: [
['y_axis_format'],
[
{
name: 'percentDifferenceFormat',
config: {
...sharedControls.y_axis_format,
label: t('Percent Difference format'),
},
},
],
['currency_format'],
[
{
@ -147,6 +157,27 @@ const config: ControlPanelConfig = {
},
},
],
[
{
name: 'comparison_color_scheme',
config: {
type: 'SelectControl',
label: t('color scheme for comparison'),
default: ColorSchemeEnum.Green,
renderTrigger: true,
choices: [
[ColorSchemeEnum.Green, 'Green for increase, red for decrease'],
[ColorSchemeEnum.Red, 'Red for increase, green for decrease'],
],
visibility: ({ controls }) =>
controls?.comparison_color_enabled?.value === true,
description: t(
'Adds color to the chart symbols based on the positive or ' +
'negative change from the comparison value.',
),
},
},
],
],
},
],

View File

@ -21,7 +21,6 @@ import {
ChartProps,
getMetricLabel,
getValueFormatter,
NumberFormats,
getNumberFormatter,
formatTimeRange,
} from '@superset-ui/core';
@ -83,7 +82,9 @@ export default function transformProps(chartProps: ChartProps) {
yAxisFormat,
currencyFormat,
subheaderFontSize,
comparisonColorScheme,
comparisonColorEnabled,
percentDifferenceFormat,
} = formData;
const { data: dataA = [] } = queriesData[0];
const {
@ -113,9 +114,7 @@ export default function transformProps(chartProps: ChartProps) {
w: 'Week' as string,
};
const formatPercentChange = getNumberFormatter(
NumberFormats.PERCENT_SIGNED_1_POINT,
);
const formatPercentChange = getNumberFormatter(percentDifferenceFormat);
let valueDifference: number | string = bigNumber - prevNumber;
@ -154,6 +153,7 @@ export default function transformProps(chartProps: ChartProps) {
headerText,
compType,
comparisonColorEnabled,
comparisonColorScheme,
percentDifferenceNumber: percentDifferenceNum,
comparatorText,
};

View File

@ -61,4 +61,10 @@ export type PopKPIProps = PopKPIStylesProps &
compType: string;
percentDifferenceNumber: number;
comparatorText: string;
comparisonColorScheme?: string;
};
export enum ColorSchemeEnum {
Green = 'Green',
Red = 'Red',
}

View File

@ -308,6 +308,7 @@ export default function transformProps(
sliceId,
isHorizontal,
lineStyle,
timeCompare: array,
},
);
if (transformedSeries) {

View File

@ -62,7 +62,7 @@ import {
formatAnnotationLabel,
parseAnnotationOpacity,
} from '../utils/annotation';
import { getChartPadding } from '../utils/series';
import { getChartPadding, getTimeCompareStackId } from '../utils/series';
import {
OpacityEnum,
StackControlsValue,
@ -164,6 +164,7 @@ export function transformSeries(
isHorizontal?: boolean;
lineStyle?: LineStyleOption;
queryIndex?: number;
timeCompare?: string[];
},
): SeriesOption | undefined {
const { name } = series;
@ -188,6 +189,7 @@ export function transformSeries(
sliceId,
isHorizontal = false,
queryIndex = 0,
timeCompare = [],
} = opts;
const contexts = seriesContexts[name || ''] || [];
const hasForecast =
@ -217,9 +219,9 @@ export function transformSeries(
} else if (stack && isObservation) {
// the suffix of the observation series is '' (falsy), which disables
// stacking. Therefore we need to set something that is truthy.
stackId = 'obs';
stackId = getTimeCompareStackId('obs', timeCompare, name);
} else if (stack && isTrend) {
stackId = forecastSeries.type;
stackId = getTimeCompareStackId(forecastSeries.type, timeCompare, name);
}
let plotType;
if (

View File

@ -35,7 +35,7 @@ import {
} from '@superset-ui/core';
import { SortSeriesType } from '@superset-ui/chart-controls';
import { format, LegendComponentOption, SeriesOption } from 'echarts';
import { maxBy, meanBy, minBy, orderBy, sumBy } from 'lodash';
import { isEmpty, maxBy, meanBy, minBy, orderBy, sumBy } from 'lodash';
import {
NULL_STRING,
StackControlsValue,
@ -604,3 +604,39 @@ export function getMinAndMaxFromBounds(
}
return {};
}
/**
* Returns the stackId used in stacked series.
* It will return the defaultId if the chart is not using time comparison.
* If time comparison is used, it will return the time comparison value as the stackId
* if the name includes the time comparison value.
*
* @param {string} defaultId The default stackId.
* @param {string[]} timeCompare The time comparison values.
* @param {string | number} name The name of the serie.
*
* @returns {string} The stackId.
*/
export function getTimeCompareStackId(
defaultId: string,
timeCompare: string[],
name?: string | number,
): string {
if (isEmpty(timeCompare)) {
return defaultId;
}
// Each timeCompare is its own stack so it doesn't stack on top of original ones
return (
timeCompare.find(value => {
if (typeof name === 'string') {
// offset is represented as <offset>, group by list
return (
name.includes(`${value},`) ||
// offset is represented as <metric>__<offset>
name.includes(`__${value}`)
);
}
return name?.toString().includes(value);
}) || defaultId
);
}

View File

@ -40,6 +40,7 @@ import {
sanitizeHtml,
sortAndFilterSeries,
sortRows,
getTimeCompareStackId,
} from '../../src/utils/series';
import {
EchartsTimeseriesSeriesType,
@ -1041,3 +1042,33 @@ test('getMinAndMaxFromBounds returns automatic lower bound when truncating', ()
scale: true,
});
});
describe('getTimeCompareStackId', () => {
it('returns the defaultId when timeCompare is empty', () => {
const result = getTimeCompareStackId('default', []);
expect(result).toEqual('default');
});
it('returns the defaultId when no value in timeCompare is included in name', () => {
const result = getTimeCompareStackId(
'default',
['compare1', 'compare2'],
'test__name',
);
expect(result).toEqual('default');
});
it('returns the first value in timeCompare that is included in name', () => {
const result = getTimeCompareStackId(
'default',
['compare1', 'compare2'],
'test__compare1',
);
expect(result).toEqual('compare1');
});
it('handles name being a number', () => {
const result = getTimeCompareStackId('default', ['123', '456'], 123);
expect(result).toEqual('123');
});
});

View File

@ -921,6 +921,7 @@ export function formatQuery(queryEditor) {
const { sql } = getUpToDateQuery(getState(), queryEditor);
return SupersetClient.post({
endpoint: `/api/v1/sqllab/format_sql/`,
// TODO (betodealmeida): pass engine as a parameter for better formatting
body: JSON.stringify({ sql }),
headers: { 'Content-Type': 'application/json' },
}).then(({ json }) => {

View File

@ -17,7 +17,10 @@
* under the License.
*/
import React from 'react';
import { render, screen } from 'spec/helpers/testing-library';
import fetchMock from 'fetch-mock';
import * as uiCore from '@superset-ui/core';
import { FeatureFlag, QueryState } from '@superset-ui/core';
import { render, screen, waitFor } from 'spec/helpers/testing-library';
import QueryHistory from 'src/SqlLab/components/QueryHistory';
import { initialState } from 'src/SqlLab/fixtures';
@ -27,18 +30,72 @@ const mockedProps = {
latestQueryId: 'yhMUZCGb',
};
const fakeApiResult = {
count: 4,
ids: [692],
result: [
{
changed_on: '2024-03-12T20:01:02.497775',
client_id: 'b0ZDzRYzn',
database: {
database_name: 'examples',
id: 1,
},
end_time: '1710273662496.047852',
error_message: null,
executed_sql: 'SELECT * from "FCC 2018 Survey"\nLIMIT 1001',
id: 692,
limit: 1000,
limiting_factor: 'DROPDOWN',
progress: 100,
results_key: null,
rows: 443,
schema: 'main',
select_as_cta: false,
sql: 'SELECT * from "FCC 2018 Survey" ',
sql_editor_id: '22',
start_time: '1710273662445.992920',
status: QueryState.Success,
tab_name: 'Untitled Query 16',
tmp_table_name: null,
tracking_url: null,
user: {
first_name: 'admin',
id: 1,
last_name: 'user',
},
},
],
};
const setup = (overrides = {}) => (
<QueryHistory {...mockedProps} {...overrides} />
);
describe('QueryHistory', () => {
it('Renders an empty state for query history', () => {
render(setup(), { useRedux: true, initialState });
test('Renders an empty state for query history', () => {
render(setup(), { useRedux: true, initialState });
const emptyStateText = screen.getByText(
/run a query to display query history/i,
const emptyStateText = screen.getByText(
/run a query to display query history/i,
);
expect(emptyStateText).toBeVisible();
});
test('fetches the query history when the persistence mode is enabled', async () => {
const isFeatureEnabledMock = jest
.spyOn(uiCore, 'isFeatureEnabled')
.mockImplementation(
featureFlag => featureFlag === FeatureFlag.SqllabBackendPersistence,
);
expect(emptyStateText).toBeVisible();
});
const editorQueryApiRoute = `glob:*/api/v1/query/?q=*`;
fetchMock.get(editorQueryApiRoute, fakeApiResult);
render(setup(), { useRedux: true, initialState });
await waitFor(() =>
expect(fetchMock.calls(editorQueryApiRoute).length).toBe(1),
);
const queryResultText = screen.getByText(fakeApiResult.result[0].rows);
expect(queryResultText).toBeInTheDocument();
isFeatureEnabledMock.mockClear();
});

View File

@ -16,12 +16,23 @@
* specific language governing permissions and limitations
* under the License.
*/
import React, { useMemo } from 'react';
import React, { useEffect, useMemo, useState } from 'react';
import { shallowEqual, useSelector } from 'react-redux';
import { useInView } from 'react-intersection-observer';
import { omit } from 'lodash';
import { EmptyStateMedium } from 'src/components/EmptyState';
import { t, styled } from '@superset-ui/core';
import {
t,
styled,
css,
FeatureFlag,
isFeatureEnabled,
} from '@superset-ui/core';
import QueryTable from 'src/SqlLab/components/QueryTable';
import { SqlLabRootState } from 'src/SqlLab/types';
import { useEditorQueriesQuery } from 'src/hooks/apiResources/queries';
import { Skeleton } from 'src/components';
import useEffectEvent from 'src/hooks/useEffectEvent';
interface QueryHistoryProps {
queryEditorId: string | number;
@ -40,39 +51,92 @@ const StyledEmptyStateWrapper = styled.div`
}
`;
const getEditorQueries = (
queries: SqlLabRootState['sqlLab']['queries'],
queryEditorId: string | number,
) =>
Object.values(queries).filter(
({ sqlEditorId }) => String(sqlEditorId) === String(queryEditorId),
);
const QueryHistory = ({
queryEditorId,
displayLimit,
latestQueryId,
}: QueryHistoryProps) => {
const [ref, hasReachedBottom] = useInView({ threshold: 0 });
const [pageIndex, setPageIndex] = useState(0);
const queries = useSelector(
({ sqlLab: { queries } }: SqlLabRootState) => queries,
shallowEqual,
);
const { data, isLoading, isFetching } = useEditorQueriesQuery(
{ editorId: `${queryEditorId}`, pageIndex },
{
skip: !isFeatureEnabled(FeatureFlag.SqllabBackendPersistence),
},
);
const editorQueries = useMemo(
() =>
Object.values(queries).filter(
({ sqlEditorId }) => String(sqlEditorId) === String(queryEditorId),
),
[queries, queryEditorId],
data
? getEditorQueries(
omit(
queries,
data.result.map(({ id }) => id),
),
queryEditorId,
)
.concat(data.result)
.reverse()
: getEditorQueries(queries, queryEditorId),
[queries, data, queryEditorId],
);
const loadNext = useEffectEvent(() => {
setPageIndex(pageIndex + 1);
});
const loadedDataCount = data?.result.length || 0;
const totalCount = data?.count || 0;
useEffect(() => {
if (hasReachedBottom && loadedDataCount < totalCount) {
loadNext();
}
}, [hasReachedBottom, loadNext, loadedDataCount, totalCount]);
if (!editorQueries.length && isLoading) {
return <Skeleton active />;
}
return editorQueries.length > 0 ? (
<QueryTable
columns={[
'state',
'started',
'duration',
'progress',
'rows',
'sql',
'results',
'actions',
]}
queries={editorQueries}
displayLimit={displayLimit}
latestQueryId={latestQueryId}
/>
<>
<QueryTable
columns={[
'state',
'started',
'duration',
'progress',
'rows',
'sql',
'results',
'actions',
]}
queries={editorQueries}
displayLimit={displayLimit}
latestQueryId={latestQueryId}
/>
{data && loadedDataCount < totalCount && (
<div
ref={ref}
css={css`
position: relative;
top: -150px;
`}
/>
)}
{isFetching && <Skeleton active />}
</>
) : (
<StyledEmptyStateWrapper>
<EmptyStateMedium

View File

@ -25,7 +25,6 @@ const apiData = {
common: DEFAULT_COMMON_BOOTSTRAP_DATA,
tab_state_ids: [],
databases: [],
queries: {},
user: {
userId: 1,
username: 'some name',
@ -220,18 +219,20 @@ describe('getInitialState', () => {
}),
);
const latestQuery = {
...runningQuery,
id: 'latestPersisted',
startDttm: Number(startDttmInStr),
endDttm: Number(endDttmInStr),
};
const initializedQueries = getInitialState({
...apiData,
queries: {
backendPersisted: {
...runningQuery,
id: 'backendPersisted',
startDttm: startDttmInStr,
endDttm: endDttmInStr,
},
...apiDataWithTabState,
active_tab: {
...apiDataWithTabState.active_tab,
latest_query: latestQuery,
},
}).sqlLab.queries;
expect(initializedQueries.backendPersisted).toEqual(
expect(initializedQueries.latestPersisted).toEqual(
expect.objectContaining({
startDttm: Number(startDttmInStr),
endDttm: Number(endDttmInStr),

View File

@ -136,7 +136,12 @@ export default function getInitialState({
});
}
const queries = { ...queries_ };
const queries = {
...queries_,
...(activeTab?.latest_query && {
[activeTab.latest_query.id]: activeTab.latest_query,
}),
};
/**
* If the `SQLLAB_BACKEND_PERSISTENCE` feature flag is off, or if the user

View File

@ -105,6 +105,7 @@ const defaultProps = {
const Styles = styled.div`
min-height: ${p => p.height}px;
position: relative;
text-align: center;
.chart-tooltip {
opacity: 0.75;
@ -128,6 +129,21 @@ const Styles = styled.div`
}
`;
const LoadingDiv = styled.div`
position: absolute;
left: 50%;
top: 50%;
width: 80%;
transform: translate(-50%, -50%);
`;
const MessageSpan = styled.span`
display: block;
margin: ${({ theme }) => theme.gridUnit * 4}px auto;
width: fit-content;
color: ${({ theme }) => theme.colors.grayscale.base};
`;
const MonospaceDiv = styled.div`
font-family: ${({ theme }) => theme.typography.families.monospace};
word-break: break-word;
@ -232,16 +248,49 @@ class Chart extends React.PureComponent {
);
}
renderSpinner(databaseName) {
const message = databaseName
? t('Waiting on %s', databaseName)
: t('Waiting on database...');
return (
<LoadingDiv>
<Loading position="inline-centered" />
<MessageSpan>{message}</MessageSpan>
</LoadingDiv>
);
}
renderChartContainer() {
return (
<div className="slice_container" data-test="slice-container">
{this.props.isInView ||
!isFeatureEnabled(FeatureFlag.DashboardVirtualization) ||
isCurrentUserBot() ? (
<ChartRenderer
{...this.props}
source={this.props.dashboardId ? 'dashboard' : 'explore'}
data-test={this.props.vizType}
/>
) : (
<Loading />
)}
</div>
);
}
render() {
const {
height,
chartAlert,
chartStatus,
datasource,
errorMessage,
chartIsStale,
queriesResponse = [],
width,
} = this.props;
const databaseName = datasource?.database?.name;
const isLoading = chartStatus === 'loading';
this.renderContainerStartTime = Logger.getTimestamp();
@ -297,20 +346,9 @@ class Chart extends React.PureComponent {
height={height}
width={width}
>
<div className="slice_container" data-test="slice-container">
{this.props.isInView ||
!isFeatureEnabled(FeatureFlag.DashboardVirtualization) ||
isCurrentUserBot() ? (
<ChartRenderer
{...this.props}
source={this.props.dashboardId ? 'dashboard' : 'explore'}
data-test={this.props.vizType}
/>
) : (
<Loading />
)}
</div>
{isLoading && <Loading />}
{isLoading
? this.renderSpinner(databaseName)
: this.renderChartContainer()}
</Styles>
</ErrorBoundary>
);

View File

@ -285,11 +285,11 @@ test('context menu for supported chart, no dimensions, no filters', async () =>
isContextMenu: true,
});
await expectDrillToDetailDisabled(
'Drill to detail is disabled because this chart does not group data by dimension value.',
);
const message =
'Drill to detail is disabled because this chart does not group data by dimension value.';
await expectDrillToDetailByDisabled();
await expectDrillToDetailDisabled(message);
await expectDrillToDetailByDisabled(message);
});
test('context menu for supported chart, no dimensions, 1 filter', async () => {
@ -299,11 +299,11 @@ test('context menu for supported chart, no dimensions, 1 filter', async () => {
filters: [filterA],
});
await expectDrillToDetailDisabled(
'Drill to detail is disabled because this chart does not group data by dimension value.',
);
const message =
'Drill to detail is disabled because this chart does not group data by dimension value.';
await expectDrillToDetailByDisabled();
await expectDrillToDetailDisabled(message);
await expectDrillToDetailByDisabled(message);
});
test('dropdown menu for supported chart, dimensions', async () => {

Some files were not shown because too many files have changed in this diff Show More