mirror of
https://github.com/apache/superset.git
synced 2026-05-02 06:24:37 +00:00
Compare commits
162 Commits
enxdev/ref
...
fix_docker
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
774910f40d | ||
|
|
dbbcc11a98 | ||
|
|
cebd45778f | ||
|
|
1b2ecc6955 | ||
|
|
1f5e567645 | ||
|
|
c467fb566d | ||
|
|
cc0ed0fef4 | ||
|
|
4f463399a7 | ||
|
|
6264ff5165 | ||
|
|
1410e528a4 | ||
|
|
f704b0f556 | ||
|
|
bdfd5cd4ec | ||
|
|
af44b14fbe | ||
|
|
29c76ef1d5 | ||
|
|
7953c89d51 | ||
|
|
fd4c3dce44 | ||
|
|
234f8c94d1 | ||
|
|
bc2e51d8d0 | ||
|
|
2787167abe | ||
|
|
9e84e13888 | ||
|
|
cfd24e3ccd | ||
|
|
aaecec2e03 | ||
|
|
66fe0b0594 | ||
|
|
0d0b43062e | ||
|
|
72df46a729 | ||
|
|
f7cfd9182a | ||
|
|
5faaaf978b | ||
|
|
855f4c4897 | ||
|
|
008ab202f3 | ||
|
|
db311eb376 | ||
|
|
d5f33c4c02 | ||
|
|
ad82a8c14e | ||
|
|
45c18368f6 | ||
|
|
6f656914fe | ||
|
|
6706d1308f | ||
|
|
cbf1aeec7d | ||
|
|
3f7907b266 | ||
|
|
ba0d118fdd | ||
|
|
49aa74cec8 | ||
|
|
7c569abaf6 | ||
|
|
a70f2cee72 | ||
|
|
7b343f7fac | ||
|
|
742ad92189 | ||
|
|
03b72628fa | ||
|
|
27ca7ba7d7 | ||
|
|
1074d1e618 | ||
|
|
b6edf148e2 | ||
|
|
046770cf76 | ||
|
|
0f1064eab8 | ||
|
|
82fc8879b0 | ||
|
|
159958e577 | ||
|
|
2a98780d2c | ||
|
|
a84da1c5cc | ||
|
|
8c329c445f | ||
|
|
05cccf6404 | ||
|
|
92808ffe38 | ||
|
|
0a7635fc05 | ||
|
|
4fe51c6db9 | ||
|
|
0eaa8c5894 | ||
|
|
f56dfb35b2 | ||
|
|
597e207eff | ||
|
|
95ae663e88 | ||
|
|
d0def80d3b | ||
|
|
9f5f0895f6 | ||
|
|
dce7e47399 | ||
|
|
4b9ae07fe5 | ||
|
|
7519cab379 | ||
|
|
84c1ad97dc | ||
|
|
f743ae36dc | ||
|
|
ca5ed8b7b0 | ||
|
|
f1a6aaad63 | ||
|
|
995182270c | ||
|
|
c864e6cd2b | ||
|
|
a3d6ef07c1 | ||
|
|
072540f321 | ||
|
|
2561b267ab | ||
|
|
8fc4c50050 | ||
|
|
359d7baaf5 | ||
|
|
437151a95f | ||
|
|
2157fe3f28 | ||
|
|
1f6ef6a870 | ||
|
|
35de980081 | ||
|
|
90ce1b5012 | ||
|
|
4a6dd94a6c | ||
|
|
860c9c08a1 | ||
|
|
f0c42b0a01 | ||
|
|
889ab36dff | ||
|
|
d85fdf4bf9 | ||
|
|
afd5379bb0 | ||
|
|
789ca738dc | ||
|
|
40568fd1ff | ||
|
|
6205fb4e48 | ||
|
|
c3bc7de75f | ||
|
|
d33f1534e2 | ||
|
|
1ccc147670 | ||
|
|
d8b9f38609 | ||
|
|
e8d5ff1264 | ||
|
|
3becd6b72e | ||
|
|
cea8ede3f0 | ||
|
|
e94667820f | ||
|
|
41e611b413 | ||
|
|
d47430ac21 | ||
|
|
f2c0d3aa48 | ||
|
|
f49a426ada | ||
|
|
acf3e12230 | ||
|
|
1d90ee3517 | ||
|
|
0f32116734 | ||
|
|
8d7ceebbc3 | ||
|
|
45da3f4519 | ||
|
|
122057bac5 | ||
|
|
997cd60d43 | ||
|
|
c57f47ddce | ||
|
|
b4068f1fca | ||
|
|
e7b136b822 | ||
|
|
86ca2b3d08 | ||
|
|
36b229cd18 | ||
|
|
fff9f874b1 | ||
|
|
7dc65072c0 | ||
|
|
5411d40a7a | ||
|
|
a7eb28ddd4 | ||
|
|
d488c78472 | ||
|
|
fe33689917 | ||
|
|
b0a2aea760 | ||
|
|
8f93ad7068 | ||
|
|
cced1c5a4e | ||
|
|
c332eebc37 | ||
|
|
106d755931 | ||
|
|
ef31710c2b | ||
|
|
6a5c293a04 | ||
|
|
86bfb2ade6 | ||
|
|
f8ed0cec74 | ||
|
|
b70c5e1d9d | ||
|
|
f4b201857e | ||
|
|
16385322db | ||
|
|
9677fa97ff | ||
|
|
16295b086a | ||
|
|
afe580bb8a | ||
|
|
d102b45692 | ||
|
|
c0c6486e70 | ||
|
|
a2d8590f0a | ||
|
|
bfb6ff3394 | ||
|
|
8ea94916d9 | ||
|
|
642de0ad63 | ||
|
|
6954db023c | ||
|
|
eca7c57083 | ||
|
|
4dca9bceed | ||
|
|
7219310267 | ||
|
|
77ade18107 | ||
|
|
bca2366d5a | ||
|
|
de2eedd16f | ||
|
|
0f1663b2ec | ||
|
|
604fe27ed1 | ||
|
|
3d7f6dae90 | ||
|
|
a8c6bb5b52 | ||
|
|
30fbfa1b14 | ||
|
|
3e297d130e | ||
|
|
dc754e2d26 | ||
|
|
f59fb6f780 | ||
|
|
fea187a36a | ||
|
|
a9ba3b325f | ||
|
|
c8008e6225 | ||
|
|
4369967732 |
18
.asf.yaml
18
.asf.yaml
@@ -18,7 +18,6 @@
|
||||
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
|
||||
---
|
||||
github:
|
||||
del_branch_on_merge: true
|
||||
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
|
||||
homepage: https://superset.apache.org/
|
||||
labels:
|
||||
@@ -54,9 +53,6 @@ github:
|
||||
merge: false
|
||||
rebase: false
|
||||
|
||||
ghp_branch: gh-pages
|
||||
ghp_path: /
|
||||
|
||||
protected_branches:
|
||||
master:
|
||||
required_status_checks:
|
||||
@@ -73,16 +69,17 @@ github:
|
||||
- cypress-matrix (3, chrome)
|
||||
- cypress-matrix (4, chrome)
|
||||
- cypress-matrix (5, chrome)
|
||||
- dependency-review
|
||||
- frontend-build
|
||||
- pre-commit (current)
|
||||
- pre-commit (previous)
|
||||
- pre-commit
|
||||
- python-lint
|
||||
- test-mysql
|
||||
- test-postgres (current)
|
||||
- test-postgres (next)
|
||||
- test-postgres-hive
|
||||
- test-postgres-presto
|
||||
- test-sqlite
|
||||
- unit-tests (current)
|
||||
- unit-tests (next)
|
||||
|
||||
required_pull_request_reviews:
|
||||
dismiss_stale_reviews: false
|
||||
@@ -90,10 +87,3 @@ github:
|
||||
required_approving_review_count: 1
|
||||
|
||||
required_signatures: false
|
||||
gh-pages:
|
||||
required_pull_request_reviews:
|
||||
dismiss_stale_reviews: false
|
||||
require_code_owner_reviews: true
|
||||
required_approving_review_count: 1
|
||||
|
||||
required_signatures: false
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
**/*.sqllite
|
||||
**/*.swp
|
||||
**/.terser-plugin-cache/
|
||||
**/.storybook/
|
||||
**/node_modules/
|
||||
|
||||
tests/
|
||||
@@ -41,8 +42,6 @@ docs/
|
||||
install/
|
||||
superset-frontend/cypress-base/
|
||||
superset-frontend/coverage/
|
||||
superset-frontend/.temp_cache/
|
||||
superset/static/assets/
|
||||
superset-websocket/dist/
|
||||
venv
|
||||
.venv
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,3 +1,2 @@
|
||||
docker/**/*.sh text eol=lf
|
||||
*.svg binary
|
||||
*.ipynb binary
|
||||
|
||||
14
.github/CODEOWNERS
vendored
14
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
|
||||
# https://github.com/apache/superset/issues/13351
|
||||
|
||||
/superset/migrations/ @mistercrunch @michael-s-molina @betodealmeida @eschutho
|
||||
/superset/migrations/ @apache/superset-committers
|
||||
|
||||
# Notify some committers of changes in the components
|
||||
|
||||
@@ -12,21 +12,21 @@
|
||||
|
||||
# Notify Helm Chart maintainers about changes in it
|
||||
|
||||
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina @mistercrunch @rusackas @Antonio-RiveroMartnez
|
||||
/helm/superset/ @craig-rueda @dpgaspar @villebro
|
||||
|
||||
# Notify E2E test maintainers of changes
|
||||
|
||||
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida @mistercrunch
|
||||
/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida
|
||||
|
||||
# Notify PMC members of changes to GitHub Actions
|
||||
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
# Notify PMC members of changes to required Github Actions
|
||||
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @Antonio-RiveroMartnez
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
|
||||
|
||||
# Maps are a finicky contribution process we care about
|
||||
# Maps are a finnicky contribution process we care about
|
||||
|
||||
**/*.geojson @villebro @rusackas
|
||||
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
11
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -15,9 +15,14 @@ body:
|
||||
id: bug-description
|
||||
attributes:
|
||||
label: Bug description
|
||||
description: A clear description of what the bug is, including reproduction steps and expected behavior.
|
||||
description: A clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: repro-steps
|
||||
attributes:
|
||||
label: How to reproduce the bug
|
||||
placeholder: |
|
||||
The bug is that...
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
@@ -41,8 +46,8 @@ body:
|
||||
label: Superset version
|
||||
options:
|
||||
- master / latest-dev
|
||||
- "4.1.1"
|
||||
- "4.0.2"
|
||||
- "3.1.3"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
name: Label Draft PRs
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- converted_to_draft
|
||||
jobs:
|
||||
label-draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if the PR is a draft
|
||||
id: check-draft
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const isDraft = context.payload.pull_request.draft;
|
||||
core.setOutput('isDraft', isDraft);
|
||||
- name: Add `review:draft` Label
|
||||
if: steps.check-draft.outputs.isDraft == 'true'
|
||||
uses: actions-ecosystem/action-add-labels@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: "review:draft"
|
||||
2
.github/actions/chart-releaser-action
vendored
2
.github/actions/chart-releaser-action
vendored
Submodule .github/actions/chart-releaser-action updated: a917fd15b2...120944e663
19
.github/actions/setup-backend/action.yml
vendored
19
.github/actions/setup-backend/action.yml
vendored
@@ -26,12 +26,11 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ inputs.python-version }}" = "current" ]; then
|
||||
echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "next" ]; then
|
||||
# currently disabled in GHA matrixes because of library compatibility issues
|
||||
echo "PYTHON_VERSION=3.12" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "previous" ]; then
|
||||
echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "next" ]; then
|
||||
echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "previous" ]; then
|
||||
echo "PYTHON_VERSION=3.9" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PYTHON_VERSION=${{ inputs.python-version }}" >> $GITHUB_ENV
|
||||
fi
|
||||
@@ -44,15 +43,11 @@ runs:
|
||||
run: |
|
||||
if [ "${{ inputs.install-superset }}" = "true" ]; then
|
||||
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
|
||||
|
||||
pip install --upgrade pip setuptools wheel uv
|
||||
|
||||
pip install --upgrade pip setuptools wheel
|
||||
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
|
||||
uv pip install --system -r requirements/development.txt
|
||||
pip install -r requirements/development.txt
|
||||
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
|
||||
uv pip install --system -r requirements/base.txt
|
||||
pip install -r requirements/base.txt
|
||||
fi
|
||||
|
||||
uv pip install --system -e .
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
69
.github/actions/setup-docker/action.yml
vendored
69
.github/actions/setup-docker/action.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: "Setup Docker Environment"
|
||||
description: "Reusable steps for setting up QEMU, Docker Buildx, DockerHub login, Supersetbot, and optionally Docker Compose"
|
||||
inputs:
|
||||
build:
|
||||
description: "Used for building?"
|
||||
required: false
|
||||
default: "false"
|
||||
dockerhub-user:
|
||||
description: "DockerHub username"
|
||||
required: false
|
||||
dockerhub-token:
|
||||
description: "DockerHub token"
|
||||
required: false
|
||||
install-docker-compose:
|
||||
description: "Flag to install Docker Compose"
|
||||
required: false
|
||||
default: "true"
|
||||
login-to-dockerhub:
|
||||
description: "Whether you want to log into dockerhub"
|
||||
required: false
|
||||
default: "true"
|
||||
outputs: {}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
|
||||
- name: Set up QEMU
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
if: ${{ inputs.login-to-dockerhub == 'true' }}
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-user }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
- name: Install Docker Compose
|
||||
if: ${{ inputs.install-docker-compose == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ca-certificates curl
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
|
||||
# Download and save the Docker GPG key in the correct format
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||
|
||||
# Ensure the key file is readable
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.gpg
|
||||
|
||||
# Add the Docker repository using the correct key
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
|
||||
# Update package lists and install Docker Compose plugin
|
||||
sudo apt update
|
||||
sudo apt install -y docker-compose-plugin
|
||||
|
||||
- name: Docker Version Info
|
||||
shell: bash
|
||||
run: docker info
|
||||
22
.github/dependabot.yml
vendored
22
.github/dependabot.yml
vendored
@@ -1,5 +1,4 @@
|
||||
version: 2
|
||||
enable-beta-ecosystems: true
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
@@ -9,9 +8,8 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
ignore:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "storybook"
|
||||
- dependency-name: "@storybook*"
|
||||
# not until node >= 18.12.0
|
||||
- dependency-name: "css-minimizer-webpack-plugin"
|
||||
directory: "/superset-frontend/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
@@ -22,14 +20,10 @@ updates:
|
||||
versioning-strategy: increase
|
||||
|
||||
|
||||
# NOTE: `uv` support is in beta, more details here:
|
||||
# https://github.com/dependabot/dependabot-core/pull/10040#issuecomment-2696978430
|
||||
- package-ecosystem: "uv"
|
||||
directory: "requirements/"
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- uv
|
||||
- dependabot
|
||||
# - package-ecosystem: "pip"
|
||||
# NOTE: as dependabot isn't compatible with our python
|
||||
# dependency setup (pip-compile-multi), we'll be using
|
||||
# `supersetbot` instead
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: ".github/actions"
|
||||
@@ -328,10 +322,6 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/packages/superset-ui-core/"
|
||||
ignore:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "react-markdown"
|
||||
- dependency-name: "remark-gfm"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
|
||||
2
.github/workflows/bashlib.sh
vendored
2
.github/workflows/bashlib.sh
vendored
@@ -165,7 +165,7 @@ cypress-run-all() {
|
||||
# UNCOMMENT the next few commands to monitor memory usage
|
||||
# monitor_memory & # Start memory monitoring in the background
|
||||
# memoryMonitorPid=$!
|
||||
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --group $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
|
||||
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
|
||||
# kill $memoryMonitorPid
|
||||
|
||||
# After job is done, print out Flask log for debugging
|
||||
|
||||
17
.github/workflows/bump-python-package.yml
vendored
17
.github/workflows/bump-python-package.yml
vendored
@@ -14,16 +14,10 @@ on:
|
||||
required: true
|
||||
description: Max number of PRs to open (0 for no limit)
|
||||
default: 5
|
||||
extra-flags:
|
||||
required: false
|
||||
default: --only-base
|
||||
description: Additional flags to pass to the bump-python command
|
||||
#schedule:
|
||||
# - cron: '0 0 * * *' # Runs daily at midnight UTC
|
||||
|
||||
jobs:
|
||||
bump-python-package:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
@@ -45,8 +39,8 @@ jobs:
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install uv
|
||||
run: pip install uv
|
||||
- name: Install pip-compile-multi
|
||||
run: pip install pip-compile-multi
|
||||
|
||||
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
|
||||
env:
|
||||
@@ -65,13 +59,10 @@ jobs:
|
||||
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
||||
fi
|
||||
|
||||
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
|
||||
|
||||
supersetbot bump-python \
|
||||
--verbose \
|
||||
--use-current-repo \
|
||||
--include-subpackages \
|
||||
--limit ${{ github.event.inputs.limit }} \
|
||||
$PACKAGE_OPT \
|
||||
$GROUP_OPT \
|
||||
$EXTRA_FLAGS
|
||||
$GROUP_OPT
|
||||
|
||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
jobs:
|
||||
cancel-duplicate-runs:
|
||||
name: Cancel duplicate workflow runs
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
44
.github/workflows/check-python-deps.yml
vendored
44
.github/workflows/check-python-deps.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Check python dependencies
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "[0-9].[0-9]*"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-python-deps:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
depth: 1
|
||||
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
- name: Run uv
|
||||
if: steps.check.outputs.python
|
||||
run: ./scripts/uv-pip-compile.sh
|
||||
|
||||
- name: Check for uncommitted changes
|
||||
run: |
|
||||
if [[ -n "$(git diff)" ]]; then
|
||||
echo "ERROR: The pinned dependencies are not up-to-date."
|
||||
echo "Please run './scripts/uv-pip-compile.sh' and commit the changes."
|
||||
exit 1
|
||||
else
|
||||
echo "Pinned dependencies are up-to-date."
|
||||
fi
|
||||
@@ -19,7 +19,7 @@ concurrency:
|
||||
jobs:
|
||||
check_db_migration_conflict:
|
||||
name: Check DB migration conflict
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -17,7 +17,7 @@ concurrency:
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
40
.github/workflows/dependency-review.yml
vendored
40
.github/workflows/dependency-review.yml
vendored
@@ -5,32 +5,19 @@
|
||||
# Source repository: https://github.com/actions/dependency-review-action
|
||||
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
|
||||
name: "Dependency Review"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "[0-9].[0-9]*"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v4
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
fail-on-severity: critical
|
||||
# compatible/incompatible licenses addressed here: https://www.apache.org/legal/resolved.html
|
||||
@@ -45,25 +32,4 @@ jobs:
|
||||
# license: https://applitools.com/legal/open-source-terms-of-use/
|
||||
# pkg:npm/node-forge@1.3.1
|
||||
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
|
||||
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
|
||||
|
||||
python-dependency-liccheck:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
with:
|
||||
requirements-type: base
|
||||
|
||||
- name: "Set up liccheck"
|
||||
run: |
|
||||
uv pip install --system liccheck
|
||||
- name: "Run liccheck"
|
||||
run: |
|
||||
# run the checks
|
||||
liccheck -R output.txt
|
||||
# Print the report
|
||||
cat output.txt
|
||||
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor
|
||||
|
||||
79
.github/workflows/docker.yml
vendored
79
.github/workflows/docker.yml
vendored
@@ -14,22 +14,21 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
setup_matrix:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
|
||||
steps:
|
||||
- id: set_matrix
|
||||
run: |
|
||||
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev", "lean"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
|
||||
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
|
||||
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
|
||||
echo $GITHUB_OUTPUT
|
||||
|
||||
docker-build:
|
||||
name: docker-build
|
||||
needs: setup_matrix
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
|
||||
@@ -37,7 +36,6 @@ jobs:
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
|
||||
|
||||
steps:
|
||||
|
||||
@@ -52,13 +50,21 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Setup Docker Environment
|
||||
- name: Set up QEMU
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
uses: ./.github/actions/setup-docker
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build: "true"
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Setup supersetbot
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
@@ -73,65 +79,12 @@ jobs:
|
||||
# Single platform builds in pull_request context to speed things up
|
||||
if [ "${{ github.event_name }}" = "push" ]; then
|
||||
PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64"
|
||||
# can only --load images in single-platform builds
|
||||
PUSH_OR_LOAD="--push"
|
||||
elif [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||
PLATFORM_ARG="--platform linux/amd64"
|
||||
PUSH_OR_LOAD="--load"
|
||||
fi
|
||||
|
||||
supersetbot docker \
|
||||
$PUSH_OR_LOAD \
|
||||
--preset ${{ matrix.build_preset }} \
|
||||
--context "$EVENT" \
|
||||
--context-ref "$RELEASE" $FORCE_LATEST \
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false --tag $IMAGE_TAG" \
|
||||
$PLATFORM_ARG
|
||||
|
||||
# in the context of push (using multi-platform build), we need to pull the image locally
|
||||
- name: Docker pull
|
||||
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
|
||||
run: docker pull $IMAGE_TAG
|
||||
|
||||
- name: Print docker stats
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
run: |
|
||||
echo "SHA: ${{ github.sha }}"
|
||||
echo "IMAGE: $IMAGE_TAG"
|
||||
docker images $IMAGE_TAG
|
||||
docker history $IMAGE_TAG
|
||||
|
||||
- name: docker-compose sanity check
|
||||
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
|
||||
shell: bash
|
||||
run: |
|
||||
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
|
||||
# This should reuse the CACHED image built in the previous steps
|
||||
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
|
||||
docker compose up superset-init --exit-code-from superset-init
|
||||
|
||||
docker-compose-image-tag:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Docker Environment
|
||||
if: steps.check.outputs.docker
|
||||
uses: ./.github/actions/setup-docker
|
||||
with:
|
||||
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build: "false"
|
||||
install-docker-compose: "true"
|
||||
- name: docker-compose sanity check
|
||||
if: steps.check.outputs.docker
|
||||
shell: bash
|
||||
run: |
|
||||
docker compose -f docker-compose-image-tag.yml up superset-init --exit-code-from superset-init
|
||||
|
||||
6
.github/workflows/embedded-sdk-release.yml
vendored
6
.github/workflows/embedded-sdk-release.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
node-version: "18"
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm run ci:release
|
||||
|
||||
4
.github/workflows/embedded-sdk-test.yml
vendored
4
.github/workflows/embedded-sdk-test.yml
vendored
@@ -13,7 +13,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
embedded-sdk-test:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
node-version: "18"
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
|
||||
4
.github/workflows/ephemeral-env-pr-close.yml
vendored
4
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Cleanup ephemeral envs
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
|
||||
486
.github/workflows/ephemeral-env.yml
vendored
486
.github/workflows/ephemeral-env.yml
vendored
@@ -1,181 +1,132 @@
|
||||
name: Ephemeral env workflow
|
||||
|
||||
# Example manual trigger:
|
||||
# gh workflow run ephemeral-env.yml --ref fix_ephemerals --field label_name="testenv-up" --field issue_number=666
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
label_name:
|
||||
description: 'Label name to simulate label-based /testenv trigger'
|
||||
required: true
|
||||
default: 'testenv-up'
|
||||
issue_number:
|
||||
description: 'Issue or PR number'
|
||||
required: true
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
ephemeral-env-label:
|
||||
config:
|
||||
runs-on: "ubuntu-22.04"
|
||||
if: github.event.issue.pull_request
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
- name: "Check for secrets"
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
ephemeral-env-comment:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-label
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment
|
||||
cancel-in-progress: true
|
||||
name: Evaluate ephemeral env label trigger
|
||||
runs-on: ubuntu-24.04
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Evaluate ephemeral env comment trigger (/testenv)
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
outputs:
|
||||
slash-command: ${{ steps.eval-label.outputs.result }}
|
||||
slash-command: ${{ steps.eval-body.outputs.result }}
|
||||
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
|
||||
sha: ${{ steps.get-sha.outputs.sha }}
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: Check for the "testenv-up" label
|
||||
id: eval-label
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
LABEL_NAME="${{ github.event.inputs.label_name }}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
- name: Debug
|
||||
run: |
|
||||
echo "Comment on PR #${{ github.event.issue.number }} by ${{ github.event.issue.user.login }}, ${{ github.event.comment.author_association }}"
|
||||
|
||||
echo "Evaluating label: $LABEL_NAME"
|
||||
- name: Eval comment body for /testenv slash command
|
||||
uses: actions/github-script@v7
|
||||
id: eval-body
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const pattern = /^\/testenv (up|down)/
|
||||
const result = pattern.exec(context.payload.comment.body)
|
||||
return result === null ? 'noop' : result[1]
|
||||
|
||||
if [[ "$LABEL_NAME" == "testenv-up" ]]; then
|
||||
echo "result=up" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "result=noop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Eval comment body for feature flags
|
||||
uses: actions/github-script@v7
|
||||
id: eval-feature-flags
|
||||
with:
|
||||
script: |
|
||||
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
||||
let results = [];
|
||||
[...context.payload.comment.body.matchAll(pattern)].forEach(match => {
|
||||
const config = {
|
||||
name: `SUPERSET_FEATURE_${match[1]}`,
|
||||
value: match[2],
|
||||
};
|
||||
results.push(config);
|
||||
});
|
||||
return results;
|
||||
|
||||
- name: Get event SHA
|
||||
id: get-sha
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
let prSha;
|
||||
|
||||
// If event is workflow_dispatch, use the issue_number from inputs
|
||||
if (context.eventName === "workflow_dispatch") {
|
||||
const prNumber = "${{ github.event.inputs.issue_number }}";
|
||||
if (!prNumber) {
|
||||
console.log("No PR number found.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch PR details using the provided issue_number
|
||||
const { data: pr } = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
prSha = pr.head.sha;
|
||||
} else {
|
||||
// If it's not workflow_dispatch, use the PR head sha from the event
|
||||
prSha = context.payload.pull_request.head.sha;
|
||||
}
|
||||
|
||||
console.log(`PR SHA: ${prSha}`);
|
||||
core.setOutput("sha", prSha);
|
||||
|
||||
- name: Looking for feature flags in PR description
|
||||
uses: actions/github-script@v7
|
||||
id: eval-feature-flags
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
script: |
|
||||
const description = context.payload.pull_request
|
||||
? context.payload.pull_request.body || ''
|
||||
: context.payload.inputs.pr_description || '';
|
||||
|
||||
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
||||
let results = [];
|
||||
[...description.matchAll(pattern)].forEach(match => {
|
||||
const config = {
|
||||
name: `SUPERSET_FEATURE_${match[1]}`,
|
||||
value: match[2],
|
||||
};
|
||||
results.push(config);
|
||||
});
|
||||
|
||||
return results;
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@v7
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const action = '${{ steps.eval-label.outputs.result }}';
|
||||
const user = context.actor;
|
||||
const runId = context.runId;
|
||||
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
|
||||
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: context.payload.inputs.issue_number;
|
||||
|
||||
if (!issueNumber) {
|
||||
throw new Error("Issue number is not available.");
|
||||
}
|
||||
|
||||
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).` +
|
||||
` Action: **${action}**.` +
|
||||
` More information on [how to use or configure ephemeral environments]` +
|
||||
`(https://superset.apache.org/docs/contributing/howtos/#github-ephemeral-environments)`;
|
||||
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
- name: Limit to committers
|
||||
if: >
|
||||
steps.eval-body.outputs.result != 'noop' &&
|
||||
github.event.comment.author_association != 'MEMBER' &&
|
||||
github.event.comment.author_association != 'OWNER'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.'
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
})
|
||||
core.setFailed(errMsg)
|
||||
|
||||
ephemeral-docker-build:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-build
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build
|
||||
cancel-in-progress: true
|
||||
needs: ephemeral-env-label
|
||||
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
|
||||
needs: ephemeral-env-comment
|
||||
name: ephemeral-docker-build
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
- name: Get Info from comment
|
||||
uses: actions/github-script@v7
|
||||
id: get-pr-info
|
||||
with:
|
||||
script: |
|
||||
const request = {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: ${{ github.event.issue.number }},
|
||||
}
|
||||
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`)
|
||||
const pr = await github.rest.pulls.get(request);
|
||||
return pr.data;
|
||||
|
||||
- name: Debug
|
||||
id: get-sha
|
||||
run: |
|
||||
echo "sha=${{ fromJSON(steps.get-pr-info.outputs.result).head.sha }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
ref: ${{ steps.get-sha.outputs.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Docker Environment
|
||||
uses: ./.github/actions/setup-docker
|
||||
with:
|
||||
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build: "true"
|
||||
install-docker-compose: "false"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Setup supersetbot
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build ephemeral env image
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
supersetbot docker \
|
||||
--push \
|
||||
--load \
|
||||
--preset ci \
|
||||
--platform linux/amd64 \
|
||||
--context-ref "$RELEASE" \
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||
./scripts/build_docker.py \
|
||||
"ci" \
|
||||
"pull_request" \
|
||||
--build_context_ref ${{ github.event.issue.number }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
@@ -193,141 +144,140 @@ jobs:
|
||||
env:
|
||||
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
|
||||
ECR_REPOSITORY: superset-ci
|
||||
IMAGE_TAG: apache/superset:${{ needs.ephemeral-env-label.outputs.sha }}-ci
|
||||
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
|
||||
run: |
|
||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-$PR_NUMBER-ci
|
||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci
|
||||
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
|
||||
|
||||
ephemeral-env-up:
|
||||
needs: [ephemeral-env-label, ephemeral-docker-build]
|
||||
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
|
||||
needs: [ephemeral-env-comment, ephemeral-docker-build]
|
||||
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
|
||||
name: Spin up an ephemeral environment
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
continue-on-error: true
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
run: |
|
||||
aws ecr describe-images \
|
||||
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
||||
--repository-name superset-ci \
|
||||
--image-ids imageTag=pr-$PR_NUMBER-ci
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
continue-on-error: true
|
||||
run: |
|
||||
aws ecr describe-images \
|
||||
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
||||
--repository-name superset-ci \
|
||||
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.pull_request.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
});
|
||||
core.setFailed(errMsg);
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.'
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
})
|
||||
core.setFailed(errMsg)
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-ci
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci
|
||||
|
||||
- name: Update env vars in the Amazon ECS task definition
|
||||
run: |
|
||||
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-label.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
|
||||
- name: Update env vars in the Amazon ECS task definition
|
||||
run: |
|
||||
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-comment.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
|
||||
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
- name: Create ECS service
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
env:
|
||||
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
||||
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
||||
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
run: |
|
||||
aws ecs create-service \
|
||||
--cluster superset-ci \
|
||||
--service-name pr-$PR_NUMBER-service \
|
||||
--task-definition superset-ci \
|
||||
--launch-type FARGATE \
|
||||
--desired-count 1 \
|
||||
--platform-version LATEST \
|
||||
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
||||
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||
cluster: superset-ci
|
||||
wait-for-service-stability: true
|
||||
wait-for-minutes: 10
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
- name: Create ECS service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
id: create-service
|
||||
env:
|
||||
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
||||
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
||||
run: |
|
||||
aws ecs create-service \
|
||||
--cluster superset-ci \
|
||||
--service-name pr-${{ github.event.issue.number }}-service \
|
||||
--task-definition superset-ci \
|
||||
--launch-type FARGATE \
|
||||
--desired-count 1 \
|
||||
--platform-version LATEST \
|
||||
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
||||
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }}
|
||||
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks[0].attachments[0].details | map(select(.name=="networkInterfaceId"))[0].value')" >> $GITHUB_OUTPUT
|
||||
- name: Get public IP
|
||||
id: get-ip
|
||||
run: |
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
|
||||
github.rest.issues.createComment({
|
||||
issue_number: issue_number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `@${{ github.actor }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are 'admin'/'admin'. Please allow several minutes for bootstrapping and startup.`
|
||||
});
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
|
||||
github.rest.issues.createComment({
|
||||
issue_number: issue_number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||
})
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.issue.number }}-service
|
||||
cluster: superset-ci
|
||||
wait-for-service-stability: true
|
||||
wait-for-minutes: 10
|
||||
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get public IP
|
||||
id: get-ip
|
||||
run: |
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
|
||||
})
|
||||
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||
})
|
||||
|
||||
4
.github/workflows/generate-FOSSA-report.yml
vendored
4
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Generate Report
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
@@ -11,7 +11,7 @@ on:
|
||||
jobs:
|
||||
|
||||
validate-all-ghas:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '18'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install -g @action-validator/core @action-validator/cli --save-dev
|
||||
|
||||
2
.github/workflows/issue_creation.yml
vendored
2
.github/workflows/issue_creation.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
|
||||
jobs:
|
||||
superbot-orglabel:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
|
||||
2
.github/workflows/latest-release-tag.yml
vendored
2
.github/workflows/latest-release-tag.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
jobs:
|
||||
latest-release:
|
||||
name: Add/update tag to new release
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
|
||||
2
.github/workflows/license-check.yml
vendored
2
.github/workflows/license-check.yml
vendored
@@ -12,7 +12,7 @@ concurrency:
|
||||
jobs:
|
||||
license_check:
|
||||
name: License Check
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
2
.github/workflows/no-hold-label.yml
vendored
2
.github/workflows/no-hold-label.yml
vendored
@@ -11,7 +11,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
check-hold-label:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check for 'hold' label
|
||||
uses: actions/github-script@v7
|
||||
|
||||
2
.github/workflows/pr-lint.yml
vendored
2
.github/workflows/pr-lint.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
lint-check:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
43
.github/workflows/pre-commit.yml
vendored
43
.github/workflows/pre-commit.yml
vendored
@@ -15,10 +15,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "previous"]
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -27,8 +24,6 @@ jobs:
|
||||
submodules: recursive
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Enable brew and helm-docs
|
||||
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
|
||||
run: |
|
||||
@@ -38,40 +33,10 @@ jobs:
|
||||
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install Frontend Dependencies
|
||||
run: |
|
||||
cd superset-frontend
|
||||
npm ci
|
||||
|
||||
- name: Install Docs Dependencies
|
||||
run: |
|
||||
cd docs
|
||||
yarn install --immutable
|
||||
|
||||
- name: pre-commit
|
||||
run: |
|
||||
set +e # Don't exit immediately on failure
|
||||
export SKIP=eslint-frontend,type-checking-frontend
|
||||
pre-commit run --all-files
|
||||
PRE_COMMIT_EXIT_CODE=$?
|
||||
git diff --quiet --exit-code
|
||||
GIT_DIFF_EXIT_CODE=$?
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
|
||||
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
|
||||
else
|
||||
echo "❌ Git working directory is dirty."
|
||||
echo "📌 This likely means that pre-commit made changes that were not committed."
|
||||
echo "🔍 Modified files:"
|
||||
git diff --name-only
|
||||
fi
|
||||
|
||||
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
|
||||
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
|
||||
if ! pre-commit run --all-files; then
|
||||
git status
|
||||
git diff
|
||||
exit 1
|
||||
fi
|
||||
|
||||
2
.github/workflows/prefer-typescript.yml
vendored
2
.github/workflows/prefer-typescript.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
prefer_typescript:
|
||||
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
|
||||
name: Prefer TypeScript
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -24,7 +24,13 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Bump version and publish package(s)
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [18]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -40,11 +46,11 @@ jobs:
|
||||
git fetch --prune --unshallow
|
||||
git tag -d `git tag | grep -E '^trigger-'`
|
||||
|
||||
- name: Install Node.js
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -21,11 +21,12 @@ jobs:
|
||||
cypress-applitools:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: ["chrome"]
|
||||
node: [18]
|
||||
env:
|
||||
SUPERSET_ENV: development
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -39,7 +40,7 @@ jobs:
|
||||
APPLITOOLS_BATCH_NAME: Superset Cypress
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -65,7 +66,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Install npm dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -27,7 +27,10 @@ jobs:
|
||||
cron:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
node: [18]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -38,7 +41,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Install eyes-storybook dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
|
||||
4
.github/workflows/superset-cli.yml
vendored
4
.github/workflows/superset-cli.yml
vendored
@@ -15,7 +15,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test-load-examples:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
|
||||
8
.github/workflows/superset-docs-deploy.yml
vendored
8
.github/workflows/superset-docs-deploy.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -28,17 +28,17 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
- name: Set up Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
node-version: '20'
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- uses: actions/setup-java@v4
|
||||
|
||||
40
.github/workflows/superset-docs-verify.yml
vendored
40
.github/workflows/superset-docs-verify.yml
vendored
@@ -4,7 +4,6 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- ".github/workflows/superset-docs-verify.yml"
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
@@ -14,43 +13,18 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
linkinator:
|
||||
# See docs here: https://github.com/marketplace/actions/linkinator
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new verison first!
|
||||
- uses: JustinBeckwith/linkinator-action@v1.11.0
|
||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||
- uses: JustinBeckwith/linkinator-action@v1.10.4
|
||||
with:
|
||||
paths: "**/*.md, **/*.mdx, !superset-frontend/CHANGELOG.md"
|
||||
linksToSkip: >-
|
||||
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
|
||||
http://localhost:8088/,
|
||||
http://127.0.0.1:3000/,
|
||||
http://localhost:9001/,
|
||||
https://charts.bitnami.com/bitnami,
|
||||
https://www.li.me/,
|
||||
https://www.fanatics.com/,
|
||||
https://tails.com/gb/,
|
||||
https://www.techaudit.info/,
|
||||
https://avetilearning.com/,
|
||||
https://www.udemy.com/,
|
||||
https://trustmedis.com/,
|
||||
http://theiconic.com.au/,
|
||||
https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html,
|
||||
^https://img\.shields\.io/.*,
|
||||
https://vkusvill.ru/
|
||||
https://www.linkedin.com/in/mark-thomas-b16751158/
|
||||
https://theiconic.com.au/
|
||||
https://wattbewerb.de/
|
||||
https://timbr.ai/
|
||||
https://opensource.org/license/apache-2-0
|
||||
https://www.plaidcloud.com/
|
||||
paths: "**/*.md, **/*.mdx"
|
||||
linksToSkip: '^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+, http://localhost:8088/, docker/.env-non-dev, http://127.0.0.1:3000/, http://localhost:9001/, https://charts.bitnami.com/bitnami, https://www.li.me/, https://www.fanatics.com/, https://tails.com/gb/, https://www.techaudit.info/, https://avetilearning.com/, https://www.udemy.com/, https://trustmedis.com/, http://theiconic.com.au/, https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html, https://img.shields.io/librariesio/release/npm/%40superset-ui%2Fembedded-sdk?style=flat, https://img.shields.io/librariesio/release/npm/%40superset-ui%2Fplugin-chart-pivot-table?style=flat, https://vkusvill.ru/'
|
||||
# verbosity: 'ERROR'
|
||||
build-deploy:
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
@@ -60,10 +34,10 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
- name: Set up Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
node-version: '20'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
|
||||
13
.github/workflows/superset-e2e.yml
vendored
13
.github/workflows/superset-e2e.yml
vendored
@@ -28,7 +28,6 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
cypress-matrix:
|
||||
# Somehow one test flakes on 24.04 for unknown reasons, this is the only GHA left on 22.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -49,11 +48,10 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
REDIS_PORT: 16379
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
# use the dashboard feature when running manually OR merging to master
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard || (github.ref == 'refs/heads/master' && 'true') || 'false' }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -109,7 +107,7 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
node-version: "18"
|
||||
- name: Install npm dependencies
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
@@ -133,12 +131,11 @@ jobs:
|
||||
PARALLEL_ID: ${{ matrix.parallel_id }}
|
||||
PARALLELISM: 6
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
NODE_OPTIONS: "--max-old-space-size=4096"
|
||||
with:
|
||||
run: cypress-run-all ${{ env.USE_DASHBOARD }}
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
if: github.event_name == 'workflow_dispatch' && (steps.check.outputs.python || steps.check.outputs.frontend)
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}
|
||||
|
||||
201
.github/workflows/superset-frontend.yml
vendored
201
.github/workflows/superset-frontend.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "Frontend Build CI (unit tests, linting & sanity checks)"
|
||||
name: Frontend
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -13,168 +13,73 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
TAG: apache/superset:GHA-${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
frontend-build:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check for File Changes
|
||||
submodules: recursive
|
||||
- name: Check npm lock file version
|
||||
run: ./scripts/ci_check_npm_lock_version.sh ./superset-frontend/package-lock.json
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker Image
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.frontend
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
docker buildx build \
|
||||
-t $TAG \
|
||||
--cache-from=type=registry,ref=apache/superset-cache:3.10-slim-bookworm \
|
||||
--target superset-node-ci \
|
||||
.
|
||||
|
||||
- name: Save Docker Image as Artifact
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.frontend
|
||||
run: |
|
||||
docker save $TAG | gzip > docker-image.tar.gz
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: npm-install
|
||||
- name: eslint
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
path: docker-image.tar.gz
|
||||
|
||||
sharded-jest-tests:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
strategy:
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4, 5, 6, 7, 8]
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: npm run test with coverage
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/superset-frontend/coverage
|
||||
docker run \
|
||||
-v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \
|
||||
--rm $TAG \
|
||||
bash -c \
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
|
||||
report-coverage:
|
||||
needs: [sharded-jest-tests]
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
|
||||
- name: Show Files
|
||||
run: find coverage/
|
||||
|
||||
- name: Merge Code Coverage
|
||||
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||
|
||||
- name: Upload Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
npm run eslint -- . --quiet
|
||||
- name: tsc
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run type
|
||||
- name: prettier
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run prettier-check
|
||||
- name: Build plugins packages
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: npm run plugins:build
|
||||
- name: Build plugins Storybook
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: npm run plugins:build-storybook
|
||||
- name: superset-ui/core coverage
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run core:cover
|
||||
- name: unit tests
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run test -- --coverage --silent
|
||||
# todo: remove this step when fix generator as a project in root jest.config.js
|
||||
- name: generator-superset unit tests
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend/packages/generator-superset
|
||||
run: npm run test
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: javascript
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
files: merged-output/coverage-summary.json
|
||||
slug: apache/superset
|
||||
|
||||
core-cover:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: superset-ui/core coverage
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run core:cover"
|
||||
|
||||
lint-frontend:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: eslint
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm i && npm run eslint -- . --quiet"
|
||||
|
||||
- name: tsc
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run type"
|
||||
|
||||
validate-frontend:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: Build Plugins Packages
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build"
|
||||
|
||||
- name: Build Plugins Storybook
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build-storybook"
|
||||
|
||||
6
.github/workflows/superset-helm-lint.yml
vendored
6
.github/workflows/superset-helm-lint.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "Helm: lint and test charts"
|
||||
name: Lint and Test Charts
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -13,7 +13,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
lint-test:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
with:
|
||||
version: v3.16.4
|
||||
version: v3.5.4
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
90
.github/workflows/superset-helm-release.yml
vendored
90
.github/workflows/superset-helm-release.yml
vendored
@@ -1,8 +1,4 @@
|
||||
# This workflow automates the release process for Helm charts.
|
||||
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
|
||||
# allowing the changes to be reviewed and merged manually.
|
||||
|
||||
name: "Helm: release charts"
|
||||
name: Release Charts
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -11,28 +7,18 @@ on:
|
||||
- "[0-9].[0-9]*"
|
||||
paths:
|
||||
- "helm/**"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The branch, tag, or commit SHA to check out"
|
||||
required: false
|
||||
default: "master"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -49,77 +35,11 @@ jobs:
|
||||
- name: Add bitnami repo dependency
|
||||
run: helm repo add bitnami https://charts.bitnami.com/bitnami
|
||||
|
||||
- name: Fetch/list all tags
|
||||
run: |
|
||||
# Debugging tags
|
||||
git fetch --tags --force
|
||||
git tag -d superset-helm-chart-0.13.4 || true
|
||||
echo "DEBUG TAGS"
|
||||
git show-ref --tags
|
||||
|
||||
- name: Create unique pages branch name
|
||||
id: vars
|
||||
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
|
||||
|
||||
- name: Force recreate branch from gh-pages
|
||||
run: |
|
||||
# Ensure a clean working directory
|
||||
git reset --hard
|
||||
git clean -fdx
|
||||
git checkout -b local_gha_temp
|
||||
git submodule update
|
||||
|
||||
# Fetch the latest gh-pages branch
|
||||
git fetch origin gh-pages
|
||||
|
||||
# Check out and reset the target branch based on gh-pages
|
||||
git checkout -B ${{ env.branch_name }} origin/gh-pages
|
||||
|
||||
# Remove submodules from the branch
|
||||
git submodule deinit -f --all
|
||||
|
||||
# Force push to the remote branch
|
||||
git push origin ${{ env.branch_name }} --force
|
||||
|
||||
# Return to the original branch
|
||||
git checkout local_gha_temp
|
||||
|
||||
- name: Fetch/list all tags
|
||||
run: |
|
||||
git submodule update
|
||||
cat .github/actions/chart-releaser-action/action.yml
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: ./.github/actions/chart-releaser-action
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
with:
|
||||
version: v1.6.0
|
||||
charts_dir: helm
|
||||
mark_as_latest: false
|
||||
pages_branch: ${{ env.branch_name }}
|
||||
env:
|
||||
CR_TOKEN: "${{ github.token }}"
|
||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||
|
||||
- name: Open Pull Request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const branchName = '${{ env.branch_name }}';
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||
|
||||
if (!branchName) {
|
||||
throw new Error("Branch name is not defined.");
|
||||
}
|
||||
|
||||
const pr = await github.rest.pulls.create({
|
||||
owner,
|
||||
repo,
|
||||
title: `Helm chart release for ${branchName}`,
|
||||
head: branchName,
|
||||
base: "gh-pages", // Adjust if the target branch is different
|
||||
body: `This PR releases Helm charts to the gh-pages branch.`,
|
||||
});
|
||||
|
||||
core.info(`Pull request created: ${pr.data.html_url}`);
|
||||
env:
|
||||
BRANCH_NAME: ${{ env.branch_name }}
|
||||
|
||||
@@ -15,7 +15,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test-mysql:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -68,16 +68,16 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
test-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "previous"]
|
||||
python-version: ["current", "next", "previous"]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -129,14 +129,14 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: python,postgres
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
test-sqlite:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -181,7 +181,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: python,sqlite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
53
.github/workflows/superset-python-misc.yml
vendored
Normal file
53
.github/workflows/superset-python-misc.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# Python Misc unit tests
|
||||
name: Python Misc
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "[0-9].[0-9]*"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
python-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
if: steps.check.outputs.python
|
||||
|
||||
babel-extract:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- name: Test babel extraction
|
||||
if: steps.check.outputs.python
|
||||
run: scripts/translations/babel_update.sh
|
||||
@@ -16,7 +16,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test-postgres-presto:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -77,14 +77,14 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: python,presto
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
test-postgres-hive:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
UPLOAD_FOLDER: /tmp/.superset/uploads/
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -142,10 +142,9 @@ jobs:
|
||||
- name: Python unit tests (PostgreSQL)
|
||||
if: steps.check.outputs.python
|
||||
run: |
|
||||
pip install -e .[hive]
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: python,hive
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -16,10 +16,10 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["previous", "current"]
|
||||
python-version: ["current", "next"]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: python,unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.github/workflows/superset-translations.yml
vendored
6
.github/workflows/superset-translations.yml
vendored
@@ -15,7 +15,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
frontend-check-translations:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
node-version: '18'
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
npm run build-translation
|
||||
|
||||
babel-extract:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -18,7 +18,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
app-checks:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
2
.github/workflows/supersetbot.yml
vendored
2
.github/workflows/supersetbot.yml
vendored
@@ -15,7 +15,7 @@ on:
|
||||
|
||||
jobs:
|
||||
supersetbot:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
if: >
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))
|
||||
|
||||
58
.github/workflows/tag-release.yml
vendored
58
.github/workflows/tag-release.yml
vendored
@@ -23,7 +23,7 @@ on:
|
||||
- 'false'
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -39,34 +39,35 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: docker-release
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
tags: true
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Docker Environment
|
||||
uses: ./.github/actions/setup-docker
|
||||
with:
|
||||
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
install-docker-compose: "false"
|
||||
build: "true"
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Setup supersetbot
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Execute custom Node.js script
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
@@ -87,45 +88,22 @@ jobs:
|
||||
fi
|
||||
|
||||
supersetbot docker \
|
||||
--push \
|
||||
--preset ${{ matrix.build_preset }} \
|
||||
--context "$EVENT" \
|
||||
--context-ref "$RELEASE" $FORCE_LATEST \
|
||||
--platform "linux/arm64" \
|
||||
--platform "linux/amd64"
|
||||
|
||||
# Returning to master to support closing setup-supersetbot
|
||||
git checkout master
|
||||
|
||||
update-prs-with-release-info:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
|
||||
# Going back on original branch to allow "post" GHA operations
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Setup supersetbot
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
persist-credentials: false
|
||||
|
||||
- name: Label the PRs with the right release-related labels
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
export GITHUB_ACTOR=""
|
||||
git fetch --all --tags
|
||||
git checkout master
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
|
||||
8
.github/workflows/tech-debt.yml
vendored
8
.github/workflows/tech-debt.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: "ubuntu-22.04"
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
process-and-upload:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
name: Generate Reports
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
@@ -32,10 +32,10 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
node-version: '18'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
run: npm install
|
||||
working-directory: ./superset-frontend
|
||||
|
||||
- name: Run Script
|
||||
|
||||
2
.github/workflows/welcome-new-users.yml
vendored
2
.github/workflows/welcome-new-users.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
welcome:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -21,7 +21,6 @@
|
||||
*.swp
|
||||
__pycache__
|
||||
|
||||
.aider*
|
||||
.local
|
||||
.cache
|
||||
.bento*
|
||||
@@ -51,6 +50,7 @@ env
|
||||
venv*
|
||||
env_py3
|
||||
envpy3
|
||||
env36
|
||||
local_config.py
|
||||
/superset_config.py
|
||||
/superset_text.yml
|
||||
@@ -66,10 +66,7 @@ superset-websocket/config.json
|
||||
*.js.map
|
||||
node_modules
|
||||
npm-debug.log*
|
||||
superset/static/assets/*
|
||||
!superset/static/assets/.gitkeep
|
||||
superset/static/uploads/*
|
||||
!superset/static/uploads/.gitkeep
|
||||
superset/static/assets
|
||||
superset/static/version_info.json
|
||||
superset-frontend/**/esm/*
|
||||
superset-frontend/**/lib/*
|
||||
@@ -124,5 +121,3 @@ docker/*local*
|
||||
|
||||
# Jest test report
|
||||
test-report.html
|
||||
superset/static/stats/statistics.html
|
||||
.aider*
|
||||
|
||||
@@ -16,11 +16,11 @@
|
||||
#
|
||||
repos:
|
||||
- repo: https://github.com/MarcoGorelli/auto-walrus
|
||||
rev: 0.3.4
|
||||
rev: v0.2.2
|
||||
hooks:
|
||||
- id: auto-walrus
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.15.0
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--check-untyped-defs]
|
||||
@@ -38,49 +38,28 @@ repos:
|
||||
types-paramiko,
|
||||
types-Markdown,
|
||||
]
|
||||
- repo: https://github.com/peterdemin/pip-compile-multi
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pip-compile-multi-verify
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*
|
||||
- id: check-yaml
|
||||
exclude: ^helm/superset/templates/
|
||||
- id: debug-statements
|
||||
- id: end-of-file-fixer
|
||||
exclude: .*/lerna\.json$
|
||||
- id: trailing-whitespace
|
||||
exclude: ^.*\.(snap)
|
||||
args: ["--markdown-linebreak-ext=md"]
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
|
||||
rev: v3.1.0 # Use the sha or tag you want to point at
|
||||
hooks:
|
||||
- id: prettier
|
||||
additional_dependencies:
|
||||
- prettier@3.3.3
|
||||
args: ["--ignore-path=./superset-frontend/.prettierignore"]
|
||||
files: "superset-frontend"
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: eslint-frontend
|
||||
name: eslint (frontend)
|
||||
entry: ./scripts/eslint.sh
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
|
||||
- id: eslint-docs
|
||||
name: eslint (docs)
|
||||
entry: bash -c 'cd docs && FILES=$(echo "$@" | sed "s|docs/||g") && yarn eslint --fix --ext .js,.jsx,.ts,.tsx --quiet $FILES'
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^docs/.*\.(js|jsx|ts|tsx)$
|
||||
- id: type-checking-frontend
|
||||
name: Type-Checking (Frontend)
|
||||
entry: bash -c './scripts/check-type.js package=superset-frontend excludeDeclarationDir=cypress-base'
|
||||
language: system
|
||||
files: ^superset-frontend\/.*\.(js|jsx|ts|tsx)$
|
||||
exclude: ^superset-frontend/cypress-base\/
|
||||
require_serial: true
|
||||
# blacklist unsafe functions like make_url (see #19526)
|
||||
- repo: https://github.com/skorokithakis/blacklist-pre-commit-hook
|
||||
rev: e2f070289d8eddcaec0b580d3bde29437e7c8221
|
||||
@@ -88,15 +67,27 @@ repos:
|
||||
- id: blacklist
|
||||
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
|
||||
- repo: https://github.com/norwoodj/helm-docs
|
||||
rev: v1.14.2
|
||||
rev: v1.11.0
|
||||
hooks:
|
||||
- id: helm-docs
|
||||
files: helm
|
||||
verbose: false
|
||||
args: ["--log-level", "error"]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.7
|
||||
rev: v0.4.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
args: [ --fix ]
|
||||
- id: ruff-format
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: pylint
|
||||
language: system
|
||||
types: [python]
|
||||
exclude: ^(tests/|superset/migrations/|scripts/|RELEASING/|docker/)
|
||||
args:
|
||||
[
|
||||
"-rn", # Only display messages
|
||||
"-sn", # Don't display the score
|
||||
"--rcfile=.pylintrc",
|
||||
]
|
||||
|
||||
380
.pylintrc
Normal file
380
.pylintrc
Normal file
@@ -0,0 +1,380 @@
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS,migrations
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=superset.extensions.pylint
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
jobs=2
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code
|
||||
extension-pkg-whitelist=pyarrow
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=
|
||||
useless-suppression,
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=
|
||||
cyclic-import, # re-enable once this no longer raises false positives
|
||||
missing-docstring,
|
||||
duplicate-code,
|
||||
line-too-long,
|
||||
unspecified-encoding,
|
||||
too-many-instance-attributes # re-enable once this no longer raises false positives
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x,y
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=bar,baz,db,fd,foo,sesh,session,tata,toto,tutu
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
property-classes=
|
||||
abc.abstractproperty,
|
||||
sqlalchemy.ext.hybrid.hybrid_property
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=10
|
||||
|
||||
|
||||
[ELIF]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=5
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||
# install python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to indicated private dictionary in
|
||||
# --spelling-private-dict-file option instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=contextlib.closing,optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,_cb
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,future.builtins
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=10
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=15
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=8
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of boolean expressions in a if statement
|
||||
max-bool-expr=5
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=optparse
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=builtins.Exception
|
||||
@@ -61,16 +61,12 @@ tsconfig.tsbuildinfo
|
||||
generator-superset/*
|
||||
temporary_superset_ui/*
|
||||
|
||||
# skip license checks for auto-generated test snapshots
|
||||
.*snap
|
||||
|
||||
# docs overrides for third party logos we don't have the rights to
|
||||
google-big-query.svg
|
||||
google-sheets.svg
|
||||
ibm-db2.svg
|
||||
postgresql.svg
|
||||
snowflake.svg
|
||||
ydb.svg
|
||||
|
||||
# docs-related
|
||||
erd.puml
|
||||
|
||||
50
CHANGELOG/4.1.1.md
Normal file
50
CHANGELOG/4.1.1.md
Normal file
@@ -0,0 +1,50 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
|
||||
|
||||
**Database Migrations**
|
||||
|
||||
**Features**
|
||||
|
||||
**Fixes**
|
||||
|
||||
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
|
||||
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
|
||||
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
|
||||
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
|
||||
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
|
||||
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
|
||||
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
|
||||
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
|
||||
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
|
||||
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
|
||||
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
|
||||
|
||||
**Others**
|
||||
|
||||
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
|
||||
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
|
||||
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
|
||||
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
|
||||
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
|
||||
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
|
||||
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)
|
||||
@@ -80,9 +80,9 @@ If you believe someone is violating this code of conduct, you may reply to them
|
||||
|
||||
Or one of our volunteers:
|
||||
|
||||
* [Mark Thomas](https://www.linkedin.com/in/mark-thomas-b16751158/)
|
||||
* [Joan Touzet](https://www.apache.org/foundation/conduct-team/wohali.html)
|
||||
* [Sharan Foga](https://www.linkedin.com/in/sfoga/)
|
||||
* [Mark Thomas](http://home.apache.org/~markt/coc.html)
|
||||
* [Joan Touzet](http://home.apache.org/~wohali/)
|
||||
* [Sharan Foga](http://home.apache.org/~sharan/coc.html)
|
||||
|
||||
If the violation is in documentation or code, for example inappropriate pronoun usage or word choice within official documentation, we ask that people report these privately to the project in question at <private@project.apache.org>, and, if they have sufficient ability within the project, to resolve or remove the concerning material, being mindful of the perspective of the person originally reporting the issue.
|
||||
|
||||
|
||||
329
Dockerfile
329
Dockerfile
@@ -18,246 +18,163 @@
|
||||
######################################################################
|
||||
# Node stage to deal with static asset construction
|
||||
######################################################################
|
||||
ARG PY_VER=3.11.11-slim-bookworm
|
||||
ARG PY_VER=3.10-slim-bookworm
|
||||
|
||||
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
FROM --platform=${BUILDPLATFORM} node:18-bullseye-slim AS superset-node
|
||||
|
||||
# Include translations in the final build
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
######################################################################
|
||||
# superset-node-ci used as a base for building frontend assets and CI
|
||||
######################################################################
|
||||
FROM --platform=${BUILDPLATFORM} node:20-bookworm-slim AS superset-node-ci
|
||||
ARG BUILD_TRANSLATIONS
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||
ENV DEV_MODE=${DEV_MODE}
|
||||
|
||||
COPY docker/ /app/docker/
|
||||
# Arguments for build configuration
|
||||
ARG NPM_BUILD_CMD="build"
|
||||
|
||||
# Install system dependencies required for node-gyp
|
||||
RUN /app/docker/apt-install.sh build-essential python3 zstd
|
||||
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install \
|
||||
-yqq --no-install-recommends \
|
||||
build-essential \
|
||||
python3
|
||||
|
||||
# Define environment variables for frontend build
|
||||
ENV BUILD_CMD=${NPM_BUILD_CMD} \
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
# NPM ci first, as to NOT invalidate previous steps except for when package.json changes
|
||||
|
||||
# Run the frontend memory monitoring script
|
||||
RUN /app/docker/frontend-mem-nag.sh
|
||||
RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \
|
||||
/frontend-mem-nag.sh
|
||||
|
||||
WORKDIR /app/superset-frontend
|
||||
|
||||
# Create necessary folders to avoid errors in subsequent steps
|
||||
RUN mkdir -p /app/superset/static/assets \
|
||||
/app/superset/translations
|
||||
|
||||
# Mount package files and install dependencies if not in dev mode
|
||||
# NOTE: we mount packages and plugins as they are referenced in package.json as workspaces
|
||||
# ideally we'd COPY only their package.json. Here npm ci will be cached as long
|
||||
# as the full content of these folders don't change, yielding a decent cache reuse rate.
|
||||
# Note that's it's not possible selectively COPY of mount using blobs.
|
||||
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
|
||||
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
npm ci; \
|
||||
else \
|
||||
echo "Skipping 'npm ci' in dev mode"; \
|
||||
fi
|
||||
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
|
||||
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
|
||||
npm ci
|
||||
|
||||
# Runs the webpack build process
|
||||
COPY superset-frontend /app/superset-frontend
|
||||
RUN npm run ${BUILD_CMD}
|
||||
|
||||
######################################################################
|
||||
# superset-node used for compile frontend assets
|
||||
######################################################################
|
||||
FROM superset-node-ci AS superset-node
|
||||
|
||||
# Build the frontend if not in dev mode
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
echo "Running 'npm run ${BUILD_CMD}'"; \
|
||||
npm run ${BUILD_CMD}; \
|
||||
else \
|
||||
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
|
||||
fi;
|
||||
|
||||
# Copy translation files
|
||||
# This copies the .po files needed for translation
|
||||
RUN mkdir -p /app/superset/translations
|
||||
COPY superset/translations /app/superset/translations
|
||||
# Compiles .json files from the .po files, then deletes the .po files
|
||||
RUN npm run build-translation
|
||||
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
|
||||
RUN rm /app/superset/translations/messages.pot
|
||||
|
||||
# Build the frontend if not in dev mode
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
npm run build-translation; \
|
||||
fi; \
|
||||
rm -rf /app/superset/translations/*/*/*.po; \
|
||||
rm -rf /app/superset/translations/*/*/*.mo;
|
||||
|
||||
|
||||
######################################################################
|
||||
# Base python layer
|
||||
######################################################################
|
||||
FROM python:${PY_VER} AS python-base
|
||||
|
||||
ARG SUPERSET_HOME="/app/superset_home"
|
||||
ENV SUPERSET_HOME=${SUPERSET_HOME}
|
||||
|
||||
RUN mkdir -p $SUPERSET_HOME
|
||||
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||
&& chmod -R 1777 $SUPERSET_HOME \
|
||||
&& chown -R superset:superset $SUPERSET_HOME
|
||||
|
||||
# Some bash scripts needed throughout the layers
|
||||
COPY --chmod=755 docker/*.sh /app/docker/
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade uv
|
||||
|
||||
# Using uv as it's faster/simpler than pip
|
||||
RUN uv venv /app/.venv
|
||||
ENV PATH="/app/.venv/bin:${PATH}"
|
||||
|
||||
######################################################################
|
||||
# Python translation compiler layer
|
||||
######################################################################
|
||||
FROM python-base AS python-translation-compiler
|
||||
|
||||
ARG BUILD_TRANSLATIONS
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
COPY requirements/translations.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
. /app/.venv/bin/activate && /app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
|
||||
|
||||
COPY superset/translations/ /app/translations_mo/
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
pybabel compile -d /app/translations_mo | true; \
|
||||
fi; \
|
||||
rm -f /app/translations_mo/*/*/*.po; \
|
||||
rm -f /app/translations_mo/*/*/*.json;
|
||||
|
||||
######################################################################
|
||||
# Python APP common layer
|
||||
######################################################################
|
||||
FROM python-base AS python-common
|
||||
|
||||
ENV SUPERSET_HOME="/app/superset_home" \
|
||||
HOME="/app/superset_home" \
|
||||
SUPERSET_ENV="production" \
|
||||
FLASK_APP="superset.app:create_app()" \
|
||||
PYTHONPATH="/app/pythonpath" \
|
||||
SUPERSET_PORT="8088"
|
||||
|
||||
# Copy the entrypoints, make them executable in userspace
|
||||
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
|
||||
|
||||
WORKDIR /app
|
||||
# Set up necessary directories and user
|
||||
RUN mkdir -p \
|
||||
${PYTHONPATH} \
|
||||
superset/static \
|
||||
requirements \
|
||||
superset-frontend \
|
||||
apache_superset.egg-info \
|
||||
requirements \
|
||||
&& touch superset/static/version_info.json
|
||||
|
||||
# Install Playwright and optionally setup headless browsers
|
||||
ARG INCLUDE_CHROMIUM="true"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
uv pip install playwright && \
|
||||
playwright install-deps && \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
|
||||
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
|
||||
else \
|
||||
echo "Skipping browser installation"; \
|
||||
fi
|
||||
|
||||
# Copy required files for Python build
|
||||
COPY pyproject.toml setup.py MANIFEST.in README.md ./
|
||||
COPY superset-frontend/package.json superset-frontend/
|
||||
COPY scripts/check-env.py scripts/
|
||||
|
||||
# keeping for backward compatibility
|
||||
COPY --chmod=755 ./docker/entrypoints/run-server.sh /usr/bin/
|
||||
|
||||
# Some debian libs
|
||||
RUN /app/docker/apt-install.sh \
|
||||
curl \
|
||||
libsasl2-dev \
|
||||
libsasl2-modules-gssapi-mit \
|
||||
libpq-dev \
|
||||
libecpg-dev \
|
||||
libldap2-dev
|
||||
|
||||
# Copy compiled things from previous stages
|
||||
COPY --from=superset-node /app/superset/static/assets superset/static/assets
|
||||
|
||||
# TODO, when the next version comes out, use --exclude superset/translations
|
||||
COPY superset superset
|
||||
# TODO in the meantime, remove the .po files
|
||||
RUN rm superset/translations/*/*/*.po
|
||||
|
||||
# Merging translations from backend and frontend stages
|
||||
COPY --from=superset-node /app/superset/translations superset/translations
|
||||
COPY --from=python-translation-compiler /app/translations_mo superset/translations
|
||||
|
||||
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
|
||||
CMD ["/app/docker/entrypoints/run-server.sh"]
|
||||
EXPOSE ${SUPERSET_PORT}
|
||||
|
||||
######################################################################
|
||||
# Final lean image...
|
||||
######################################################################
|
||||
FROM python-common AS lean
|
||||
FROM python-base AS lean
|
||||
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
COPY requirements/base.txt requirements/
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
|
||||
# Install the superset package
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
uv pip install .
|
||||
RUN python -m compileall /app/superset
|
||||
WORKDIR /app
|
||||
ENV LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
SUPERSET_ENV=production \
|
||||
FLASK_APP="superset.app:create_app()" \
|
||||
PYTHONPATH="/app/pythonpath" \
|
||||
SUPERSET_HOME="/app/superset_home" \
|
||||
SUPERSET_PORT=8088
|
||||
|
||||
RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \
|
||||
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
curl \
|
||||
default-libmysqlclient-dev \
|
||||
libsasl2-dev \
|
||||
libsasl2-modules-gssapi-mit \
|
||||
libpq-dev \
|
||||
libecpg-dev \
|
||||
libldap2-dev \
|
||||
&& touch superset/static/version_info.json \
|
||||
&& chown -R superset:superset ./* \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
|
||||
# setup.py uses the version information in package.json
|
||||
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
|
||||
COPY --chown=superset:superset requirements/base.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
&& pip install --upgrade setuptools pip \
|
||||
&& pip install -r requirements/base.txt \
|
||||
&& apt-get autoremove -yqq --purge build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy the compiled frontend assets
|
||||
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
|
||||
|
||||
## Lastly, let's install superset itself
|
||||
COPY --chown=superset:superset superset superset
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -e .
|
||||
|
||||
# Copy the .json translations from the frontend layer
|
||||
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
|
||||
|
||||
# Compile translations for the backend - this generates .mo files, then deletes the .po files
|
||||
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
|
||||
RUN ./scripts/translations/generate_mo_files.sh \
|
||||
&& chown -R superset:superset superset/translations \
|
||||
&& rm superset/translations/messages.pot \
|
||||
&& rm superset/translations/*/LC_MESSAGES/*.po
|
||||
|
||||
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
|
||||
USER superset
|
||||
|
||||
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
|
||||
|
||||
EXPOSE ${SUPERSET_PORT}
|
||||
|
||||
CMD ["/usr/bin/run-server.sh"]
|
||||
|
||||
######################################################################
|
||||
# Dev image...
|
||||
######################################################################
|
||||
FROM python-common AS dev
|
||||
FROM lean AS dev
|
||||
|
||||
# Debian libs needed for dev
|
||||
RUN /app/docker/apt-install.sh \
|
||||
git \
|
||||
pkg-config \
|
||||
default-libmysqlclient-dev
|
||||
USER root
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends \
|
||||
libnss3 \
|
||||
libdbus-glib-1-2 \
|
||||
libgtk-3-0 \
|
||||
libx11-xcb1 \
|
||||
libasound2 \
|
||||
libxtst6 \
|
||||
git \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy development requirements and install them
|
||||
COPY requirements/*.txt requirements/
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
|
||||
# Install the superset package
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
uv pip install .
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install playwright
|
||||
RUN playwright install-deps
|
||||
RUN playwright install chromium
|
||||
|
||||
RUN uv pip install .[postgres]
|
||||
RUN python -m compileall /app/superset
|
||||
# Install GeckoDriver WebDriver
|
||||
ARG GECKODRIVER_VERSION=v0.34.0 \
|
||||
FIREFOX_VERSION=125.0.3
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends wget bzip2 \
|
||||
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
|
||||
# Install Firefox
|
||||
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
|
||||
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
|
||||
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*
|
||||
# Cache everything for dev purposes...
|
||||
|
||||
COPY --chown=superset:superset requirements/development.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
&& pip install -r requirements/development.txt \
|
||||
&& apt-get autoremove -yqq --purge build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
USER superset
|
||||
|
||||
######################################################################
|
||||
# CI image...
|
||||
######################################################################
|
||||
FROM lean AS ci
|
||||
USER root
|
||||
RUN uv pip install .[postgres]
|
||||
USER superset
|
||||
CMD ["/app/docker/entrypoints/docker-ci.sh"]
|
||||
|
||||
COPY --chown=superset:superset --chmod=755 ./docker/*.sh /app/docker/
|
||||
|
||||
CMD ["/app/docker/docker-ci.sh"]
|
||||
|
||||
3
Makefile
3
Makefile
@@ -87,6 +87,9 @@ format: py-format js-format
|
||||
py-format: pre-commit
|
||||
pre-commit run black --all-files
|
||||
|
||||
py-lint: pre-commit
|
||||
pylint -j 0 superset
|
||||
|
||||
js-format:
|
||||
cd superset-frontend; npm run prettier
|
||||
|
||||
|
||||
11
README.md
11
README.md
@@ -19,7 +19,7 @@ under the License.
|
||||
|
||||
# Superset
|
||||
|
||||
[](https://opensource.org/license/apache-2-0)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://github.com/apache/superset/tree/latest)
|
||||
[](https://github.com/apache/superset/actions)
|
||||
[](https://badge.fury.io/py/apache-superset)
|
||||
@@ -73,8 +73,7 @@ Superset provides:
|
||||
|
||||
**Video Overview**
|
||||
<!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 -->
|
||||
[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6)
|
||||
|
||||
[superset-video-4k.webm](https://github.com/apache/superset/assets/812905/da036bc2-150c-4ee7-80f9-75e63210ff76)
|
||||
|
||||
<br/>
|
||||
|
||||
@@ -135,10 +134,6 @@ Here are some of the major database solutions that are supported:
|
||||
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
|
||||
</p>
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
@@ -147,7 +142,7 @@ Want to add support for your datastore or data engine? Read more [here](https://
|
||||
|
||||
## Installation and Configuration
|
||||
|
||||
Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) guide or learn about [the options for production deployments](https://superset.apache.org/docs/installation/architecture/).
|
||||
[Extended documentation for Superset](https://superset.apache.org/docs/installation/docker-compose)
|
||||
|
||||
## Get Involved
|
||||
|
||||
|
||||
@@ -30,12 +30,12 @@ RUN apt-get install -y apt-transport-https apt-utils
|
||||
# Install superset dependencies
|
||||
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
||||
RUN apt-get install -y build-essential libssl-dev \
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
|
||||
|
||||
# Install nodejs for custom build
|
||||
# https://nodejs.org/en/download/package-manager/
|
||||
RUN set -eux; \
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
|
||||
curl -sL https://deb.nodesource.com/setup_18.x | bash -; \
|
||||
apt-get install -y nodejs; \
|
||||
node --version;
|
||||
RUN if ! which npm; then apt-get install -y npm; fi
|
||||
@@ -64,7 +64,7 @@ RUN pip install --upgrade setuptools pip \
|
||||
RUN flask fab babel-compile --target superset/translations
|
||||
|
||||
ENV PATH=/home/superset/superset/bin:$PATH \
|
||||
PYTHONPATH=/home/superset/superset/ \
|
||||
PYTHONPATH=/home/superset/superset/:$PYTHONPATH \
|
||||
SUPERSET_TESTENV=true
|
||||
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
@@ -29,16 +29,13 @@ RUN apt-get install -y apt-transport-https apt-utils
|
||||
|
||||
# Install superset dependencies
|
||||
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
||||
RUN apt-get install -y subversion build-essential libssl-dev \
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
|
||||
RUN apt-get install -y build-essential libssl-dev \
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
|
||||
|
||||
# Install nodejs for custom build
|
||||
# https://nodejs.org/en/download/package-manager/
|
||||
RUN set -eux; \
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
|
||||
apt-get install -y nodejs; \
|
||||
node --version;
|
||||
RUN if ! which npm; then apt-get install -y npm; fi
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
RUN mkdir -p /home/superset
|
||||
RUN chown superset /home/superset
|
||||
@@ -49,12 +46,14 @@ ARG VERSION
|
||||
# Can fetch source from svn or copy tarball from local mounted directory
|
||||
RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./
|
||||
RUN tar -xvf *.tar.gz
|
||||
WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend
|
||||
WORKDIR apache-superset-$VERSION
|
||||
|
||||
RUN npm ci \
|
||||
RUN cd superset-frontend \
|
||||
&& npm ci \
|
||||
&& npm run build \
|
||||
&& rm -rf node_modules
|
||||
|
||||
|
||||
WORKDIR /home/superset/apache-superset-$VERSION
|
||||
RUN pip install --upgrade setuptools pip \
|
||||
&& pip install -r requirements/base.txt \
|
||||
@@ -63,6 +62,6 @@ RUN pip install --upgrade setuptools pip \
|
||||
RUN flask fab babel-compile --target superset/translations
|
||||
|
||||
ENV PATH=/home/superset/superset/bin:$PATH \
|
||||
PYTHONPATH=/home/superset/superset/
|
||||
PYTHONPATH=/home/superset/superset/:$PYTHONPATH
|
||||
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
@@ -437,7 +437,7 @@ cd ${SUPERSET_RELEASE_RC}
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install -r requirements/base.txt
|
||||
pip install build twine
|
||||
pip install twine
|
||||
```
|
||||
|
||||
Create the distribution
|
||||
@@ -452,10 +452,7 @@ cd ../
|
||||
|
||||
|
||||
# Compile translations for the backend
|
||||
./scripts/translations/generate_mo_files.sh
|
||||
|
||||
# update build version number
|
||||
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
|
||||
./scripts/translations/generate_po_files.sh
|
||||
|
||||
# build the python distribution
|
||||
python setup.py sdist
|
||||
@@ -469,7 +466,6 @@ an account first if you don't have one, and reference your username
|
||||
while requesting access to push packages.
|
||||
|
||||
```bash
|
||||
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
|
||||
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
|
||||
```
|
||||
|
||||
|
||||
@@ -232,7 +232,8 @@ class GitChangeLog:
|
||||
for log in self._logs:
|
||||
yield {
|
||||
"pr_number": log.pr_number,
|
||||
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/{log.pr_number}",
|
||||
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/"
|
||||
f"{log.pr_number}",
|
||||
"message": log.message,
|
||||
"time": log.time,
|
||||
"author": log.author,
|
||||
@@ -271,14 +272,14 @@ class GitLogs:
|
||||
|
||||
@staticmethod
|
||||
def _git_get_current_head() -> str:
|
||||
output = os.popen("git status | head -1").read() # noqa: S605, S607
|
||||
output = os.popen("git status | head -1").read()
|
||||
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
|
||||
if not match:
|
||||
return ""
|
||||
return match.group(1)
|
||||
|
||||
def _git_checkout(self, git_ref: str) -> None:
|
||||
os.popen(f"git checkout {git_ref}").read() # noqa: S605
|
||||
os.popen(f"git checkout {git_ref}").read()
|
||||
current_head = self._git_get_current_head()
|
||||
if current_head != git_ref:
|
||||
print(f"Could not checkout {git_ref}")
|
||||
@@ -289,7 +290,7 @@ class GitLogs:
|
||||
current_git_ref = self._git_get_current_head()
|
||||
self._git_checkout(self._git_ref)
|
||||
output = (
|
||||
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') # noqa: S605, S607
|
||||
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"')
|
||||
.read()
|
||||
.split("\n")
|
||||
)
|
||||
@@ -322,9 +323,9 @@ class BaseParameters:
|
||||
|
||||
|
||||
def print_title(message: str) -> None:
|
||||
print(f"{50 * '-'}")
|
||||
print(f"{50*'-'}")
|
||||
print(message)
|
||||
print(f"{50 * '-'}")
|
||||
print(f"{50*'-'}")
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -348,14 +349,14 @@ def compare(base_parameters: BaseParameters) -> None:
|
||||
previous_logs = base_parameters.previous_logs
|
||||
current_logs = base_parameters.current_logs
|
||||
print_title(
|
||||
f"Pull requests from {current_logs.git_ref} not in {previous_logs.git_ref}"
|
||||
f"Pull requests from " f"{current_logs.git_ref} not in {previous_logs.git_ref}"
|
||||
)
|
||||
previous_diff_logs = previous_logs.diff(current_logs)
|
||||
for diff_log in previous_diff_logs:
|
||||
print(f"{diff_log}")
|
||||
|
||||
print_title(
|
||||
f"Pull requests from {previous_logs.git_ref} not in {current_logs.git_ref}"
|
||||
f"Pull requests from " f"{previous_logs.git_ref} not in {current_logs.git_ref}"
|
||||
)
|
||||
current_diff_logs = current_logs.diff(previous_logs)
|
||||
for diff_log in current_diff_logs:
|
||||
|
||||
@@ -31,7 +31,7 @@ except ModuleNotFoundError:
|
||||
RECEIVER_EMAIL = "dev@superset.apache.org"
|
||||
PROJECT_NAME = "Superset"
|
||||
PROJECT_MODULE = "superset"
|
||||
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." # noqa: E501
|
||||
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application."
|
||||
|
||||
|
||||
def string_comma_to_list(message: str) -> list[str]:
|
||||
|
||||
@@ -137,4 +137,4 @@ There is now a [metadata bar](https://github.com/apache/superset/pull/27857) add
|
||||
|
||||
## Change to Docker image builds
|
||||
|
||||
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/docs/installation/docker-builds.mdx#build-presets) for more details.
|
||||
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/installation/docker-builds#build-presets) for more details.
|
||||
|
||||
@@ -23,12 +23,12 @@ from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # noqa: E501
|
||||
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512`
|
||||
|
||||
|
||||
def get_sha512_hash(filename: str) -> str:
|
||||
"""Run the shasum command on the file and return the SHA512 hash."""
|
||||
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) # noqa: S603, S607
|
||||
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE)
|
||||
sha512_hash = result.stdout.decode().split()[0]
|
||||
return sha512_hash
|
||||
|
||||
@@ -43,7 +43,7 @@ def read_sha512_file(filename: str) -> str:
|
||||
|
||||
|
||||
def verify_sha512(filename: str) -> str:
|
||||
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" # noqa: E501
|
||||
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file."""
|
||||
sha512_hash = get_sha512_hash(filename)
|
||||
sha512_file_content = read_sha512_file(filename)
|
||||
|
||||
@@ -53,56 +53,47 @@ def verify_sha512(filename: str) -> str:
|
||||
return "SHA failed"
|
||||
|
||||
|
||||
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
|
||||
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file
|
||||
|
||||
|
||||
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
||||
"""Run the GPG verify command and extract RSA key and email address."""
|
||||
asc_filename = filename + ".asc"
|
||||
result = subprocess.run( # noqa: S603
|
||||
["gpg", "--verify", asc_filename, filename], # noqa: S607
|
||||
capture_output=True, # noqa: S607
|
||||
result = subprocess.run(
|
||||
["gpg", "--verify", asc_filename, filename], capture_output=True
|
||||
)
|
||||
output = result.stderr.decode()
|
||||
|
||||
rsa_key = re.search(r"RSA key ([0-9A-F]+)", output)
|
||||
eddsa_key = re.search(r"EDDSA key ([0-9A-F]+)", output)
|
||||
email = re.search(r'issuer "([^"]+)"', output)
|
||||
|
||||
rsa_key_result = rsa_key.group(1) if rsa_key else None
|
||||
eddsa_key_result = eddsa_key.group(1) if eddsa_key else None
|
||||
email_result = email.group(1) if email else None
|
||||
|
||||
key_result = rsa_key_result or eddsa_key_result
|
||||
|
||||
# Debugging:
|
||||
if key_result:
|
||||
print("RSA or EDDSA Key found")
|
||||
else:
|
||||
print("Warning: No RSA or EDDSA key found in GPG verification output.")
|
||||
if email_result:
|
||||
print("email found")
|
||||
else:
|
||||
# Debugging: print warnings if rsa_key or email is not found
|
||||
if rsa_key_result is None:
|
||||
print("Warning: No RSA key found in GPG verification output.")
|
||||
if email_result is None:
|
||||
print("Warning: No email address found in GPG verification output.")
|
||||
|
||||
return key_result, email_result
|
||||
return rsa_key_result, email_result
|
||||
|
||||
|
||||
def verify_key(key: str, email: Optional[str]) -> str:
|
||||
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
|
||||
def verify_rsa_key(rsa_key: str, email: Optional[str]) -> str:
|
||||
"""Fetch the KEYS file and verify if the RSA key and email match."""
|
||||
url = "https://downloads.apache.org/superset/KEYS"
|
||||
response = requests.get(url) # noqa: S113
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
if key not in response.text:
|
||||
return "RSA/EDDSA key not found on KEYS page"
|
||||
if rsa_key not in response.text:
|
||||
return "RSA key not found on KEYS page"
|
||||
|
||||
# Check if email is None or not in response.text
|
||||
if email and email in response.text:
|
||||
return "RSA/EDDSA key and email verified against Apache KEYS file"
|
||||
return "RSA key and email verified against Apache KEYS file"
|
||||
elif email:
|
||||
return "RSA/EDDSA key verified, but Email not found on KEYS page"
|
||||
return "RSA key verified, but Email not found on KEYS page"
|
||||
else:
|
||||
return "RSA/EDDSA key verified, but Email not available for verification"
|
||||
return "RSA key verified, but Email not available for verification"
|
||||
else:
|
||||
return "Failed to fetch KEYS file"
|
||||
|
||||
@@ -112,9 +103,9 @@ def verify_sha512_and_rsa(filename: str) -> None:
|
||||
sha_result = verify_sha512(filename)
|
||||
print(sha_result)
|
||||
|
||||
key, email = get_gpg_info(filename)
|
||||
if key:
|
||||
rsa_result = verify_key(key, email)
|
||||
rsa_key, email = get_gpg_info(filename)
|
||||
if rsa_key:
|
||||
rsa_result = verify_rsa_key(rsa_key, email)
|
||||
print(rsa_result)
|
||||
else:
|
||||
print("GPG verification failed: RSA key or email not found")
|
||||
|
||||
@@ -44,11 +44,12 @@ These features are **finished** but currently being tested. They are usable, but
|
||||
- ALLOW_FULL_CSV_EXPORT
|
||||
- CACHE_IMPERSONATION
|
||||
- CONFIRM_DASHBOARD_DIFF
|
||||
- DRILL_TO_DETAIL
|
||||
- DYNAMIC_PLUGINS
|
||||
- DATE_FORMAT_IN_EMAIL_SUBJECT: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports#commons)
|
||||
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
||||
- ESTIMATE_QUERY_COST
|
||||
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
||||
- HORIZONTAL_FILTER_BAR
|
||||
- IMPERSONATE_WITH_EMAIL_PREFIX
|
||||
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
||||
- RLS_IN_SQLLAB
|
||||
@@ -62,8 +63,9 @@ These features flags are **safe for production**. They have been tested and will
|
||||
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
||||
|
||||
### Flags on the path to feature launch and flag deprecation/removal
|
||||
|
||||
- DASHBOARD_VIRTUALIZATION
|
||||
- DRILL_BY
|
||||
- DISABLE_LEGACY_DATASOURCE_EDITOR
|
||||
|
||||
### Flags retained for runtime configuration
|
||||
|
||||
@@ -77,7 +79,6 @@ independently. This new framework will also allow for non-boolean configurations
|
||||
- ALLOW_ADHOC_SUBQUERY
|
||||
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
||||
- DATAPANEL_CLOSED_BY_DEFAULT
|
||||
- DRILL_BY
|
||||
- DRUID_JOINS
|
||||
- EMBEDDABLE_CHARTS
|
||||
- EMBEDDED_SUPERSET
|
||||
@@ -97,6 +98,6 @@ These features flags currently default to True and **will be removed in a future
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- AVOID_COLORS_COLLISION
|
||||
- DRILL_TO_DETAIL
|
||||
- DASHBOARD_CROSS_FILTERS
|
||||
- ENABLE_JAVASCRIPT_CONTROLS
|
||||
- KV_STORE
|
||||
|
||||
@@ -28,8 +28,7 @@ Join our growing community!
|
||||
### Sharing Economy
|
||||
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Faasos](https://faasos.com/) [@shashanksingh]
|
||||
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
||||
- [Faasos](http://faasos.com/) [@shashanksingh]
|
||||
- [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel]
|
||||
- [Lime](https://www.li.me/) [@cxmcc]
|
||||
- [Lyft](https://www.lyft.com/)
|
||||
@@ -37,19 +36,16 @@ Join our growing community!
|
||||
|
||||
### Financial Services
|
||||
|
||||
- [Aktia Bank plc](https://www.aktia.com)
|
||||
- [Aktia Bank plc](https://www.aktia.com) [@villebro]
|
||||
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
- [Cape Crypto](https://capecrypto.com)
|
||||
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](https://clark.de/)
|
||||
- [Capital Service S.A.](http://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](http://clark.de/)
|
||||
- [KarrotPay](https://www.daangnpay.com/)
|
||||
- [Remita](https://remita.net) [@mujibishola]
|
||||
- [Taveo](https://www.taveo.com) [@codek]
|
||||
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
||||
- [Wise](https://wise.com) [@koszti]
|
||||
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [Cover Genius](https://covergenius.com/)
|
||||
- [Xendit](http://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
|
||||
### Gaming
|
||||
|
||||
@@ -62,24 +58,20 @@ Join our growing community!
|
||||
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
||||
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
||||
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
||||
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
||||
- [Fynd](https://www.fynd.com/) [@darpanjain07]
|
||||
- [Fordeal](http://www.fordeal.com) [@Renkai]
|
||||
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
||||
- [GoTo/Gojek](https://www.gojek.io/) [@gwthm-in]
|
||||
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [HuiShouBao](http://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [Now](https://www.now.vn/) [@davidkohcw]
|
||||
- [Qunar](https://www.qunar.com/) [@flametest]
|
||||
- [Rakuten Viki](https://www.viki.com)
|
||||
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
||||
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
||||
- [ShopUp](https://www.shopup.org/) [@gwthm-in]
|
||||
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
||||
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
||||
- [THE ICONIC](http://theiconic.com.au/) [@ksaagariconic]
|
||||
- [Utair](https://www.utair.ru) [@utair-digital]
|
||||
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
||||
- [Zalando](https://www.zalando.com) [@dmigo]
|
||||
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
||||
- [Zepto](https://www.zeptonow.com/) [@gwthm-in]
|
||||
|
||||
### Enterprise Technology
|
||||
|
||||
@@ -89,52 +81,45 @@ Join our growing community!
|
||||
- [Astronomer](https://www.astronomer.io) [@ryw]
|
||||
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
|
||||
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
||||
- [Canonical](https://canonical.com)
|
||||
- [Careem](https://www.careem.com/) [@samraHanif0340]
|
||||
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
|
||||
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
||||
- [CnOvit](https://www.cnovit.com/) [@xieshaohu]
|
||||
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
||||
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
|
||||
- [Dial Once](https://www.dial-once.com/)
|
||||
- [Dremio](https://dremio.com) [@narendrans]
|
||||
- [EFinance](https://www.efinance.com.eg) [@habeeb556]
|
||||
- [Elestio](https://elest.io/) [@kaiwalyakoparkar]
|
||||
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
|
||||
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
|
||||
- [FBK - ICT center](https://ict.fbk.eu)
|
||||
- [Endress+Hauser](http://www.endress.com/) [@rumbin]
|
||||
- [FBK - ICT center](http://ict.fbk.eu)
|
||||
- [Gavagai](https://gavagai.io) [@gavagai-corp]
|
||||
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
|
||||
- [Hydrolix](https://www.hydrolix.io/)
|
||||
- [Intercom](https://www.intercom.com/) [@kate-gallo]
|
||||
- [jampp](https://jampp.com/)
|
||||
- [Konfío](https://konfio.mx) [@uis-rodriguez]
|
||||
- [Konfío](http://konfio.mx) [@uis-rodriguez]
|
||||
- [Mainstrat](https://mainstrat.com/)
|
||||
- [mishmash io](https://mishmash.io/) [@mishmash-io]
|
||||
- [Myra Labs](https://www.myralabs.com/) [@viksit]
|
||||
- [Nielsen](https://www.nielsen.com/) [@amitNielsen]
|
||||
- [mishmash io](https://mishmash.io/)[@mishmash-io]
|
||||
- [Myra Labs](http://www.myralabs.com/) [@viksit]
|
||||
- [Nielsen](http://www.nielsen.com/) [@amitNielsen]
|
||||
- [Ona](https://ona.io) [@pld]
|
||||
- [Orange](https://www.orange.com) [@icsu]
|
||||
- [Oslandia](https://oslandia.com)
|
||||
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
||||
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
||||
- [PlaidCloud](https://www.plaidcloud.com)
|
||||
- [Preset, Inc.](https://preset.io)
|
||||
- [PubNub](https://pubnub.com) [@jzucker2]
|
||||
- [ReadyTech](https://www.readytech.io)
|
||||
- [Reward Gateway](https://www.rewardgateway.com)
|
||||
- [RIADVICE](https://riadvice.tn) [@riadvice]
|
||||
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
|
||||
- [shipmnts](https://shipmnts.com)
|
||||
- [Showmax](https://showmax.com) [@bobek]
|
||||
- [SingleStore](https://www.singlestore.com/)
|
||||
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
||||
- [Tenable](https://www.tenable.com) [@dflionis]
|
||||
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
||||
- [Tentacle](https://tentaclecmi.com) [@jdclarke5]
|
||||
- [timbr.ai](https://timbr.ai/) [@semantiDan]
|
||||
- [Tobii](https://www.tobii.com/) [@dwa]
|
||||
- [Tobii](http://www.tobii.com/) [@dwa]
|
||||
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
||||
- [Unvired](https://unvired.com) [@srinisubramanian]
|
||||
- [Virtuoso QA](https://www.virtuosoqa.com)
|
||||
- [Whale](https://whale.im)
|
||||
- [Unvired](https://unvired.com)[@srinisubramanian]
|
||||
- [Whale](http://whale.im)
|
||||
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
||||
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
||||
|
||||
@@ -147,18 +132,17 @@ Join our growing community!
|
||||
- [Kuaishou](https://www.kuaishou.com/) [@zhaoyu89730105]
|
||||
- [Netflix](https://www.netflix.com/)
|
||||
- [Prensa Iberica](https://www.prensaiberica.es/) [@zamar-roura]
|
||||
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/) [@shenyuanli,@marklaw]
|
||||
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/)[@shenyuanli,@marklaw]
|
||||
- [Xite](https://xite.com/) [@shashankkoppar]
|
||||
- [Zaihang](https://www.zaih.com/)
|
||||
- [Zaihang](http://www.zaih.com/)
|
||||
|
||||
### Education
|
||||
|
||||
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Open edX](https://openedx.org/)
|
||||
- [Platzi.com](https://platzi.com/)
|
||||
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/)[@fccoelho]
|
||||
- [Udemy](https://www.udemy.com/) [@sungjuly]
|
||||
- [VIPKID](https://www.vipkid.com.cn/) [@illpanda]
|
||||
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
||||
@@ -175,14 +159,13 @@ Join our growing community!
|
||||
|
||||
- [Amino](https://amino.com) [@shkr]
|
||||
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
||||
- [Care](https://www.getcare.io/) [@alandao2021]
|
||||
- [Care](https://www.getcare.io/)[@alandao2021]
|
||||
- [Living Goods](https://www.livinggoods.org) [@chelule]
|
||||
- [Maieutical Labs](https://maieuticallabs.it) [@xrmx]
|
||||
- [Medic](https://medic.org) [@1yuv]
|
||||
- [QPID Health](http://www.qpidhealth.com/)
|
||||
- [REDCap Cloud](https://www.redcapcloud.com/)
|
||||
- [TrustMedis](https://trustmedis.com/) [@famasya]
|
||||
- [WeSure](https://www.wesure.cn/)
|
||||
- [2070Health](https://2070health.com/)
|
||||
|
||||
### HR / Staffing
|
||||
|
||||
@@ -194,12 +177,10 @@ Join our growing community!
|
||||
|
||||
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
||||
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
||||
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
||||
|
||||
### Travel
|
||||
|
||||
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
||||
- [HomeToGo](https://hometogo.com/) [@pedromartinsteenstrup]
|
||||
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
||||
|
||||
### Others
|
||||
@@ -208,13 +189,12 @@ Join our growing community!
|
||||
- [AI inside](https://inside.ai/en/)
|
||||
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
||||
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
|
||||
- [Flowbird](https://flowbird.com) [@EmmanuelCbd]
|
||||
- [GEOTAB](https://www.geotab.com) [@JZ6]
|
||||
- [Grassroot](https://www.grassrootinstitute.org/)
|
||||
- [Increff](https://www.increff.com/) [@ishansinghania]
|
||||
- [komoot](https://www.komoot.com/) [@christophlingg]
|
||||
- [Let's Roam](https://www.letsroam.com/)
|
||||
- [Onebeat](https://1beat.com/) [@GuyAttia]
|
||||
- [X](https://x.com/)
|
||||
- [Twitter](https://twitter.com/)
|
||||
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
|
||||
- [Yahoo!](https://yahoo.com/)
|
||||
|
||||
@@ -43,8 +43,8 @@ under the License.
|
||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
@@ -65,6 +65,7 @@ under the License.
|
||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can function names on Database |:heavy_check_mark:|O|O|O|
|
||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
@@ -75,6 +76,7 @@ under the License.
|
||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can schemas access for csv upload on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
@@ -116,6 +118,8 @@ under the License.
|
||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Upload a CSV |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Upload Excel |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
@@ -125,6 +129,13 @@ under the License.
|
||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
|
||||
| can edit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can list on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can show on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can userinfo on UserOAuthModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| userinfoedit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
@@ -181,6 +192,7 @@ under the License.
|
||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Upload a Columnar file |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|
||||
44
UPDATING.md
44
UPDATING.md
@@ -22,35 +22,6 @@ under the License.
|
||||
This file documents any backwards-incompatible changes in Superset and
|
||||
assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
|
||||
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
|
||||
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
|
||||
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
|
||||
- [31794](https://github.com/apache/superset/pull/31794) Removed the previously deprecated `DASHBOARD_CROSS_FILTERS` feature flag
|
||||
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
|
||||
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
|
||||
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
|
||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
||||
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
|
||||
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
|
||||
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
|
||||
- [31413](https://github.com/apache/superset/pull/31413) Enable the DATE_FORMAT_IN_EMAIL_SUBJECT feature flag to allow users to specify a date format for the email subject, which will then be replaced with the actual date.
|
||||
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
|
||||
- [31503](https://github.com/apache/superset/pull/31503) Deprecating python 3.9.x support, 3.11 is now the recommended version and 3.10 is still supported over the Superset 5.0 lifecycle.
|
||||
- [29121](https://github.com/apache/superset/pull/29121) Removed the `css`, `position_json`, and `json_metadata` from the payload of the dashboard list endpoint (`GET api/v1/dashboard`) for performance reasons.
|
||||
- [29163](https://github.com/apache/superset/pull/29163) Removed the `SHARE_QUERIES_VIA_KV_STORE` and `KV_STORE` feature flags and changed the way Superset shares SQL Lab queries to use permalinks. The legacy `/kv` API was removed but we still support legacy links in 5.0. In 6.0, only permalinks will be supported.
|
||||
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
|
||||
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
|
||||
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
|
||||
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
## 4.1.0
|
||||
|
||||
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your
|
||||
@@ -61,9 +32,9 @@ assists people when migrating to a new version.
|
||||
`requirements/` folder. If you use these files for your builds you may want to double
|
||||
check that your builds are not affected. `base.txt` should be the same as before, though
|
||||
`development.txt` becomes a bigger set, incorporating the now defunct local,testing,integration, and docker
|
||||
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker compose.\*
|
||||
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker-compose.\*
|
||||
files for production use cases! While we never really supported
|
||||
or should have tried to support docker compose for production use cases, we now actively
|
||||
or should have tried to support docker-compose for production use cases, we now actively
|
||||
have taken a stance against supporting it. See the PR for details.
|
||||
- [24112](https://github.com/apache/superset/pull/24112): Python 3.10 is now the recommended python version to use, 3.9 still
|
||||
supported but getting deprecated in the nearish future. CI/CD runs on py310 so you probably want to align. If you
|
||||
@@ -87,7 +58,7 @@ assists people when migrating to a new version.
|
||||
backend, as well as the .json files used by the frontend. If you were doing anything before
|
||||
as part of your bundling to expose translation packages, it's probably not needed anymore.
|
||||
- [29264](https://github.com/apache/superset/pull/29264) Slack has updated its file upload api, and we are now supporting this new api in Superset, although the Slack api is not backward compatible. The original Slack integration is deprecated and we will require a new Slack scope `channels:read` to be added to Slack workspaces in order to use this new api. In an upcoming release, we will make this new Slack scope mandatory and remove the old Slack functionality.
|
||||
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs.
|
||||
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
@@ -145,7 +116,7 @@ assists people when migrating to a new version.
|
||||
- [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
|
||||
- [24939](https://github.com/apache/superset/pull/24939): Augments the foreign key constraints for the `embedded_dashboards` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard is deleted. Scheduled downtime may be advised.
|
||||
- [24938](https://github.com/apache/superset/pull/24938): Augments the foreign key constraints for the `dashboard_slices` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard or slice is deleted. Scheduled downtime may be advised.
|
||||
- [24628](https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
|
||||
- [24628]https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
|
||||
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables which reference the `tables` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
|
||||
- [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated.
|
||||
- [23652](https://github.com/apache/superset/pull/23652): Enables GENERIC_CHART_AXES feature flag by default.
|
||||
@@ -161,7 +132,7 @@ assists people when migrating to a new version.
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- [24686](https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
|
||||
- [24686]https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
|
||||
- [24262](https://github.com/apache/superset/pull/24262): Enabled `TALISMAN_ENABLED` flag by default and provided stricter default Content Security Policy
|
||||
- [24415](https://github.com/apache/superset/pull/24415): Removed the obsolete Druid NoSQL REGEX operator.
|
||||
- [24423](https://github.com/apache/superset/pull/24423): Removed deprecated APIs `/superset/slice_json/...`, `/superset/annotation_json/...`
|
||||
@@ -258,7 +229,7 @@ assists people when migrating to a new version.
|
||||
- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case.
|
||||
- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case.
|
||||
- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation.
|
||||
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
|
||||
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
|
||||
- [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default.
|
||||
- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward.
|
||||
- [19107](https://github.com/apache/superset/pull/19107): The `SQLLAB_BACKEND_PERSISTENCE` feature flag is now `True` by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`.
|
||||
@@ -346,7 +317,8 @@ assists people when migrating to a new version.
|
||||
### Potential Downtime
|
||||
|
||||
- [14234](https://github.com/apache/superset/pull/14234): Adds the `limiting_factor` column to the `query` table. Give the migration includes a DDL operation on a heavily trafficked table, potential service downtime may be required.
|
||||
- [16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
|
||||
|
||||
-[16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
|
||||
|
||||
## 1.2.0
|
||||
|
||||
|
||||
@@ -16,12 +16,15 @@
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# We don't support docker compose for production environments.
|
||||
# We don't support docker-compose for production environments.
|
||||
# If you choose to use this type of deployment make sure to
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
# -----------------------------------------------------------------------
|
||||
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
x-superset-volumes:
|
||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -41,7 +44,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:16
|
||||
image: postgres:15
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -61,12 +64,8 @@ services:
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-init:
|
||||
image: *superset-image
|
||||
@@ -77,18 +76,11 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
depends_on: *superset-depends-on
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker:
|
||||
image: *superset-image
|
||||
@@ -100,9 +92,7 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
@@ -111,8 +101,6 @@ services:
|
||||
"CMD-SHELL",
|
||||
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
||||
]
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker-beat:
|
||||
image: *superset-image
|
||||
@@ -124,15 +112,11 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
volumes:
|
||||
superset_home:
|
||||
|
||||
@@ -16,11 +16,14 @@
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# We don't support docker compose for production environments.
|
||||
# We don't support docker-compose for production environments.
|
||||
# If you choose to use this type of deployment make sure to
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
# -----------------------------------------------------------------------
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
x-superset-volumes:
|
||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -46,7 +49,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:16
|
||||
image: postgres:15
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -67,12 +70,8 @@ services:
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-init:
|
||||
container_name: superset_init
|
||||
@@ -84,18 +83,11 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
depends_on: *superset-depends-on
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker:
|
||||
build:
|
||||
@@ -108,9 +100,7 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
@@ -119,8 +109,6 @@ services:
|
||||
"CMD-SHELL",
|
||||
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
||||
]
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker-beat:
|
||||
build:
|
||||
@@ -133,15 +121,11 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
volumes:
|
||||
superset_home:
|
||||
|
||||
@@ -16,12 +16,15 @@
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# We don't support docker compose for production environments.
|
||||
# We don't support docker-compose for production environments.
|
||||
# If you choose to use this type of deployment make sure to
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
# -----------------------------------------------------------------------
|
||||
x-superset-user: &superset-user root
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
x-superset-volumes: &superset-volumes
|
||||
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -32,14 +35,9 @@ x-superset-volumes: &superset-volumes
|
||||
|
||||
x-common-build: &common-build
|
||||
context: .
|
||||
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
|
||||
target: dev
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
args:
|
||||
DEV_MODE: "true"
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
|
||||
services:
|
||||
nginx:
|
||||
@@ -67,7 +65,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:16
|
||||
image: postgres:15
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
@@ -89,18 +87,13 @@ services:
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
# When in cypress-mode ->
|
||||
- 8081:8081
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
user: *superset-user
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-websocket:
|
||||
container_name: superset_websocket
|
||||
@@ -127,7 +120,7 @@ services:
|
||||
- /home/superset-websocket/dist
|
||||
|
||||
# Mounting a config file that contains a dummy secret required to boot up.
|
||||
# do not use this docker compose in production
|
||||
# do not use this docker-compose in production
|
||||
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json
|
||||
environment:
|
||||
- PORT=8080
|
||||
@@ -145,40 +138,20 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
depends_on: *superset-depends-on
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
superset-node:
|
||||
build:
|
||||
context: .
|
||||
target: superset-node
|
||||
args:
|
||||
# This prevents building the frontend bundle since we'll mount local folder
|
||||
# and build it on startup while firing docker-frontend.sh in dev mode, where
|
||||
# it'll mount and watch local files and rebuild as you update them
|
||||
DEV_MODE: "true"
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
image: node:18
|
||||
environment:
|
||||
# set this to false if you have perf issues running the npm i; npm run dev in-docker
|
||||
# if you do so, you have to run this manually on the host, which should perform better!
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset:8088"
|
||||
ports:
|
||||
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
|
||||
container_name: superset_node
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
env_file:
|
||||
@@ -186,6 +159,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on: *superset-depends-on
|
||||
volumes: *superset-volumes
|
||||
|
||||
superset-worker:
|
||||
@@ -200,12 +174,8 @@ services:
|
||||
required: false
|
||||
environment:
|
||||
CELERYD_CONCURRENCY: 2
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
extra_hosts:
|
||||
@@ -227,15 +197,11 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- superset-worker
|
||||
depends_on: *superset-depends-on
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-tests-worker:
|
||||
build:
|
||||
@@ -256,11 +222,8 @@ services:
|
||||
REDIS_RESULTS_DB: 3
|
||||
REDIS_HOST: localhost
|
||||
CELERYD_CONCURRENCY: 8
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
network_mode: host
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
depends_on: *superset-depends-on
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
|
||||
@@ -15,11 +15,8 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# Allowing python to print() in docker
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
COMPOSE_PROJECT_NAME=superset
|
||||
DEV_MODE=true
|
||||
|
||||
# database configurations (do not modify)
|
||||
DATABASE_DB=superset
|
||||
@@ -66,4 +63,3 @@ SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
|
||||
ENABLE_PLAYWRIGHT=false
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true
|
||||
SUPERSET_LOG_LEVEL=info
|
||||
|
||||
@@ -68,7 +68,7 @@ Don't forget to reload the page to take the new frontend into account though.
|
||||
|
||||
## Production
|
||||
|
||||
It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development.
|
||||
It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development and uses [`./docker/.env-non-dev`](./.env-non-dev) which sets the variable `SUPERSET_ENV` to `production`.
|
||||
|
||||
## Resource Constraints
|
||||
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
# Ensure this script is run as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
echo "This script must be run as root" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for required arguments
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <package1> [<package2> ...]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Colors for better logging (optional)
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
RESET='\033[0m'
|
||||
|
||||
# Install packages with clean-up
|
||||
echo -e "${GREEN}Updating package lists...${RESET}"
|
||||
apt-get update -qq
|
||||
|
||||
echo -e "${GREEN}Installing packages: $@${RESET}"
|
||||
apt-get install -yqq --no-install-recommends "$@"
|
||||
|
||||
echo -e "${GREEN}Autoremoving unnecessary packages...${RESET}"
|
||||
apt-get autoremove -y
|
||||
|
||||
echo -e "${GREEN}Cleaning up package cache and metadata...${RESET}"
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* /tmp/* /var/tmp/*
|
||||
|
||||
echo -e "${GREEN}Installation and cleanup complete.${RESET}"
|
||||
@@ -18,39 +18,19 @@
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
# Make python interactive
|
||||
if [ "$DEV_MODE" == "true" ]; then
|
||||
if [ "$(whoami)" = "root" ] && command -v uv > /dev/null 2>&1; then
|
||||
echo "Reinstalling the app in editable mode"
|
||||
uv pip install -e .
|
||||
fi
|
||||
fi
|
||||
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
|
||||
PORT=${PORT:-8088}
|
||||
# If Cypress run – overwrite the password for admin and export env variables
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
export SUPERSET_TESTENV=true
|
||||
export POSTGRES_DB=superset_cypress
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||
PORT=8081
|
||||
fi
|
||||
if [[ "$DATABASE_DIALECT" == postgres* ]] && [ "$(whoami)" = "root" ]; then
|
||||
# older images may not have the postgres dev requirements installed
|
||||
echo "Installing postgres requirements"
|
||||
if command -v uv > /dev/null 2>&1; then
|
||||
# Use uv in newer images
|
||||
uv pip install -e .[postgres]
|
||||
else
|
||||
# Use pip in older images
|
||||
pip install -e .[postgres]
|
||||
fi
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
|
||||
fi
|
||||
#
|
||||
# Make sure we have dev requirements installed
|
||||
#
|
||||
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
|
||||
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
|
||||
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
|
||||
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
|
||||
else
|
||||
echo "Skipping local overrides"
|
||||
fi
|
||||
@@ -68,7 +48,7 @@ case "${1}" in
|
||||
;;
|
||||
app)
|
||||
echo "Starting web app (using development server)..."
|
||||
flask run -p $PORT --with-threads --reload --debugger --host=0.0.0.0
|
||||
flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0
|
||||
;;
|
||||
app-gunicorn)
|
||||
echo "Starting web app..."
|
||||
|
||||
@@ -23,4 +23,4 @@
|
||||
export SERVER_THREADS_AMOUNT=8
|
||||
# start up the web server
|
||||
|
||||
/app/docker/entrypoints/run-server.sh
|
||||
/usr/bin/run-server.sh
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Creates the examples database and respective user. This database location
|
||||
# and access credentials are defined on the environment variables
|
||||
# ------------------------------------------------------------------------
|
||||
set -e
|
||||
|
||||
psql -v ON_ERROR_STOP=1 --username "${POSTGRES_USER}" <<-EOSQL
|
||||
CREATE DATABASE superset_cypress;
|
||||
EOSQL
|
||||
@@ -24,23 +24,12 @@ if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then
|
||||
fi
|
||||
|
||||
if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
||||
echo "Building Superset frontend in dev mode inside docker container"
|
||||
cd /app/superset-frontend
|
||||
npm install -f --no-optional --global webpack webpack-cli
|
||||
npm install -f
|
||||
|
||||
if [ "$NPM_RUN_PRUNE" = "true" ]; then
|
||||
echo "Running `npm run prune`"
|
||||
npm run prune
|
||||
fi
|
||||
|
||||
echo "Running `npm install`"
|
||||
npm install
|
||||
|
||||
echo "Start webpack dev server"
|
||||
# start the webpack dev server, serving dynamically at http://localhost:9000
|
||||
# it proxies to the backend served at http://localhost:8088
|
||||
npm run dev-server
|
||||
|
||||
echo "Running frontend"
|
||||
npm run dev
|
||||
else
|
||||
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
|
||||
echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server"
|
||||
echo "Skipping frontend build steps - YOU RUN IT MANUALLY ON THE HOST!"
|
||||
fi
|
||||
|
||||
@@ -22,26 +22,28 @@ set -e
|
||||
#
|
||||
/app/docker/docker-bootstrap.sh
|
||||
|
||||
if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
|
||||
STEP_CNT=4
|
||||
else
|
||||
STEP_CNT=3
|
||||
fi
|
||||
STEP_CNT=4
|
||||
|
||||
echo_step() {
|
||||
cat <<EOF
|
||||
|
||||
######################################################################
|
||||
|
||||
|
||||
Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
|
||||
|
||||
|
||||
######################################################################
|
||||
|
||||
EOF
|
||||
}
|
||||
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
|
||||
# If Cypress run – overwrite the password for admin and export env variables
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
ADMIN_PASSWORD="general"
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
export SUPERSET_TESTENV=true
|
||||
export POSTGRES_DB=superset_cypress
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
|
||||
fi
|
||||
# Initialize the database
|
||||
echo_step "1" "Starting" "Applying DB migrations"
|
||||
@@ -50,16 +52,12 @@ echo_step "1" "Complete" "Applying DB migrations"
|
||||
|
||||
# Create an admin user
|
||||
echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )"
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
superset load_test_users
|
||||
else
|
||||
superset fab create-admin \
|
||||
--username admin \
|
||||
--email admin@superset.com \
|
||||
--password "$ADMIN_PASSWORD" \
|
||||
--firstname Superset \
|
||||
--lastname Admin
|
||||
fi
|
||||
superset fab create-admin \
|
||||
--username admin \
|
||||
--firstname Superset \
|
||||
--lastname Admin \
|
||||
--email admin@superset.com \
|
||||
--password "$ADMIN_PASSWORD"
|
||||
echo_step "2" "Complete" "Setting up admin user"
|
||||
# Create default roles and permissions
|
||||
echo_step "3" "Starting" "Setting up roles and perms"
|
||||
@@ -71,9 +69,10 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
|
||||
echo_step "4" "Starting" "Loading examples"
|
||||
# If Cypress run which consumes superset_test_config – load required data for tests
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
superset load_test_users
|
||||
superset load_examples --load-test-data
|
||||
else
|
||||
superset load_examples
|
||||
superset load_examples --force
|
||||
fi
|
||||
echo_step "4" "Complete" "Loading examples"
|
||||
fi
|
||||
|
||||
@@ -112,12 +112,6 @@ http {
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
location /static {
|
||||
proxy_pass http://host.docker.internal:9000; # Proxy to superset-node
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_pass http://superset_app;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
# Default flag
|
||||
REQUIRES_BUILD_ESSENTIAL=false
|
||||
USE_CACHE=true
|
||||
|
||||
# Filter arguments
|
||||
ARGS=()
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--requires-build-essential)
|
||||
REQUIRES_BUILD_ESSENTIAL=true
|
||||
;;
|
||||
--no-cache)
|
||||
USE_CACHE=false
|
||||
;;
|
||||
*)
|
||||
ARGS+=("$arg")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Install build-essential if required
|
||||
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||
echo "Installing build-essential for package builds..."
|
||||
apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends build-essential
|
||||
fi
|
||||
|
||||
# Choose whether to use pip cache
|
||||
if $USE_CACHE; then
|
||||
echo "Using pip cache..."
|
||||
uv pip install "${ARGS[@]}"
|
||||
else
|
||||
echo "Disabling pip cache..."
|
||||
uv pip install --no-cache-dir "${ARGS[@]}"
|
||||
fi
|
||||
|
||||
# Remove build-essential if it was installed
|
||||
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||
echo "Removing build-essential to keep the image lean..."
|
||||
apt-get autoremove -yqq --purge build-essential \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
|
||||
fi
|
||||
|
||||
echo "Python packages installed successfully."
|
||||
@@ -22,7 +22,6 @@
|
||||
#
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from celery.schedules import crontab
|
||||
from flask_caching.backends.filesystemcache import FileSystemCache
|
||||
@@ -100,26 +99,11 @@ CELERY_CONFIG = CeleryConfig
|
||||
|
||||
FEATURE_FLAGS = {"ALERT_REPORTS": True}
|
||||
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
|
||||
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
|
||||
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/
|
||||
# The base URL for the email report hyperlinks.
|
||||
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
|
||||
SQLLAB_CTAS_NO_LIMIT = True
|
||||
|
||||
log_level_text = os.getenv("SUPERSET_LOG_LEVEL", "INFO")
|
||||
LOG_LEVEL = getattr(logging, log_level_text.upper(), logging.INFO)
|
||||
|
||||
if os.getenv("CYPRESS_CONFIG") == "true":
|
||||
# When running the service as a cypress backend, we need to import the config
|
||||
# located @ tests/integration_tests/superset_test_config.py
|
||||
base_dir = os.path.dirname(__file__)
|
||||
module_folder = os.path.abspath(
|
||||
os.path.join(base_dir, "../../tests/integration_tests/")
|
||||
)
|
||||
sys.path.insert(0, module_folder)
|
||||
from superset_test_config import * # noqa
|
||||
|
||||
sys.path.pop(0)
|
||||
|
||||
#
|
||||
# Optionally import superset_config_docker.py (which will have been included on
|
||||
# the PYTHONPATH) in order to allow for local settings to be overridden
|
||||
@@ -129,7 +113,7 @@ try:
|
||||
from superset_config_docker import * # noqa
|
||||
|
||||
logger.info(
|
||||
f"Loaded your Docker configuration at [{superset_config_docker.__file__}]"
|
||||
f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]"
|
||||
)
|
||||
except ImportError:
|
||||
logger.info("Using default Docker config...")
|
||||
|
||||
@@ -26,7 +26,6 @@ gunicorn \
|
||||
--workers ${SERVER_WORKER_AMOUNT:-1} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-gthread} \
|
||||
--threads ${SERVER_THREADS_AMOUNT:-20} \
|
||||
--log-level "${GUNICORN_LOGLEVEL:info}" \
|
||||
--timeout ${GUNICORN_TIMEOUT:-60} \
|
||||
--keep-alive ${GUNICORN_KEEPALIVE:-2} \
|
||||
--max-requests ${WORKER_MAX_REQUESTS:-0} \
|
||||
@@ -1 +1 @@
|
||||
v20.18.3
|
||||
v20.12.2
|
||||
|
||||
@@ -16,8 +16,7 @@ KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
This is the public documentation site for Superset, built using
|
||||
[Docusaurus 3](https://docusaurus.io/). See
|
||||
[Docusaurus 2](https://docusaurus.io/). See
|
||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||
contributing to documentation.
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-env node */
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
|
||||
@@ -63,7 +63,6 @@
|
||||
"Fiji",
|
||||
"Finland",
|
||||
"France",
|
||||
"France (with overseas)",
|
||||
"France (regions)",
|
||||
"French Polynesia",
|
||||
"Gabon",
|
||||
@@ -78,7 +77,6 @@
|
||||
"Guyana",
|
||||
"Haiti",
|
||||
"Honduras",
|
||||
"Hungary",
|
||||
"Iceland",
|
||||
"India",
|
||||
"Indonesia",
|
||||
|
||||
@@ -4,6 +4,7 @@ hide_title: true
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
import { Buffer } from 'buffer/index.js';
|
||||
import SwaggerUI from 'swagger-ui-react';
|
||||
import openapi from '/resources/openapi.json';
|
||||
import 'swagger-ui-react/swagger-ui.css';
|
||||
|
||||
@@ -25,9 +25,6 @@ Alerts and reports are disabled by default. To turn them on, you need to do some
|
||||
- At least one of those must be configured, depending on what you want to use:
|
||||
- emails: `SMTP_*` settings
|
||||
- Slack messages: `SLACK_API_TOKEN`
|
||||
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/docs/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||
- Use date codes from [strftime.org](https://strftime.org/) to create the email subject.
|
||||
- If no date code is provided, the original string will be used as the email subject.
|
||||
|
||||
##### Disable dry-run mode
|
||||
|
||||
@@ -56,14 +53,11 @@ To send alerts and reports to Slack channels, you need to create a new Slack App
|
||||
- `incoming-webhook`
|
||||
- `files:write`
|
||||
- `chat:write`
|
||||
- `channels:read`
|
||||
- `groups:read`
|
||||
4. At the top of the "OAuth and Permissions" section, click "install to workspace".
|
||||
5. Select a default channel for your app and continue.
|
||||
(You can post to any channel by inviting your Superset app into that channel).
|
||||
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
|
||||
7. Ensure the feature flag `ALERT_REPORT_SLACK_V2` is set to True in `superset_config.py`
|
||||
8. Restart the service (or run `superset init`) to pull in the new configuration.
|
||||
7. Restart the service (or run `superset init`) to pull in the new configuration.
|
||||
|
||||
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.
|
||||
|
||||
@@ -92,7 +86,6 @@ You can find documentation about each field in the default `config.py` in the Gi
|
||||
You need to replace default values with your custom Redis, Slack and/or SMTP config.
|
||||
|
||||
Superset uses Celery beat and Celery worker(s) to send alerts and reports.
|
||||
|
||||
- The beat is the scheduler that tells the worker when to perform its tasks. This schedule is defined when you create the alert or report.
|
||||
- The worker will process the tasks that need to be performed when an alert or report is fired.
|
||||
|
||||
@@ -144,7 +137,7 @@ SLACK_API_TOKEN = "xoxb-"
|
||||
SMTP_HOST = "smtp.sendgrid.net" # change to your host
|
||||
SMTP_PORT = 2525 # your port, e.g. 587
|
||||
SMTP_STARTTLS = True
|
||||
SMTP_SSL_SERVER_AUTH = True # If you're using an SMTP server with a valid certificate
|
||||
SMTP_SSL_SERVER_AUTH = True # If your using an SMTP server with a valid certificate
|
||||
SMTP_SSL = False
|
||||
SMTP_USER = "your_user" # use the empty string "" if using an unauthenticated SMTP server
|
||||
SMTP_PASSWORD = "your_password" # use the empty string "" if using an unauthenticated SMTP server
|
||||
@@ -181,13 +174,15 @@ By default, Alerts and Reports are executed as the owner of the alert/report obj
|
||||
just change the config as follows (`admin` in this example):
|
||||
|
||||
```python
|
||||
from superset.tasks.types import FixedExecutor
|
||||
from superset.tasks.types import ExecutorType
|
||||
|
||||
ALERT_REPORTS_EXECUTORS = [FixedExecutor("admin")]
|
||||
THUMBNAIL_SELENIUM_USER = 'admin'
|
||||
ALERT_REPORTS_EXECUTE_AS = [ExecutorType.SELENIUM]
|
||||
```
|
||||
|
||||
Please refer to `ExecutorType` in the codebase for other executor types.
|
||||
|
||||
|
||||
**Important notes**
|
||||
|
||||
- Be mindful of the concurrency setting for celery (using `-c 4`). Selenium/webdriver instances can
|
||||
@@ -199,6 +194,7 @@ Please refer to `ExecutorType` in the codebase for other executor types.
|
||||
- Adjust `WEBDRIVER_BASEURL` in your configuration file if celery workers can’t access Superset via
|
||||
its default value of `http://0.0.0.0:8080/`.
|
||||
|
||||
|
||||
It's also possible to specify a minimum interval between each report's execution through the config file:
|
||||
|
||||
``` python
|
||||
@@ -255,18 +251,15 @@ FROM apache/superset:3.1.0
|
||||
USER root
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y wget zip libaio1
|
||||
|
||||
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
|
||||
wget -O google-chrome-stable_current_amd64.deb -q http://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROMEDRIVER_VERSION}-1_amd64.deb && \
|
||||
wget -q https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
|
||||
apt-get install -y --no-install-recommends ./google-chrome-stable_current_amd64.deb && \
|
||||
rm -f google-chrome-stable_current_amd64.deb
|
||||
|
||||
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
|
||||
wget -q https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip && \
|
||||
unzip -j chromedriver-linux64.zip -d /usr/bin && \
|
||||
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://chromedriver.storage.googleapis.com/LATEST_RELEASE_102) && \
|
||||
wget -q https://chromedriver.storage.googleapis.com/${CHROMEDRIVER_VERSION}/chromedriver_linux64.zip && \
|
||||
unzip chromedriver_linux64.zip -d /usr/bin && \
|
||||
chmod 755 /usr/bin/chromedriver && \
|
||||
rm -f chromedriver-linux64.zip
|
||||
rm -f chromedriver_linux64.zip
|
||||
|
||||
RUN pip install --no-cache gevent psycopg2 redis
|
||||
|
||||
@@ -304,7 +297,6 @@ One symptom of an invalid connection to an email server is receiving an error of
|
||||
Confirm via testing that your outbound email configuration is correct. Here is the simplest test, for an un-authenticated email SMTP email service running on port 25. If you are sending over SSL, for instance, study how [Superset's codebase sends emails](https://github.com/apache/superset/blob/master/superset/utils/core.py#L818) and then test with those commands and arguments.
|
||||
|
||||
Start Python in your worker environment, replace all example values, and run:
|
||||
|
||||
```python
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
@@ -326,7 +318,6 @@ mailserver.quit()
|
||||
This should send an email.
|
||||
|
||||
Possible fixes:
|
||||
|
||||
- Some cloud hosts disable outgoing unauthenticated SMTP email to prevent spam. For instance, [Azure blocks port 25 by default on some machines](https://learn.microsoft.com/en-us/azure/virtual-network/troubleshoot-outbound-smtp-connectivity). Enable that port or use another sending method.
|
||||
- Use another set of SMTP credentials that you verify works in this setup.
|
||||
|
||||
|
||||
@@ -42,13 +42,13 @@ CELERY_CONFIG = CeleryConfig
|
||||
|
||||
To start a Celery worker to leverage the configuration, run the following command:
|
||||
|
||||
```bash
|
||||
```
|
||||
celery --app=superset.tasks.celery_app:app worker --pool=prefork -O fair -c 4
|
||||
```
|
||||
|
||||
To start a job which schedules periodic background jobs, run the following command:
|
||||
|
||||
```bash
|
||||
```
|
||||
celery --app=superset.tasks.celery_app:app beat
|
||||
```
|
||||
|
||||
@@ -93,12 +93,12 @@ issues arise. Please clear your existing results cache store when upgrading an e
|
||||
|
||||
Flower is a web based tool for monitoring the Celery cluster which you can install from pip:
|
||||
|
||||
```bash
|
||||
```python
|
||||
pip install flower
|
||||
```
|
||||
|
||||
You can run flower using:
|
||||
|
||||
```bash
|
||||
```
|
||||
celery --app=superset.tasks.celery_app:app flower
|
||||
```
|
||||
|
||||
@@ -13,17 +13,16 @@ SimpleCache (in-memory), or the local filesystem.
|
||||
[Custom cache backends](https://flask-caching.readthedocs.io/en/latest/#custom-cache-backends)
|
||||
are also supported.
|
||||
|
||||
Caching can be configured by providing dictionaries in
|
||||
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
|
||||
Caching can be configured by providing a dictionaries in
|
||||
`superset_config.py` that comply with[the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
|
||||
|
||||
The following cache configurations can be customized in this way:
|
||||
|
||||
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
|
||||
- Explore chart form data (required): `EXPLORE_FORM_DATA_CACHE_CONFIG`
|
||||
- Metadata cache (optional): `CACHE_CONFIG`
|
||||
- Charting data queried from datasets (optional): `DATA_CACHE_CONFIG`
|
||||
|
||||
For example, to configure the filter state cache using Redis:
|
||||
For example, to configure the filter state cache using redis:
|
||||
|
||||
```python
|
||||
FILTER_STATE_CACHE_CONFIG = {
|
||||
@@ -82,7 +81,7 @@ See [Async Queries via Celery](/docs/configuration/async-queries-celery) for det
|
||||
|
||||
## Caching Thumbnails
|
||||
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/docs/configuration/configuring-superset#feature-flags) on config:
|
||||
This is an optional feature that can be turned on by activating it’s [feature flag](/docs/configuration/configuring-superset#feature-flags) on config:
|
||||
|
||||
```
|
||||
FEATURE_FLAGS = {
|
||||
@@ -95,11 +94,13 @@ By default thumbnails are rendered per user, and will fall back to the Selenium
|
||||
To always render thumbnails as a fixed user (`admin` in this example), use the following configuration:
|
||||
|
||||
```python
|
||||
from superset.tasks.types import FixedExecutor
|
||||
from superset.tasks.types import ExecutorType
|
||||
|
||||
THUMBNAIL_EXECUTORS = [FixedExecutor("admin")]
|
||||
THUMBNAIL_SELENIUM_USER = "admin"
|
||||
THUMBNAIL_EXECUTE_AS = [ExecutorType.SELENIUM]
|
||||
```
|
||||
|
||||
|
||||
For this feature you will need a cache system and celery workers. All thumbnails are stored on cache
|
||||
and are processed asynchronously by the workers.
|
||||
|
||||
@@ -129,6 +130,8 @@ def init_thumbnail_cache(app: Flask) -> S3Cache:
|
||||
|
||||
|
||||
THUMBNAIL_CACHE_CONFIG = init_thumbnail_cache
|
||||
# Async selenium thumbnail task will use the following user
|
||||
THUMBNAIL_SELENIUM_USER = "Admin"
|
||||
```
|
||||
|
||||
Using the above example cache keys for dashboards will be `superset_thumb__dashboard__{ID}`. You can
|
||||
|
||||
@@ -37,7 +37,7 @@ ENV SUPERSET_CONFIG_PATH /app/superset_config.py
|
||||
```
|
||||
|
||||
Docker compose deployments handle application configuration differently using specific conventions.
|
||||
Refer to the [docker compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
|
||||
Refer to the [docker-compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
|
||||
for details.
|
||||
|
||||
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
|
||||
@@ -117,7 +117,7 @@ Your deployment must use a complex, unique key.
|
||||
### Rotating to a newer SECRET_KEY
|
||||
|
||||
If you wish to change your existing SECRET_KEY, add the existing SECRET_KEY to your `superset_config.py` file as
|
||||
`PREVIOUS_SECRET_KEY =`and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
|
||||
`PREVIOUS_SECRET_KEY = `and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
|
||||
commands - if running Superset with Docker, execute from within the Superset application container:
|
||||
|
||||
```python
|
||||
@@ -141,10 +141,10 @@ database engine on a separate host or container.
|
||||
|
||||
Superset supports the following database engines/versions:
|
||||
|
||||
| Database Engine | Supported Versions |
|
||||
| ----------------------------------------- | ---------------------------------------- |
|
||||
| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X, 16.X |
|
||||
| [MySQL](https://www.mysql.com/) | 5.7, 8.X |
|
||||
| Database Engine | Supported Versions |
|
||||
| ----------------------------------------- | ---------------------------------- |
|
||||
| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X |
|
||||
| [MySQL](https://www.mysql.com/) | 5.7, 8.X |
|
||||
|
||||
Use the following database drivers and connection strings:
|
||||
|
||||
@@ -283,7 +283,7 @@ class CustomSsoSecurityManager(SupersetSecurityManager):
|
||||
...
|
||||
```
|
||||
|
||||
This file must be located in the same directory as `superset_config.py` with the name
|
||||
This file must be located at the same directory than `superset_config.py` with the name
|
||||
`custom_sso_security_manager.py`. Finally, add the following 2 lines to `superset_config.py`:
|
||||
|
||||
```
|
||||
@@ -300,7 +300,6 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
|
||||
- If an OAuth2 authorization server supports OpenID Connect 1.0, you could configure its configuration
|
||||
document URL only without providing `api_base_url`, `access_token_url`, `authorize_url` and other
|
||||
required options like user info endpoint, jwks uri etc. For instance:
|
||||
|
||||
```python
|
||||
OAUTH_PROVIDERS = [
|
||||
{ 'name':'egaSSO',
|
||||
@@ -314,15 +313,12 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Keycloak-Specific Configuration using Flask-OIDC
|
||||
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://https://pypi.org/project/flask-oidc/) is a viable option.
|
||||
|
||||
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is a viable option.
|
||||
|
||||
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was successfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
|
||||
Make sure the pip package [`Flask-OIDC`](https://https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
|
||||
|
||||
The following code defines a new security manager. Add it to a new file named `keycloak_security_manager.py`, placed in the same directory as your `superset_config.py` file.
|
||||
|
||||
```python
|
||||
from flask_appbuilder.security.manager import AUTH_OID
|
||||
from superset.security import SupersetSecurityManager
|
||||
@@ -377,9 +373,7 @@ class AuthOIDCView(AuthOIDView):
|
||||
return redirect(
|
||||
oidc.client_secrets.get('issuer') + '/protocol/openid-connect/logout?redirect_uri=' + quote(redirect_url))
|
||||
```
|
||||
|
||||
Then add to your `superset_config.py` file:
|
||||
|
||||
```python
|
||||
from keycloak_security_manager import OIDCSecurityManager
|
||||
from flask_appbuilder.security.manager import AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH
|
||||
@@ -399,9 +393,7 @@ AUTH_USER_REGISTRATION = True
|
||||
# The default user self registration role
|
||||
AUTH_USER_REGISTRATION_ROLE = 'Public'
|
||||
```
|
||||
|
||||
Store your client-specific OpenID information in a file called `client_secret.json`. Create this file in the same directory as `superset_config.py`:
|
||||
|
||||
```json
|
||||
{
|
||||
"<myOpenIDProvider>": {
|
||||
@@ -418,7 +410,6 @@ Store your client-specific OpenID information in a file called `client_secret.js
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## LDAP Authentication
|
||||
|
||||
FAB supports authenticating user credentials against an LDAP server.
|
||||
@@ -441,7 +432,6 @@ AUTH_ROLES_MAPPING = {
|
||||
"superset_admins": ["Admin"],
|
||||
}
|
||||
```
|
||||
|
||||
### Mapping LDAP groups to Superset roles
|
||||
|
||||
The following `AUTH_ROLES_MAPPING` dictionary would map the LDAP DN "cn=superset_users,ou=groups,dc=example,dc=com" to the Superset roles "Gamma" as well as "Alpha", and the LDAP DN "cn=superset_admins,ou=groups,dc=example,dc=com" to the Superset role "Admin".
|
||||
@@ -452,7 +442,6 @@ AUTH_ROLES_MAPPING = {
|
||||
"cn=superset_admins,ou=groups,dc=example,dc=com": ["Admin"],
|
||||
}
|
||||
```
|
||||
|
||||
Note: This requires `AUTH_LDAP_SEARCH` to be set. For more details, please see the [FAB Security documentation](https://flask-appbuilder.readthedocs.io/en/latest/security.html).
|
||||
|
||||
### Syncing roles at login
|
||||
@@ -486,7 +475,7 @@ def FLASK_APP_MUTATOR(app: Flask) -> None:
|
||||
|
||||
To support a diverse set of users, Superset has some features that are not enabled by default. For
|
||||
example, some users have stronger security restrictions, while some others may not. So Superset
|
||||
allows users to enable or disable some features by config. For feature owners, you can add optional
|
||||
allow users to enable or disable some features by config. For feature owners, you can add optional
|
||||
functionalities in Superset, but will be only affected by a subset of users.
|
||||
|
||||
You can enable or disable features with flag from `superset_config.py`:
|
||||
|
||||
@@ -31,17 +31,18 @@ install new database drivers into your Superset configuration.
|
||||
|
||||
### Supported Databases and Dependencies
|
||||
|
||||
|
||||
Some of the recommended packages are shown below. Please refer to
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml) for the versions that
|
||||
are compatible with Superset.
|
||||
|
||||
| <div style={{width: '150px'}}>Database</div> | PyPI package | Connection String |
|
||||
| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
|
||||
| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&... ` |
|
||||
| [AWS DynamoDB](/docs/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
|
||||
| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [Apache Doris](/docs/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
|
||||
| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` |
|
||||
| [Apache Druid](/docs/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
|
||||
| [Apache Hive](/docs/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Apache Impala](/docs/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
|
||||
@@ -54,9 +55,7 @@ are compatible with Superset.
|
||||
| [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/docs/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
| [Denodo](/docs/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
|
||||
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` |
|
||||
| [Elasticsearch](/docs/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
|
||||
| [Exasol](/docs/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
|
||||
| [Google BigQuery](/docs/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
|
||||
@@ -67,24 +66,20 @@ are compatible with Superset.
|
||||
| [IBM Netezza Performance Server](/docs/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/docs/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/docs/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/docs/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://` |
|
||||
| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://` |
|
||||
| [Rockset](/docs/configuration/databases#rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` |
|
||||
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
|
||||
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
|
||||
| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
|
||||
| [TDengine](/docs/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
|
||||
| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://` |
|
||||
| [Teradata](/docs/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
|
||||
| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
|
||||
---
|
||||
|
||||
Note that many other databases are supported, the main criteria being the existence of a functional
|
||||
@@ -185,6 +180,7 @@ purposes of isolating the problem.
|
||||
|
||||
Repeat this process for each type of database you want Superset to connect to.
|
||||
|
||||
|
||||
### Database-specific Instructions
|
||||
|
||||
#### Ascend.io
|
||||
@@ -210,12 +206,14 @@ You'll need the following setting values to form the connection string:
|
||||
- **Catalog**: Catalog Name
|
||||
- **Database**: Database Name
|
||||
|
||||
|
||||
Here's what the connection string looks like:
|
||||
|
||||
```
|
||||
doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
```
|
||||
|
||||
|
||||
#### AWS Athena
|
||||
|
||||
##### PyAthenaJDBC
|
||||
@@ -245,7 +243,6 @@ awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name
|
||||
```
|
||||
|
||||
The PyAthena library also allows to assume a specific IAM role which you can define by adding following parameters in Superset's Athena database connection UI under ADVANCED --> Other --> ENGINE PARAMETERS.
|
||||
|
||||
```json
|
||||
{
|
||||
"connect_args": {
|
||||
@@ -268,6 +265,7 @@ dynamodb://{aws_access_key_id}:{aws_secret_access_key}@dynamodb.{region_name}.am
|
||||
|
||||
To get more documentation, please visit: [PyDynamoDB WIKI](https://github.com/passren/PyDynamoDB/wiki/5.-Superset).
|
||||
|
||||
|
||||
#### AWS Redshift
|
||||
|
||||
The [sqlalchemy-redshift](https://pypi.org/project/sqlalchemy-redshift/) library is the recommended
|
||||
@@ -283,6 +281,7 @@ You'll need to set the following values to form the connection string:
|
||||
- **Database Name**: Database Name
|
||||
- **Port**: default 5439
|
||||
|
||||
|
||||
##### psycopg2
|
||||
|
||||
Here's what the SQLALCHEMY URI looks like:
|
||||
@@ -291,6 +290,7 @@ Here's what the SQLALCHEMY URI looks like:
|
||||
redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>
|
||||
```
|
||||
|
||||
|
||||
##### redshift_connector
|
||||
|
||||
Here's what the SQLALCHEMY URI looks like:
|
||||
@@ -299,7 +299,8 @@ Here's what the SQLALCHEMY URI looks like:
|
||||
redshift+redshift_connector://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>
|
||||
```
|
||||
|
||||
###### Using IAM-based credentials with Redshift cluster
|
||||
|
||||
###### Using IAM-based credentials with Redshift cluster:
|
||||
|
||||
[Amazon redshift cluster](https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html) also supports generating temporary IAM-based database user credentials.
|
||||
|
||||
@@ -310,10 +311,10 @@ You have to define the following arguments in Superset's redshift database conne
|
||||
```
|
||||
{"connect_args":{"iam":true,"database":"<database>","cluster_identifier":"<cluster_identifier>","db_user":"<db_user>"}}
|
||||
```
|
||||
|
||||
and SQLALCHEMY URI should be set to `redshift+redshift_connector://`
|
||||
|
||||
###### Using IAM-based credentials with Redshift serverless
|
||||
|
||||
###### Using IAM-based credentials with Redshift serverless:
|
||||
|
||||
[Redshift serverless](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-whatis.html) supports connection using IAM roles.
|
||||
|
||||
@@ -325,6 +326,8 @@ You have to define the following arguments in Superset's redshift database conne
|
||||
{"connect_args":{"iam":true,"is_serverless":true,"serverless_acct_id":"<aws account number>","serverless_work_group":"<redshift work group>","database":"<database>","user":"IAMR:<superset iam role name>"}}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### ClickHouse
|
||||
|
||||
To use ClickHouse with Superset, you will need to install the `clickhouse-connect` Python library:
|
||||
@@ -357,6 +360,8 @@ uses the default user without a password (and doesn't encrypt the connection):
|
||||
clickhousedb://localhost/default
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### CockroachDB
|
||||
|
||||
The recommended connector library for CockroachDB is
|
||||
@@ -368,12 +373,13 @@ The expected connection string is formatted as follows:
|
||||
cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### Couchbase
|
||||
|
||||
The Couchbase's Superset connection is designed to support two services: Couchbase Analytics and Couchbase Columnar.
|
||||
The recommended connector library for couchbase is
|
||||
[couchbase-sqlalchemy](https://github.com/couchbase/couchbase-sqlalchemy).
|
||||
|
||||
```
|
||||
pip install couchbase-sqlalchemy
|
||||
```
|
||||
@@ -384,40 +390,25 @@ The expected connection string is formatted as follows:
|
||||
couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false}
|
||||
```
|
||||
|
||||
|
||||
#### CrateDB
|
||||
|
||||
The connector library for CrateDB is [sqlalchemy-cratedb].
|
||||
We recommend to add the following item to your `requirements.txt` file:
|
||||
The recommended connector library for CrateDB is
|
||||
[crate](https://pypi.org/project/crate/).
|
||||
You need to install the extras as well for this library.
|
||||
We recommend adding something like the following
|
||||
text to your requirements file:
|
||||
|
||||
```
|
||||
sqlalchemy-cratedb>=0.40.1,<1
|
||||
crate[sqlalchemy]==0.26.0
|
||||
```
|
||||
|
||||
An SQLAlchemy connection string for [CrateDB Self-Managed] on localhost,
|
||||
for evaluation purposes, looks like this:
|
||||
The expected connection string is formatted as follows:
|
||||
|
||||
```
|
||||
crate://crate@127.0.0.1:4200
|
||||
```
|
||||
|
||||
An SQLAlchemy connection string for connecting to [CrateDB Cloud] looks like
|
||||
this:
|
||||
|
||||
```
|
||||
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
|
||||
```
|
||||
|
||||
Follow the steps [here](/docs/configuration/databases#installing-database-drivers)
|
||||
to install the CrateDB connector package when setting up Superset locally using
|
||||
Docker Compose.
|
||||
|
||||
```
|
||||
echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
[CrateDB Cloud]: https://cratedb.com/product/cloud
|
||||
[CrateDB Self-Managed]: https://cratedb.com/product/self-managed
|
||||
[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/
|
||||
|
||||
#### Databend
|
||||
|
||||
@@ -436,6 +427,7 @@ Here's a connection string example of Superset connecting to a Databend database
|
||||
databend://user:password@localhost:8000/default?secure=false
|
||||
```
|
||||
|
||||
|
||||
#### Databricks
|
||||
|
||||
Databricks now offer a native DB API 2.0 driver, `databricks-sql-connector`, that can be used with the `sqlalchemy-databricks` dialect. You can install both with:
|
||||
@@ -519,16 +511,8 @@ For a connection to a SQL endpoint you need to use the HTTP path from the endpoi
|
||||
{"connect_args": {"http_path": "/sql/1.0/endpoints/****", "driver_path": "/path/to/odbc/driver"}}
|
||||
```
|
||||
|
||||
#### Denodo
|
||||
|
||||
The recommended connector library for Denodo is
|
||||
[denodo-sqlalchemy](https://pypi.org/project/denodo-sqlalchemy/).
|
||||
|
||||
The expected connection string is formatted as follows (default port is 9996):
|
||||
|
||||
```
|
||||
denodo://{username}:{password}@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
#### Dremio
|
||||
|
||||
@@ -538,7 +522,7 @@ The recommended connector library for Dremio is
|
||||
The expected connection string for ODBC (Default port is 31010) is formatted as follows:
|
||||
|
||||
```
|
||||
dremio+pyodbc://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
|
||||
dremio://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
|
||||
```
|
||||
|
||||
The expected connection string for Arrow Flight (Dremio 4.9.1+. Default port is 32010) is formatted as follows:
|
||||
@@ -550,6 +534,7 @@ dremio+flight://{username}:{password}@{host}:{port}/dremio
|
||||
This [blog post by Dremio](https://www.dremio.com/tutorials/dremio-apache-superset/) has some
|
||||
additional helpful instructions on connecting Superset to Dremio.
|
||||
|
||||
|
||||
#### Apache Drill
|
||||
|
||||
##### SQLAlchemy
|
||||
@@ -591,6 +576,8 @@ We recommend reading the
|
||||
the [GitHub README](https://github.com/JohnOmernik/sqlalchemy-drill#usage-with-odbc) to learn how to
|
||||
work with Drill through ODBC.
|
||||
|
||||
|
||||
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
|
||||
#### Apache Druid
|
||||
@@ -604,7 +591,6 @@ The connection string looks like:
|
||||
```
|
||||
druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql
|
||||
```
|
||||
|
||||
Here's a breakdown of the key components of this connection string:
|
||||
|
||||
- `User`: username portion of the credentials needed to connect to your database
|
||||
@@ -633,7 +619,7 @@ To disable SSL verification, add the following to the **Extras** field:
|
||||
```
|
||||
engine_params:
|
||||
{"connect_args":
|
||||
{"scheme": "https", "ssl_verify_cert": false}}
|
||||
{"scheme": "https", "ssl_verify_cert": false}}
|
||||
```
|
||||
|
||||
##### Aggregations
|
||||
@@ -657,6 +643,7 @@ much like you would create an aggregation manually, but specify `postagg` as a `
|
||||
then have to provide a valid json post-aggregation definition (as specified in the Druid docs) in
|
||||
the JSON field.
|
||||
|
||||
|
||||
#### Elasticsearch
|
||||
|
||||
The recommended connector library for Elasticsearch is
|
||||
@@ -705,7 +692,7 @@ Then register your table with the alias name logstash_all
|
||||
By default, Superset uses UTC time zone for elasticsearch query. If you need to specify a time zone,
|
||||
please edit your Database and enter the settings of your specified time zone in the Other > ENGINE PARAMETERS:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"connect_args": {
|
||||
"time_zone": "Asia/Shanghai"
|
||||
@@ -727,6 +714,8 @@ To disable SSL verification, add the following to the **SQLALCHEMY URI** field:
|
||||
elasticsearch+https://{user}:{password}@{host}:9200/?verify_certs=False
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### Exasol
|
||||
|
||||
The recommended connector library for Exasol is
|
||||
@@ -738,6 +727,7 @@ The connection string for Exasol looks like this:
|
||||
exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC
|
||||
```
|
||||
|
||||
|
||||
#### Firebird
|
||||
|
||||
The recommended connector library for Firebird is [sqlalchemy-firebird](https://pypi.org/project/sqlalchemy-firebird/).
|
||||
@@ -755,6 +745,7 @@ Here's a connection string example of Superset connecting to a local Firebird da
|
||||
firebird+fdb://SYSDBA:masterkey@192.168.86.38:3050//Library/Frameworks/Firebird.framework/Versions/A/Resources/examples/empbuild/employee.fdb
|
||||
```
|
||||
|
||||
|
||||
#### Firebolt
|
||||
|
||||
The recommended connector library for Firebolt is [firebolt-sqlalchemy](https://pypi.org/project/firebolt-sqlalchemy/).
|
||||
@@ -785,7 +776,7 @@ The recommended connector library for BigQuery is
|
||||
Follow the steps [here](/docs/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```bash
|
||||
```
|
||||
echo "sqlalchemy-bigquery" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
@@ -798,7 +789,7 @@ credentials file (as a JSON).
|
||||
appropriate BigQuery datasets, and download the JSON configuration file for the service account.
|
||||
2. In Superset, you can either upload that JSON or add the JSON blob in the following format (this should be the content of your credential JSON file):
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "...",
|
||||
@@ -826,7 +817,7 @@ credentials file (as a JSON).
|
||||
Go to the **Advanced** tab, Add a JSON blob to the **Secure Extra** field in the database configuration form with
|
||||
the following format:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"credentials_info": <contents of credentials JSON file>
|
||||
}
|
||||
@@ -834,7 +825,7 @@ credentials file (as a JSON).
|
||||
|
||||
The resulting file should have this structure:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"credentials_info": {
|
||||
"type": "service_account",
|
||||
@@ -861,6 +852,8 @@ To be able to upload CSV or Excel files to BigQuery in Superset, you'll need to
|
||||
Currently, the Google BigQuery Python SDK is not compatible with `gevent`, due to some dynamic monkeypatching on python core library by `gevent`.
|
||||
So, when you deploy Superset with `gunicorn` server, you have to use worker type except `gevent`.
|
||||
|
||||
|
||||
|
||||
#### Google Sheets
|
||||
|
||||
Google Sheets has a very limited
|
||||
@@ -871,6 +864,7 @@ There are a few steps involved in connecting Superset to Google Sheets. This
|
||||
[tutorial](https://preset.io/blog/2020-06-01-connect-superset-google-sheets/) has the most up to date
|
||||
instructions on setting up this connection.
|
||||
|
||||
|
||||
#### Hana
|
||||
|
||||
The recommended connector library is [sqlalchemy-hana](https://github.com/SAP/sqlalchemy-hana).
|
||||
@@ -881,6 +875,7 @@ The connection string is formatted as follows:
|
||||
hana://{username}:{password}@{host}:{port}
|
||||
```
|
||||
|
||||
|
||||
#### Apache Hive
|
||||
|
||||
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Hive through SQLAlchemy.
|
||||
@@ -891,6 +886,7 @@ The expected connection string is formatted as follows:
|
||||
hive://hive@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Hologres
|
||||
|
||||
Hologres is a real-time interactive analytics service developed by Alibaba Cloud. It is fully compatible with PostgreSQL 11 and integrates seamlessly with the big data ecosystem.
|
||||
@@ -909,6 +905,7 @@ The connection string looks like:
|
||||
postgresql+psycopg2://{username}:{password}@{host}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### IBM DB2
|
||||
|
||||
The [IBM_DB_SA](https://github.com/ibmdb/python-ibmdbsa/tree/master/ibm_db_sa) library provides a
|
||||
@@ -926,6 +923,7 @@ There are two DB2 dialect versions implemented in SQLAlchemy. If you are connect
|
||||
ibm_db_sa://{username}:{passport}@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Apache Impala
|
||||
|
||||
The recommended connector library to Apache Impala is [impyla](https://github.com/cloudera/impyla).
|
||||
@@ -936,6 +934,7 @@ The expected connection string is formatted as follows:
|
||||
impala://{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Kusto
|
||||
|
||||
The recommended connector library for Kusto is
|
||||
@@ -956,6 +955,7 @@ kustokql+https://{cluster_url}/{database}?azure_ad_client_id={azure_ad_client_id
|
||||
Make sure the user has privileges to access and use all required
|
||||
databases/tables/views.
|
||||
|
||||
|
||||
#### Apache Kylin
|
||||
|
||||
The recommended connector library for Apache Kylin is
|
||||
@@ -967,6 +967,10 @@ The expected connection string is formatted as follows:
|
||||
kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#### MySQL
|
||||
|
||||
The recommended connector library for MySQL is [mysqlclient](https://pypi.org/project/mysqlclient/).
|
||||
@@ -991,6 +995,7 @@ One problem with `mysqlclient` is that it will fail to connect to newer MySQL da
|
||||
mysql+mysqlconnector://{username}:{password}@{host}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### IBM Netezza Performance Server
|
||||
|
||||
The [nzalchemy](https://pypi.org/project/nzalchemy/) library provides a
|
||||
@@ -1007,19 +1012,21 @@ netezza+nzpy://{username}:{password}@{hostname}:{port}/{database}
|
||||
The [sqlalchemy-oceanbase](https://pypi.org/project/oceanbase_py/) library is the recommended
|
||||
way to connect to OceanBase through SQLAlchemy.
|
||||
|
||||
|
||||
The connection string for OceanBase looks like this:
|
||||
|
||||
```
|
||||
oceanbase://<User>:<Password>@<Host>:<Port>/<Database>
|
||||
```
|
||||
|
||||
|
||||
#### Ocient DB
|
||||
|
||||
The recommended connector library for Ocient is [sqlalchemy-ocient](https://pypi.org/project/sqlalchemy-ocient).
|
||||
|
||||
##### Install the Ocient Driver
|
||||
|
||||
```bash
|
||||
```
|
||||
pip install sqlalchemy-ocient
|
||||
```
|
||||
|
||||
@@ -1042,25 +1049,8 @@ The connection string is formatted as follows:
|
||||
oracle://<username>:<password>@<hostname>:<port>
|
||||
```
|
||||
|
||||
#### Parseable
|
||||
|
||||
[Parseable](https://www.parseable.io) is a distributed log analytics database that provides SQL-like query interface for log data. The recommended connector library is [sqlalchemy-parseable](https://github.com/parseablehq/sqlalchemy-parseable).
|
||||
|
||||
The connection string is formatted as follows:
|
||||
|
||||
```
|
||||
parseable://<username>:<password>@<hostname>:<port>/<stream_name>
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
parseable://admin:admin@demo.parseable.com:443/ingress-nginx
|
||||
```
|
||||
|
||||
Note: The stream_name in the URI represents the Parseable logstream you want to query. You can use both HTTP (port 80) and HTTPS (port 443) connections.
|
||||
|
||||
>>>>>>>
|
||||
#### Apache Pinot
|
||||
|
||||
The recommended connector library for Apache Pinot is [pinotdb](https://pypi.org/project/pinotdb/).
|
||||
@@ -1079,8 +1069,7 @@ pinot://<username>:<password>@<pinot-broker-host>:<pinot-broker-port>/query/sql?
|
||||
|
||||
If you want to use explore view or joins, window functions, etc. then enable [multi-stage query engine](https://docs.pinot.apache.org/reference/multi-stage-engine).
|
||||
Add below argument while creating database connection in Advanced -> Other -> ENGINE PARAMETERS
|
||||
|
||||
```json
|
||||
```
|
||||
{"connect_args":{"use_multistage_engine":"true"}}
|
||||
```
|
||||
|
||||
@@ -1120,6 +1109,7 @@ More information about PostgreSQL connection options can be found in the
|
||||
and the
|
||||
[PostgreSQL docs](https://www.postgresql.org/docs/9.1/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS).
|
||||
|
||||
|
||||
#### Presto
|
||||
|
||||
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Presto through SQLAlchemy.
|
||||
@@ -1145,7 +1135,7 @@ presto://datascientist:securepassword@presto.example.com:8080/hive
|
||||
By default Superset assumes the most recent version of Presto is being used when querying the
|
||||
datasource. If you’re using an older version of Presto, you can configure it in the extra parameter:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"version": "0.123"
|
||||
}
|
||||
@@ -1153,7 +1143,7 @@ datasource. If you’re using an older version of Presto, you can configure it i
|
||||
|
||||
SSL Secure extra add json config to extra connection information.
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"connect_args":
|
||||
{"protocol": "https",
|
||||
@@ -1162,6 +1152,8 @@ SSL Secure extra add json config to extra connection information.
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### RisingWave
|
||||
|
||||
The recommended connector library for RisingWave is
|
||||
@@ -1173,6 +1165,7 @@ The expected connection string is formatted as follows:
|
||||
risingwave://root@{hostname}:{port}/{database}?sslmode=disable
|
||||
```
|
||||
|
||||
|
||||
#### Rockset
|
||||
|
||||
The connection string for Rockset is:
|
||||
@@ -1192,6 +1185,7 @@ rockset://{api key}:@{api server}/{VI ID}
|
||||
|
||||
For more complete instructions, we recommend the [Rockset documentation](https://docs.rockset.com/apache-superset/).
|
||||
|
||||
|
||||
#### Snowflake
|
||||
|
||||
##### Install Snowflake Driver
|
||||
@@ -1199,7 +1193,7 @@ For more complete instructions, we recommend the [Rockset documentation](https:/
|
||||
Follow the steps [here](/docs/configuration/databases#installing-database-drivers) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```bash
|
||||
```
|
||||
echo "snowflake-sqlalchemy" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
@@ -1232,7 +1226,7 @@ To connect Snowflake with Key Pair Authentication, you need to add the following
|
||||
|
||||
***Please note that you need to merge multi-line private key content to one line and insert `\n` between each line***
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"auth_method": "keypair",
|
||||
"auth_params": {
|
||||
@@ -1244,7 +1238,7 @@ To connect Snowflake with Key Pair Authentication, you need to add the following
|
||||
|
||||
If your private key is stored on server, you can replace "privatekey_body" with “privatekey_path” in parameter.
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"auth_method": "keypair",
|
||||
"auth_params": {
|
||||
@@ -1265,6 +1259,7 @@ The connection string for Solr looks like this:
|
||||
solr://{username}:{password}@{host}:{port}/{server_path}/{collection}[/?use_ssl=true|false]
|
||||
```
|
||||
|
||||
|
||||
#### Apache Spark SQL
|
||||
|
||||
The recommended connector library for Apache Spark SQL [pyhive](https://pypi.org/project/PyHive/).
|
||||
@@ -1282,13 +1277,12 @@ The recommended connector library for SQL Server is [pymssql](https://github.com
|
||||
The connection string for SQL Server looks like this:
|
||||
|
||||
```
|
||||
mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>
|
||||
mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>/?Encrypt=yes
|
||||
```
|
||||
|
||||
It is also possible to connect using [pyodbc](https://pypi.org/project/pyodbc) with the parameter [odbc_connect](https://docs.sqlalchemy.org/en/14/dialects/mssql.html#pass-through-exact-pyodbc-string)
|
||||
|
||||
The connection string for SQL Server looks like this:
|
||||
|
||||
```
|
||||
mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_database%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30
|
||||
```
|
||||
@@ -1313,28 +1307,6 @@ Here's what the connection string looks like:
|
||||
starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
```
|
||||
|
||||
:::note
|
||||
StarRocks maintains their Superset docuementation [here](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/).
|
||||
:::
|
||||
|
||||
#### TDengine
|
||||
|
||||
[TDengine](https://www.tdengine.com) is a High-Performance, Scalable Time-Series Database for Industrial IoT and provides SQL-like query interface.
|
||||
|
||||
The recommended connector library for TDengine is [taospy](https://pypi.org/project/taospy/) and [taos-ws-py](https://pypi.org/project/taos-ws-py/)
|
||||
|
||||
The expected connection string is formatted as follows:
|
||||
|
||||
```
|
||||
taosws://<user>:<password>@<host>:<port>
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
taosws://root:taosdata@127.0.0.1:6041
|
||||
```
|
||||
|
||||
#### Teradata
|
||||
|
||||
The recommended connector library is
|
||||
@@ -1356,7 +1328,7 @@ here: https://downloads.teradata.com/download/connectivity/odbc-driver/linux
|
||||
|
||||
Here are the required environment variables:
|
||||
|
||||
```bash
|
||||
```
|
||||
export ODBCINI=/.../teradata/client/ODBC_64/odbc.ini
|
||||
export ODBCINST=/.../teradata/client/ODBC_64/odbcinst.ini
|
||||
```
|
||||
@@ -1365,8 +1337,8 @@ We recommend using the first library because of the
|
||||
lack of requirement around ODBC drivers and
|
||||
because it's more regularly updated.
|
||||
|
||||
#### TimescaleDB
|
||||
|
||||
#### TimescaleDB
|
||||
[TimescaleDB](https://www.timescale.com) is the open-source relational database for time-series and analytics to build powerful data-intensive applications.
|
||||
TimescaleDB is a PostgreSQL extension, and you can use the standard PostgreSQL connector library, [psycopg2](https://www.psycopg.org/docs/), to connect to the database.
|
||||
|
||||
@@ -1398,38 +1370,31 @@ postgresql://{username}:{password}@{host}:{port}/{database name}?sslmode=require
|
||||
|
||||
[Learn more about TimescaleDB!](https://docs.timescale.com/)
|
||||
|
||||
|
||||
#### Trino
|
||||
|
||||
Supported trino version 352 and higher
|
||||
|
||||
##### Connection String
|
||||
|
||||
The connection string format is as follows:
|
||||
|
||||
```
|
||||
trino://{username}:{password}@{hostname}:{port}/{catalog}
|
||||
```
|
||||
|
||||
If you are running Trino with docker on local machine, please use the following connection URL
|
||||
|
||||
```
|
||||
trino://trino@host.docker.internal:8080
|
||||
```
|
||||
|
||||
##### Authentications
|
||||
|
||||
###### 1. Basic Authentication
|
||||
|
||||
You can provide `username`/`password` in the connection string or in the `Secure Extra` field at `Advanced / Security`
|
||||
|
||||
- In Connection String
|
||||
|
||||
* In Connection String
|
||||
```
|
||||
trino://{username}:{password}@{hostname}:{port}/{catalog}
|
||||
```
|
||||
|
||||
- In `Secure Extra` field
|
||||
|
||||
* In `Secure Extra` field
|
||||
```json
|
||||
{
|
||||
"auth_method": "basic",
|
||||
@@ -1443,9 +1408,7 @@ You can provide `username`/`password` in the connection string or in the `Secure
|
||||
NOTE: if both are provided, `Secure Extra` always takes higher priority.
|
||||
|
||||
###### 2. Kerberos Authentication
|
||||
|
||||
In `Secure Extra` field, config as following example:
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "kerberos",
|
||||
@@ -1462,9 +1425,7 @@ All fields in `auth_params` are passed directly to the [`KerberosAuthentication`
|
||||
NOTE: Kerberos authentication requires installing the [`trino-python-client`](https://github.com/trinodb/trino-python-client) locally with either the `all` or `kerberos` optional features, i.e., installing `trino[all]` or `trino[kerberos]` respectively.
|
||||
|
||||
###### 3. Certificate Authentication
|
||||
|
||||
In `Secure Extra` field, config as following example:
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "certificate",
|
||||
@@ -1478,9 +1439,7 @@ In `Secure Extra` field, config as following example:
|
||||
All fields in `auth_params` are passed directly to the [`CertificateAuthentication`](https://github.com/trinodb/trino-python-client/blob/0.315.0/trino/auth.py#L416) class.
|
||||
|
||||
###### 4. JWT Authentication
|
||||
|
||||
Config `auth_method` and provide token in `Secure Extra` field
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "jwt",
|
||||
@@ -1491,10 +1450,8 @@ Config `auth_method` and provide token in `Secure Extra` field
|
||||
```
|
||||
|
||||
###### 5. Custom Authentication
|
||||
|
||||
To use custom authentication, first you need to add it into
|
||||
`ALLOWED_EXTRA_AUTHENTICATIONS` allow list in Superset config file:
|
||||
|
||||
```python
|
||||
from your.module import AuthClass
|
||||
from another.extra import auth_method
|
||||
@@ -1508,7 +1465,6 @@ ALLOWED_EXTRA_AUTHENTICATIONS: Dict[str, Dict[str, Callable[..., Any]]] = {
|
||||
```
|
||||
|
||||
Then in `Secure Extra` field:
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "custom_auth",
|
||||
@@ -1524,8 +1480,8 @@ or factory function (which returns an `Authentication` instance) to `auth_method
|
||||
All fields in `auth_params` are passed directly to your class/function.
|
||||
|
||||
**Reference**:
|
||||
* [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
|
||||
|
||||
- [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
|
||||
|
||||
#### Vertica
|
||||
|
||||
@@ -1552,76 +1508,6 @@ Other parameters:
|
||||
|
||||
- Load Balancer - Backup Host
|
||||
|
||||
#### YDB
|
||||
|
||||
The recommended connector library for [YDB](https://ydb.tech/) is
|
||||
[ydb-sqlalchemy](https://pypi.org/project/ydb-sqlalchemy/).
|
||||
|
||||
##### Connection String
|
||||
|
||||
The connection string for YDB looks like this:
|
||||
|
||||
```
|
||||
ydb://{host}:{port}/{database_name}
|
||||
```
|
||||
|
||||
##### Protocol
|
||||
|
||||
You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"protocol": "grpcs"
|
||||
}
|
||||
```
|
||||
|
||||
Default is `grpc`.
|
||||
|
||||
##### Authentication Methods
|
||||
|
||||
###### Static Credentials
|
||||
|
||||
To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"credentials": {
|
||||
"username": "...",
|
||||
"password": "..."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
###### Access Token Credentials
|
||||
|
||||
To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"credentials": {
|
||||
"token": "...",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### Service Account Credentials
|
||||
|
||||
To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"credentials": {
|
||||
"service_account_json": {
|
||||
"id": "...",
|
||||
"service_account_id": "...",
|
||||
"created_at": "...",
|
||||
"key_algorithm": "...",
|
||||
"public_key": "...",
|
||||
"private_key": "..."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### YugabyteDB
|
||||
|
||||
@@ -1637,6 +1523,8 @@ The connection string looks like:
|
||||
postgresql://{username}:{password}@{host}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Connecting through the UI
|
||||
|
||||
Here is the documentation on how to leverage the new DB Connection UI. This will provide admins the ability to enhance the UX for users who want to connect to new databases.
|
||||
@@ -1709,6 +1597,9 @@ For databases like MySQL and Postgres that use the standard format of `engine+dr
|
||||
|
||||
For other databases you need to implement these methods yourself. The BigQuery DB engine spec is a good example of how to do that.
|
||||
|
||||
|
||||
|
||||
|
||||
### Extra Database Settings
|
||||
|
||||
##### Deeper SQLAlchemy Integration
|
||||
@@ -1772,7 +1663,9 @@ You can use the `Extra` field in the **Edit Databases** form to configure SSL:
|
||||
}
|
||||
```
|
||||
|
||||
## Misc
|
||||
|
||||
|
||||
## Misc.
|
||||
|
||||
### Querying across databases
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ version: 1
|
||||
The superset cli allows you to import and export datasources from and to YAML. Datasources include
|
||||
databases. The data is expected to be organized in the following hierarchy:
|
||||
|
||||
```text
|
||||
```
|
||||
├──databases
|
||||
| ├──database_1
|
||||
| | ├──table_1
|
||||
@@ -30,13 +30,13 @@ databases. The data is expected to be organized in the following hierarchy:
|
||||
|
||||
You can print your current datasources to stdout by running:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset export_datasources
|
||||
```
|
||||
|
||||
To save your datasources to a ZIP file run:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset export_datasources -f <filename>
|
||||
```
|
||||
|
||||
@@ -55,7 +55,7 @@ Alternatively, you can export datasources using the UI:
|
||||
|
||||
In order to obtain an **exhaustive list of all fields** you can import using the YAML import run:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset export_datasource_schema
|
||||
```
|
||||
|
||||
@@ -65,13 +65,13 @@ As a reminder, you can use the `-b` flag to include back references.
|
||||
|
||||
In order to import datasources from a ZIP file, run:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset import_datasources -p <path / filename>
|
||||
```
|
||||
|
||||
The optional username flag **-u** sets the user used for the datasource import. The default is 'admin'. Example:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset import_datasources -p <path / filename> -u 'admin'
|
||||
```
|
||||
|
||||
@@ -81,7 +81,7 @@ superset import_datasources -p <path / filename> -u 'admin'
|
||||
|
||||
When using Superset version 4.x.x to import from an older version (2.x.x or 3.x.x) importing is supported as the command `legacy_import_datasources` and expects a JSON or directory of JSONs. The options are `-r` for recursive and `-u` for specifying a user. Example of legacy import without options:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset legacy_import_datasources -p <path or filename>
|
||||
```
|
||||
|
||||
@@ -89,21 +89,21 @@ superset legacy_import_datasources -p <path or filename>
|
||||
|
||||
When using an older Superset version (2.x.x & 3.x.x) of Superset, the command is `import_datasources`. ZIP and YAML files are supported and to switch between them the feature flag `VERSIONED_EXPORT` is used. When `VERSIONED_EXPORT` is `True`, `import_datasources` expects a ZIP file, otherwise YAML. Example:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset import_datasources -p <path or filename>
|
||||
```
|
||||
|
||||
When `VERSIONED_EXPORT` is `False`, if you supply a path all files ending with **yaml** or **yml** will be parsed. You can apply
|
||||
additional flags (e.g. to search the supplied path recursively):
|
||||
|
||||
```bash
|
||||
```
|
||||
superset import_datasources -p <path> -r
|
||||
```
|
||||
|
||||
The sync flag **-s** takes parameters in order to sync the supplied elements with your file. Be
|
||||
careful this can delete the contents of your meta database. Example:
|
||||
|
||||
```bash
|
||||
```
|
||||
superset import_datasources -p <path / filename> -s columns,metrics
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ If you don’t supply the sync flag (**-s**) importing will only add and update
|
||||
E.g. you can add a verbose_name to the column ds in the table random_time_series from the example
|
||||
datasets by saving the following YAML to file and then running the **import_datasources** command.
|
||||
|
||||
```yaml
|
||||
```
|
||||
databases:
|
||||
- database_name: main
|
||||
tables:
|
||||
|
||||
@@ -18,78 +18,22 @@ The following keys in `superset_config.py` can be specified to configure CORS:
|
||||
|
||||
- `ENABLE_CORS`: Must be set to `True` in order to enable CORS
|
||||
- `CORS_OPTIONS`: options passed to Flask-CORS
|
||||
([documentation](https://flask-cors.readthedocs.io/en/latest/api.html#extension))
|
||||
([documentation](https://flask-cors.corydolphin.com/en/latest/api.html#extension))
|
||||
|
||||
|
||||
## HTTP headers
|
||||
|
||||
Note that Superset bundles [flask-talisman](https://pypi.org/project/talisman/)
|
||||
Self-described as a small Flask extension that handles setting HTTP headers that can help
|
||||
Self-descried as a small Flask extension that handles setting HTTP headers that can help
|
||||
protect against a few common web application security issues.
|
||||
|
||||
## HTML Embedding of Dashboards and Charts
|
||||
|
||||
There are two ways to embed a dashboard: Using the [SDK](https://www.npmjs.com/package/@superset-ui/embedded-sdk) or embedding a direct link. Note that in the latter case everybody who knows the link is able to access the dashboard.
|
||||
|
||||
### Embedding a Public Direct Link to a Dashboard
|
||||
|
||||
This works by first changing the content security policy (CSP) of [flask-talisman](https://github.com/GoogleCloudPlatform/flask-talisman) to allow for certain domains to display Superset content. Then a dashboard can be made publicly accessible, i.e. **bypassing authentication**. Once made public, the dashboard's URL can be added to an iframe in another website's HTML code.
|
||||
|
||||
#### Changing flask-talisman CSP
|
||||
|
||||
Add to `superset_config.py` the entire `TALISMAN_CONFIG` section from `config.py` and include a `frame-ancestors` section:
|
||||
|
||||
```python
|
||||
TALISMAN_ENABLED = True
|
||||
TALISMAN_CONFIG = {
|
||||
"content_security_policy": {
|
||||
...
|
||||
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
|
||||
...
|
||||
```
|
||||
|
||||
Restart Superset for this configuration change to take effect.
|
||||
|
||||
#### Making a Dashboard Public
|
||||
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) to `superset_config.py`
|
||||
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
|
||||
|
||||
#### Embedding a Public Dashboard
|
||||
|
||||
Now anybody can directly access the dashboard's URL. You can embed it in an iframe like so:
|
||||
|
||||
```html
|
||||
<iframe
|
||||
width="600"
|
||||
height="400"
|
||||
seamless
|
||||
frameBorder="0"
|
||||
scrolling="no"
|
||||
src="https://superset.my-domain.com/superset/dashboard/10/?standalone=1&height=400"
|
||||
>
|
||||
</iframe>
|
||||
```
|
||||
|
||||
#### Embedding a Chart
|
||||
|
||||
A chart's embed code can be generated by going to a chart's edit view and then clicking at the top right on `...` > `Share` > `Embed code`
|
||||
|
||||
### Enabling Embedding via the SDK
|
||||
|
||||
Clicking on `...` next to `EDIT DASHBOARD` on the top right of the dashboard's overview page should yield a drop-down menu including the entry "Embed dashboard".
|
||||
|
||||
To enable this entry, add the following line to the `.env` file:
|
||||
|
||||
```text
|
||||
SUPERSET_FEATURE_EMBEDDED_SUPERSET=true
|
||||
```
|
||||
|
||||
## CSRF settings
|
||||
|
||||
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used to manage
|
||||
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used manage
|
||||
some CSRF configurations. If you need to exempt endpoints from CSRF (e.g. if you are
|
||||
running a custom auth postback endpoint), you can add the endpoints to `WTF_CSRF_EXEMPT_LIST`:
|
||||
|
||||
|
||||
## SSH Tunneling
|
||||
|
||||
1. Turn on feature flag
|
||||
@@ -105,11 +49,8 @@ running a custom auth postback endpoint), you can add the endpoints to `WTF_CSRF
|
||||
3. Verify data is flowing
|
||||
- Once SSH tunneling has been enabled, go to SQL Lab and write a query to verify data is properly flowing.
|
||||
|
||||
## Domain Sharding
|
||||
|
||||
:::note
|
||||
Domain Sharding is deprecated as of Superset 5.0.0, and will be removed in Superset 6.0.0. Please Enable HTTP2 to keep more open connections per domain.
|
||||
:::
|
||||
## Domain Sharding
|
||||
|
||||
Chrome allows up to 6 open connections per domain at a time. When there are more than 6 slices in
|
||||
dashboard, a lot of time fetch requests are queued up and wait for next available socket.
|
||||
|
||||
6
docs/docs/configuration/setup-ssh-tunneling.mdx
Normal file
6
docs/docs/configuration/setup-ssh-tunneling.mdx
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
title: Setup SSH Tunneling
|
||||
hide_title: true
|
||||
sidebar_position: 8
|
||||
version: 1
|
||||
---
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user