Compare commits
162 Commits
remove-mor
...
fix_docker
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
774910f40d | ||
|
|
dbbcc11a98 | ||
|
|
cebd45778f | ||
|
|
1b2ecc6955 | ||
|
|
1f5e567645 | ||
|
|
c467fb566d | ||
|
|
cc0ed0fef4 | ||
|
|
4f463399a7 | ||
|
|
6264ff5165 | ||
|
|
1410e528a4 | ||
|
|
f704b0f556 | ||
|
|
bdfd5cd4ec | ||
|
|
af44b14fbe | ||
|
|
29c76ef1d5 | ||
|
|
7953c89d51 | ||
|
|
fd4c3dce44 | ||
|
|
234f8c94d1 | ||
|
|
bc2e51d8d0 | ||
|
|
2787167abe | ||
|
|
9e84e13888 | ||
|
|
cfd24e3ccd | ||
|
|
aaecec2e03 | ||
|
|
66fe0b0594 | ||
|
|
0d0b43062e | ||
|
|
72df46a729 | ||
|
|
f7cfd9182a | ||
|
|
5faaaf978b | ||
|
|
855f4c4897 | ||
|
|
008ab202f3 | ||
|
|
db311eb376 | ||
|
|
d5f33c4c02 | ||
|
|
ad82a8c14e | ||
|
|
45c18368f6 | ||
|
|
6f656914fe | ||
|
|
6706d1308f | ||
|
|
cbf1aeec7d | ||
|
|
3f7907b266 | ||
|
|
ba0d118fdd | ||
|
|
49aa74cec8 | ||
|
|
7c569abaf6 | ||
|
|
a70f2cee72 | ||
|
|
7b343f7fac | ||
|
|
742ad92189 | ||
|
|
03b72628fa | ||
|
|
27ca7ba7d7 | ||
|
|
1074d1e618 | ||
|
|
b6edf148e2 | ||
|
|
046770cf76 | ||
|
|
0f1064eab8 | ||
|
|
82fc8879b0 | ||
|
|
159958e577 | ||
|
|
2a98780d2c | ||
|
|
a84da1c5cc | ||
|
|
8c329c445f | ||
|
|
05cccf6404 | ||
|
|
92808ffe38 | ||
|
|
0a7635fc05 | ||
|
|
4fe51c6db9 | ||
|
|
0eaa8c5894 | ||
|
|
f56dfb35b2 | ||
|
|
597e207eff | ||
|
|
95ae663e88 | ||
|
|
d0def80d3b | ||
|
|
9f5f0895f6 | ||
|
|
dce7e47399 | ||
|
|
4b9ae07fe5 | ||
|
|
7519cab379 | ||
|
|
84c1ad97dc | ||
|
|
f743ae36dc | ||
|
|
ca5ed8b7b0 | ||
|
|
f1a6aaad63 | ||
|
|
995182270c | ||
|
|
c864e6cd2b | ||
|
|
a3d6ef07c1 | ||
|
|
072540f321 | ||
|
|
2561b267ab | ||
|
|
8fc4c50050 | ||
|
|
359d7baaf5 | ||
|
|
437151a95f | ||
|
|
2157fe3f28 | ||
|
|
1f6ef6a870 | ||
|
|
35de980081 | ||
|
|
90ce1b5012 | ||
|
|
4a6dd94a6c | ||
|
|
860c9c08a1 | ||
|
|
f0c42b0a01 | ||
|
|
889ab36dff | ||
|
|
d85fdf4bf9 | ||
|
|
afd5379bb0 | ||
|
|
789ca738dc | ||
|
|
40568fd1ff | ||
|
|
6205fb4e48 | ||
|
|
c3bc7de75f | ||
|
|
d33f1534e2 | ||
|
|
1ccc147670 | ||
|
|
d8b9f38609 | ||
|
|
e8d5ff1264 | ||
|
|
3becd6b72e | ||
|
|
cea8ede3f0 | ||
|
|
e94667820f | ||
|
|
41e611b413 | ||
|
|
d47430ac21 | ||
|
|
f2c0d3aa48 | ||
|
|
f49a426ada | ||
|
|
acf3e12230 | ||
|
|
1d90ee3517 | ||
|
|
0f32116734 | ||
|
|
8d7ceebbc3 | ||
|
|
45da3f4519 | ||
|
|
122057bac5 | ||
|
|
997cd60d43 | ||
|
|
c57f47ddce | ||
|
|
b4068f1fca | ||
|
|
e7b136b822 | ||
|
|
86ca2b3d08 | ||
|
|
36b229cd18 | ||
|
|
fff9f874b1 | ||
|
|
7dc65072c0 | ||
|
|
5411d40a7a | ||
|
|
a7eb28ddd4 | ||
|
|
d488c78472 | ||
|
|
fe33689917 | ||
|
|
b0a2aea760 | ||
|
|
8f93ad7068 | ||
|
|
cced1c5a4e | ||
|
|
c332eebc37 | ||
|
|
106d755931 | ||
|
|
ef31710c2b | ||
|
|
6a5c293a04 | ||
|
|
86bfb2ade6 | ||
|
|
f8ed0cec74 | ||
|
|
b70c5e1d9d | ||
|
|
f4b201857e | ||
|
|
16385322db | ||
|
|
9677fa97ff | ||
|
|
16295b086a | ||
|
|
afe580bb8a | ||
|
|
d102b45692 | ||
|
|
c0c6486e70 | ||
|
|
a2d8590f0a | ||
|
|
bfb6ff3394 | ||
|
|
8ea94916d9 | ||
|
|
642de0ad63 | ||
|
|
6954db023c | ||
|
|
eca7c57083 | ||
|
|
4dca9bceed | ||
|
|
7219310267 | ||
|
|
77ade18107 | ||
|
|
bca2366d5a | ||
|
|
de2eedd16f | ||
|
|
0f1663b2ec | ||
|
|
604fe27ed1 | ||
|
|
3d7f6dae90 | ||
|
|
a8c6bb5b52 | ||
|
|
30fbfa1b14 | ||
|
|
3e297d130e | ||
|
|
dc754e2d26 | ||
|
|
f59fb6f780 | ||
|
|
fea187a36a | ||
|
|
a9ba3b325f | ||
|
|
c8008e6225 | ||
|
|
4369967732 |
@@ -70,9 +70,8 @@ github:
|
||||
- cypress-matrix (4, chrome)
|
||||
- cypress-matrix (5, chrome)
|
||||
- frontend-build
|
||||
- pre-commit (current)
|
||||
- pre-commit (next)
|
||||
- pre-commit (previous)
|
||||
- pre-commit
|
||||
- python-lint
|
||||
- test-mysql
|
||||
- test-postgres (current)
|
||||
- test-postgres (next)
|
||||
|
||||
6
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
|
||||
# https://github.com/apache/superset/issues/13351
|
||||
|
||||
/superset/migrations/ @mistercrunch @michael-s-molina @betodealmeida @eschutho
|
||||
/superset/migrations/ @apache/superset-committers
|
||||
|
||||
# Notify some committers of changes in the components
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
|
||||
# Notify Helm Chart maintainers about changes in it
|
||||
|
||||
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina
|
||||
/helm/superset/ @craig-rueda @dpgaspar @villebro
|
||||
|
||||
# Notify E2E test maintainers of changes
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
# Notify PMC members of changes to required Github Actions
|
||||
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
|
||||
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -15,9 +15,14 @@ body:
|
||||
id: bug-description
|
||||
attributes:
|
||||
label: Bug description
|
||||
description: A clear description of what the bug is, including reproduction steps and expected behavior.
|
||||
description: A clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: repro-steps
|
||||
attributes:
|
||||
label: How to reproduce the bug
|
||||
placeholder: |
|
||||
The bug is that...
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
@@ -41,7 +46,7 @@ body:
|
||||
label: Superset version
|
||||
options:
|
||||
- master / latest-dev
|
||||
- "4.1.0"
|
||||
- "4.0.2"
|
||||
- "3.1.3"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
2
.github/actions/chart-releaser-action
vendored
5
.github/dependabot.yml
vendored
@@ -8,9 +8,8 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
ignore:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "storybook"
|
||||
- dependency-name: "@storybook*"
|
||||
# not until node >= 18.12.0
|
||||
- dependency-name: "css-minimizer-webpack-plugin"
|
||||
directory: "/superset-frontend/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
|
||||
2
.github/workflows/bashlib.sh
vendored
@@ -165,7 +165,7 @@ cypress-run-all() {
|
||||
# UNCOMMENT the next few commands to monitor memory usage
|
||||
# monitor_memory & # Start memory monitoring in the background
|
||||
# memoryMonitorPid=$!
|
||||
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --group $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
|
||||
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
|
||||
# kill $memoryMonitorPid
|
||||
|
||||
# After job is done, print out Flask log for debugging
|
||||
|
||||
2
.github/workflows/embedded-sdk-release.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: "18"
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm run ci:release
|
||||
|
||||
2
.github/workflows/embedded-sdk-test.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: "18"
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
|
||||
2
.github/workflows/ephemeral-env.yml
vendored
@@ -233,7 +233,7 @@ jobs:
|
||||
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.issue.number }}-service
|
||||
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '18'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install -g @action-validator/core @action-validator/cli --save-dev
|
||||
|
||||
14
.github/workflows/pre-commit.yml
vendored
@@ -16,9 +16,6 @@ concurrency:
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "next", "previous"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -27,8 +24,6 @@ jobs:
|
||||
submodules: recursive
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Enable brew and helm-docs
|
||||
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
|
||||
run: |
|
||||
@@ -40,11 +35,8 @@ jobs:
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: pre-commit
|
||||
run: |
|
||||
set +e # Don't exit immediately on failure
|
||||
pre-commit run --all-files
|
||||
if [ $? -ne 0 ] || ! git diff --quiet --exit-code; then
|
||||
echo "❌ Pre-commit check failed."
|
||||
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
|
||||
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
|
||||
if ! pre-commit run --all-files; then
|
||||
git status
|
||||
git diff
|
||||
exit 1
|
||||
fi
|
||||
|
||||
2
.github/workflows/release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20]
|
||||
node-version: [18]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: ["chrome"]
|
||||
node: [20]
|
||||
node: [18]
|
||||
env:
|
||||
SUPERSET_ENV: development
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
node: [20]
|
||||
node: [18]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
33
.github/workflows/superset-docs-verify.yml
vendored
@@ -4,7 +4,6 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- ".github/workflows/superset-docs-verify.yml"
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
@@ -14,41 +13,15 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
linkinator:
|
||||
# See docs here: https://github.com/marketplace/actions/linkinator
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new verison first!
|
||||
- uses: JustinBeckwith/linkinator-action@v1.11.0
|
||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||
- uses: JustinBeckwith/linkinator-action@v1.10.4
|
||||
with:
|
||||
paths: "**/*.md, **/*.mdx"
|
||||
linksToSkip: >-
|
||||
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
|
||||
http://localhost:8088/,
|
||||
docker/.env-non-dev,
|
||||
http://127.0.0.1:3000/,
|
||||
http://localhost:9001/,
|
||||
https://charts.bitnami.com/bitnami,
|
||||
https://www.li.me/,
|
||||
https://www.fanatics.com/,
|
||||
https://tails.com/gb/,
|
||||
https://www.techaudit.info/,
|
||||
https://avetilearning.com/,
|
||||
https://www.udemy.com/,
|
||||
https://trustmedis.com/,
|
||||
http://theiconic.com.au/,
|
||||
https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html,
|
||||
^https://img\.shields\.io/.*,
|
||||
https://vkusvill.ru/
|
||||
https://www.linkedin.com/in/mark-thomas-b16751158/
|
||||
https://theiconic.com.au/
|
||||
https://wattbewerb.de/
|
||||
https://timbr.ai/
|
||||
https://opensource.org/license/apache-2-0
|
||||
https://www.plaidcloud.com/
|
||||
linksToSkip: '^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+, http://localhost:8088/, docker/.env-non-dev, http://127.0.0.1:3000/, http://localhost:9001/, https://charts.bitnami.com/bitnami, https://www.li.me/, https://www.fanatics.com/, https://tails.com/gb/, https://www.techaudit.info/, https://avetilearning.com/, https://www.udemy.com/, https://trustmedis.com/, http://theiconic.com.au/, https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html, https://img.shields.io/librariesio/release/npm/%40superset-ui%2Fembedded-sdk?style=flat, https://img.shields.io/librariesio/release/npm/%40superset-ui%2Fplugin-chart-pivot-table?style=flat, https://vkusvill.ru/'
|
||||
# verbosity: 'ERROR'
|
||||
build-deploy:
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
6
.github/workflows/superset-e2e.yml
vendored
@@ -48,8 +48,7 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
REDIS_PORT: 16379
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
# use the dashboard feature when running manually OR merging to master
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard || (github.ref == 'refs/heads/master' && 'true') || 'false' }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
@@ -108,7 +107,7 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: "18"
|
||||
- name: Install npm dependencies
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
@@ -132,7 +131,6 @@ jobs:
|
||||
PARALLEL_ID: ${{ matrix.parallel_id }}
|
||||
PARALLELISM: 6
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
NODE_OPTIONS: "--max-old-space-size=4096"
|
||||
with:
|
||||
run: cypress-run-all ${{ env.USE_DASHBOARD }}
|
||||
- name: Upload Artifacts
|
||||
|
||||
7
.github/workflows/superset-frontend.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: "18"
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
@@ -49,6 +49,11 @@ jobs:
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run type
|
||||
- name: prettier
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run prettier-check
|
||||
- name: Build plugins packages
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
|
||||
2
.github/workflows/superset-helm-release.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
run: helm repo add bitnami https://charts.bitnami.com/bitnami
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: ./.github/actions/chart-releaser-action
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
with:
|
||||
charts_dir: helm
|
||||
mark_as_latest: false
|
||||
|
||||
53
.github/workflows/superset-python-misc.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# Python Misc unit tests
|
||||
name: Python Misc
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "[0-9].[0-9]*"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
python-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
if: steps.check.outputs.python
|
||||
|
||||
babel-extract:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- name: Test babel extraction
|
||||
if: steps.check.outputs.python
|
||||
run: scripts/translations/babel_update.sh
|
||||
33
.github/workflows/tag-release.yml
vendored
@@ -54,13 +54,10 @@ jobs:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
tags: true
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Setup supersetbot
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
@@ -97,38 +94,16 @@ jobs:
|
||||
--platform "linux/arm64" \
|
||||
--platform "linux/amd64"
|
||||
|
||||
# Returning to master to support closing setup-supersetbot
|
||||
git checkout master
|
||||
|
||||
update-prs-with-release-info:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
|
||||
# Going back on original branch to allow "post" GHA operations
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Setup supersetbot
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
persist-credentials: false
|
||||
|
||||
- name: Label the PRs with the right release-related labels
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
export GITHUB_ACTOR=""
|
||||
git fetch --all --tags
|
||||
git checkout master
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
|
||||
2
.github/workflows/tech-debt.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '18'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install
|
||||
|
||||
1
.gitignore
vendored
@@ -121,4 +121,3 @@ docker/*local*
|
||||
|
||||
# Jest test report
|
||||
test-report.html
|
||||
superset/static/stats/statistics.html
|
||||
|
||||
@@ -53,14 +53,11 @@ repos:
|
||||
- id: debug-statements
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
exclude: ^.*\.(snap)
|
||||
args: ["--markdown-linebreak-ext=md"]
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0 # Use the sha or tag you want to point at
|
||||
hooks:
|
||||
- id: prettier
|
||||
additional_dependencies:
|
||||
- prettier@3.3.3
|
||||
args: ["--ignore-path=./superset-frontend/.prettierignore"]
|
||||
files: "superset-frontend"
|
||||
# blacklist unsafe functions like make_url (see #19526)
|
||||
|
||||
@@ -61,9 +61,6 @@ tsconfig.tsbuildinfo
|
||||
generator-superset/*
|
||||
temporary_superset_ui/*
|
||||
|
||||
# skip license checks for auto-generated test snapshots
|
||||
.*snap
|
||||
|
||||
# docs overrides for third party logos we don't have the rights to
|
||||
google-big-query.svg
|
||||
google-sheets.svg
|
||||
|
||||
50
CHANGELOG/4.1.1.md
Normal file
@@ -0,0 +1,50 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
|
||||
|
||||
**Database Migrations**
|
||||
|
||||
**Features**
|
||||
|
||||
**Fixes**
|
||||
|
||||
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
|
||||
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
|
||||
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
|
||||
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
|
||||
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
|
||||
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
|
||||
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
|
||||
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
|
||||
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
|
||||
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
|
||||
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
|
||||
|
||||
**Others**
|
||||
|
||||
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
|
||||
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
|
||||
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
|
||||
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
|
||||
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
|
||||
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
|
||||
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)
|
||||
@@ -80,9 +80,9 @@ If you believe someone is violating this code of conduct, you may reply to them
|
||||
|
||||
Or one of our volunteers:
|
||||
|
||||
* [Mark Thomas](https://www.linkedin.com/in/mark-thomas-b16751158/)
|
||||
* [Joan Touzet](https://www.apache.org/foundation/conduct-team/wohali.html)
|
||||
* [Sharan Foga](https://www.linkedin.com/in/sfoga/)
|
||||
* [Mark Thomas](http://home.apache.org/~markt/coc.html)
|
||||
* [Joan Touzet](http://home.apache.org/~wohali/)
|
||||
* [Sharan Foga](http://home.apache.org/~sharan/coc.html)
|
||||
|
||||
If the violation is in documentation or code, for example inappropriate pronoun usage or word choice within official documentation, we ask that people report these privately to the project in question at <private@project.apache.org>, and, if they have sufficient ability within the project, to resolve or remove the concerning material, being mindful of the perspective of the person originally reporting the issue.
|
||||
|
||||
|
||||
96
Dockerfile
@@ -22,29 +22,16 @@ ARG PY_VER=3.10-slim-bookworm
|
||||
|
||||
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
|
||||
FROM --platform=${BUILDPLATFORM} node:18-bullseye-slim AS superset-node
|
||||
|
||||
ARG NPM_BUILD_CMD="build"
|
||||
|
||||
# Include translations in the final build. The default supports en only to
|
||||
# reduce complexity and weight for those only using en
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
# Used by docker-compose to skip the frontend build,
|
||||
# in dev we mount the repo and build the frontend inside docker
|
||||
ARG DEV_MODE="false"
|
||||
|
||||
# Include headless browsers? Allows for alerts, reports & thumbnails, but bloats the images
|
||||
ARG INCLUDE_CHROMIUM="true"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
|
||||
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install \
|
||||
-yqq --no-install-recommends \
|
||||
build-essential \
|
||||
python3 \
|
||||
zstd
|
||||
python3
|
||||
|
||||
ENV BUILD_CMD=${NPM_BUILD_CMD} \
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
@@ -54,34 +41,19 @@ RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.
|
||||
/frontend-mem-nag.sh
|
||||
|
||||
WORKDIR /app/superset-frontend
|
||||
# Creating empty folders to avoid errors when running COPY later on
|
||||
RUN mkdir -p /app/superset/static/assets
|
||||
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
|
||||
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
npm ci; \
|
||||
else \
|
||||
echo "Skipping 'npm ci' in dev mode"; \
|
||||
fi
|
||||
npm ci
|
||||
|
||||
# Runs the webpack build process
|
||||
COPY superset-frontend /app/superset-frontend
|
||||
RUN npm run ${BUILD_CMD}
|
||||
|
||||
# This copies the .po files needed for translation
|
||||
RUN mkdir -p /app/superset/translations
|
||||
COPY superset/translations /app/superset/translations
|
||||
RUN if [ "$DEV_MODE" = "false" ]; then \
|
||||
BUILD_TRANSLATIONS=$BUILD_TRANSLATIONS npm run ${BUILD_CMD}; \
|
||||
else \
|
||||
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
|
||||
fi
|
||||
|
||||
|
||||
# Compiles .json files from the .po files, then deletes the .po files
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
npm run build-translation; \
|
||||
else \
|
||||
echo "Skipping translations as requested by build flag"; \
|
||||
fi
|
||||
RUN npm run build-translation
|
||||
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
|
||||
RUN rm /app/superset/translations/messages.pot
|
||||
|
||||
@@ -91,10 +63,6 @@ FROM python:${PY_VER} AS python-base
|
||||
######################################################################
|
||||
FROM python-base AS lean
|
||||
|
||||
# Include translations in the final build. The default supports en only to
|
||||
# reduce complexity and weight for those only using en
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
WORKDIR /app
|
||||
ENV LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
@@ -108,6 +76,7 @@ RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache
|
||||
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
curl \
|
||||
default-libmysqlclient-dev \
|
||||
libsasl2-dev \
|
||||
libsasl2-modules-gssapi-mit \
|
||||
libpq-dev \
|
||||
@@ -121,12 +90,11 @@ COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
|
||||
# setup.py uses the version information in package.json
|
||||
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
|
||||
COPY --chown=superset:superset requirements/base.txt requirements/
|
||||
COPY --chown=superset:superset scripts/check-env.py scripts/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
&& pip install --no-cache-dir --upgrade setuptools pip \
|
||||
&& pip install --no-cache-dir -r requirements/base.txt \
|
||||
&& pip install --upgrade setuptools pip \
|
||||
&& pip install -r requirements/base.txt \
|
||||
&& apt-get autoremove -yqq --purge build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -136,21 +104,17 @@ COPY --chown=superset:superset --from=superset-node /app/superset/static/assets
|
||||
## Lastly, let's install superset itself
|
||||
COPY --chown=superset:superset superset superset
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --no-cache-dir -e .
|
||||
pip install -e .
|
||||
|
||||
# Copy the .json translations from the frontend layer
|
||||
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
|
||||
|
||||
# Compile translations for the backend - this generates .mo files, then deletes the .po files
|
||||
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
./scripts/translations/generate_mo_files.sh \
|
||||
&& chown -R superset:superset superset/translations \
|
||||
&& rm superset/translations/messages.pot \
|
||||
&& rm superset/translations/*/LC_MESSAGES/*.po; \
|
||||
else \
|
||||
echo "Skipping translations as requested by build flag"; \
|
||||
fi
|
||||
RUN ./scripts/translations/generate_mo_files.sh \
|
||||
&& chown -R superset:superset superset/translations \
|
||||
&& rm superset/translations/messages.pot \
|
||||
&& rm superset/translations/*/LC_MESSAGES/*.po
|
||||
|
||||
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
|
||||
USER superset
|
||||
@@ -180,38 +144,28 @@ RUN apt-get update -qq \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --no-cache-dir playwright
|
||||
pip install playwright
|
||||
RUN playwright install-deps
|
||||
|
||||
RUN if [ "$INCLUDE_CHROMIUM" = "true" ]; then \
|
||||
playwright install chromium; \
|
||||
else \
|
||||
echo "Skipping translations in dev mode"; \
|
||||
fi
|
||||
RUN playwright install chromium
|
||||
|
||||
# Install GeckoDriver WebDriver
|
||||
ARG GECKODRIVER_VERSION=v0.34.0 \
|
||||
FIREFOX_VERSION=125.0.3
|
||||
|
||||
RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends wget bzip2 \
|
||||
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
|
||||
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
|
||||
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
|
||||
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \
|
||||
fi
|
||||
|
||||
# Installing mysql client os-level dependencies in dev image only because GPL
|
||||
RUN apt-get install -yqq --no-install-recommends \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends wget bzip2 \
|
||||
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
|
||||
# Install Firefox
|
||||
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
|
||||
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
|
||||
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*
|
||||
# Cache everything for dev purposes...
|
||||
|
||||
COPY --chown=superset:superset requirements/development.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
&& pip install --no-cache-dir -r requirements/development.txt \
|
||||
&& pip install -r requirements/development.txt \
|
||||
&& apt-get autoremove -yqq --purge build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ under the License.
|
||||
|
||||
# Superset
|
||||
|
||||
[](https://opensource.org/license/apache-2-0)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://github.com/apache/superset/tree/latest)
|
||||
[](https://github.com/apache/superset/actions)
|
||||
[](https://badge.fury.io/py/apache-superset)
|
||||
@@ -134,8 +134,6 @@ Here are some of the major database solutions that are supported:
|
||||
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
</p>
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
|
||||
@@ -437,7 +437,7 @@ cd ${SUPERSET_RELEASE_RC}
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install -r requirements/base.txt
|
||||
pip install build twine
|
||||
pip install twine
|
||||
```
|
||||
|
||||
Create the distribution
|
||||
@@ -455,7 +455,7 @@ cd ../
|
||||
./scripts/translations/generate_po_files.sh
|
||||
|
||||
# build the python distribution
|
||||
python -m build
|
||||
python setup.py sdist
|
||||
```
|
||||
|
||||
Publish to PyPI
|
||||
@@ -466,7 +466,6 @@ an account first if you don't have one, and reference your username
|
||||
while requesting access to push packages.
|
||||
|
||||
```bash
|
||||
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
|
||||
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
|
||||
```
|
||||
|
||||
|
||||
@@ -137,4 +137,4 @@ There is now a [metadata bar](https://github.com/apache/superset/pull/27857) add
|
||||
|
||||
## Change to Docker image builds
|
||||
|
||||
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/docs/installation/docker-builds.mdx#build-presets) for more details.
|
||||
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/installation/docker-builds#build-presets) for more details.
|
||||
|
||||
@@ -65,43 +65,35 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
||||
output = result.stderr.decode()
|
||||
|
||||
rsa_key = re.search(r"RSA key ([0-9A-F]+)", output)
|
||||
eddsa_key = re.search(r"EDDSA key ([0-9A-F]+)", output)
|
||||
email = re.search(r'issuer "([^"]+)"', output)
|
||||
|
||||
rsa_key_result = rsa_key.group(1) if rsa_key else None
|
||||
eddsa_key_result = eddsa_key.group(1) if eddsa_key else None
|
||||
email_result = email.group(1) if email else None
|
||||
|
||||
key_result = rsa_key_result or eddsa_key_result
|
||||
|
||||
# Debugging:
|
||||
if key_result:
|
||||
print("RSA or EDDSA Key found")
|
||||
else:
|
||||
print("Warning: No RSA or EDDSA key found in GPG verification output.")
|
||||
if email_result:
|
||||
print("email found")
|
||||
else:
|
||||
# Debugging: print warnings if rsa_key or email is not found
|
||||
if rsa_key_result is None:
|
||||
print("Warning: No RSA key found in GPG verification output.")
|
||||
if email_result is None:
|
||||
print("Warning: No email address found in GPG verification output.")
|
||||
|
||||
return key_result, email_result
|
||||
return rsa_key_result, email_result
|
||||
|
||||
|
||||
def verify_key(key: str, email: Optional[str]) -> str:
|
||||
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
|
||||
def verify_rsa_key(rsa_key: str, email: Optional[str]) -> str:
|
||||
"""Fetch the KEYS file and verify if the RSA key and email match."""
|
||||
url = "https://downloads.apache.org/superset/KEYS"
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
if key not in response.text:
|
||||
return "RSA/EDDSA key not found on KEYS page"
|
||||
if rsa_key not in response.text:
|
||||
return "RSA key not found on KEYS page"
|
||||
|
||||
# Check if email is None or not in response.text
|
||||
if email and email in response.text:
|
||||
return "RSA/EDDSA key and email verified against Apache KEYS file"
|
||||
return "RSA key and email verified against Apache KEYS file"
|
||||
elif email:
|
||||
return "RSA/EDDSA key verified, but Email not found on KEYS page"
|
||||
return "RSA key verified, but Email not found on KEYS page"
|
||||
else:
|
||||
return "RSA/EDDSA key verified, but Email not available for verification"
|
||||
return "RSA key verified, but Email not available for verification"
|
||||
else:
|
||||
return "Failed to fetch KEYS file"
|
||||
|
||||
@@ -111,9 +103,9 @@ def verify_sha512_and_rsa(filename: str) -> None:
|
||||
sha_result = verify_sha512(filename)
|
||||
print(sha_result)
|
||||
|
||||
key, email = get_gpg_info(filename)
|
||||
if key:
|
||||
rsa_result = verify_key(key, email)
|
||||
rsa_key, email = get_gpg_info(filename)
|
||||
if rsa_key:
|
||||
rsa_result = verify_rsa_key(rsa_key, email)
|
||||
print(rsa_result)
|
||||
else:
|
||||
print("GPG verification failed: RSA key or email not found")
|
||||
|
||||
@@ -25,54 +25,56 @@ all you have to do is file a simple PR [like this one](https://github.com/apache
|
||||
the categorization is inaccurate, please file a PR with your correction as well.
|
||||
Join our growing community!
|
||||
|
||||
|
||||
### Sharing Economy
|
||||
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Faasos](https://faasos.com/) [@shashanksingh]
|
||||
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
||||
- [Faasos](http://faasos.com/) [@shashanksingh]
|
||||
- [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel]
|
||||
- [Lime](https://www.li.me/) [@cxmcc]
|
||||
- [Lyft](https://www.lyft.com/)
|
||||
- [Ontruck](https://www.ontruck.com/)
|
||||
|
||||
### Financial Services
|
||||
- [Aktia Bank plc](https://www.aktia.com)
|
||||
|
||||
- [Aktia Bank plc](https://www.aktia.com) [@villebro]
|
||||
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
- [Cape Crypto](https://capecrypto.com)
|
||||
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](https://clark.de/)
|
||||
- [Capital Service S.A.](http://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](http://clark.de/)
|
||||
- [KarrotPay](https://www.daangnpay.com/)
|
||||
- [Taveo](https://www.taveo.com) [@codek]
|
||||
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
||||
- [Wise](https://wise.com) [@koszti]
|
||||
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [Xendit](http://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
|
||||
### Gaming
|
||||
|
||||
- [Popoko VM Games Studio](https://popoko.live)
|
||||
|
||||
### E-Commerce
|
||||
|
||||
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
|
||||
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
|
||||
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
||||
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
||||
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
||||
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
||||
- [Fordeal](http://www.fordeal.com) [@Renkai]
|
||||
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
||||
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [HuiShouBao](http://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [Now](https://www.now.vn/) [@davidkohcw]
|
||||
- [Qunar](https://www.qunar.com/) [@flametest]
|
||||
- [Rakuten Viki](https://www.viki.com)
|
||||
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
||||
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
||||
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
||||
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
||||
- [THE ICONIC](http://theiconic.com.au/) [@ksaagariconic]
|
||||
- [Utair](https://www.utair.ru) [@utair-digital]
|
||||
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
||||
- [Zalando](https://www.zalando.com) [@dmigo]
|
||||
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
||||
|
||||
### Enterprise Technology
|
||||
|
||||
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
|
||||
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
|
||||
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
|
||||
@@ -81,31 +83,29 @@ Join our growing community!
|
||||
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
||||
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
|
||||
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
||||
- [CnOvit](https://www.cnovit.com/) [@xieshaohu]
|
||||
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
||||
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
|
||||
- [Dial Once](https://www.dial-once.com/)
|
||||
- [Dremio](https://dremio.com) [@narendrans]
|
||||
- [EFinance](https://www.efinance.com.eg) [@habeeb556]
|
||||
- [Elestio](https://elest.io/) [@kaiwalyakoparkar]
|
||||
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
|
||||
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
|
||||
- [FBK - ICT center](https://ict.fbk.eu)
|
||||
- [Endress+Hauser](http://www.endress.com/) [@rumbin]
|
||||
- [FBK - ICT center](http://ict.fbk.eu)
|
||||
- [Gavagai](https://gavagai.io) [@gavagai-corp]
|
||||
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
|
||||
- [Hydrolix](https://www.hydrolix.io/)
|
||||
- [Intercom](https://www.intercom.com/) [@kate-gallo]
|
||||
- [jampp](https://jampp.com/)
|
||||
- [Konfío](https://konfio.mx) [@uis-rodriguez]
|
||||
- [Konfío](http://konfio.mx) [@uis-rodriguez]
|
||||
- [Mainstrat](https://mainstrat.com/)
|
||||
- [mishmash io](https://mishmash.io/) [@mishmash-io]
|
||||
- [Myra Labs](https://www.myralabs.com/) [@viksit]
|
||||
- [Nielsen](https://www.nielsen.com/) [@amitNielsen]
|
||||
- [mishmash io](https://mishmash.io/)[@mishmash-io]
|
||||
- [Myra Labs](http://www.myralabs.com/) [@viksit]
|
||||
- [Nielsen](http://www.nielsen.com/) [@amitNielsen]
|
||||
- [Ona](https://ona.io) [@pld]
|
||||
- [Orange](https://www.orange.com) [@icsu]
|
||||
- [Oslandia](https://oslandia.com)
|
||||
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
||||
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
||||
- [PlaidCloud](https://www.plaidcloud.com)
|
||||
- [Preset, Inc.](https://preset.io)
|
||||
- [PubNub](https://pubnub.com) [@jzucker2]
|
||||
- [ReadyTech](https://www.readytech.io)
|
||||
@@ -114,16 +114,17 @@ Join our growing community!
|
||||
- [Showmax](https://showmax.com) [@bobek]
|
||||
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
||||
- [Tenable](https://www.tenable.com) [@dflionis]
|
||||
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
||||
- [Tentacle](https://tentaclecmi.com) [@jdclarke5]
|
||||
- [timbr.ai](https://timbr.ai/) [@semantiDan]
|
||||
- [Tobii](https://www.tobii.com/) [@dwa]
|
||||
- [Tobii](http://www.tobii.com/) [@dwa]
|
||||
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
||||
- [Unvired](https://unvired.com) [@srinisubramanian]
|
||||
- [Whale](https://whale.im)
|
||||
- [Unvired](https://unvired.com)[@srinisubramanian]
|
||||
- [Whale](http://whale.im)
|
||||
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
||||
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
||||
|
||||
### Media & Entertainment
|
||||
|
||||
- [6play](https://www.6play.fr) [@CoryChaplin]
|
||||
- [bilibili](https://www.bilibili.com) [@Moinheart]
|
||||
- [BurdaForward](https://www.burda-forward.de/en/)
|
||||
@@ -131,21 +132,23 @@ Join our growing community!
|
||||
- [Kuaishou](https://www.kuaishou.com/) [@zhaoyu89730105]
|
||||
- [Netflix](https://www.netflix.com/)
|
||||
- [Prensa Iberica](https://www.prensaiberica.es/) [@zamar-roura]
|
||||
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/) [@shenyuanli,@marklaw]
|
||||
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/)[@shenyuanli,@marklaw]
|
||||
- [Xite](https://xite.com/) [@shashankkoppar]
|
||||
- [Zaihang](https://www.zaih.com/)
|
||||
- [Zaihang](http://www.zaih.com/)
|
||||
|
||||
### Education
|
||||
|
||||
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Platzi.com](https://platzi.com/)
|
||||
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/)[@fccoelho]
|
||||
- [Udemy](https://www.udemy.com/) [@sungjuly]
|
||||
- [VIPKID](https://www.vipkid.com.cn/) [@illpanda]
|
||||
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
||||
|
||||
### Energy
|
||||
|
||||
- [Airboxlab](https://foobot.io) [@antoine-galataud]
|
||||
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
|
||||
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
|
||||
@@ -153,32 +156,35 @@ Join our growing community!
|
||||
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
|
||||
|
||||
### Healthcare
|
||||
|
||||
- [Amino](https://amino.com) [@shkr]
|
||||
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
||||
- [Care](https://www.getcare.io/) [@alandao2021]
|
||||
- [Care](https://www.getcare.io/)[@alandao2021]
|
||||
- [Living Goods](https://www.livinggoods.org) [@chelule]
|
||||
- [Maieutical Labs](https://maieuticallabs.it) [@xrmx]
|
||||
- [Medic](https://medic.org) [@1yuv]
|
||||
- [QPID Health](http://www.qpidhealth.com/)
|
||||
- [REDCap Cloud](https://www.redcapcloud.com/)
|
||||
- [TrustMedis](https://trustmedis.com/) [@famasya]
|
||||
- [WeSure](https://www.wesure.cn/)
|
||||
- [2070Health](https://2070health.com/)
|
||||
|
||||
### HR / Staffing
|
||||
|
||||
- [Swile](https://www.swile.co/) [@PaoloTerzi]
|
||||
- [Symmetrics](https://www.symmetrics.fyi)
|
||||
- [bluquist](https://bluquist.com/)
|
||||
|
||||
### Government / Non-Profit
|
||||
### Government
|
||||
|
||||
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
||||
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
||||
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
||||
|
||||
### Travel
|
||||
|
||||
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
||||
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
||||
|
||||
### Others
|
||||
|
||||
- [10Web](https://10web.io/)
|
||||
- [AI inside](https://inside.ai/en/)
|
||||
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
||||
@@ -189,6 +195,6 @@ Join our growing community!
|
||||
- [komoot](https://www.komoot.com/) [@christophlingg]
|
||||
- [Let's Roam](https://www.letsroam.com/)
|
||||
- [Onebeat](https://1beat.com/) [@GuyAttia]
|
||||
- [X](https://x.com/)
|
||||
- [Twitter](https://twitter.com/)
|
||||
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
|
||||
- [Yahoo!](https://yahoo.com/)
|
||||
|
||||
23
UPDATING.md
@@ -22,14 +22,6 @@ under the License.
|
||||
This file documents any backwards-incompatible changes in Superset and
|
||||
assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
||||
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
## 4.1.0
|
||||
|
||||
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your
|
||||
@@ -40,9 +32,9 @@ assists people when migrating to a new version.
|
||||
`requirements/` folder. If you use these files for your builds you may want to double
|
||||
check that your builds are not affected. `base.txt` should be the same as before, though
|
||||
`development.txt` becomes a bigger set, incorporating the now defunct local,testing,integration, and docker
|
||||
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker compose.\*
|
||||
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker-compose.\*
|
||||
files for production use cases! While we never really supported
|
||||
or should have tried to support docker compose for production use cases, we now actively
|
||||
or should have tried to support docker-compose for production use cases, we now actively
|
||||
have taken a stance against supporting it. See the PR for details.
|
||||
- [24112](https://github.com/apache/superset/pull/24112): Python 3.10 is now the recommended python version to use, 3.9 still
|
||||
supported but getting deprecated in the nearish future. CI/CD runs on py310 so you probably want to align. If you
|
||||
@@ -66,7 +58,7 @@ assists people when migrating to a new version.
|
||||
backend, as well as the .json files used by the frontend. If you were doing anything before
|
||||
as part of your bundling to expose translation packages, it's probably not needed anymore.
|
||||
- [29264](https://github.com/apache/superset/pull/29264) Slack has updated its file upload api, and we are now supporting this new api in Superset, although the Slack api is not backward compatible. The original Slack integration is deprecated and we will require a new Slack scope `channels:read` to be added to Slack workspaces in order to use this new api. In an upcoming release, we will make this new Slack scope mandatory and remove the old Slack functionality.
|
||||
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs.
|
||||
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
@@ -124,7 +116,7 @@ assists people when migrating to a new version.
|
||||
- [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
|
||||
- [24939](https://github.com/apache/superset/pull/24939): Augments the foreign key constraints for the `embedded_dashboards` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard is deleted. Scheduled downtime may be advised.
|
||||
- [24938](https://github.com/apache/superset/pull/24938): Augments the foreign key constraints for the `dashboard_slices` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard or slice is deleted. Scheduled downtime may be advised.
|
||||
- [24628](https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
|
||||
- [24628]https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
|
||||
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables which reference the `tables` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
|
||||
- [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated.
|
||||
- [23652](https://github.com/apache/superset/pull/23652): Enables GENERIC_CHART_AXES feature flag by default.
|
||||
@@ -140,7 +132,7 @@ assists people when migrating to a new version.
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- [24686](https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
|
||||
- [24686]https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
|
||||
- [24262](https://github.com/apache/superset/pull/24262): Enabled `TALISMAN_ENABLED` flag by default and provided stricter default Content Security Policy
|
||||
- [24415](https://github.com/apache/superset/pull/24415): Removed the obsolete Druid NoSQL REGEX operator.
|
||||
- [24423](https://github.com/apache/superset/pull/24423): Removed deprecated APIs `/superset/slice_json/...`, `/superset/annotation_json/...`
|
||||
@@ -237,7 +229,7 @@ assists people when migrating to a new version.
|
||||
- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case.
|
||||
- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case.
|
||||
- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation.
|
||||
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
|
||||
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
|
||||
- [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default.
|
||||
- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward.
|
||||
- [19107](https://github.com/apache/superset/pull/19107): The `SQLLAB_BACKEND_PERSISTENCE` feature flag is now `True` by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`.
|
||||
@@ -325,7 +317,8 @@ assists people when migrating to a new version.
|
||||
### Potential Downtime
|
||||
|
||||
- [14234](https://github.com/apache/superset/pull/14234): Adds the `limiting_factor` column to the `query` table. Give the migration includes a DDL operation on a heavily trafficked table, potential service downtime may be required.
|
||||
- [16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
|
||||
|
||||
-[16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
|
||||
|
||||
## 1.2.0
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# We don't support docker compose for production environments.
|
||||
# We don't support docker-compose for production environments.
|
||||
# If you choose to use this type of deployment make sure to
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# We don't support docker compose for production environments.
|
||||
# We don't support docker-compose for production environments.
|
||||
# If you choose to use this type of deployment make sure to
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# We don't support docker compose for production environments.
|
||||
# We don't support docker-compose for production environments.
|
||||
# If you choose to use this type of deployment make sure to
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
@@ -25,7 +25,6 @@ x-superset-user: &superset-user root
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
- superset-checks
|
||||
x-superset-volumes: &superset-volumes
|
||||
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -39,8 +38,6 @@ x-common-build: &common-build
|
||||
target: dev
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
args:
|
||||
DEV_MODE: "true"
|
||||
|
||||
services:
|
||||
nginx:
|
||||
@@ -123,7 +120,7 @@ services:
|
||||
- /home/superset-websocket/dist
|
||||
|
||||
# Mounting a config file that contains a dummy secret required to boot up.
|
||||
# do not use this docker compose in production
|
||||
# do not use this docker-compose in production
|
||||
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json
|
||||
environment:
|
||||
- PORT=8080
|
||||
@@ -131,23 +128,6 @@ services:
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_SSL=false
|
||||
|
||||
superset-checks:
|
||||
build:
|
||||
context: .
|
||||
target: python-base
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
container_name: superset_checks
|
||||
command: ["/app/scripts/check-env.py"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
user: *superset-user
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
superset-init:
|
||||
build:
|
||||
<<: *common-build
|
||||
@@ -167,18 +147,10 @@ services:
|
||||
disable: true
|
||||
|
||||
superset-node:
|
||||
build:
|
||||
context: .
|
||||
target: superset-node
|
||||
args:
|
||||
# This prevents building the frontend bundle since we'll mount local folder
|
||||
# and build it on startup while firing docker-frontend.sh in dev mode, where
|
||||
# it'll mount and watch local files and rebuild as you update them
|
||||
DEV_MODE: "true"
|
||||
image: node:18
|
||||
environment:
|
||||
# set this to false if you have perf issues running the npm i; npm run dev in-docker
|
||||
# if you do so, you have to run this manually on the host, which should perform better!
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
container_name: superset_node
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
|
||||
@@ -24,16 +24,12 @@ if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then
|
||||
fi
|
||||
|
||||
if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
||||
echo "Building Superset frontend in dev mode inside docker container"
|
||||
cd /app/superset-frontend
|
||||
|
||||
echo "Running `npm install`"
|
||||
npm install
|
||||
npm install -f --no-optional --global webpack webpack-cli
|
||||
npm install -f
|
||||
|
||||
echo "Running frontend"
|
||||
npm run dev
|
||||
|
||||
else
|
||||
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
|
||||
echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server"
|
||||
echo "Skipping frontend build steps - YOU RUN IT MANUALLY ON THE HOST!"
|
||||
fi
|
||||
|
||||
@@ -22,11 +22,7 @@ set -e
|
||||
#
|
||||
/app/docker/docker-bootstrap.sh
|
||||
|
||||
if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
|
||||
STEP_CNT=4
|
||||
else
|
||||
STEP_CNT=3
|
||||
fi
|
||||
STEP_CNT=4
|
||||
|
||||
echo_step() {
|
||||
cat <<EOF
|
||||
|
||||
@@ -26,7 +26,6 @@ gunicorn \
|
||||
--workers ${SERVER_WORKER_AMOUNT:-1} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-gthread} \
|
||||
--threads ${SERVER_THREADS_AMOUNT:-20} \
|
||||
--log-level "${GUNICORN_LOGLEVEL:info}" \
|
||||
--timeout ${GUNICORN_TIMEOUT:-60} \
|
||||
--keep-alive ${GUNICORN_KEEPALIVE:-2} \
|
||||
--max-requests ${WORKER_MAX_REQUESTS:-0} \
|
||||
|
||||
@@ -1 +1 @@
|
||||
v20.16.0
|
||||
v20.12.2
|
||||
|
||||
@@ -16,7 +16,6 @@ KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
This is the public documentation site for Superset, built using
|
||||
[Docusaurus 2](https://docusaurus.io/). See
|
||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||
|
||||
@@ -77,7 +77,6 @@
|
||||
"Guyana",
|
||||
"Haiti",
|
||||
"Honduras",
|
||||
"Hungary",
|
||||
"Iceland",
|
||||
"India",
|
||||
"Indonesia",
|
||||
|
||||
@@ -251,18 +251,15 @@ FROM apache/superset:3.1.0
|
||||
USER root
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y wget zip libaio1
|
||||
|
||||
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
|
||||
wget -O google-chrome-stable_current_amd64.deb -q http://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROMEDRIVER_VERSION}-1_amd64.deb && \
|
||||
wget -q https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
|
||||
apt-get install -y --no-install-recommends ./google-chrome-stable_current_amd64.deb && \
|
||||
rm -f google-chrome-stable_current_amd64.deb
|
||||
|
||||
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
|
||||
wget -q https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip && \
|
||||
unzip -j chromedriver-linux64.zip -d /usr/bin && \
|
||||
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://chromedriver.storage.googleapis.com/LATEST_RELEASE_102) && \
|
||||
wget -q https://chromedriver.storage.googleapis.com/${CHROMEDRIVER_VERSION}/chromedriver_linux64.zip && \
|
||||
unzip chromedriver_linux64.zip -d /usr/bin && \
|
||||
chmod 755 /usr/bin/chromedriver && \
|
||||
rm -f chromedriver-linux64.zip
|
||||
rm -f chromedriver_linux64.zip
|
||||
|
||||
RUN pip install --no-cache gevent psycopg2 redis
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ SimpleCache (in-memory), or the local filesystem.
|
||||
[Custom cache backends](https://flask-caching.readthedocs.io/en/latest/#custom-cache-backends)
|
||||
are also supported.
|
||||
|
||||
Caching can be configured by providing dictionaries in
|
||||
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
|
||||
Caching can be configured by providing a dictionaries in
|
||||
`superset_config.py` that comply with[the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
|
||||
|
||||
The following cache configurations can be customized in this way:
|
||||
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
|
||||
@@ -22,7 +22,7 @@ The following cache configurations can be customized in this way:
|
||||
- Metadata cache (optional): `CACHE_CONFIG`
|
||||
- Charting data queried from datasets (optional): `DATA_CACHE_CONFIG`
|
||||
|
||||
For example, to configure the filter state cache using Redis:
|
||||
For example, to configure the filter state cache using redis:
|
||||
|
||||
```python
|
||||
FILTER_STATE_CACHE_CONFIG = {
|
||||
|
||||
@@ -37,7 +37,7 @@ ENV SUPERSET_CONFIG_PATH /app/superset_config.py
|
||||
```
|
||||
|
||||
Docker compose deployments handle application configuration differently using specific conventions.
|
||||
Refer to the [docker compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
|
||||
Refer to the [docker-compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
|
||||
for details.
|
||||
|
||||
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
|
||||
@@ -314,9 +314,9 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
|
||||
]
|
||||
```
|
||||
### Keycloak-Specific Configuration using Flask-OIDC
|
||||
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is a viable option.
|
||||
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://https://pypi.org/project/flask-oidc/) is a viable option.
|
||||
|
||||
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
|
||||
Make sure the pip package [`Flask-OIDC`](https://https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
|
||||
|
||||
The following code defines a new security manager. Add it to a new file named `keycloak_security_manager.py`, placed in the same directory as your `superset_config.py` file.
|
||||
```python
|
||||
|
||||
@@ -55,9 +55,7 @@ are compatible with Superset.
|
||||
| [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/docs/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
| [Denodo](/docs/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
|
||||
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` |
|
||||
| [Elasticsearch](/docs/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
|
||||
| [Exasol](/docs/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
|
||||
| [Google BigQuery](/docs/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
|
||||
@@ -395,33 +393,21 @@ couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate
|
||||
|
||||
#### CrateDB
|
||||
|
||||
The connector library for CrateDB is [sqlalchemy-cratedb].
|
||||
We recommend to add the following item to your `requirements.txt` file:
|
||||
The recommended connector library for CrateDB is
|
||||
[crate](https://pypi.org/project/crate/).
|
||||
You need to install the extras as well for this library.
|
||||
We recommend adding something like the following
|
||||
text to your requirements file:
|
||||
|
||||
```
|
||||
sqlalchemy-cratedb>=0.40.1,<1
|
||||
crate[sqlalchemy]==0.26.0
|
||||
```
|
||||
|
||||
An SQLAlchemy connection string for [CrateDB Self-Managed] on localhost,
|
||||
for evaluation purposes, looks like this:
|
||||
The expected connection string is formatted as follows:
|
||||
|
||||
```
|
||||
crate://crate@127.0.0.1:4200
|
||||
```
|
||||
An SQLAlchemy connection string for connecting to [CrateDB Cloud] looks like
|
||||
this:
|
||||
```
|
||||
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
|
||||
```
|
||||
|
||||
Follow the steps [here](/docs/configuration/databases#installing-database-drivers)
|
||||
to install the CrateDB connector package when setting up Superset locally using
|
||||
Docker Compose.
|
||||
```
|
||||
echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
[CrateDB Cloud]: https://cratedb.com/product/cloud
|
||||
[CrateDB Self-Managed]: https://cratedb.com/product/self-managed
|
||||
[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/
|
||||
|
||||
|
||||
#### Databend
|
||||
@@ -526,16 +512,6 @@ For a connection to a SQL endpoint you need to use the HTTP path from the endpoi
|
||||
```
|
||||
|
||||
|
||||
#### Denodo
|
||||
|
||||
The recommended connector library for Denodo is
|
||||
[denodo-sqlalchemy](https://pypi.org/project/denodo-sqlalchemy/).
|
||||
|
||||
The expected connection string is formatted as follows (default port is 9996):
|
||||
|
||||
```
|
||||
denodo://{username}:{password}@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Dremio
|
||||
@@ -546,7 +522,7 @@ The recommended connector library for Dremio is
|
||||
The expected connection string for ODBC (Default port is 31010) is formatted as follows:
|
||||
|
||||
```
|
||||
dremio+pyodbc://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
|
||||
dremio://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
|
||||
```
|
||||
|
||||
The expected connection string for Arrow Flight (Dremio 4.9.1+. Default port is 32010) is formatted as follows:
|
||||
@@ -1331,10 +1307,6 @@ Here's what the connection string looks like:
|
||||
starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
```
|
||||
|
||||
:::note
|
||||
StarRocks maintains their Superset docuementation [here](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/).
|
||||
:::
|
||||
|
||||
#### Teradata
|
||||
|
||||
The recommended connector library is
|
||||
|
||||
@@ -24,65 +24,9 @@ The following keys in `superset_config.py` can be specified to configure CORS:
|
||||
## HTTP headers
|
||||
|
||||
Note that Superset bundles [flask-talisman](https://pypi.org/project/talisman/)
|
||||
Self-described as a small Flask extension that handles setting HTTP headers that can help
|
||||
Self-descried as a small Flask extension that handles setting HTTP headers that can help
|
||||
protect against a few common web application security issues.
|
||||
|
||||
|
||||
## HTML Embedding of Dashboards and Charts
|
||||
|
||||
There are two ways to embed a dashboard: Using the [SDK](https://www.npmjs.com/package/@superset-ui/embedded-sdk) or embedding a direct link. Note that in the latter case everybody who knows the link is able to access the dashboard.
|
||||
|
||||
### Embedding a Public Direct Link to a Dashboard
|
||||
|
||||
This works by first changing the content security policy (CSP) of [flask-talisman](https://github.com/GoogleCloudPlatform/flask-talisman) to allow for certain domains to display Superset content. Then a dashboard can be made publicly accessible, i.e. **bypassing authentication**. Once made public, the dashboard's URL can be added to an iframe in another website's HTML code.
|
||||
|
||||
#### Changing flask-talisman CSP
|
||||
|
||||
Add to `superset_config.py` the entire `TALISMAN_CONFIG` section from `config.py` and include a `frame-ancestors` section:
|
||||
```python
|
||||
TALISMAN_ENABLED = True
|
||||
TALISMAN_CONFIG = {
|
||||
"content_security_policy": {
|
||||
...
|
||||
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
|
||||
...
|
||||
```
|
||||
Restart Superset for this configuration change to take effect.
|
||||
|
||||
#### Making a Dashboard Public
|
||||
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) to `superset_config.py`
|
||||
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
|
||||
|
||||
#### Embedding a Public Dashboard
|
||||
|
||||
Now anybody can directly access the dashboard's URL. You can embed it in an iframe like so:
|
||||
|
||||
```html
|
||||
<iframe
|
||||
width="600"
|
||||
height="400"
|
||||
seamless
|
||||
frameBorder="0"
|
||||
scrolling="no"
|
||||
src="https://superset.my-domain.com/superset/dashboard/10/?standalone=1&height=400"
|
||||
>
|
||||
</iframe>
|
||||
```
|
||||
#### Embedding a Chart
|
||||
|
||||
A chart's embed code can be generated by going to a chart's edit view and then clicking at the top right on `...` > `Share` > `Embed code`
|
||||
|
||||
### Enabling Embedding via the SDK
|
||||
|
||||
Clicking on `...` next to `EDIT DASHBOARD` on the top right of the dashboard's overview page should yield a drop-down menu including the entry "Embed dashboard".
|
||||
|
||||
To enable this entry, add the following line to the `.env` file:
|
||||
|
||||
```text
|
||||
SUPERSET_FEATURE_EMBEDDED_SUPERSET=true
|
||||
```
|
||||
|
||||
## CSRF settings
|
||||
|
||||
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used manage
|
||||
|
||||
@@ -17,8 +17,8 @@ made available in the Jinja context:
|
||||
|
||||
- `columns`: columns which to group by in the query
|
||||
- `filter`: filters applied in the query
|
||||
- `from_dttm`: start `datetime` value from the selected time range (`None` if undefined) (deprecated beginning in version 5.0, use `get_time_filter` instead)
|
||||
- `to_dttm`: end `datetime` value from the selected time range (`None` if undefined). (deprecated beginning in version 5.0, use `get_time_filter` instead)
|
||||
- `from_dttm`: start `datetime` value from the selected time range (`None` if undefined)
|
||||
- `to_dttm`: end `datetime` value from the selected time range (`None` if undefined)
|
||||
- `groupby`: columns which to group by in the query (deprecated)
|
||||
- `metrics`: aggregate expressions in the query
|
||||
- `row_limit`: row limit of the query
|
||||
@@ -48,15 +48,12 @@ WHERE (
|
||||
{% if to_dttm is not none %}
|
||||
dttm_col < '{{ to_dttm }}' AND
|
||||
{% endif %}
|
||||
1 = 1
|
||||
true
|
||||
)
|
||||
```
|
||||
|
||||
The `1 = 1` at the end ensures a value is present for the `WHERE` clause even when
|
||||
the time filter is not set. For many database engines, this could be replaced with `true`.
|
||||
|
||||
Note that the Jinja parameters are called within _double_ brackets in the query and with
|
||||
_single_ brackets in the logic blocks.
|
||||
Note how the Jinja parameters are called within double brackets in the query, and without in the
|
||||
logic blocks.
|
||||
|
||||
To add custom functionality to the Jinja context, you need to overload the default Jinja
|
||||
context in your environment by defining the `JINJA_CONTEXT_ADDONS` in your superset configuration
|
||||
@@ -97,7 +94,7 @@ There is a special ``_filters`` parameter which can be used to test filters used
|
||||
```sql
|
||||
SELECT action, count(*) as times
|
||||
FROM logs
|
||||
WHERE action in {{ filter_values('action_type')|where_in }}
|
||||
WHERE action in {{ filter_values('action_type'))|where_in }}
|
||||
GROUP BY action
|
||||
```
|
||||
|
||||
@@ -349,78 +346,6 @@ Here's a concrete example:
|
||||
order by lineage, level
|
||||
```
|
||||
|
||||
**Time Filter**
|
||||
|
||||
The `{{ get_time_filter() }}` macro returns the time filter applied to a specific column. This is useful if you want
|
||||
to handle time filters inside the virtual dataset, as by default the time filter is placed on the outer query. This can
|
||||
considerably improve performance, as many databases and query engines are able to optimize the query better
|
||||
if the temporal filter is placed on the inner query, as opposed to the outer query.
|
||||
|
||||
The macro takes the following parameters:
|
||||
- `column`: Name of the temporal column. Leave undefined to reference the time range from a Dashboard Native Time Range
|
||||
filter (when present).
|
||||
- `default`: The default value to fall back to if the time filter is not present, or has the value `No filter`
|
||||
- `target_type`: The target temporal type as recognized by the target database (e.g. `TIMESTAMP`, `DATE` or
|
||||
`DATETIME`). If `column` is defined, the format will default to the type of the column. This is used to produce
|
||||
the format of the `from_expr` and `to_expr` properties of the returned `TimeFilter` object.
|
||||
- `strftime`: format using the `strftime` method of `datetime` for custom time formatting.
|
||||
([see docs for valid format codes](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes)).
|
||||
When defined `target_type` will be ignored.
|
||||
- `remove_filter`: When set to true, mark the filter as processed, removing it from the outer query. Useful when a
|
||||
filter should only apply to the inner query.
|
||||
|
||||
The return type has the following properties:
|
||||
- `from_expr`: the start of the time filter (if any)
|
||||
- `to_expr`: the end of the time filter (if any)
|
||||
- `time_range`: The applied time range
|
||||
|
||||
Here's a concrete example using the `logs` table from the Superset metastore:
|
||||
|
||||
```
|
||||
{% set time_filter = get_time_filter("dttm", remove_filter=True) %}
|
||||
{% set from_expr = time_filter.from_expr %}
|
||||
{% set to_expr = time_filter.to_expr %}
|
||||
{% set time_range = time_filter.time_range %}
|
||||
SELECT
|
||||
*,
|
||||
'{{ time_range }}' as time_range
|
||||
FROM logs
|
||||
{% if from_expr or to_expr %}WHERE 1 = 1
|
||||
{% if from_expr %}AND dttm >= {{ from_expr }}{% endif %}
|
||||
{% if to_expr %}AND dttm < {{ to_expr }}{% endif %}
|
||||
{% endif %}
|
||||
```
|
||||
|
||||
Assuming we are creating a table chart with a simple `COUNT(*)` as the metric with a time filter `Last week` on the
|
||||
`dttm` column, this would render the following query on Postgres (note the formatting of the temporal filters, and
|
||||
the absence of time filters on the outer query):
|
||||
|
||||
```
|
||||
SELECT COUNT(*) AS count
|
||||
FROM
|
||||
(SELECT *,
|
||||
'Last week' AS time_range
|
||||
FROM public.logs
|
||||
WHERE 1 = 1
|
||||
AND dttm >= TO_TIMESTAMP('2024-08-27 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
|
||||
AND dttm < TO_TIMESTAMP('2024-09-03 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')) AS virtual_table
|
||||
ORDER BY count DESC
|
||||
LIMIT 1000;
|
||||
```
|
||||
|
||||
When using the `default` parameter, the templated query can be simplified, as the endpoints will always be defined
|
||||
(to use a fixed time range, you can also use something like `default="2024-08-27 : 2024-09-03"`)
|
||||
```
|
||||
{% set time_filter = get_time_filter("dttm", default="Last week", remove_filter=True) %}
|
||||
SELECT
|
||||
*,
|
||||
'{{ time_filter.time_range }}' as time_range
|
||||
FROM logs
|
||||
WHERE
|
||||
dttm >= {{ time_filter.from_expr }}
|
||||
AND dttm < {{ time_filter.to_expr }}
|
||||
```
|
||||
|
||||
**Datasets**
|
||||
|
||||
It's possible to query physical and virtual datasets using the `dataset` macro. This is useful if you've defined computed columns and metrics on your datasets, and want to reuse the definition in adhoc SQL Lab queries.
|
||||
|
||||
@@ -6,13 +6,13 @@ version: 1
|
||||
# Setting up a Development Environment
|
||||
|
||||
The documentation in this section is a bit of a patchwork of knowledge representing the
|
||||
multitude of ways that exist to run Superset (`docker compose`, just "docker", on "metal", using
|
||||
multitude of ways that exist to run Superset (`docker-compose`, just "docker", on "metal", using
|
||||
a Makefile).
|
||||
|
||||
:::note
|
||||
Now we have evolved to recommend and support `docker compose` more actively as the main way
|
||||
Now we have evolved to recommend and support `docker-compose` more actively as the main way
|
||||
to run Superset for development and preserve your sanity. **Most people should stick to
|
||||
the first few sections - ("Fork & Clone", "docker compose" and "Installing Dev Tools")**
|
||||
the first few sections - ("Fork & Clone", "docker-compose" and "Installing Dev Tools")**
|
||||
:::
|
||||
|
||||
## Fork and Clone
|
||||
@@ -27,12 +27,12 @@ git clone git@github.com:your-username/superset.git
|
||||
cd superset
|
||||
```
|
||||
|
||||
## docker compose (recommended!)
|
||||
## docker-compose (recommended!)
|
||||
|
||||
Setting things up to squeeze a "hello world" into any part of Superset should be as simple as
|
||||
Setting things up to squeeze an "hello world" into any part of Superset should be as simple as
|
||||
|
||||
```bash
|
||||
docker compose up
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
Note that:
|
||||
@@ -45,7 +45,7 @@ Note that:
|
||||
- **Postgres** as the metadata database and to store example datasets, charts and dashboards which
|
||||
should be populated upon startup
|
||||
- **Redis** as the message queue for our async backend and caching backend
|
||||
- It'll load up examples into the database upon the first startup
|
||||
- It'll load up examples into the database upon first startup
|
||||
- all other details and pointers available in
|
||||
[docker-compose.yml](https://github.com/apache/superset/blob/master/docker-compose.yml)
|
||||
- The local repository is mounted within the services, meaning updating
|
||||
@@ -53,17 +53,10 @@ Note that:
|
||||
- Superset is served at localhost:8088/
|
||||
- You can login with admin/admin
|
||||
|
||||
:::note
|
||||
Installing and building Node modules for Apache Superset inside `superset-node` can take a
|
||||
significant amount of time. This is normal due to the size of the dependencies. Please be
|
||||
patient while the process completes, as long wait times do not indicate an issue with your setup.
|
||||
If delays seem excessive, check your internet connection or system resources.
|
||||
:::
|
||||
|
||||
:::caution
|
||||
Since `docker compose` is primarily designed to run a set of containers on **a single host**
|
||||
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
|
||||
and can't credibly support **high availability** as a result, we do not support nor recommend
|
||||
using our `docker compose` constructs to support production-type use-cases. For single host
|
||||
using our `docker-compose` constructs to support production-type use-cases. For single host
|
||||
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
|
||||
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
|
||||
documentation.
|
||||
@@ -73,10 +66,10 @@ configured to be secure.
|
||||
## Installing Development Tools
|
||||
|
||||
:::note
|
||||
While `docker compose` simplifies a lot of the setup, there are still
|
||||
While docker-compose simplifies a lot of the setup, there are still
|
||||
many things you'll want to set up locally to power your IDE, and things like
|
||||
**commit hooks**, **linters**, and **test-runners**. Note that you can do these
|
||||
things inside docker images with commands like `docker compose exec superset_app bash` for
|
||||
things inside docker images with commands like `docker-compose exec superset_app bash` for
|
||||
instance, but many people like to run that tooling from their host.
|
||||
:::
|
||||
|
||||
@@ -99,55 +92,13 @@ To install run the following:
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
This will install the hooks in your local repository. From now on, a series of checks will
|
||||
automatically run whenever you make a Git commit.
|
||||
A series of checks will now run when you make a git commit.
|
||||
|
||||
#### Running Pre-commit Manually
|
||||
|
||||
You can also run the pre-commit checks manually in various ways:
|
||||
|
||||
- **Run pre-commit on all files (same as CI):**
|
||||
|
||||
To run the pre-commit checks across all files in your repository, use the following command:
|
||||
|
||||
```bash
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
This is the same set of checks that will run during CI, ensuring your changes meet the project's standards.
|
||||
|
||||
- **Run pre-commit on a specific file:**
|
||||
|
||||
If you want to check or fix a specific file, you can do so by specifying the file path:
|
||||
|
||||
```bash
|
||||
pre-commit run --files path/to/your/file.py
|
||||
```
|
||||
|
||||
This will only run the checks on the file(s) you specify.
|
||||
|
||||
- **Run a specific pre-commit check:**
|
||||
|
||||
To run a specific check (hook) across all files or a particular file, use the following command:
|
||||
|
||||
```bash
|
||||
pre-commit run <hook_id> --all-files
|
||||
```
|
||||
|
||||
Or for a specific file:
|
||||
|
||||
```bash
|
||||
pre-commit run <hook_id> --files path/to/your/file.py
|
||||
```
|
||||
|
||||
Replace `<hook_id>` with the ID of the specific hook you want to run. You can find the list
|
||||
of available hooks in the `.pre-commit-config.yaml` file.
|
||||
|
||||
## Alternatives to `docker compose`
|
||||
## Alternatives to docker-compose
|
||||
|
||||
:::caution
|
||||
This part of the documentation is a patchwork of information related to setting up
|
||||
development environments without `docker compose` and is documented/supported to varying
|
||||
development environments without `docker-compose` and are documented/supported to varying
|
||||
degrees. It's been difficult to maintain this wide array of methods and insure they're
|
||||
functioning across environments.
|
||||
:::
|
||||
@@ -157,7 +108,7 @@ functioning across environments.
|
||||
#### OS Dependencies
|
||||
|
||||
Make sure your machine meets the [OS dependencies](https://superset.apache.org/docs/installation/pypi#os-dependencies) before following these steps.
|
||||
You also need to install MySQL.
|
||||
You also need to install MySQL or [MariaDB](https://mariadb.com/downloads).
|
||||
|
||||
Ensure that you are using Python version 3.9, 3.10 or 3.11, then proceed with:
|
||||
|
||||
@@ -187,11 +138,11 @@ superset load-examples
|
||||
|
||||
# Start the Flask dev web server from inside your virtualenv.
|
||||
# Note that your page may not have CSS at this point.
|
||||
# See instructions below on how to build the front-end assets.
|
||||
# See instructions below how to build the front-end assets.
|
||||
superset run -p 8088 --with-threads --reload --debugger --debug
|
||||
```
|
||||
|
||||
Or you can install it via our Makefile
|
||||
Or you can install via our Makefile
|
||||
|
||||
```bash
|
||||
# Create a virtual environment and activate it (recommended)
|
||||
@@ -209,7 +160,7 @@ $ make pre-commit
|
||||
```
|
||||
|
||||
**Note: the FLASK_APP env var should not need to be set, as it's currently controlled
|
||||
via `.flaskenv`, however, if needed, it should be set to `superset.app:create_app()`**
|
||||
via `.flaskenv`, however if needed, it should be set to `superset.app:create_app()`**
|
||||
|
||||
If you have made changes to the FAB-managed templates, which are not built the same way as the newer, React-powered front-end assets, you need to start the app without the `--with-threads` argument like so:
|
||||
`superset run -p 8088 --reload --debugger --debug`
|
||||
@@ -274,7 +225,7 @@ Frontend assets (TypeScript, JavaScript, CSS, and images) must be compiled in or
|
||||
|
||||
First, be sure you are using the following versions of Node.js and npm:
|
||||
|
||||
- `Node.js`: Version 20
|
||||
- `Node.js`: Version 18
|
||||
- `npm`: Version 10
|
||||
|
||||
We recommend using [nvm](https://github.com/nvm-sh/nvm) to manage your node environment:
|
||||
@@ -312,7 +263,7 @@ cd superset-frontend
|
||||
npm ci
|
||||
```
|
||||
|
||||
Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package and an analytics pixel. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the `scarf-js` package, and various means to opt out of it, but you can opt out of the npm package _and_ the pixel by setting the `SCARF_ANALYTICS` environment variable to `false` or opt out of the pixel by adding this setting in `superset-frontent/package.json`:
|
||||
Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package and an analytics pixel. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy, and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the `scarf-js` package, and various means to opt out of it, but you can opt out of the npm package _and_ the pixel by setting the `SCARF_ANALYTICS` environment variable to `false` or opt out of the pixel by adding this setting in `superset-frontent/package.json`:
|
||||
|
||||
```json
|
||||
// your-package/package.json
|
||||
@@ -340,7 +291,7 @@ Error: ENOSPC: System limit for number of file watchers reached
|
||||
```
|
||||
|
||||
The error is thrown because the number of files monitored by the system has reached the limit.
|
||||
You can address this error by increasing the number of inotify watchers.
|
||||
You can address this this error by increasing the number of inotify watchers.
|
||||
|
||||
The current value of max watches can be checked with:
|
||||
|
||||
@@ -351,13 +302,13 @@ cat /proc/sys/fs/inotify/max_user_watches
|
||||
Edit the file `/etc/sysctl.conf` to increase this value.
|
||||
The value needs to be decided based on the system memory [(see this StackOverflow answer for more context)](https://stackoverflow.com/questions/535768/what-is-a-reasonable-amount-of-inotify-watches-with-linux).
|
||||
|
||||
Open the file in an editor and add a line at the bottom specifying the max watches values.
|
||||
Open the file in editor and add a line at the bottom specifying the max watches values.
|
||||
|
||||
```bash
|
||||
fs.inotify.max_user_watches=524288
|
||||
```
|
||||
|
||||
Save the file and exit the editor.
|
||||
Save the file and exit editor.
|
||||
To confirm that the change succeeded, run the following command to load the updated value of max_user_watches from `sysctl.conf`:
|
||||
|
||||
```bash
|
||||
@@ -455,7 +406,7 @@ pre-commit install
|
||||
|
||||
A series of checks will now run when you make a git commit.
|
||||
|
||||
Alternatively, it is possible to run pre-commit via tox:
|
||||
Alternatively it is possible to run pre-commit via tox:
|
||||
|
||||
```bash
|
||||
tox -e pre-commit
|
||||
@@ -539,7 +490,7 @@ commands are invoked.
|
||||
There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be
|
||||
found here](https://github.com/apache/superset/tree/master/scripts/tests)
|
||||
|
||||
To run all integration tests, for example, run this script from the root directory:
|
||||
To run all integration tests for example, run this script from the root directory:
|
||||
|
||||
```bash
|
||||
scripts/tests/run.sh
|
||||
@@ -614,14 +565,14 @@ As an alternative you can use docker compose environment for testing:
|
||||
Make sure you have added below line to your /etc/hosts file:
|
||||
`127.0.0.1 db`
|
||||
|
||||
If you already have launched Docker environment please use the following command to ensure a fresh database instance:
|
||||
If you already have launched Docker environment please use the following command to assure a fresh database instance:
|
||||
`docker compose down -v`
|
||||
|
||||
Launch environment:
|
||||
|
||||
`CYPRESS_CONFIG=true docker compose up`
|
||||
|
||||
It will serve the backend and frontend on port 8088.
|
||||
It will serve backend and frontend on port 8088.
|
||||
|
||||
Run Cypress tests:
|
||||
|
||||
@@ -658,12 +609,12 @@ For debugging locally using VSCode, you can configure a launch configuration fil
|
||||
}
|
||||
```
|
||||
|
||||
#### Raw Docker (without `docker compose`)
|
||||
#### Raw Docker (without docker-compose)
|
||||
|
||||
Follow these instructions to debug the Flask app running inside a docker container. Note that
|
||||
this will run a barebones Superset web server,
|
||||
|
||||
First, add the following to the ./docker-compose.yaml file
|
||||
First add the following to the ./docker-compose.yaml file
|
||||
|
||||
```diff
|
||||
superset:
|
||||
@@ -777,7 +728,7 @@ See (set capabilities for a container)[https://kubernetes.io/docs/tasks/configur
|
||||
|
||||
Once the pod is running as root and has the SYS_PTRACE capability it will be able to debug the Flask app.
|
||||
|
||||
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
|
||||
You can follow the same instructions as in the docker-compose. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
|
||||
|
||||
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
|
||||
|
||||
@@ -785,11 +736,11 @@ Often in a Kubernetes environment nodes are not addressable from outside the clu
|
||||
kubectl port-forward pod/superset-<some random id> 5678:5678
|
||||
```
|
||||
|
||||
You can now launch your VSCode debugger with the same config as above. VSCode will connect to 127.0.0.1:5678 which is forwarded by kubectl to your remote kubernetes POD.
|
||||
You can now launch your VSCode debugger with the same config as above. VSCode will connect to to 127.0.0.1:5678 which is forwarded by kubectl to your remote kubernetes POD.
|
||||
|
||||
### Storybook
|
||||
|
||||
Superset includes a [Storybook](https://storybook.js.org/) to preview the layout/styling of various Superset components and variations thereof. To open and view the Storybook:
|
||||
Superset includes a [Storybook](https://storybook.js.org/) to preview the layout/styling of various Superset components, and variations thereof. To open and view the Storybook:
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
@@ -939,7 +890,7 @@ To fix it:
|
||||
from alembic import op
|
||||
```
|
||||
|
||||
Alternatively, you may also run `superset db merge` to create a migration script
|
||||
Alternatively you may also run `superset db merge` to create a migration script
|
||||
just for merging the heads.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -438,7 +438,7 @@ See [set capabilities for a container](https://kubernetes.io/docs/tasks/configur
|
||||
|
||||
Once the pod is running as root and has the `SYS_PTRACE` capability it will be able to debug the Flask app.
|
||||
|
||||
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
|
||||
You can follow the same instructions as in the docker-compose. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
|
||||
|
||||
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
|
||||
|
||||
|
||||
@@ -174,16 +174,13 @@ You can take a look at this Flask-AppBuilder
|
||||
## Is there a way to force the dashboard to use specific colors?
|
||||
|
||||
It is possible on a per-dashboard basis by providing a mapping of labels to colors in the JSON
|
||||
Metadata attribute using the `label_colors` key. You can use either the full hex color, a named color,
|
||||
like `red`, `coral` or `lightblue`, or the index in the current color palette (0 for first color, 1 for
|
||||
second etc). Example:
|
||||
Metadata attribute using the `label_colors` key.
|
||||
|
||||
```json
|
||||
{
|
||||
"label_colors": {
|
||||
"foo": "#FF69B4",
|
||||
"bar": "lightblue",
|
||||
"baz": 0
|
||||
"Girls": "#FF69B4",
|
||||
"Boys": "#ADD8E6"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -59,29 +59,11 @@ Here are the build presets that are exposed through the `build_docker.py` script
|
||||
this specific SHA, which could be from a `master` merge, or release.
|
||||
- `websocket-latest`: The WebSocket image for use in a Superset cluster.
|
||||
|
||||
|
||||
|
||||
For insights or modifications to the build matrix and tagging conventions,
|
||||
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py)
|
||||
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
|
||||
GitHub action.
|
||||
|
||||
## Key ARGs in Dockerfile
|
||||
- `BUILD_TRANSLATIONS`: whether to build the translations into the image. For the
|
||||
frontend build this tells webpack to strip out all locales other than `en` from
|
||||
the `moment-timezone` library. For the backendthis skips compiling the
|
||||
`*.po` translation files
|
||||
- `DEV_MODE`: whether to skip the frontend build, this is used by our `docker-compose` dev setup
|
||||
where we mount the local volume and build using `webpack` in `--watch` mode, meaning as you
|
||||
alter the code in the local file system, webpack, from within a docker image used for this
|
||||
purpose, will constantly rebuild the frontend as you go. This ARG enables the initial
|
||||
`docker-compose` build to take much less time and resources
|
||||
- `INCLUDE_CHROMIUM`: whether to include chromium in the backend build so that it can be
|
||||
used as a headless browser for workloads related to "Alerts & Reports" and thumbnail generation
|
||||
- `INCLUDE_FIREFOX`: same as above, but for firefox
|
||||
- `PY_VER`: specifying the base image for the python backend, we don't recommend altering
|
||||
this setting if you're not working on forwards or backwards compatibility
|
||||
|
||||
## Caching
|
||||
|
||||
To accelerate builds, we follow Docker best practices and use `apache/superset-cache`.
|
||||
@@ -101,7 +83,7 @@ add database support for the database you need.
|
||||
|
||||
Currently all automated builds are multi-platform, supporting both `linux/arm64`
|
||||
and `linux/amd64`. This enables higher level constructs like `helm` and
|
||||
`docker compose` to point to these images and effectively be multi-platform
|
||||
docker-compose to point to these images and effectively be multi-platform
|
||||
as well.
|
||||
|
||||
Pull requests and master builds
|
||||
|
||||
@@ -13,9 +13,9 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
<br /><br />
|
||||
|
||||
:::caution
|
||||
Since `docker compose` is primarily designed to run a set of containers on **a single host**
|
||||
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
|
||||
and can't support requirements for **high availability**, we do not support nor recommend
|
||||
using our `docker compose` constructs to support production-type use-cases. For single host
|
||||
using our `docker-compose` constructs to support production-type use-cases. For single host
|
||||
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
|
||||
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
|
||||
documentation.
|
||||
@@ -26,7 +26,7 @@ Superset locally is using Docker Compose on a Linux or Mac OSX
|
||||
computer. Superset does not have official support for Windows. It's also the easiest
|
||||
way to launch a fully functioning **development environment** quickly.
|
||||
|
||||
Note that there are 3 major ways we support to run `docker compose`:
|
||||
Note that there are 3 major ways we support to run docker-compose:
|
||||
|
||||
1. **docker-compose.yml:** for interactive development, where we mount your local folder with the
|
||||
frontend/backend files that you can edit and experience the changes you
|
||||
@@ -49,9 +49,9 @@ More on these two approaches after setting up the requirements for either.
|
||||
|
||||
## Requirements
|
||||
|
||||
Note that this documentation assumes that you have [Docker](https://www.docker.com) and
|
||||
[git](https://git-scm.com/) installed. Note also that we used to use `docker-compose` but that
|
||||
is on the path to deprecation so we now use `docker compose` instead.
|
||||
Note that this documentation assumes that you have [Docker](https://www.docker.com),
|
||||
[docker-compose](https://docs.docker.com/compose/), and
|
||||
[git](https://git-scm.com/) installed.
|
||||
|
||||
## 1. Clone Superset's GitHub repository
|
||||
|
||||
@@ -67,7 +67,7 @@ current directory.
|
||||
|
||||
## 2. Launch Superset Through Docker Compose
|
||||
|
||||
First let's assume you're familiar with `docker compose` mechanics. Here we'll refer generally
|
||||
First let's assume you're familiar with docker-compose mechanics. Here we'll refer generally
|
||||
to `docker compose up` even though in some cases you may want to force a check for newer remote
|
||||
images using `docker compose pull`, force a build with `docker compose build` or force a build
|
||||
on latest base images using `docker compose build --pull`. In most cases though, the simple
|
||||
@@ -112,7 +112,7 @@ Here various release tags, github SHA, and latest `master` can be referenced by
|
||||
Refer to the docker-related documentation to learn more about existing tags you can point to
|
||||
from Docker Hub.
|
||||
|
||||
## `docker compose` tips & configuration
|
||||
## docker-compose tips & configuration
|
||||
|
||||
:::caution
|
||||
All of the content belonging to a Superset instance - charts, dashboards, users, etc. - is stored in
|
||||
@@ -137,7 +137,7 @@ You can install additional python packages and apply config overrides by followi
|
||||
mentioned in [docker/README.md](https://github.com/apache/superset/tree/master/docker#configuration)
|
||||
|
||||
Note that `docker/.env` sets the default environment variables for all the docker images
|
||||
used by `docker compose`, and that `docker/.env-local` can be used to override those defaults.
|
||||
used by `docker-compose`, and that `docker/.env-local` can be used to override those defaults.
|
||||
Also note that `docker/.env-local` is referenced in our `.gitignore`,
|
||||
preventing developers from risking committing potentially sensitive configuration to the repository.
|
||||
|
||||
|
||||
@@ -153,7 +153,9 @@ See [Install Database Drivers](/docs/configuration/databases) for more informati
|
||||
|
||||
:::
|
||||
|
||||
The following example installs the drivers for BigQuery and Elasticsearch, allowing you to connect to these data sources within your Superset setup:
|
||||
The following example installs the Big Query and Elasticsearch database drivers so that you can
|
||||
connect to those datasources in your Superset installation:
|
||||
|
||||
```yaml
|
||||
bootstrapScript: |
|
||||
#!/bin/bash
|
||||
|
||||
@@ -22,18 +22,18 @@ level dependencies.
|
||||
|
||||
**Debian and Ubuntu**
|
||||
|
||||
In Ubuntu **20.04 and 22.04** the following command will ensure that the required dependencies are installed:
|
||||
|
||||
```bash
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python3-dev python3-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
|
||||
```
|
||||
|
||||
In Ubuntu **before 20.04** the following command will ensure that the required dependencies are installed:
|
||||
The following command will ensure that the required dependencies are installed:
|
||||
|
||||
```bash
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python-dev python-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
|
||||
```
|
||||
|
||||
In Ubuntu 20.04 the following command will ensure that the required dependencies are installed:
|
||||
|
||||
```bash
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python3-dev python3-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
|
||||
```
|
||||
|
||||
**Fedora and RHEL-derivative Linux distributions**
|
||||
|
||||
Install the following packages using the `yum` package manager:
|
||||
|
||||
@@ -40,7 +40,6 @@ container images and will load up some examples. Once all containers
|
||||
are downloaded and the output settles, you're ready to log in.
|
||||
|
||||
⚠️ If you get an error message like `validating superset\docker-compose-image-tag.yml: services.superset-worker-beat.env_file.0 must be a string`, you need to update your version of `docker-compose`.
|
||||
Note that `docker-compose` is on the path to deprecation and you should now use `docker compose` instead.
|
||||
|
||||
### 3. Log into Superset
|
||||
|
||||
|
||||
@@ -27,34 +27,33 @@ following information about each flight is given:
|
||||
You may need to enable the functionality to upload a CSV or Excel file to your database. The following section
|
||||
explains how to enable this functionality for the examples database.
|
||||
|
||||
In the top menu, select **Settings ‣ Data ‣ Database Connections**. Find the **examples** database in the list and
|
||||
In the top menu, select **Data ‣ Databases**. Find the **examples** database in the list and
|
||||
select the **Edit** button.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/edit-record.png" )} />
|
||||
|
||||
In the resulting modal window, switch to the **Advanced** tab and open **Security** section.
|
||||
Then, tick the checkbox for **Allow file uploads to database**. End by clicking the **Finish** button.
|
||||
In the resulting modal window, switch to the **Extra** tab and
|
||||
tick the checkbox for **Allow Data Upload**. End by clicking the **Save** button.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/allow-file-uploads.png" )} />
|
||||
<img src={useBaseUrl("/img/tutorial/add-data-upload.png" )} />
|
||||
|
||||
### Loading CSV Data
|
||||
|
||||
Download the CSV dataset to your computer from
|
||||
[GitHub](https://raw.githubusercontent.com/apache-superset/examples-data/master/tutorial_flights.csv).
|
||||
In the top menu, select **Settings ‣ Data ‣ Database Connections**. Then, **Upload file to database ‣ Upload CSV**.
|
||||
In the Superset menu, select **Data ‣ Upload a CSV**.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/upload_a_csv.png" )} />
|
||||
|
||||
Then, select select the CSV file from your computer, select **Database** and **Schema**, and enter the **Table Name**
|
||||
as _tutorial_flights_.
|
||||
Then, enter the **Table Name** as _tutorial_flights_ and select the CSV file from your computer.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/csv_to_database_configuration.png" )} />
|
||||
|
||||
Next enter the text _Travel Date_ into the **File settings ‣ Columns to be parsed as dates** field.
|
||||
Next enter the text _Travel Date_ into the **Parse Dates** field.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/parse_dates_column.png" )} />
|
||||
|
||||
Leaving all the other options in their default settings, select **Upload** at the bottom of the page.
|
||||
Leaving all the other options in their default settings, select **Save** at the bottom of the page.
|
||||
|
||||
### Table Visualization
|
||||
|
||||
|
||||
@@ -17,40 +17,40 @@
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@algolia/client-search": "^5.12.0",
|
||||
"@ant-design/icons": "^5.4.0",
|
||||
"@docsearch/react": "^3.6.3",
|
||||
"@docusaurus/core": "^3.5.2",
|
||||
"@docusaurus/plugin-client-redirects": "^3.5.2",
|
||||
"@docusaurus/preset-classic": "^3.5.2",
|
||||
"@algolia/client-search": "^4.24.0",
|
||||
"@ant-design/icons": "^5.3.7",
|
||||
"@docsearch/react": "^3.6.0",
|
||||
"@docusaurus/core": "^3.4.0",
|
||||
"@docusaurus/plugin-client-redirects": "^3.4.0",
|
||||
"@docusaurus/preset-classic": "^3.4.0",
|
||||
"@emotion/core": "^10.1.1",
|
||||
"@emotion/styled": "^10.0.27",
|
||||
"@mdx-js/react": "^3.1.0",
|
||||
"@saucelabs/theme-github-codeblock": "^0.3.0",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"@saucelabs/theme-github-codeblock": "^0.2.3",
|
||||
"@superset-ui/style": "^0.14.23",
|
||||
"@svgr/webpack": "^8.1.0",
|
||||
"antd": "^5.21.6",
|
||||
"antd": "^4.19.3",
|
||||
"buffer": "^6.0.3",
|
||||
"clsx": "^2.1.1",
|
||||
"docusaurus-plugin-less": "^2.0.2",
|
||||
"file-loader": "^6.2.0",
|
||||
"less": "^4.2.0",
|
||||
"less-loader": "^11.0.0",
|
||||
"prism-react-renderer": "^2.4.0",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-github-btn": "^1.4.0",
|
||||
"react-svg-pan-zoom": "^3.13.1",
|
||||
"react-svg-pan-zoom": "^3.12.1",
|
||||
"stream": "^0.0.3",
|
||||
"swagger-ui-react": "^5.17.14",
|
||||
"url-loader": "^4.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "^3.5.2",
|
||||
"@docusaurus/tsconfig": "^3.5.2",
|
||||
"@types/react": "^18.3.12",
|
||||
"typescript": "^5.6.3",
|
||||
"webpack": "^5.96.1"
|
||||
"@docusaurus/module-type-aliases": "^3.4.0",
|
||||
"@docusaurus/tsconfig": "^3.4.0",
|
||||
"@types/react": "^18.3.3",
|
||||
"typescript": "^5.5.2",
|
||||
"webpack": "^5.92.1"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
||||
@@ -132,9 +132,4 @@ export const Databases = [
|
||||
href: 'https://www.couchbase.com/',
|
||||
imgName: 'couchbase.svg',
|
||||
},
|
||||
{
|
||||
title: 'Denodo',
|
||||
href: 'https://www.denodo.com/',
|
||||
imgName: 'denodo.png',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
@import '~antd/lib/style/themes/default.less';
|
||||
@import '~antd/dist/antd.less'; // Import Ant Design styles by less entry
|
||||
@import 'antd-theme.less';
|
||||
|
||||
body {
|
||||
|
||||
BIN
docs/static/img/databases/denodo.png
vendored
|
Before Width: | Height: | Size: 17 KiB |
BIN
docs/static/img/databases/timescale.png
vendored
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 16 KiB |
BIN
docs/static/img/tutorial/add-data-upload.png
vendored
Normal file
|
After Width: | Height: | Size: 90 KiB |
BIN
docs/static/img/tutorial/allow-file-uploads.png
vendored
|
Before Width: | Height: | Size: 139 KiB |
|
Before Width: | Height: | Size: 248 KiB After Width: | Height: | Size: 144 KiB |
BIN
docs/static/img/tutorial/parse_dates_column.png
vendored
|
Before Width: | Height: | Size: 93 KiB After Width: | Height: | Size: 92 KiB |
BIN
docs/static/img/tutorial/upload_a_csv.png
vendored
|
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 102 KiB |
96
docs/static/resources/openapi.json
vendored
@@ -116,8 +116,7 @@
|
||||
"GENERIC_BACKEND_ERROR",
|
||||
"INVALID_PAYLOAD_FORMAT_ERROR",
|
||||
"INVALID_PAYLOAD_SCHEMA_ERROR",
|
||||
"REPORT_NOTIFICATION_ERROR",
|
||||
"RESULT_TOO_LARGE_ERROR"
|
||||
"REPORT_NOTIFICATION_ERROR"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -3008,33 +3007,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"DashboardNativeFiltersConfigUpdateSchema": {
|
||||
"properties": {
|
||||
"deleted": {
|
||||
"description": "List of filter ids to delete",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"modified": {
|
||||
"description": "List of filter objects to update",
|
||||
"items": {
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"reordered": {
|
||||
"description": "List of filter ids in the new order",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": ["deleted", "modified", "reordered"],
|
||||
"type": "object"
|
||||
},
|
||||
"DashboardCopySchema": {
|
||||
"properties": {
|
||||
"css": {
|
||||
@@ -13522,72 +13494,6 @@
|
||||
"tags": ["Dashboards"]
|
||||
}
|
||||
},
|
||||
"/api/v1/dashboard/{id_or_slug}/filters/": {
|
||||
"put": {
|
||||
"description": "Update the filters for a given dashboard",
|
||||
"parameters": [
|
||||
{
|
||||
"in": "path",
|
||||
"name": "id_or_slug",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DashboardNativeFiltersConfigUpdateSchema"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"properties": {
|
||||
"result": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Successfully updated the filters"
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/400"
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/401"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/403"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/404"
|
||||
},
|
||||
"500": {
|
||||
"$ref": "#/components/responses/500"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"jwt": []
|
||||
}
|
||||
],
|
||||
"tags": ["Dashboards"]
|
||||
}
|
||||
},
|
||||
"/api/v1/dashboard/{id_or_slug}/copy/": {
|
||||
"post": {
|
||||
"parameters": [
|
||||
|
||||
1611
docs/yarn.lock
@@ -15,7 +15,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
apiVersion: v2
|
||||
appVersion: "4.1.1"
|
||||
appVersion: "4.0.1"
|
||||
description: Apache Superset is a modern, enterprise-ready business intelligence web application
|
||||
name: superset
|
||||
icon: https://artifacthub.io/image/68c1d717-0e97-491f-b046-754e46f46922@2x
|
||||
@@ -29,7 +29,7 @@ maintainers:
|
||||
- name: craig-rueda
|
||||
email: craig@craigrueda.com
|
||||
url: https://github.com/craig-rueda
|
||||
version: 0.13.3
|
||||
version: 0.12.12
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
version: 12.1.6
|
||||
|
||||
@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
|
||||
|
||||
# superset
|
||||
|
||||

|
||||

|
||||
|
||||
Apache Superset is a modern, enterprise-ready business intelligence web application
|
||||
|
||||
@@ -69,7 +69,6 @@ On helm this can be set on `extraSecretEnv.SUPERSET_SECRET_KEY` or `configOverri
|
||||
| extraConfigs | object | `{}` | Extra files to mount on `/app/pythonpath` |
|
||||
| extraEnv | object | `{}` | Extra environment variables that will be passed into pods |
|
||||
| extraEnvRaw | list | `[]` | Extra environment variables in RAW format that will be passed into pods |
|
||||
| extraLabels | object | `{}` | Labels to be added to all resources |
|
||||
| extraSecretEnv | object | `{}` | Extra environment variables to pass as secrets |
|
||||
| extraSecrets | object | `{}` | Extra files to mount on `/app/pythonpath` as secrets |
|
||||
| extraVolumeMounts | list | `[]` | |
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
data:
|
||||
{{- range $path, $config := .Values.extraConfigs }}
|
||||
{{ $path }}: |
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetCeleryBeat.deploymentAnnotations }}
|
||||
annotations: {{- toYaml .Values.supersetCeleryBeat.deploymentAnnotations | nindent 4 }}
|
||||
{{- end }}
|
||||
@@ -61,9 +58,6 @@ spec:
|
||||
labels:
|
||||
app: "{{ template "superset.name" . }}-celerybeat"
|
||||
release: {{ .Release.Name }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetCeleryBeat.podLabels }}
|
||||
{{- toYaml .Values.supersetCeleryBeat.podLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetCeleryFlower.deploymentAnnotations }}
|
||||
annotations: {{- toYaml .Values.supersetCeleryFlower.deploymentAnnotations | nindent 4 }}
|
||||
{{- end }}
|
||||
@@ -50,9 +47,6 @@ spec:
|
||||
labels:
|
||||
app: "{{ template "superset.name" . }}-flower"
|
||||
release: {{ .Release.Name }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetCeleryFlower.podLabels }}
|
||||
{{- toYaml .Values.supersetCeleryFlower.podLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetWorker.deploymentLabels }}
|
||||
{{- toYaml .Values.supersetWorker.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
@@ -67,9 +64,6 @@ spec:
|
||||
labels:
|
||||
app: {{ template "superset.name" . }}-worker
|
||||
release: {{ .Release.Name }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetWorker.podLabels }}
|
||||
{{- toYaml .Values.supersetWorker.podLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetWebsockets.deploymentAnnotations }}
|
||||
annotations: {{- toYaml .Values.supersetWebsockets.deploymentAnnotations | nindent 4 }}
|
||||
{{- end }}
|
||||
@@ -53,9 +50,6 @@ spec:
|
||||
labels:
|
||||
app: "{{ template "superset.name" . }}-ws"
|
||||
release: {{ .Release.Name }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetWebsockets.podLabels }}
|
||||
{{- toYaml .Values.supersetWebsockets.podLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetNode.deploymentLabels }}
|
||||
{{- toYaml .Values.supersetNode.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
@@ -69,9 +66,6 @@ spec:
|
||||
labels:
|
||||
app: {{ template "superset.name" . }}
|
||||
release: {{ .Release.Name }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.supersetNode.podLabels }}
|
||||
{{- toYaml .Values.supersetNode.podLabels | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
|
||||
@@ -29,9 +29,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- with .Values.ingress.annotations }}
|
||||
annotations: {{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -23,10 +23,6 @@ kind: Job
|
||||
metadata:
|
||||
name: {{ template "superset.fullname" . }}-init-db
|
||||
namespace: {{ .Release.Namespace }}
|
||||
{{- if .Values.extraLabels }}
|
||||
labels:
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.init.jobAnnotations }}
|
||||
annotations: {{- toYaml .Values.init.jobAnnotations | nindent 4 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -31,9 +31,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" $ }}
|
||||
release: {{ $.Release.Name }}
|
||||
heritage: {{ $.Release.Service }}
|
||||
{{- if $.Values.extraLabels }}
|
||||
{{- toYaml $.Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .minAvailable }}
|
||||
minAvailable: {{ .minAvailable }}
|
||||
|
||||
@@ -31,9 +31,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" $ }}
|
||||
release: {{ $.Release.Name }}
|
||||
heritage: {{ $.Release.Service }}
|
||||
{{- if $.Values.extraLabels }}
|
||||
{{- toYaml $.Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .minAvailable }}
|
||||
minAvailable: {{ .minAvailable }}
|
||||
|
||||
@@ -31,9 +31,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" $ }}
|
||||
release: {{ $.Release.Name }}
|
||||
heritage: {{ $.Release.Service }}
|
||||
{{- if $.Values.extraLabels }}
|
||||
{{- toYaml $.Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .minAvailable }}
|
||||
minAvailable: {{ .minAvailable }}
|
||||
|
||||
@@ -31,9 +31,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" $ }}
|
||||
release: {{ $.Release.Name }}
|
||||
heritage: {{ $.Release.Service }}
|
||||
{{- if $.Values.extraLabels }}
|
||||
{{- toYaml $.Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .minAvailable }}
|
||||
minAvailable: {{ .minAvailable }}
|
||||
|
||||
@@ -31,9 +31,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" $ }}
|
||||
release: {{ $.Release.Name }}
|
||||
heritage: {{ $.Release.Service }}
|
||||
{{- if $.Values.extraLabels }}
|
||||
{{- toYaml $.Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .minAvailable }}
|
||||
minAvailable: {{ .minAvailable }}
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: "{{ .Release.Name }}"
|
||||
heritage: "{{ .Release.Service }}"
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
type: Opaque
|
||||
stringData:
|
||||
REDIS_HOST: {{ tpl .Values.supersetNode.connections.redis_host . | quote }}
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: "{{ .Release.Name }}"
|
||||
heritage: "{{ .Release.Service }}"
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
type: Opaque
|
||||
stringData:
|
||||
superset_config.py: |
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: "{{ .Release.Name }}"
|
||||
heritage: "{{ .Release.Service }}"
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
type: Opaque
|
||||
stringData:
|
||||
config.json: |
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- with .Values.supersetCeleryFlower.service.annotations }}
|
||||
annotations: {{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -28,9 +28,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- with .Values.supersetWebsockets.service.annotations }}
|
||||
annotations: {{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -27,9 +27,6 @@ metadata:
|
||||
chart: {{ template "superset.chart" . }}
|
||||
release: {{ .Release.Name }}
|
||||
heritage: {{ .Release.Service }}
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- with .Values.service.annotations }}
|
||||
annotations: {{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -32,9 +32,6 @@ metadata:
|
||||
kubernetes.io/cluster-service: "true"
|
||||
{{- end }}
|
||||
addonmanager.kubernetes.io/mode: Reconcile
|
||||
{{- if .Values.extraLabels }}
|
||||
{{- toYaml .Values.extraLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- if .Values.serviceAccount.annotations }}
|
||||
annotations: {{- toYaml .Values.serviceAccount.annotations | nindent 4 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -27,9 +27,6 @@ nameOverride: ~
|
||||
# -- Provide a name to override the full names of resources
|
||||
fullnameOverride: ~
|
||||
|
||||
# -- Labels to be added to all resources
|
||||
extraLabels: {}
|
||||
|
||||
# -- User ID directive. This user must have enough permissions to run the bootstrap script
|
||||
# Running containers as root is not recommended in production. Change this to another UID - e.g. 1000 to be more secure
|
||||
runAsUser: 0
|
||||
@@ -870,7 +867,7 @@ redis:
|
||||
## docker registry secret names (list)
|
||||
# pullSecrets: nil
|
||||
##
|
||||
## Configure persistence
|
||||
## Configure persistance
|
||||
persistence:
|
||||
##
|
||||
## Use a PVC to persist data.
|
||||
|
||||
@@ -42,7 +42,7 @@ dependencies = [
|
||||
"colorama",
|
||||
"croniter>=0.3.28",
|
||||
"cron-descriptor",
|
||||
"cryptography>=42.0.4, <44.0.0",
|
||||
"cryptography>=42.0.4, <43.0.0",
|
||||
"deprecation>=2.1.0, <2.2.0",
|
||||
"flask>=2.2.5, <3.0.0",
|
||||
"flask-appbuilder>=4.5.0, <5.0.0",
|
||||
@@ -57,7 +57,6 @@ dependencies = [
|
||||
"geopy",
|
||||
"gunicorn>=22.0.0; sys_platform != 'win32'",
|
||||
"hashids>=1.3.1, <2",
|
||||
# known issue with holidays 0.26.0 and above related to prophet lib #25017
|
||||
"holidays>=0.25, <0.26",
|
||||
"humanize",
|
||||
"importlib_metadata",
|
||||
@@ -112,15 +111,14 @@ bigquery = [
|
||||
clickhouse = ["clickhouse-connect>=0.5.14, <1.0"]
|
||||
cockroachdb = ["cockroachdb>=0.3.5, <0.4"]
|
||||
cors = ["flask-cors>=2.0.0"]
|
||||
crate = ["sqlalchemy-cratedb>=0.40.1, <1"]
|
||||
crate = ["crate[sqlalchemy]>=0.26.0, <0.27"]
|
||||
databend = ["databend-sqlalchemy>=0.3.2, <1.0"]
|
||||
databricks = [
|
||||
"databricks-sql-connector>=2.0.2, <3",
|
||||
"sqlalchemy-databricks>=0.2.0",
|
||||
]
|
||||
db2 = ["ibm-db-sa>0.3.8, <=0.4.0"]
|
||||
denodo = ["denodo-sqlalchemy~=1.0.6"]
|
||||
dremio = ["sqlalchemy-dremio>=1.2.1, <4"]
|
||||
dremio = ["sqlalchemy-dremio>=1.1.5, <1.3"]
|
||||
drill = ["sqlalchemy-drill>=1.1.4, <2"]
|
||||
druid = ["pydruid>=0.6.5,<0.7"]
|
||||
duckdb = ["duckdb-engine>=0.9.5, <0.10"]
|
||||
@@ -188,7 +186,6 @@ development = [
|
||||
"pip-compile-multi",
|
||||
"pre-commit",
|
||||
"progress>=1.5,<2",
|
||||
"psutil",
|
||||
"pyfakefs",
|
||||
"pyinstrument>=4.0.2,<5",
|
||||
"pylint",
|
||||
@@ -448,7 +445,6 @@ select = [
|
||||
"E7",
|
||||
"E9",
|
||||
"F",
|
||||
"PT009",
|
||||
"TRY201",
|
||||
]
|
||||
ignore = []
|
||||
|
||||
@@ -15,14 +15,12 @@ apispec[yaml]==6.3.0
|
||||
# via flask-appbuilder
|
||||
apsw==3.46.0.0
|
||||
# via shillelagh
|
||||
async-timeout==4.0.3
|
||||
# via redis
|
||||
attrs==24.2.0
|
||||
attrs==23.2.0
|
||||
# via
|
||||
# cattrs
|
||||
# jsonschema
|
||||
# requests-cache
|
||||
babel==2.16.0
|
||||
babel==2.15.0
|
||||
# via flask-babel
|
||||
backoff==2.2.1
|
||||
# via apache-superset
|
||||
@@ -30,7 +28,7 @@ bcrypt==4.1.3
|
||||
# via paramiko
|
||||
billiard==4.2.0
|
||||
# via celery
|
||||
blinker==1.9.0
|
||||
blinker==1.8.2
|
||||
# via flask
|
||||
bottleneck==1.3.8
|
||||
# via pandas
|
||||
@@ -42,13 +40,13 @@ cachelib==0.9.0
|
||||
# flask-session
|
||||
cachetools==5.3.3
|
||||
# via google-auth
|
||||
cattrs==24.1.2
|
||||
cattrs==23.2.3
|
||||
# via requests-cache
|
||||
celery==5.4.0
|
||||
# via apache-superset
|
||||
certifi==2024.2.2
|
||||
# via requests
|
||||
cffi==1.17.1
|
||||
cffi==1.16.0
|
||||
# via
|
||||
# cryptography
|
||||
# pynacl
|
||||
@@ -80,7 +78,7 @@ cron-descriptor==1.4.3
|
||||
# via apache-superset
|
||||
croniter==2.0.5
|
||||
# via apache-superset
|
||||
cryptography==42.0.8
|
||||
cryptography==42.0.7
|
||||
# via
|
||||
# apache-superset
|
||||
# paramiko
|
||||
@@ -93,8 +91,6 @@ dnspython==2.6.1
|
||||
# via email-validator
|
||||
email-validator==2.1.1
|
||||
# via flask-appbuilder
|
||||
exceptiongroup==1.2.2
|
||||
# via cattrs
|
||||
flask==2.3.3
|
||||
# via
|
||||
# apache-superset
|
||||
@@ -109,7 +105,7 @@ flask==2.3.3
|
||||
# flask-session
|
||||
# flask-sqlalchemy
|
||||
# flask-wtf
|
||||
flask-appbuilder==4.5.2
|
||||
flask-appbuilder==4.5.0
|
||||
# via apache-superset
|
||||
flask-babel==2.0.0
|
||||
# via flask-appbuilder
|
||||
@@ -148,9 +144,7 @@ geopy==2.4.1
|
||||
google-auth==2.29.0
|
||||
# via shillelagh
|
||||
greenlet==3.0.3
|
||||
# via
|
||||
# shillelagh
|
||||
# sqlalchemy
|
||||
# via shillelagh
|
||||
gunicorn==22.0.0
|
||||
# via apache-superset
|
||||
hashids==1.3.1
|
||||
@@ -197,7 +191,7 @@ markdown==3.6
|
||||
# via apache-superset
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
markupsafe==3.0.2
|
||||
markupsafe==2.1.5
|
||||
# via
|
||||
# jinja2
|
||||
# mako
|
||||
@@ -219,7 +213,7 @@ nh3==0.2.17
|
||||
# via apache-superset
|
||||
numba==0.59.1
|
||||
# via pandas
|
||||
numexpr==2.10.1
|
||||
numexpr==2.10.0
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# pandas
|
||||
@@ -301,7 +295,7 @@ python-dotenv==1.0.1
|
||||
# via apache-superset
|
||||
python-geohash==0.8.5
|
||||
# via apache-superset
|
||||
pytz==2024.2
|
||||
pytz==2024.1
|
||||
# via
|
||||
# croniter
|
||||
# flask-babel
|
||||
@@ -360,11 +354,10 @@ sshtunnel==0.4.0
|
||||
# via apache-superset
|
||||
tabulate==0.8.10
|
||||
# via apache-superset
|
||||
typing-extensions==4.12.2
|
||||
typing-extensions==4.12.0
|
||||
# via
|
||||
# alembic
|
||||
# apache-superset
|
||||
# cattrs
|
||||
# flask-limiter
|
||||
# limits
|
||||
# shillelagh
|
||||
@@ -387,7 +380,7 @@ vine==5.1.0
|
||||
# kombu
|
||||
wcwidth==0.2.13
|
||||
# via prompt-toolkit
|
||||
werkzeug==3.1.3
|
||||
werkzeug==3.0.6
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# flask
|
||||
@@ -396,7 +389,7 @@ werkzeug==3.1.3
|
||||
# flask-login
|
||||
wrapt==1.16.0
|
||||
# via deprecated
|
||||
wtforms==3.2.1
|
||||
wtforms==3.1.2
|
||||
# via
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
|
||||
@@ -174,8 +174,6 @@ protobuf==4.23.0
|
||||
# googleapis-common-protos
|
||||
# grpcio-status
|
||||
# proto-plus
|
||||
psutil==6.0.0
|
||||
# via apache-superset
|
||||
psycopg2-binary==2.9.6
|
||||
# via apache-superset
|
||||
pure-sasl==0.6.2
|
||||
@@ -188,7 +186,7 @@ pyee==11.0.1
|
||||
# via playwright
|
||||
pyfakefs==5.3.5
|
||||
# via apache-superset
|
||||
pyhive[presto]==0.7.0
|
||||
pyhive[hive_pure_sasl]==0.7.0
|
||||
# via apache-superset
|
||||
pyinstrument==4.4.0
|
||||
# via apache-superset
|
||||
@@ -235,16 +233,6 @@ thrift==0.16.0
|
||||
# thrift-sasl
|
||||
thrift-sasl==0.4.3
|
||||
# via apache-superset
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# build
|
||||
# coverage
|
||||
# pip-tools
|
||||
# pylint
|
||||
# pyproject-api
|
||||
# pyproject-hooks
|
||||
# pytest
|
||||
# tox
|
||||
tomlkit==0.12.5
|
||||
# via pylint
|
||||
toposort==1.10
|
||||
|
||||
@@ -1,222 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Callable, Optional, Set, Tuple
|
||||
|
||||
import click
|
||||
import psutil
|
||||
from packaging.version import InvalidVersion, Version
|
||||
|
||||
|
||||
class Requirement:
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
ideal_range: Tuple[Version, Version],
|
||||
supported_range: Tuple[Version, Version],
|
||||
req_type: str,
|
||||
command: str,
|
||||
version_post_process: Optional[Callable[[str], str]] = None,
|
||||
):
|
||||
self.name = name
|
||||
self.ideal_range = ideal_range
|
||||
self.supported_range = supported_range
|
||||
self.req_type = req_type
|
||||
self.command = command
|
||||
self.version_post_process = version_post_process
|
||||
self.version = self.get_version()
|
||||
self.status = self.check_version()
|
||||
|
||||
def get_version(self) -> Optional[str]:
|
||||
try:
|
||||
version = subprocess.check_output(self.command, shell=True).decode().strip()
|
||||
if self.version_post_process:
|
||||
version = self.version_post_process(version)
|
||||
return version.split()[-1]
|
||||
except subprocess.CalledProcessError:
|
||||
return None
|
||||
|
||||
def check_version(self) -> str:
|
||||
if self.version is None:
|
||||
return "❌ Not Installed"
|
||||
|
||||
try:
|
||||
version_number = Version(self.version)
|
||||
except InvalidVersion:
|
||||
return "❌ Invalid Version Format"
|
||||
|
||||
ideal_min, ideal_max = self.ideal_range
|
||||
supported_min, supported_max = self.supported_range
|
||||
|
||||
if ideal_min <= version_number <= ideal_max:
|
||||
return "✅ Ideal"
|
||||
elif supported_min <= version_number:
|
||||
return "🟡 Supported"
|
||||
else:
|
||||
return "❌ Unsupported"
|
||||
|
||||
def format_result(self) -> str:
|
||||
ideal_range_str = f"{self.ideal_range[0]} - {self.ideal_range[1]}"
|
||||
supported_range_str = f"{self.supported_range[0]} - {self.supported_range[1]}"
|
||||
return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}"
|
||||
|
||||
|
||||
def check_memory(min_gb: int) -> str:
|
||||
total_memory = psutil.virtual_memory().total / (1024**3)
|
||||
if total_memory >= min_gb:
|
||||
return f"✅ Memory: {total_memory:.2f} GB"
|
||||
else:
|
||||
return f"❌ Memory: {total_memory:.2f} GB (Minimum required: {min_gb} GB)"
|
||||
|
||||
|
||||
def get_cpu_info() -> str:
|
||||
cpu_count = psutil.cpu_count(logical=True)
|
||||
cpu_freq = psutil.cpu_freq()
|
||||
cpu_info = (
|
||||
f"{cpu_count} cores at {cpu_freq.current:.2f} MHz"
|
||||
if cpu_freq
|
||||
else f"{cpu_count} cores"
|
||||
)
|
||||
return f"CPU: {cpu_info}"
|
||||
|
||||
|
||||
def get_docker_platform() -> str:
|
||||
try:
|
||||
output = (
|
||||
subprocess.check_output(
|
||||
"docker info --format '{{.OperatingSystem}}'", shell=True
|
||||
)
|
||||
.decode()
|
||||
.strip()
|
||||
)
|
||||
if "Docker Desktop" in output:
|
||||
return f"Docker Platform: {output} ({platform.system()})"
|
||||
return f"Docker Platform: {output}"
|
||||
except subprocess.CalledProcessError:
|
||||
return "Docker Platform: ❌ Not Detected"
|
||||
|
||||
|
||||
@click.command(
|
||||
help="""
|
||||
This script checks the local environment for various software versions and other requirements, providing feedback on whether they are ideal, supported, or unsupported.
|
||||
"""
|
||||
)
|
||||
@click.option(
|
||||
"--docker", is_flag=True, help="Check Docker and Docker Compose requirements"
|
||||
)
|
||||
@click.option(
|
||||
"--frontend",
|
||||
is_flag=True,
|
||||
help="Check frontend requirements (npm, Node.js, memory)",
|
||||
)
|
||||
@click.option("--backend", is_flag=True, help="Check backend requirements (Python)")
|
||||
def main(docker: bool, frontend: bool, backend: bool) -> None:
|
||||
requirements = [
|
||||
Requirement(
|
||||
"python",
|
||||
(Version("3.10.0"), Version("3.10.999")),
|
||||
(Version("3.9.0"), Version("3.11.999")),
|
||||
"backend",
|
||||
"python --version",
|
||||
),
|
||||
Requirement(
|
||||
"npm",
|
||||
(Version("10.0.0"), Version("999.999.999")),
|
||||
(Version("10.0.0"), Version("999.999.999")),
|
||||
"frontend",
|
||||
"npm -v",
|
||||
),
|
||||
Requirement(
|
||||
"node",
|
||||
(Version("20.0.0"), Version("20.999.999")),
|
||||
(Version("20.0.0"), Version("20.999.999")),
|
||||
"frontend",
|
||||
"node -v",
|
||||
),
|
||||
Requirement(
|
||||
"docker",
|
||||
(Version("20.10.0"), Version("999.999.999")),
|
||||
(Version("19.0.0"), Version("999.999.999")),
|
||||
"docker",
|
||||
"docker --version",
|
||||
lambda v: v.split(",")[0],
|
||||
),
|
||||
Requirement(
|
||||
"docker-compose",
|
||||
(Version("2.28.0"), Version("999.999.999")),
|
||||
(Version("1.29.0"), Version("999.999.999")),
|
||||
"docker",
|
||||
"docker-compose --version",
|
||||
),
|
||||
Requirement(
|
||||
"git",
|
||||
(Version("2.30.0"), Version("999.999.999")),
|
||||
(Version("2.20.0"), Version("999.999.999")),
|
||||
"backend",
|
||||
"git --version",
|
||||
),
|
||||
]
|
||||
|
||||
print("==================")
|
||||
print("System Information")
|
||||
print("==================")
|
||||
print(f"OS: {platform.system()} {platform.release()}")
|
||||
print(get_cpu_info())
|
||||
print(get_docker_platform())
|
||||
print("\n")
|
||||
|
||||
check_req_types: Set[str] = set()
|
||||
if docker:
|
||||
check_req_types.add("docker")
|
||||
if frontend:
|
||||
check_req_types.add("frontend")
|
||||
if backend:
|
||||
check_req_types.add("backend")
|
||||
if not check_req_types:
|
||||
check_req_types.update(["docker", "frontend", "backend"])
|
||||
|
||||
headers = ["Status", "Software", "Version Found", "Ideal Range", "Supported Range"]
|
||||
row_format = "{:<2} {:<25} {:<25} {:<25} {:<25}"
|
||||
|
||||
print("=" * 100)
|
||||
print(row_format.format(*headers))
|
||||
print("=" * 100)
|
||||
|
||||
all_ok = True
|
||||
for requirement in requirements:
|
||||
if requirement.req_type in check_req_types:
|
||||
result = requirement.format_result()
|
||||
if "❌" in requirement.status:
|
||||
all_ok = False
|
||||
print(result)
|
||||
|
||||
if "frontend" in check_req_types:
|
||||
memory_check = check_memory(12)
|
||||
if "❌" in memory_check:
|
||||
all_ok = False
|
||||
print(memory_check)
|
||||
|
||||
if not all_ok:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -37,7 +37,7 @@ def generate_build_id() -> str:
|
||||
|
||||
|
||||
def run_cypress_for_test_file(
|
||||
test_file: str, retries: int, use_dashboard: bool, group: str, dry_run: bool, i: int
|
||||
test_file: str, retries: int, use_dashboard: bool, group: str, dry_run: bool
|
||||
) -> int:
|
||||
"""Runs Cypress for a single test file and retries upon failure."""
|
||||
cypress_cmd = "./node_modules/.bin/cypress run"
|
||||
@@ -47,33 +47,31 @@ def run_cypress_for_test_file(
|
||||
browser = os.getenv("CYPRESS_BROWSER", "chrome")
|
||||
chrome_flags = "--disable-dev-shm-usage"
|
||||
|
||||
for attempt in range(retries):
|
||||
# Create Cypress command for a single test file
|
||||
cmd: str = ""
|
||||
if use_dashboard:
|
||||
# If/when we want to use cypress' dashboard feature to record the run
|
||||
group_id = f"matrix{group}-file{i}-{attempt}"
|
||||
cmd = (
|
||||
f"{XVFB_PRE_CMD} "
|
||||
f'{cypress_cmd} --spec "{test_file}" --browser {browser} '
|
||||
f"--record --group {group_id} --tag {REPO},{GITHUB_EVENT_NAME} "
|
||||
f"--ci-build-id {build_id} "
|
||||
f"-- {chrome_flags}"
|
||||
)
|
||||
else:
|
||||
os.environ.pop("CYPRESS_RECORD_KEY", None)
|
||||
cmd = (
|
||||
f"{XVFB_PRE_CMD} "
|
||||
f"{cypress_cmd} --browser {browser} "
|
||||
f'--spec "{test_file}" '
|
||||
f"-- {chrome_flags}"
|
||||
)
|
||||
print(f"RUN: {cmd} (Attempt {attempt + 1}/{retries})")
|
||||
if dry_run:
|
||||
# Print the command instead of executing it
|
||||
print(f"DRY RUN: {cmd}")
|
||||
return 0
|
||||
# Create Cypress command for a single test file
|
||||
if use_dashboard:
|
||||
cmd = (
|
||||
f"{XVFB_PRE_CMD} "
|
||||
f'{cypress_cmd} --spec "{test_file}" --browser {browser} '
|
||||
f"--record --group {group} --tag {REPO},{GITHUB_EVENT_NAME} "
|
||||
f"--parallel --ci-build-id {build_id} "
|
||||
f"-- {chrome_flags}"
|
||||
)
|
||||
else:
|
||||
os.environ.pop("CYPRESS_RECORD_KEY", None)
|
||||
cmd = (
|
||||
f"{XVFB_PRE_CMD} "
|
||||
f"{cypress_cmd} --browser {browser} "
|
||||
f'--spec "{test_file}" '
|
||||
f"-- {chrome_flags}"
|
||||
)
|
||||
|
||||
if dry_run:
|
||||
# Print the command instead of executing it
|
||||
print(f"DRY RUN: {cmd}")
|
||||
return 0
|
||||
|
||||
for attempt in range(retries):
|
||||
print(f"RUN: {cmd} (Attempt {attempt + 1}/{retries})")
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
shell=True,
|
||||
@@ -161,9 +159,9 @@ def main() -> None:
|
||||
|
||||
# Run each test file independently with retry logic or dry-run
|
||||
processed_file_count: int = 0
|
||||
for i, test_file in enumerate(spec_list):
|
||||
for test_file in spec_list:
|
||||
result = run_cypress_for_test_file(
|
||||
test_file, args.retries, args.use_dashboard, args.group, args.dry_run, i
|
||||
test_file, args.retries, args.use_dashboard, args.group, args.dry_run
|
||||
)
|
||||
if result != 0:
|
||||
print(f"Exiting due to failure in {test_file}")
|
||||
|
||||
@@ -19,7 +19,7 @@ under the License.
|
||||
|
||||
# Utility script to run tests faster
|
||||
|
||||
By default tests will be run using the Postgres container defined at the `docker compose` file (`docker-compose.yml`) on the root of the repo,
|
||||
By default tests will be run using the Postgres container defined at the `docker-compose` file on the root of the repo,
|
||||
so prior to using this script make sure to launch the dev containers.
|
||||
|
||||
You can use a different DB backend by defining `SUPERSET__SQLALCHEMY_DATABASE_URI` env var.
|
||||
|
||||