Compare commits
304 Commits
dnd
...
semantic-l
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
508aad1603 | ||
|
|
954cf32ca4 | ||
|
|
552c685a6b | ||
|
|
a26c91c4e2 | ||
|
|
3c8835bd75 | ||
|
|
955d8bc205 | ||
|
|
cd8e27d33c | ||
|
|
d0962bd32f | ||
|
|
28870168cd | ||
|
|
87d15d32c4 | ||
|
|
7d9a8a0c5a | ||
|
|
ddba88ffad | ||
|
|
1e50422a66 | ||
|
|
246dbd7f5c | ||
|
|
9b861b2848 | ||
|
|
1c35c3f6d0 | ||
|
|
b71654877f | ||
|
|
cd447ca1fd | ||
|
|
01ac966b83 | ||
|
|
97e5f0631d | ||
|
|
b7acb7984f | ||
|
|
d3919cf24f | ||
|
|
27889651b3 | ||
|
|
361fe6fe89 | ||
|
|
8506d70242 | ||
|
|
00a53eec2d | ||
|
|
5040db859c | ||
|
|
ef4f7afa90 | ||
|
|
47db185e3b | ||
|
|
2e463078a2 | ||
|
|
4f42928b34 | ||
|
|
75fa474fce | ||
|
|
fd8c21591a | ||
|
|
4147d877fc | ||
|
|
a9dca529c1 | ||
|
|
20f1918dd6 | ||
|
|
c09a4f6f47 | ||
|
|
4e4fa53c8d | ||
|
|
07ff82f189 | ||
|
|
b7b9bfd3fe | ||
|
|
b968d1095c | ||
|
|
e10237fcc1 | ||
|
|
92438322c0 | ||
|
|
f96e90b979 | ||
|
|
b464979db1 | ||
|
|
45f883c9cd | ||
|
|
8fd3401077 | ||
|
|
89a98ab9a4 | ||
|
|
2dfc770b0f | ||
|
|
6b7b23ed78 | ||
|
|
5ac5480f35 | ||
|
|
76889c1a69 | ||
|
|
569606635b | ||
|
|
66264856a7 | ||
|
|
3eb860a663 | ||
|
|
a44980da65 | ||
|
|
7112bce961 | ||
|
|
568486a304 | ||
|
|
fea135b46c | ||
|
|
601fcb3382 | ||
|
|
0d7cc88b2b | ||
|
|
32ee160c75 | ||
|
|
5914e83436 | ||
|
|
0b5e4dd5de | ||
|
|
3a565a6c16 | ||
|
|
f60c82e4a6 | ||
|
|
91131d5996 | ||
|
|
4b0d497513 | ||
|
|
86f690d17f | ||
|
|
e9b494163b | ||
|
|
be404f9b84 | ||
|
|
11257c0536 | ||
|
|
f2b6c395cd | ||
|
|
2d35ed2391 | ||
|
|
bd65469091 | ||
|
|
a6a66ca483 | ||
|
|
4a7cdccdad | ||
|
|
61bd8f0cf2 | ||
|
|
ae10e105c2 | ||
|
|
901dca58f7 | ||
|
|
d95a3d8426 | ||
|
|
70b95ca1b9 | ||
|
|
004f02746f | ||
|
|
5d20dc57d7 | ||
|
|
05c2354997 | ||
|
|
6043e7e7e3 | ||
|
|
1ee14c5993 | ||
|
|
9764a84402 | ||
|
|
570cc3e5f8 | ||
|
|
66519c3a85 | ||
|
|
1f43138888 | ||
|
|
652d029a2d | ||
|
|
e67b1f5326 | ||
|
|
fa79a467e4 | ||
|
|
2cce0308d4 | ||
|
|
c7fd1a2f65 | ||
|
|
ab4f646ef6 | ||
|
|
d6029f5c8a | ||
|
|
c16e8f747c | ||
|
|
9c0337d092 | ||
|
|
3ef33dcb76 | ||
|
|
5a99588f57 | ||
|
|
0b34363654 | ||
|
|
55ec1152ec | ||
|
|
810d6ff480 | ||
|
|
1501af06fe | ||
|
|
6cb3cea960 | ||
|
|
675a4c7a66 | ||
|
|
7110fc9cde | ||
|
|
73e095db8e | ||
|
|
5fedb65bc0 | ||
|
|
b3526fc4ca | ||
|
|
042229bf80 | ||
|
|
06e4f4ff4c | ||
|
|
bb5be6cf54 | ||
|
|
f6f9e083ac | ||
|
|
ad0186093f | ||
|
|
073c3c72b4 | ||
|
|
d4b89de001 | ||
|
|
912538d176 | ||
|
|
cfeb7ccd31 | ||
|
|
abf90de0ca | ||
|
|
ec2509a8b4 | ||
|
|
43653d1fa1 | ||
|
|
da56bddada | ||
|
|
3347b9bf6c | ||
|
|
6663709a23 | ||
|
|
e6d0f97aab | ||
|
|
3bcd3b1683 | ||
|
|
b223f10ab5 | ||
|
|
f787aec567 | ||
|
|
2ec3aaaeea | ||
|
|
20da4eb86e | ||
|
|
27a4575f3e | ||
|
|
5fa6925522 | ||
|
|
a7e7cc30a9 | ||
|
|
e4d71c2a55 | ||
|
|
10a8d8b8ee | ||
|
|
1681f74b2e | ||
|
|
58ab4e78ff | ||
|
|
8f6dd4aba0 | ||
|
|
dba75bd897 | ||
|
|
e28d2782f1 | ||
|
|
97aea5d128 | ||
|
|
bd419d19af | ||
|
|
56ad429200 | ||
|
|
7fc9974a7c | ||
|
|
73d4332b51 | ||
|
|
10a9b4bb94 | ||
|
|
290bcc1dbb | ||
|
|
26ac832138 | ||
|
|
4db6f9e04c | ||
|
|
0fd528c7af | ||
|
|
647f21c26a | ||
|
|
89b998d6b7 | ||
|
|
695e295333 | ||
|
|
f2fc5dec11 | ||
|
|
95a465ad7c | ||
|
|
8aebfe1105 | ||
|
|
c7cec19827 | ||
|
|
ce84ab4ce2 | ||
|
|
470c593c3d | ||
|
|
04a9be04ab | ||
|
|
09b5af5945 | ||
|
|
19d5fa86fc | ||
|
|
b09e60c1ec | ||
|
|
2f81720603 | ||
|
|
0ecc69d2f1 | ||
|
|
319a131ec9 | ||
|
|
3f37cdbf9c | ||
|
|
3a811d680d | ||
|
|
a60f8d761d | ||
|
|
3580dc6cad | ||
|
|
b99fc582e4 | ||
|
|
e4f649e49c | ||
|
|
d54e227e25 | ||
|
|
39ebf7a7ad | ||
|
|
34418d7e0b | ||
|
|
d6328fcb42 | ||
|
|
e8363cf606 | ||
|
|
5747fb1e85 | ||
|
|
d823dfd2b9 | ||
|
|
baaa8c5f54 | ||
|
|
429d9b27f6 | ||
|
|
56cf7a810b | ||
|
|
bbab86a0b1 | ||
|
|
f83f952221 | ||
|
|
e14931c368 | ||
|
|
8951362852 | ||
|
|
eeb4065d7d | ||
|
|
6a46700721 | ||
|
|
790b79541b | ||
|
|
7c69ec7f24 | ||
|
|
ef395662aa | ||
|
|
e1ce553b2b | ||
|
|
b81543c18c | ||
|
|
5f67fa45ce | ||
|
|
8e0c584a92 | ||
|
|
01a9541a0e | ||
|
|
5e3acc2041 | ||
|
|
f2b54e882d | ||
|
|
54919c942a | ||
|
|
04c5517206 | ||
|
|
b1ad54220b | ||
|
|
c6821cac6f | ||
|
|
b7a5b24a54 | ||
|
|
760227d630 | ||
|
|
3ca8c998ab | ||
|
|
87bbd54d0a | ||
|
|
b630830841 | ||
|
|
9fabd7f997 | ||
|
|
fadab21493 | ||
|
|
cc972cad5a | ||
|
|
de6ac2a444 | ||
|
|
2b647d2352 | ||
|
|
7888da9e30 | ||
|
|
b576665f9a | ||
|
|
7f4c260cbe | ||
|
|
febc5d54d5 | ||
|
|
aa37e96a02 | ||
|
|
3fa5bb4138 | ||
|
|
0289028313 | ||
|
|
5e7fe81cfa | ||
|
|
02495a130f | ||
|
|
d4723ef116 | ||
|
|
c3d5edbae9 | ||
|
|
bb3452b43c | ||
|
|
996e0e1e7a | ||
|
|
daec330127 | ||
|
|
d2907b2577 | ||
|
|
17d6f4ebc4 | ||
|
|
dee063a4c5 | ||
|
|
ec36791551 | ||
|
|
0fedfe03d5 | ||
|
|
23fec55e3d | ||
|
|
212559dab2 | ||
|
|
c564655f39 | ||
|
|
dc15feb83d | ||
|
|
95169807d3 | ||
|
|
10ed60b4c1 | ||
|
|
a33f96b2fc | ||
|
|
39d5511b29 | ||
|
|
b460ca94c6 | ||
|
|
2c1a33fd32 | ||
|
|
13013bbd64 | ||
|
|
a1d24f1e4a | ||
|
|
3fa7dba094 | ||
|
|
801c84f0ef | ||
|
|
238bebebec | ||
|
|
281c0c9672 | ||
|
|
807ff513ef | ||
|
|
445bc403b8 | ||
|
|
2267b78a10 | ||
|
|
d0e80d2079 | ||
|
|
25647942fd | ||
|
|
3fba967856 | ||
|
|
e1fa374517 | ||
|
|
50d0508a92 | ||
|
|
2187fb4ab4 | ||
|
|
fe16c828cf | ||
|
|
6e1718910f | ||
|
|
2d20079a88 | ||
|
|
1f19ef92cb | ||
|
|
f4597be341 | ||
|
|
4393db57d9 | ||
|
|
409cdad264 | ||
|
|
c0cbbe393a | ||
|
|
2900258e05 | ||
|
|
2e29e33dd8 | ||
|
|
39238ef8a9 | ||
|
|
476e454384 | ||
|
|
4d462c76bd | ||
|
|
a06e6eb680 | ||
|
|
cee5ce13e0 | ||
|
|
6453980d8d | ||
|
|
f984dca5cc | ||
|
|
a77c2d550c | ||
|
|
f00f7d1c18 | ||
|
|
33ff127370 | ||
|
|
b941be01cf | ||
|
|
f4474b2e3e | ||
|
|
896947c787 | ||
|
|
4b1d92e575 | ||
|
|
2bcb66c2fc | ||
|
|
d0783da3e5 | ||
|
|
4532ccf638 | ||
|
|
c30edaf075 | ||
|
|
54f19856de | ||
|
|
ab8df1ab34 | ||
|
|
9555798d37 | ||
|
|
95c14b1fc1 | ||
|
|
b142f1956f | ||
|
|
e071e0c5a4 | ||
|
|
129b8e10a2 | ||
|
|
82d74d15ec | ||
|
|
89380638b0 | ||
|
|
c6ad0dbd3a | ||
|
|
f69cd43bd0 | ||
|
|
4c267b7ee2 | ||
|
|
7f6cdc5616 | ||
|
|
db61e4f62a | ||
|
|
68e917c3f6 | ||
|
|
96a3f2a187 | ||
|
|
c867d9379f |
15
.claude/settings.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "jq -r '.tool_input.command // \"\"' | grep -qE '^git commit' && cd \"$CLAUDE_PROJECT_DIR\" && echo '🔍 Running pre-commit before commit...' && pre-commit run || true"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
2
.github/CODEOWNERS
vendored
@@ -20,7 +20,7 @@
|
||||
|
||||
# Notify PMC members of changes to GitHub Actions
|
||||
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if the PR is a draft
|
||||
id: check-draft
|
||||
uses: actions/github-script@v6
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const isDraft = context.payload.pull_request.draft;
|
||||
|
||||
7
.github/dependabot.yml
vendored
@@ -12,10 +12,17 @@ updates:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "storybook"
|
||||
- dependency-name: "@storybook*"
|
||||
# remark-gfm v4+ requires react-markdown v9+, which needs React 18
|
||||
- dependency-name: "remark-gfm"
|
||||
- dependency-name: "react-markdown"
|
||||
# JSDOM v30 doesn't play well with Jest v30
|
||||
# Source: https://jestjs.io/blog#known-issues
|
||||
# GH thread: https://github.com/jsdom/jsdom/issues/3492
|
||||
- dependency-name: "jest-environment-jsdom"
|
||||
# `@swc/plugin-transform-imports` doesn't work with current Webpack-SWC hybrid setup
|
||||
# See https://github.com/apache/superset/pull/37384#issuecomment-3793991389
|
||||
# TODO: remove the plugin once Lodash usage has been migrated to a more readily tree-shakeable alternative
|
||||
- dependency-name: "@swc/plugin-transform-imports"
|
||||
directory: "/superset-frontend/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
`❗ @${pull.user.login} Your base branch \`${currentBranch}\` has ` +
|
||||
'also updated `superset/migrations`.\n' +
|
||||
'\n' +
|
||||
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#merging-db-migrations).**',
|
||||
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://superset.apache.org/docs/contributing/development#merging-db-migrations).**',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
2
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
4
.github/workflows/ephemeral-env.yml
vendored
@@ -189,7 +189,7 @@ jobs:
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
41
.github/workflows/superset-docs-deploy.yml
vendored
@@ -1,6 +1,13 @@
|
||||
name: Docs Deployment
|
||||
|
||||
on:
|
||||
# Deploy after integration tests complete on master
|
||||
workflow_run:
|
||||
workflows: ["Python-Integration"]
|
||||
types: [completed]
|
||||
branches: [master]
|
||||
|
||||
# Also allow manual trigger and direct pushes to docs
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
@@ -30,9 +37,10 @@ jobs:
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
@@ -58,6 +66,35 @@ jobs:
|
||||
working-directory: docs
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics (if triggered by integration tests)
|
||||
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
|
||||
uses: dawidd6/action-download-artifact@v14
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
- name: Try to download latest diagnostics (for push/dispatch triggers)
|
||||
if: github.event_name != 'workflow_run'
|
||||
uses: dawidd6/action-download-artifact@v14
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
branch: master
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: warn
|
||||
- name: Use diagnostics artifact if available
|
||||
working-directory: docs
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
echo "Using fresh diagnostics from integration tests"
|
||||
mv src/data/databases-diagnostics.json src/data/databases.json
|
||||
else
|
||||
echo "Using committed databases.json (no artifact found)"
|
||||
fi
|
||||
- name: yarn build
|
||||
working-directory: docs
|
||||
run: |
|
||||
@@ -71,5 +108,5 @@ jobs:
|
||||
destination-github-username: "apache"
|
||||
destination-repository-name: "superset-site"
|
||||
target-branch: "asf-site"
|
||||
commit-message: "deploying docs: ${{ github.event.head_commit.message }} (apache/superset@${{ github.sha }})"
|
||||
commit-message: "deploying docs: ${{ github.event.head_commit.message || 'triggered by integration tests' }} (apache/superset@${{ github.event.workflow_run.head_sha || github.sha }})"
|
||||
user-email: dev@superset.apache.org
|
||||
|
||||
64
.github/workflows/superset-docs-verify.yml
vendored
@@ -4,24 +4,30 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "superset/db_engine_specs/**"
|
||||
- ".github/workflows/superset-docs-verify.yml"
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
workflow_run:
|
||||
workflows: ["Python-Integration"]
|
||||
types: [completed]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.workflow_run.head_sha || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
linkinator:
|
||||
# See docs here: https://github.com/marketplace/actions/linkinator
|
||||
# Only run on pull_request, not workflow_run
|
||||
if: github.event_name == 'pull_request'
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new version first!
|
||||
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
|
||||
- uses: JustinBeckwith/linkinator-action@f62ba0c110a76effb2ee6022cc6ce4ab161085e3 # v2.4
|
||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||
with:
|
||||
paths: "**/*.md, **/*.mdx"
|
||||
@@ -50,8 +56,11 @@ jobs:
|
||||
https://timbr.ai/,
|
||||
https://opensource.org/license/apache-2-0,
|
||||
https://www.plaidcloud.com/
|
||||
build-deploy:
|
||||
name: Build & Deploy
|
||||
|
||||
build-on-pr:
|
||||
# Build docs when PR changes docs/** (uses committed databases.json)
|
||||
if: github.event_name == 'pull_request'
|
||||
name: Build (PR trigger)
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
@@ -75,3 +84,50 @@ jobs:
|
||||
- name: yarn build
|
||||
run: |
|
||||
yarn build
|
||||
|
||||
build-after-tests:
|
||||
# Build docs after integration tests complete (uses fresh diagnostics)
|
||||
# Only runs if integration tests succeeded
|
||||
if: >
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
name: Build (after integration tests)
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}"
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics from integration tests
|
||||
uses: dawidd6/action-download-artifact@v14
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
- name: Use fresh diagnostics
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
echo "Using fresh diagnostics from integration tests"
|
||||
mv src/data/databases-diagnostics.json src/data/databases.json
|
||||
else
|
||||
echo "Warning: No diagnostics artifact found, using committed data"
|
||||
fi
|
||||
- name: yarn typecheck
|
||||
run: |
|
||||
yarn typecheck
|
||||
- name: yarn build
|
||||
run: |
|
||||
yarn build
|
||||
|
||||
@@ -73,6 +73,36 @@ jobs:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
- name: Generate database diagnostics for docs
|
||||
if: steps.check.outputs.python
|
||||
env:
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
|
||||
run: |
|
||||
python -c "
|
||||
import json
|
||||
from superset.app import create_app
|
||||
from superset.db_engine_specs.lib import generate_yaml_docs
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
docs = generate_yaml_docs()
|
||||
# Wrap in the expected format
|
||||
output = {
|
||||
'generated': '$(date -Iseconds)',
|
||||
'databases': docs
|
||||
}
|
||||
with open('databases-diagnostics.json', 'w') as f:
|
||||
json.dump(output, f, indent=2, default=str)
|
||||
print(f'Generated diagnostics for {len(docs)} databases')
|
||||
"
|
||||
- name: Upload database diagnostics artifact
|
||||
if: steps.check.outputs.python
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: database-diagnostics
|
||||
path: databases-diagnostics.json
|
||||
retention-days: 7
|
||||
test-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
|
||||
@@ -52,6 +52,7 @@ jobs:
|
||||
SUPERSET_SECRET_KEY: not-a-secret
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
|
||||
pytest --durations-min=0.5 --cov=superset/semantic_layers/ ./tests/unit_tests/semantic_layers/ --cache-clear --cov-fail-under=100
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
|
||||
6
.gitignore
vendored
@@ -61,6 +61,7 @@ tmp
|
||||
rat-results.txt
|
||||
superset/app/
|
||||
superset-websocket/config.json
|
||||
.direnv
|
||||
|
||||
# Node.js, webpack artifacts, storybook
|
||||
*.entry.js
|
||||
@@ -72,10 +73,6 @@ superset/static/assets/*
|
||||
superset/static/uploads/*
|
||||
!superset/static/uploads/.gitkeep
|
||||
superset/static/version_info.json
|
||||
superset-frontend/**/esm/*
|
||||
superset-frontend/**/lib/*
|
||||
superset-frontend/**/storybook-static/*
|
||||
superset-frontend/migration-storybook.log
|
||||
yarn-error.log
|
||||
*.map
|
||||
*.min.js
|
||||
@@ -139,3 +136,4 @@ PROJECT.md
|
||||
.env.local
|
||||
oxc-custom-build/
|
||||
*.code-workspace
|
||||
*.duckdb
|
||||
|
||||
@@ -27,6 +27,7 @@ repos:
|
||||
args: [--check-untyped-defs]
|
||||
exclude: ^superset-extensions-cli/
|
||||
additional_dependencies: [
|
||||
types-cachetools,
|
||||
types-simplejson,
|
||||
types-python-dateutil,
|
||||
types-requests,
|
||||
@@ -49,7 +50,7 @@ repos:
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$|^superset/examples/.*/data\.parquet$
|
||||
- id: check-yaml
|
||||
exclude: ^helm/superset/templates/
|
||||
- id: debug-statements
|
||||
@@ -142,3 +143,18 @@ repos:
|
||||
else
|
||||
echo "No Python files to lint."
|
||||
fi
|
||||
- id: db-engine-spec-metadata
|
||||
name: database engine spec metadata validation
|
||||
entry: python superset/db_engine_specs/lint_metadata.py --strict
|
||||
language: system
|
||||
files: ^superset/db_engine_specs/.*\.py$
|
||||
exclude: ^superset/db_engine_specs/(base|lib|lint_metadata|__init__)\.py$
|
||||
pass_filenames: false
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: feature-flags-sync
|
||||
name: feature flags documentation sync
|
||||
entry: bash -c 'python scripts/extract_feature_flags.py > docs/static/feature-flags.json.tmp && if ! diff -q docs/static/feature-flags.json docs/static/feature-flags.json.tmp > /dev/null 2>&1; then mv docs/static/feature-flags.json.tmp docs/static/feature-flags.json && echo "Updated docs/static/feature-flags.json" && exit 1; else rm docs/static/feature-flags.json.tmp; fi'
|
||||
language: system
|
||||
files: ^superset/config\.py$
|
||||
pass_filenames: false
|
||||
|
||||
@@ -67,22 +67,15 @@ temporary_superset_ui/*
|
||||
# skip license checks for auto-generated test snapshots
|
||||
.*snap
|
||||
|
||||
# docs overrides for third party logos we don't have the rights to
|
||||
google-big-query.svg
|
||||
google-sheets.svg
|
||||
ibm-db2.svg
|
||||
postgresql.svg
|
||||
snowflake.svg
|
||||
ydb.svg
|
||||
loading.svg
|
||||
|
||||
# docs third-party logos, i.e. docs/static/img/logos/*
|
||||
# docs third-party logos (database logos, org logos, etc.)
|
||||
databases/*
|
||||
logos/*
|
||||
|
||||
# docs-related
|
||||
erd.puml
|
||||
erd.svg
|
||||
intro_header.txt
|
||||
TODO.md
|
||||
|
||||
# for LLMs
|
||||
llm-context.md
|
||||
|
||||
45
AGENTS.md
@@ -2,6 +2,27 @@
|
||||
|
||||
Apache Superset is a data visualization platform with Flask/Python backend and React/TypeScript frontend.
|
||||
|
||||
## ⚠️ CRITICAL: Always Run Pre-commit Before Pushing
|
||||
|
||||
**ALWAYS run `pre-commit run --all-files` before pushing commits.** CI will fail if pre-commit checks don't pass. This is non-negotiable.
|
||||
|
||||
```bash
|
||||
# Stage your changes first
|
||||
git add .
|
||||
|
||||
# Run pre-commit on all files
|
||||
pre-commit run --all-files
|
||||
|
||||
# If there are auto-fixes, stage them and commit
|
||||
git add .
|
||||
git commit --amend # or new commit
|
||||
```
|
||||
|
||||
Common pre-commit failures:
|
||||
- **Formatting** - black, prettier, eslint will auto-fix
|
||||
- **Type errors** - mypy failures need manual fixes
|
||||
- **Linting** - ruff, pylint issues need manual fixes
|
||||
|
||||
## ⚠️ CRITICAL: Ongoing Refactors (What NOT to Do)
|
||||
|
||||
**These migrations are actively happening - avoid deprecated patterns:**
|
||||
@@ -80,6 +101,30 @@ superset/
|
||||
- **UPDATING.md**: Add breaking changes here
|
||||
- **Docstrings**: Required for new functions/classes
|
||||
|
||||
## Developer Portal: Storybook-to-MDX Documentation
|
||||
|
||||
The Developer Portal auto-generates MDX documentation from Storybook stories. **Stories are the single source of truth.**
|
||||
|
||||
### Core Philosophy
|
||||
- **Fix issues in the STORY, not the generator** - When something doesn't render correctly, update the story file first
|
||||
- **Generator should be lightweight** - It extracts and passes through data; avoid special cases
|
||||
- **Stories define everything** - Props, controls, galleries, examples all come from story metadata
|
||||
|
||||
### Story Requirements for Docs Generation
|
||||
- Use `export default { title: '...' }` (inline), not `const meta = ...; export default meta;`
|
||||
- Name interactive stories `Interactive${ComponentName}` (e.g., `InteractiveButton`)
|
||||
- Define `args` for default prop values
|
||||
- Define `argTypes` at the story level (not meta level) with control types and descriptions
|
||||
- Use `parameters.docs.gallery` for size×style variant grids
|
||||
- Use `parameters.docs.sampleChildren` for components that need children
|
||||
- Use `parameters.docs.liveExample` for custom live code blocks
|
||||
- Use `parameters.docs.staticProps` for complex object props that can't be parsed inline
|
||||
|
||||
### Generator Location
|
||||
- Script: `docs/scripts/generate-superset-components.mjs`
|
||||
- Wrapper: `docs/src/components/StorybookWrapper.jsx`
|
||||
- Output: `docs/developer_portal/components/`
|
||||
|
||||
## Architecture Patterns
|
||||
|
||||
### Security & Features
|
||||
|
||||
20
Dockerfile
@@ -26,9 +26,6 @@ ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
# Include translations in the final build
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
# Build arg to pre-populate examples DuckDB file
|
||||
ARG LOAD_EXAMPLES_DUCKDB="false"
|
||||
|
||||
######################################################################
|
||||
# superset-node-ci used as a base for building frontend assets and CI
|
||||
######################################################################
|
||||
@@ -146,9 +143,6 @@ RUN if [ "${BUILD_TRANSLATIONS}" = "true" ]; then \
|
||||
######################################################################
|
||||
FROM python-base AS python-common
|
||||
|
||||
# Re-declare build arg to receive it in this stage
|
||||
ARG LOAD_EXAMPLES_DUCKDB
|
||||
|
||||
ENV SUPERSET_HOME="/app/superset_home" \
|
||||
HOME="/app/superset_home" \
|
||||
SUPERSET_ENV="production" \
|
||||
@@ -202,17 +196,9 @@ RUN /app/docker/apt-install.sh \
|
||||
libecpg-dev \
|
||||
libldap2-dev
|
||||
|
||||
# Pre-load examples DuckDB file if requested
|
||||
RUN if [ "$LOAD_EXAMPLES_DUCKDB" = "true" ]; then \
|
||||
mkdir -p /app/data && \
|
||||
echo "Downloading pre-built examples.duckdb..." && \
|
||||
curl -L -o /app/data/examples.duckdb \
|
||||
"https://raw.githubusercontent.com/apache-superset/examples-data/master/examples.duckdb" && \
|
||||
chown -R superset:superset /app/data; \
|
||||
else \
|
||||
mkdir -p /app/data && \
|
||||
chown -R superset:superset /app/data; \
|
||||
fi
|
||||
# Create data directory for DuckDB examples database
|
||||
# The database file will be created at runtime when examples are loaded from Parquet files
|
||||
RUN mkdir -p /app/data && chown -R superset:superset /app/data
|
||||
|
||||
# Copy compiled things from previous stages
|
||||
COPY --from=superset-node /app/superset/static/assets superset/static/assets
|
||||
|
||||
20
INSTALL.md
@@ -16,8 +16,20 @@ KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
# INSTALL / BUILD instructions for Apache Superset
|
||||
# Installing Apache Superset
|
||||
|
||||
At this time, the docker file at RELEASING/Dockerfile.from_local_tarball
|
||||
constitutes the recipe on how to get to a working release from a source
|
||||
release tarball.
|
||||
For comprehensive installation instructions, please see the Apache Superset documentation:
|
||||
|
||||
**[📚 Installation Guide →](https://superset.apache.org/docs/installation/installation-methods)**
|
||||
|
||||
The documentation covers:
|
||||
- [Docker Compose](https://superset.apache.org/docs/installation/docker-compose) (recommended for development)
|
||||
- [Kubernetes / Helm](https://superset.apache.org/docs/installation/kubernetes)
|
||||
- [PyPI](https://superset.apache.org/docs/installation/pypi)
|
||||
- [Docker Builds](https://superset.apache.org/docs/installation/docker-builds)
|
||||
- [Architecture Overview](https://superset.apache.org/docs/installation/architecture)
|
||||
|
||||
## Building from Source
|
||||
|
||||
For building from a source release tarball, see the Dockerfile at:
|
||||
`RELEASING/Dockerfile.from_local_tarball`
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Superset Frontend Linting Architecture
|
||||
|
||||
## Overview
|
||||
We use a hybrid linting approach combining OXC (fast, standard rules) with custom AST-based checks for Superset-specific patterns.
|
||||
|
||||
## Components
|
||||
|
||||
### 1. Primary Linter: OXC
|
||||
- **What**: Oxidation Compiler's linter (oxlint)
|
||||
- **Handles**: 95% of linting rules (standard ESLint rules, TypeScript, React, etc.)
|
||||
- **Speed**: ~50-100x faster than ESLint
|
||||
- **Config**: `oxlint.json`
|
||||
|
||||
### 2. Custom Rule Checker
|
||||
- **What**: Node.js AST-based script
|
||||
- **Handles**: Superset-specific rules:
|
||||
- No literal colors (use theme)
|
||||
- No FontAwesome icons (use Icons component)
|
||||
- No template vars in i18n
|
||||
- **Speed**: Fast enough for pre-commit
|
||||
- **Script**: `scripts/check-custom-rules.js`
|
||||
|
||||
## Developer Workflow
|
||||
|
||||
### Local Development
|
||||
```bash
|
||||
# Fast linting (OXC only)
|
||||
npm run lint
|
||||
|
||||
# Full linting (OXC + custom rules)
|
||||
npm run lint:full
|
||||
|
||||
# Auto-fix what's possible
|
||||
npm run lint-fix
|
||||
```
|
||||
|
||||
### Pre-commit
|
||||
1. OXC runs first (via `scripts/oxlint.sh`)
|
||||
2. Custom rules check runs second (lightweight, AST-based)
|
||||
3. Both must pass for commit to succeed
|
||||
|
||||
### CI Pipeline
|
||||
```yaml
|
||||
- name: Lint with OXC
|
||||
run: npm run lint
|
||||
|
||||
- name: Check custom rules
|
||||
run: npm run check:custom-rules
|
||||
```
|
||||
|
||||
## Why This Architecture?
|
||||
|
||||
### ✅ Pros
|
||||
1. **No binary distribution issues** - ASF compatible
|
||||
2. **Fast performance** - OXC for bulk, lightweight script for custom
|
||||
3. **Maintainable** - Custom rules in JavaScript, not Rust
|
||||
4. **Flexible** - Can evolve as OXC adds plugin support
|
||||
5. **Cacheable** - Both OXC and Node.js are standard tools
|
||||
|
||||
### ❌ Cons
|
||||
1. **Two tools** - Slightly more complex than single linter
|
||||
2. **Duplicate parsing** - Files parsed twice (once by each tool)
|
||||
|
||||
### 🔄 Migration Path
|
||||
When OXC supports JavaScript plugins:
|
||||
1. Convert `check-custom-rules.js` to OXC plugin format
|
||||
2. Consolidate back to single tool
|
||||
3. Keep same rules and developer experience
|
||||
|
||||
## Implementation Checklist
|
||||
|
||||
- [x] OXC for standard linting
|
||||
- [x] Pre-commit integration
|
||||
- [ ] Custom rules script
|
||||
- [ ] Combine in npm scripts
|
||||
- [ ] Update CI pipeline
|
||||
- [ ] Developer documentation
|
||||
|
||||
## Performance Targets
|
||||
|
||||
| Operation | Target Time | Current |
|
||||
|-----------|------------|---------|
|
||||
| Pre-commit (changed files) | <2s | ✅ 1.5s |
|
||||
| Full lint (all files) | <10s | ✅ 8s |
|
||||
| Custom rules check | <5s | 🔄 TBD |
|
||||
|
||||
## Caching Strategy
|
||||
|
||||
### Local Development
|
||||
- OXC: Built-in incremental checking
|
||||
- Custom rules: Use file hash cache (similar to pytest cache)
|
||||
|
||||
### CI
|
||||
- Cache `node_modules` (includes oxlint binary)
|
||||
- Cache custom rules results by commit hash
|
||||
- Skip unchanged files using git diff
|
||||
|
||||
## Future Improvements
|
||||
|
||||
1. **When OXC adds plugin support**: Migrate custom rules to OXC plugins
|
||||
2. **Consider Biome**: Another Rust-based linter with plugin support
|
||||
3. **AST sharing**: Investigate sharing AST between tools to avoid double parsing
|
||||
110
README.md
@@ -89,7 +89,7 @@ Superset provides:
|
||||
|
||||
**Craft Beautiful, Dynamic Dashboards**
|
||||
|
||||
<kbd><img title="View Dashboards" src="https://superset.apache.org/img/screenshots/slack_dash.jpg"/></kbd><br/>
|
||||
<kbd><img title="View Dashboards" src="https://superset.apache.org/img/screenshots/dashboard.jpg"/></kbd><br/>
|
||||
|
||||
**No-Code Chart Builder**
|
||||
|
||||
@@ -101,51 +101,77 @@ Superset provides:
|
||||
|
||||
## Supported Databases
|
||||
|
||||
Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/configuration/databases)) that has a Python DB-API driver and a SQLAlchemy dialect.
|
||||
Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/databases)) that has a Python DB-API driver and a SQLAlchemy dialect.
|
||||
|
||||
Here are some of the major database solutions that are supported:
|
||||
|
||||
<!-- SUPPORTED_DATABASES_START -->
|
||||
<p align="center">
|
||||
<img src="https://superset.apache.org/img/databases/redshift.png" alt="redshift" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/google-biquery.png" alt="google-bigquery" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/snowflake.png" alt="snowflake" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/trino.png" alt="trino" border="0" width="150" />
|
||||
<img src="https://superset.apache.org/img/databases/presto.png" alt="presto" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/databricks.png" alt="databricks" border="0" width="160" />
|
||||
<img src="https://superset.apache.org/img/databases/druid.png" alt="druid" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/firebolt.png" alt="firebolt" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/timescale.png" alt="timescale" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/postgresql.png" alt="postgresql" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mysql.png" alt="mysql" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mssql-server.png" alt="mssql-server" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ibm-db2.svg" alt="db2" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sqlite.png" alt="sqlite" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/sybase.png" alt="sybase" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mariadb.png" alt="mariadb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/vertica.png" alt="vertica" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oracle.png" alt="oracle" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/firebird.png" alt="firebird" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/greenplum.png" alt="greenplum" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/clickhouse.png" alt="clickhouse" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/exasol.png" alt="exasol" border="0" width="160" />
|
||||
<img src="https://superset.apache.org/img/databases/monet-db.png" alt="monet-db" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-kylin.png" alt="apache-kylin" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/hologres.png" alt="hologres" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/netezza.png" alt="netezza" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/pinot.png" alt="pinot" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/teradata.png" alt="teradata" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/yugabyte.png" alt="yugabyte" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/databend.png" alt="databend" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="sap-hana" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
|
||||
<a href="https://superset.apache.org/docs/databases/supported/amazon-athena" title="Amazon Athena"><img src="docs/static/img/databases/amazon-athena.jpg" alt="Amazon Athena" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/amazon-dynamodb" title="Amazon DynamoDB"><img src="docs/static/img/databases/aws.png" alt="Amazon DynamoDB" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/amazon-redshift" title="Amazon Redshift"><img src="docs/static/img/databases/redshift.png" alt="Amazon Redshift" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-doris" title="Apache Doris"><img src="docs/static/img/databases/doris.png" alt="Apache Doris" width="103" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-drill" title="Apache Drill"><img src="docs/static/img/databases/apache-drill.png" alt="Apache Drill" width="81" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-druid" title="Apache Druid"><img src="docs/static/img/databases/druid.png" alt="Apache Druid" width="117" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-hive" title="Apache Hive"><img src="docs/static/img/databases/apache-hive.svg" alt="Apache Hive" width="44" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-impala" title="Apache Impala"><img src="docs/static/img/databases/apache-impala.png" alt="Apache Impala" width="21" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-kylin" title="Apache Kylin"><img src="docs/static/img/databases/apache-kylin.png" alt="Apache Kylin" width="44" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-pinot" title="Apache Pinot"><img src="docs/static/img/databases/apache-pinot.svg" alt="Apache Pinot" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-solr" title="Apache Solr"><img src="docs/static/img/databases/apache-solr.png" alt="Apache Solr" width="79" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-spark-sql" title="Apache Spark SQL"><img src="docs/static/img/databases/apache-spark.png" alt="Apache Spark SQL" width="75" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ascend" title="Ascend"><img src="docs/static/img/databases/ascend.webp" alt="Ascend" width="117" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/aurora-mysql-data-api" title="Aurora MySQL (Data API)"><img src="docs/static/img/databases/mysql.png" alt="Aurora MySQL (Data API)" width="77" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/aurora-postgresql-data-api" title="Aurora PostgreSQL (Data API)"><img src="docs/static/img/databases/postgresql.svg" alt="Aurora PostgreSQL (Data API)" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/azure-data-explorer" title="Azure Data Explorer"><img src="docs/static/img/databases/kusto.png" alt="Azure Data Explorer" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/azure-synapse" title="Azure Synapse"><img src="docs/static/img/databases/azure.svg" alt="Azure Synapse" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/clickhouse" title="ClickHouse"><img src="docs/static/img/databases/clickhouse.png" alt="ClickHouse" width="150" height="37" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/cloudflare-d1" title="Cloudflare D1"><img src="docs/static/img/databases/cloudflare.png" alt="Cloudflare D1" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/cockroachdb" title="CockroachDB"><img src="docs/static/img/databases/cockroachdb.png" alt="CockroachDB" width="150" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/couchbase" title="Couchbase"><img src="docs/static/img/databases/couchbase.svg" alt="Couchbase" width="150" height="35" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/cratedb" title="CrateDB"><img src="docs/static/img/databases/cratedb.svg" alt="CrateDB" width="180" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/databend" title="Databend"><img src="docs/static/img/databases/databend.png" alt="Databend" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/databricks" title="Databricks"><img src="docs/static/img/databases/databricks.png" alt="Databricks" width="152" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/denodo" title="Denodo"><img src="docs/static/img/databases/denodo.png" alt="Denodo" width="138" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/dremio" title="Dremio"><img src="docs/static/img/databases/dremio.png" alt="Dremio" width="126" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/duckdb" title="DuckDB"><img src="docs/static/img/databases/duckdb.png" alt="DuckDB" width="52" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/elasticsearch" title="Elasticsearch"><img src="docs/static/img/databases/elasticsearch.png" alt="Elasticsearch" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/exasol" title="Exasol"><img src="docs/static/img/databases/exasol.png" alt="Exasol" width="72" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/firebird" title="Firebird"><img src="docs/static/img/databases/firebird.png" alt="Firebird" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/firebolt" title="Firebolt"><img src="docs/static/img/databases/firebolt.png" alt="Firebolt" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/google-bigquery" title="Google BigQuery"><img src="docs/static/img/databases/google-big-query.svg" alt="Google BigQuery" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/google-sheets" title="Google Sheets"><img src="docs/static/img/databases/google-sheets.svg" alt="Google Sheets" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/greenplum" title="Greenplum"><img src="docs/static/img/databases/greenplum.png" alt="Greenplum" width="124" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/hologres" title="Hologres"><img src="docs/static/img/databases/hologres.png" alt="Hologres" width="44" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ibm-db2" title="IBM Db2"><img src="docs/static/img/databases/ibm-db2.svg" alt="IBM Db2" width="91" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ibm-netezza-performance-server" title="IBM Netezza Performance Server"><img src="docs/static/img/databases/netezza.png" alt="IBM Netezza Performance Server" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/mariadb" title="MariaDB"><img src="docs/static/img/databases/mariadb.png" alt="MariaDB" width="150" height="37" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/microsoft-sql-server" title="Microsoft SQL Server"><img src="docs/static/img/databases/msql.png" alt="Microsoft SQL Server" width="50" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/monetdb" title="MonetDB"><img src="docs/static/img/databases/monet-db.png" alt="MonetDB" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/mongodb" title="MongoDB"><img src="docs/static/img/databases/mongodb.png" alt="MongoDB" width="150" height="38" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/motherduck" title="MotherDuck"><img src="docs/static/img/databases/motherduck.png" alt="MotherDuck" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/oceanbase" title="OceanBase"><img src="docs/static/img/databases/oceanbase.svg" alt="OceanBase" width="175" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/oracle" title="Oracle"><img src="docs/static/img/databases/oraclelogo.png" alt="Oracle" width="111" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/presto" title="Presto"><img src="docs/static/img/databases/presto-og.png" alt="Presto" width="127" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/risingwave" title="RisingWave"><img src="docs/static/img/databases/risingwave.svg" alt="RisingWave" width="147" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/sap-hana" title="SAP HANA"><img src="docs/static/img/databases/sap-hana.png" alt="SAP HANA" width="137" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/sap-sybase" title="SAP Sybase"><img src="docs/static/img/databases/sybase.png" alt="SAP Sybase" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/shillelagh" title="Shillelagh"><img src="docs/static/img/databases/shillelagh.png" alt="Shillelagh" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/singlestore" title="SingleStore"><img src="docs/static/img/databases/singlestore.png" alt="SingleStore" width="150" height="31" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/snowflake" title="Snowflake"><img src="docs/static/img/databases/snowflake.svg" alt="Snowflake" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/sqlite" title="SQLite"><img src="docs/static/img/databases/sqlite.png" alt="SQLite" width="84" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/starrocks" title="StarRocks"><img src="docs/static/img/databases/starrocks.png" alt="StarRocks" width="149" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/superset-meta-database" title="Superset meta database"><img src="docs/static/img/databases/superset.svg" alt="Superset meta database" width="150" height="39" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/tdengine" title="TDengine"><img src="docs/static/img/databases/tdengine.png" alt="TDengine" width="140" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/teradata" title="Teradata"><img src="docs/static/img/databases/teradata.png" alt="Teradata" width="124" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/timescaledb" title="TimescaleDB"><img src="docs/static/img/databases/timescale.png" alt="TimescaleDB" width="150" height="36" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/trino" title="Trino"><img src="docs/static/img/databases/trino.png" alt="Trino" width="89" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/vertica" title="Vertica"><img src="docs/static/img/databases/vertica.png" alt="Vertica" width="128" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ydb" title="YDB"><img src="docs/static/img/databases/ydb.svg" alt="YDB" width="110" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/yugabytedb" title="YugabyteDB"><img src="docs/static/img/databases/yugabyte.png" alt="YugabyteDB" width="150" height="26" /></a>
|
||||
</p>
|
||||
<!-- SUPPORTED_DATABASES_END -->
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/databases).
|
||||
|
||||
Want to add support for your datastore or data engine? Read more [here](https://superset.apache.org/docs/frequently-asked-questions#does-superset-work-with-insert-database-engine-here) about the technical requirements.
|
||||
|
||||
@@ -165,14 +191,14 @@ Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) gu
|
||||
## Contributor Guide
|
||||
|
||||
Interested in contributing? Check out our
|
||||
[CONTRIBUTING.md](https://github.com/apache/superset/blob/master/CONTRIBUTING.md)
|
||||
[Developer Portal](https://superset.apache.org/developer_portal/)
|
||||
to find resources around contributing along with a detailed guide on
|
||||
how to set up a development environment.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Superset "In the Wild"](https://superset.apache.org/inTheWild) - see who's using Superset, and [add your organization](https://github.com/apache/superset/edit/master/RESOURCES/INTHEWILD.yaml) to the list!
|
||||
- [Feature Flags](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) - the status of Superset's Feature Flags.
|
||||
- [Feature Flags](https://superset.apache.org/docs/configuration/feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
|
||||
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
|
||||
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.
|
||||
|
||||
@@ -92,7 +92,7 @@ Some of the new features in this release are disabled by default. Each has a fea
|
||||
|
||||
| Feature | Feature Flag | Dependencies | Documentation
|
||||
| --- | --- | --- | --- |
|
||||
| Global Async Queries | `GLOBAL_ASYNC_QUERIES: True` | Redis 5.0+, celery workers configured and running | [Extra documentation](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries )
|
||||
| Global Async Queries | `GLOBAL_ASYNC_QUERIES: True` | Redis 5.0+, celery workers configured and running | [Extra documentation](https://superset.apache.org/docs/contributing/misc#async-chart-queries)
|
||||
| Dashboard Native Filters | `DASHBOARD_NATIVE_FILTERS: True` | |
|
||||
| Alerts & Reporting | `ALERT_REPORTS: True` | [Celery workers configured & celery beat process](https://superset.apache.org/docs/installation/async-queries-celery) |
|
||||
| Homescreen Thumbnails | `THUMBNAILS: TRUE, THUMBNAIL_CACHE_CONFIG: CacheConfig = { "CACHE_TYPE": "null", "CACHE_NO_NULL_WARNING": True}`| selenium, pillow 7, celery |
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Superset Feature Flags
|
||||
|
||||
This is a list of the current Superset optional features. See config.py for default values. These features can be turned on/off by setting your preferred values in superset_config.py to True/False respectively
|
||||
|
||||
## In Development
|
||||
|
||||
These features are considered **unfinished** and should only be used on development environments.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- ALERT_REPORT_TABS
|
||||
- DATE_RANGE_TIMESHIFTS_ENABLED
|
||||
- ENABLE_ADVANCED_DATA_TYPES
|
||||
- PRESTO_EXPAND_DATA
|
||||
- SHARE_QUERIES_VIA_KV_STORE
|
||||
- TAGGING_SYSTEM
|
||||
- CHART_PLUGINS_EXPERIMENTAL
|
||||
|
||||
## In Testing
|
||||
|
||||
These features are **finished** but currently being tested. They are usable, but may still contain some bugs.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- ALERT_REPORTS: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
|
||||
- ALLOW_FULL_CSV_EXPORT
|
||||
- CACHE_IMPERSONATION
|
||||
- CONFIRM_DASHBOARD_DIFF
|
||||
- DYNAMIC_PLUGINS
|
||||
- DATE_FORMAT_IN_EMAIL_SUBJECT: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports#commons)
|
||||
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
||||
- ESTIMATE_QUERY_COST
|
||||
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
||||
- IMPERSONATE_WITH_EMAIL_PREFIX
|
||||
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
||||
- RLS_IN_SQLLAB
|
||||
- SSH_TUNNELING [(docs)](https://superset.apache.org/docs/configuration/setup-ssh-tunneling)
|
||||
- USE_ANALAGOUS_COLORS
|
||||
|
||||
## Stable
|
||||
|
||||
These features flags are **safe for production**. They have been tested and will be supported for the at least the current major version cycle.
|
||||
|
||||
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
||||
|
||||
### Flags on the path to feature launch and flag deprecation/removal
|
||||
|
||||
- DASHBOARD_VIRTUALIZATION
|
||||
|
||||
### Flags retained for runtime configuration
|
||||
|
||||
Currently some of our feature flags act as dynamic configurations that can change
|
||||
on the fly. This acts in contradiction with the typical ephemeral feature flag use case,
|
||||
where the flag is used to mature a feature, and eventually deprecated once the feature is
|
||||
solid. Eventually we'll likely refactor these under a more formal "dynamic configurations" managed
|
||||
independently. This new framework will also allow for non-boolean configurations.
|
||||
|
||||
- ALERTS_ATTACH_REPORTS
|
||||
- ALLOW_ADHOC_SUBQUERY
|
||||
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
||||
- DATAPANEL_CLOSED_BY_DEFAULT
|
||||
- DRILL_BY
|
||||
- DRUID_JOINS
|
||||
- EMBEDDABLE_CHARTS
|
||||
- EMBEDDED_SUPERSET
|
||||
- ENABLE_TEMPLATE_PROCESSING
|
||||
- ESCAPE_MARKDOWN_HTML
|
||||
- LISTVIEWS_DEFAULT_CARD_VIEW
|
||||
- SCHEDULED_QUERIES [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
|
||||
- SLACK_ENABLE_AVATARS (see `superset/config.py` for more information)
|
||||
- SQLLAB_BACKEND_PERSISTENCE
|
||||
- SQL_VALIDATORS_BY_ENGINE [(docs)](https://superset.apache.org/docs/configuration/sql-templating)
|
||||
- THUMBNAILS [(docs)](https://superset.apache.org/docs/configuration/cache)
|
||||
|
||||
## Deprecated Flags
|
||||
|
||||
These features flags currently default to True and **will be removed in a future major release**. For this current release you can turn them off by setting your config to False, but it is advised to remove or set these flags in your local configuration to **True** so that you do not experience any unexpected changes in a future release.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- AVOID_COLORS_COLLISION
|
||||
- DRILL_TO_DETAIL
|
||||
- ENABLE_JAVASCRIPT_CONTROLS
|
||||
- KV_STORE
|
||||
@@ -136,10 +136,6 @@ categories:
|
||||
url: https://www.dropit.shop/
|
||||
contributors: ["@dropit-dev"]
|
||||
|
||||
- name: Fanatics
|
||||
url: https://www.fanatics.com/
|
||||
contributors: ["@coderfender"]
|
||||
|
||||
- name: Fordeal
|
||||
url: https://www.fordeal.com
|
||||
contributors: ["@Renkai"]
|
||||
@@ -291,8 +287,10 @@ categories:
|
||||
url: https://www.gfk.com/home
|
||||
contributors: ["@mherr"]
|
||||
|
||||
# Logo approved by @anmol-hpe on behalf of HPE
|
||||
- name: HPE
|
||||
url: https://www.hpe.com/in/en/home.html
|
||||
logo: hpe.png
|
||||
contributors: ["@anmol-hpe"]
|
||||
|
||||
- name: Hydrolix
|
||||
@@ -432,6 +430,11 @@ categories:
|
||||
url: https://brandct.cn/
|
||||
contributors: ["@wenbinye"]
|
||||
|
||||
- name: XNET
|
||||
url: https://xnetmobile.com/
|
||||
logo: xnet.png
|
||||
contributors: ["@deuspt"]
|
||||
|
||||
- name: Zeta
|
||||
url: https://www.zeta.tech/
|
||||
contributors: ["@shaikidris"]
|
||||
@@ -622,6 +625,20 @@ categories:
|
||||
- name: Stockarea
|
||||
url: https://stockarea.io
|
||||
|
||||
Sports:
|
||||
- name: Club 25 de Agosto (Femenino / Women's Team)
|
||||
url: https://www.instagram.com/25deagosto.basketfemenino/
|
||||
contributors: [ "@lion90" ]
|
||||
logo: club25deagosto.svg
|
||||
|
||||
- name: Fanatics
|
||||
url: https://www.fanatics.com/
|
||||
contributors: [ "@coderfender" ]
|
||||
|
||||
- name: komoot
|
||||
url: https://www.komoot.com/
|
||||
contributors: [ "@christophlingg" ]
|
||||
|
||||
Others:
|
||||
- name: 10Web
|
||||
url: https://10web.io/
|
||||
@@ -657,10 +674,6 @@ categories:
|
||||
url: https://www.increff.com/
|
||||
contributors: ["@ishansinghania"]
|
||||
|
||||
- name: komoot
|
||||
url: https://www.komoot.com/
|
||||
contributors: ["@christophlingg"]
|
||||
|
||||
- name: Let's Roam
|
||||
url: https://www.letsroam.com/
|
||||
|
||||
|
||||
74
UPDATING.md
@@ -24,6 +24,56 @@ assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
### WebSocket config for GAQ with Docker
|
||||
|
||||
[35896](https://github.com/apache/superset/pull/35896) and [37624](https://github.com/apache/superset/pull/37624) updated documentation on how to run and configure Superset with Docker. Specifically for the WebSocket configuration, a new `docker/superset-websocket/config.example.json` was added to the repo, so that users could copy it to create a `docker/superset-websocket/config.json` file. The existing `docker/superset-websocket/config.json` was removed and git-ignored, so if you're using GAQ / WebSocket make sure to:
|
||||
- Stash/backup your existing `config.json` file, to re-apply it after (will get git-ignored going forward)
|
||||
- Update the `volumes` configuration for the `superset-websocket` service in your `docker-compose.override.yml` file, to include the `docker/superset-websocket/config.json` file. For example:
|
||||
``` yaml
|
||||
services:
|
||||
superset-websocket:
|
||||
volumes:
|
||||
- ./superset-websocket:/home/superset-websocket
|
||||
- /home/superset-websocket/node_modules
|
||||
- /home/superset-websocket/dist
|
||||
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json:ro
|
||||
```
|
||||
|
||||
### Example Data Loading Improvements
|
||||
|
||||
#### New Directory Structure
|
||||
Examples are now organized by name with data and configs co-located:
|
||||
```
|
||||
superset/examples/
|
||||
├── _shared/ # Shared database & metadata configs
|
||||
├── birth_names/ # Each example is self-contained
|
||||
│ ├── data.parquet # Dataset (Parquet format)
|
||||
│ ├── dataset.yaml # Dataset metadata
|
||||
│ ├── dashboard.yaml # Dashboard config (optional)
|
||||
│ └── charts/ # Chart configs (optional)
|
||||
└── ...
|
||||
```
|
||||
|
||||
#### Simplified Parquet-based Loading
|
||||
- Auto-discovery: create `superset/examples/my_dataset/data.parquet` to add a new example
|
||||
- Parquet is an Apache project format: compressed (~27% smaller), self-describing schema
|
||||
- YAML configs define datasets, charts, and dashboards declaratively
|
||||
- Removed Python-based data generation from individual example files
|
||||
|
||||
#### Test Data Reorganization
|
||||
- Moved `big_data.py` to `superset/cli/test_loaders.py` - better reflects its purpose as a test utility
|
||||
- Fixed inverted logic for `--load-test-data` flag (now correctly includes .test.yaml files when flag is set)
|
||||
- Clarified CLI flags:
|
||||
- `--force` / `-f`: Force reload even if tables exist
|
||||
- `--only-metadata` / `-m`: Create table metadata without loading data
|
||||
- `--load-test-data` / `-t`: Include test dashboards and .test.yaml configs
|
||||
- `--load-big-data` / `-b`: Generate synthetic stress-test data
|
||||
|
||||
#### Bug Fixes
|
||||
- Fixed numpy array serialization for PostgreSQL (converts complex types to JSON strings)
|
||||
- Fixed KeyError for `allow_csv_upload` field in database configs (now optional with default)
|
||||
- Fixed test data loading logic that was incorrectly filtering files
|
||||
|
||||
### MCP Service
|
||||
|
||||
The MCP (Model Context Protocol) service enables AI assistants and automation tools to interact programmatically with Superset.
|
||||
@@ -128,6 +178,28 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
|
||||
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
|
||||
|
||||
### Breaking Changes
|
||||
- [37370](https://github.com/apache/superset/pull/37370): The `APP_NAME` configuration variable no longer controls the browser window/tab title or other frontend branding. Application names should now be configured using the theme system with the `brandAppName` token. The `APP_NAME` config is still used for backend contexts (MCP service, logs, etc.) and serves as a fallback if `brandAppName` is not set.
|
||||
- **Migration:**
|
||||
```python
|
||||
# Before (Superset 5.x)
|
||||
APP_NAME = "My Custom App"
|
||||
|
||||
# After (Superset 6.x) - Option 1: Use theme system (recommended)
|
||||
THEME_DEFAULT = {
|
||||
"token": {
|
||||
"brandAppName": "My Custom App", # Window titles
|
||||
"brandLogoAlt": "My Custom App", # Logo alt text
|
||||
"brandLogoUrl": "/static/assets/images/custom_logo.png"
|
||||
}
|
||||
}
|
||||
|
||||
# After (Superset 6.x) - Option 2: Temporary fallback
|
||||
# Keep APP_NAME for now (will be used as fallback for brandAppName)
|
||||
APP_NAME = "My Custom App"
|
||||
# But you should migrate to THEME_DEFAULT.token.brandAppName
|
||||
```
|
||||
- **Note:** For dark mode, set the same tokens in `THEME_DARK` configuration.
|
||||
|
||||
- [36317](https://github.com/apache/superset/pull/36317): The `CUSTOM_FONT_URLS` configuration option has been removed. Use the new per-theme `fontUrls` token in `THEME_DEFAULT` or database-managed themes instead.
|
||||
- **Before:**
|
||||
```python
|
||||
@@ -142,7 +214,7 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
|
||||
"fontUrls": [
|
||||
"https://fonts.example.com/myfont.css",
|
||||
],
|
||||
# ... other tokens
|
||||
# ... other tokens
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -77,7 +77,6 @@ x-common-build: &common-build
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
LOAD_EXAMPLES_DUCKDB: ${LOAD_EXAMPLES_DUCKDB:-true}
|
||||
|
||||
services:
|
||||
db-light:
|
||||
@@ -116,7 +115,6 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
GITHUB_HEAD_REF: ${GITHUB_HEAD_REF:-}
|
||||
GITHUB_SHA: ${GITHUB_SHA:-}
|
||||
@@ -139,7 +137,6 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
healthcheck:
|
||||
disable: true
|
||||
@@ -162,8 +159,8 @@ services:
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset-light:8088"
|
||||
# Webpack dev server configuration
|
||||
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-127.0.0.1}"
|
||||
# Webpack dev server must bind to 0.0.0.0 to be accessible from outside the container
|
||||
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-0.0.0.0}"
|
||||
WEBPACK_DEVSERVER_PORT: "${WEBPACK_DEVSERVER_PORT:-9000}"
|
||||
ports:
|
||||
- "${NODE_PORT:-9001}:9000" # Parameterized port, accessible on all interfaces
|
||||
@@ -196,7 +193,6 @@ services:
|
||||
DATABASE_DB: test
|
||||
POSTGRES_DB: test
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@db-light:5432/test
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG: superset_test_config_light
|
||||
PYTHONPATH: /app/pythonpath:/app/docker/pythonpath_dev:/app
|
||||
|
||||
|
||||
@@ -44,7 +44,6 @@ x-common-build: &common-build
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
LOAD_EXAMPLES_DUCKDB: ${LOAD_EXAMPLES_DUCKDB:-true}
|
||||
|
||||
services:
|
||||
nginx:
|
||||
@@ -106,8 +105,6 @@ services:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
|
||||
superset-websocket:
|
||||
build: ./superset-websocket
|
||||
@@ -157,8 +154,6 @@ services:
|
||||
condition: service_started
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
@@ -180,7 +175,7 @@ services:
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset:8088"
|
||||
# Bind to all interfaces so Docker port mapping works
|
||||
# Webpack dev server must bind to 0.0.0.0 to be accessible from outside the container
|
||||
WEBPACK_DEVSERVER_HOST: "0.0.0.0"
|
||||
ports:
|
||||
- "127.0.0.1:${NODE_PORT:-9000}:9000" # exposing the dynamic webpack dev server
|
||||
|
||||
@@ -28,11 +28,11 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
||||
cd /app/superset-frontend
|
||||
|
||||
if [ "$NPM_RUN_PRUNE" = "true" ]; then
|
||||
echo "Running `npm run prune`"
|
||||
echo "Running \"npm run prune\""
|
||||
npm run prune
|
||||
fi
|
||||
|
||||
echo "Running `npm install`"
|
||||
echo "Running \"npm install\""
|
||||
npm install
|
||||
|
||||
echo "Start webpack dev server"
|
||||
|
||||
@@ -105,7 +105,12 @@ class CeleryConfig:
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
FEATURE_FLAGS = {"ALERT_REPORTS": True}
|
||||
FEATURE_FLAGS = {
|
||||
"ALERT_REPORTS": True,
|
||||
"DATASET_FOLDERS": True,
|
||||
"ENABLE_EXTENSIONS": True,
|
||||
}
|
||||
EXTENSIONS_PATH = "/app/docker/extensions"
|
||||
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
|
||||
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
|
||||
# The base URL for the email report hyperlinks.
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"port": 8080,
|
||||
"logLevel": "info",
|
||||
"logToFile": false,
|
||||
"logFilename": "app.log",
|
||||
"statsd": {
|
||||
"host": "127.0.0.1",
|
||||
"port": 8125,
|
||||
"globalTags": []
|
||||
},
|
||||
"redis": {
|
||||
"port": 6379,
|
||||
"host": "127.0.0.1",
|
||||
"password": "",
|
||||
"db": 0,
|
||||
"ssl": false
|
||||
},
|
||||
"redisStreamPrefix": "async-events-",
|
||||
"jwtAlgorithms": ["HS256"],
|
||||
"jwtSecret": "CHANGE-ME-IN-PRODUCTION-GOTTA-BE-LONG-AND-SECRET",
|
||||
"jwtCookieName": "async-token"
|
||||
}
|
||||
115
docs/.claude/instructions.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# Developer Portal Documentation Instructions
|
||||
|
||||
## Core Principle: Stories Are the Single Source of Truth
|
||||
|
||||
When working on the Storybook-to-MDX documentation system:
|
||||
|
||||
**ALWAYS fix the story first. NEVER add workarounds to the generator.**
|
||||
|
||||
## Why This Matters
|
||||
|
||||
The generator (`scripts/generate-superset-components.mjs`) should be lightweight - it extracts data from stories and passes it through. When you add special cases to the generator:
|
||||
- It becomes harder to maintain
|
||||
- Stories diverge from their docs representation
|
||||
- Future stories need to know about generator quirks
|
||||
|
||||
When you fix stories to match the expected patterns:
|
||||
- Stories work identically in Storybook and Docs
|
||||
- The generator stays simple and predictable
|
||||
- Patterns are consistent and learnable
|
||||
|
||||
## Story Patterns for Docs Generation
|
||||
|
||||
### Required Structure
|
||||
```tsx
|
||||
// Use inline export default (NOT const meta = ...; export default meta)
|
||||
export default {
|
||||
title: 'Components/MyComponent',
|
||||
component: MyComponent,
|
||||
};
|
||||
|
||||
// Name interactive stories with Interactive prefix
|
||||
export const InteractiveMyComponent: Story = {
|
||||
args: {
|
||||
// Default prop values
|
||||
},
|
||||
argTypes: {
|
||||
// Control definitions - MUST be at story level, not meta level
|
||||
propName: {
|
||||
control: { type: 'select' },
|
||||
options: ['a', 'b', 'c'],
|
||||
description: 'What this prop does',
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Components with Variants (size × style grids)
|
||||
```tsx
|
||||
const sizes = ['small', 'medium', 'large'];
|
||||
const variants = ['primary', 'secondary', 'danger'];
|
||||
|
||||
InteractiveButton.parameters = {
|
||||
docs: {
|
||||
gallery: {
|
||||
component: 'Button',
|
||||
sizes,
|
||||
styles: variants,
|
||||
sizeProp: 'size',
|
||||
styleProp: 'variant',
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Components Requiring Children
|
||||
```tsx
|
||||
InteractiveIconTooltip.parameters = {
|
||||
docs: {
|
||||
// Component descriptors with dot notation for nested components
|
||||
sampleChildren: [{ component: 'Icons.InfoCircleOutlined', props: { iconSize: 'l' } }],
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Custom Live Code Examples
|
||||
```tsx
|
||||
InteractiveMyComponent.parameters = {
|
||||
docs: {
|
||||
liveExample: `function Demo() {
|
||||
return <MyComponent prop="value">Content</MyComponent>;
|
||||
}`,
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Complex Props (objects, arrays)
|
||||
```tsx
|
||||
InteractiveMenu.parameters = {
|
||||
docs: {
|
||||
staticProps: {
|
||||
items: [
|
||||
{ key: '1', label: 'Item 1' },
|
||||
{ key: '2', label: 'Item 2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
## Common Issues and How to Fix Them (in the Story)
|
||||
|
||||
| Issue | Wrong Approach | Right Approach |
|
||||
|-------|---------------|----------------|
|
||||
| Component not generated | Add pattern to generator | Change story to use inline `export default` |
|
||||
| Control shows as text instead of select | Add special case in generator | Add `argTypes` with `control: { type: 'select' }` |
|
||||
| Missing children/content | Modify StorybookWrapper | Add `parameters.docs.sampleChildren` |
|
||||
| Gallery not showing | Add to generator output | Add `parameters.docs.gallery` config |
|
||||
| Wrong live example | Hardcode in generator | Add `parameters.docs.liveExample` |
|
||||
|
||||
## Files
|
||||
|
||||
- **Generator**: `docs/scripts/generate-superset-components.mjs`
|
||||
- **Wrapper**: `docs/src/components/StorybookWrapper.jsx`
|
||||
- **Output**: `docs/developer_portal/components/`
|
||||
- **Stories**: `superset-frontend/packages/superset-ui-core/src/components/*/`
|
||||
18
docs/.gitignore
vendored
@@ -26,3 +26,21 @@ docs/intro.md
|
||||
|
||||
# Generated badge images (downloaded at build time by remark-localize-badges plugin)
|
||||
static/badges/
|
||||
|
||||
# Generated database documentation MDX files (regenerated at build time)
|
||||
# Source of truth is in superset/db_engine_specs/*.py metadata attributes
|
||||
docs/databases/
|
||||
|
||||
# Generated API documentation (regenerated at build time from openapi.json)
|
||||
# Source of truth is static/resources/openapi.json
|
||||
docs/api/
|
||||
|
||||
# Generated component documentation MDX files (regenerated at build time)
|
||||
# Source of truth is Storybook stories in superset-frontend/packages/superset-ui-core/src/components/
|
||||
developer_portal/components/
|
||||
|
||||
# Generated extension component documentation (regenerated at build time)
|
||||
developer_portal/extensions/components/
|
||||
|
||||
# Note: src/data/databases.json is COMMITTED (not ignored) to preserve feature diagnostics
|
||||
# that require Flask context to generate. Update it locally with: npm run gen-db-docs
|
||||
|
||||
@@ -416,7 +416,7 @@ If versions don't appear in dropdown:
|
||||
|
||||
- [Docusaurus Documentation](https://docusaurus.io/docs)
|
||||
- [MDX Documentation](https://mdxjs.com/)
|
||||
- [Superset Contributing Guide](../CONTRIBUTING.md)
|
||||
- [Superset Developer Portal](https://superset.apache.org/developer_portal/)
|
||||
- [Main Superset Documentation](https://superset.apache.org/docs/intro)
|
||||
|
||||
## 📖 Real Examples and Patterns
|
||||
|
||||
@@ -18,9 +18,9 @@ under the License.
|
||||
-->
|
||||
|
||||
This is the public documentation site for Superset, built using
|
||||
[Docusaurus 3](https://docusaurus.io/). See
|
||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||
contributing to documentation.
|
||||
[Docusaurus 3](https://docusaurus.io/). See the
|
||||
[Developer Portal](https://superset.apache.org/developer_portal/contributing/development-setup#documentation)
|
||||
for documentation on contributing to documentation.
|
||||
|
||||
## Version Management
|
||||
|
||||
|
||||
@@ -19,5 +19,14 @@
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
|
||||
presets: [
|
||||
[
|
||||
require.resolve('@docusaurus/core/lib/babel/preset'),
|
||||
{
|
||||
runtime: 'automatic',
|
||||
importSource: '@emotion/react',
|
||||
},
|
||||
],
|
||||
],
|
||||
plugins: ['@emotion/babel-plugin'],
|
||||
};
|
||||
|
||||
@@ -653,7 +653,7 @@ export enum FeatureFlag {
|
||||
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
|
||||
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
|
||||
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in `RESOURCES/FEATURE_FLAGS.md`.
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
## Git Hooks
|
||||
|
||||
|
||||
@@ -258,19 +258,7 @@ For debugging the Flask backend:
|
||||
|
||||
### Storybook
|
||||
|
||||
Storybook is used for developing and testing UI components in isolation:
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
|
||||
# Start Storybook
|
||||
npm run storybook
|
||||
|
||||
# Build static Storybook
|
||||
npm run build-storybook
|
||||
```
|
||||
|
||||
Access Storybook at http://localhost:6006
|
||||
See the dedicated [Storybook documentation](../testing/storybook) for information on running Storybook locally and adding new stories.
|
||||
|
||||
## Contributing Translations
|
||||
|
||||
@@ -342,26 +330,79 @@ ruff check --fix .
|
||||
|
||||
Pre-commit hooks run automatically on `git commit` if installed.
|
||||
|
||||
### TypeScript
|
||||
### TypeScript / JavaScript
|
||||
|
||||
We use ESLint and Prettier for TypeScript:
|
||||
We use a hybrid linting approach combining OXC (Oxidation Compiler) for standard rules and a custom AST-based checker for Superset-specific patterns.
|
||||
|
||||
#### Quick Commands
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
|
||||
# Run eslint checks
|
||||
# Run both OXC and custom rules
|
||||
npm run lint:full
|
||||
|
||||
# Run OXC linter only (faster for most checks)
|
||||
npm run lint
|
||||
|
||||
# Fix auto-fixable issues with OXC
|
||||
npm run lint-fix
|
||||
|
||||
# Run custom rules checker only
|
||||
npm run check:custom-rules
|
||||
|
||||
# Run tsc (typescript) checks
|
||||
npm run type
|
||||
|
||||
# Fix lint issues
|
||||
npm run lint-fix
|
||||
|
||||
# Format with Prettier
|
||||
npm run prettier
|
||||
```
|
||||
|
||||
#### Architecture
|
||||
|
||||
The linting system consists of two components:
|
||||
|
||||
1. **OXC Linter** (`oxlint`) - A Rust-based linter that's 50-100x faster than ESLint
|
||||
- Handles all standard JavaScript/TypeScript rules
|
||||
- Configured via `oxlint.json`
|
||||
- Runs via `npm run lint` or `npm run lint-fix`
|
||||
|
||||
2. **Custom Rules Checker** - A Node.js AST-based checker for Superset-specific patterns
|
||||
- Enforces no literal colors (use theme colors)
|
||||
- Prevents FontAwesome usage (use @superset-ui/core Icons)
|
||||
- Validates i18n template usage (no template variables)
|
||||
- Runs via `npm run check:custom-rules`
|
||||
|
||||
#### Why This Approach?
|
||||
|
||||
- **50-100x faster linting** compared to ESLint for standard rules via OXC
|
||||
- **Apache-compatible** - No custom binaries, ASF-friendly
|
||||
- **Maintainable** - Custom rules in JavaScript, not Rust
|
||||
- **Flexible** - Can evolve as OXC adds plugin support
|
||||
|
||||
#### Troubleshooting
|
||||
|
||||
**"Plugin 'basic-custom-plugin' not found" Error**
|
||||
|
||||
Ensure you're using the explicit config:
|
||||
```bash
|
||||
npx oxlint --config oxlint.json
|
||||
```
|
||||
|
||||
**Custom Rules Not Running**
|
||||
|
||||
Verify the AST parsing dependencies are installed:
|
||||
```bash
|
||||
npm ls @babel/parser @babel/traverse glob
|
||||
```
|
||||
|
||||
#### Adding New Custom Rules
|
||||
|
||||
1. Edit `scripts/check-custom-rules.js`
|
||||
2. Add a new check function following the AST visitor pattern
|
||||
3. Call the function in `processFile()`
|
||||
4. Test with `npm run check:custom-rules`
|
||||
|
||||
## GitHub Ephemeral Environments
|
||||
|
||||
For every PR, an ephemeral environment is automatically deployed for testing.
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
---
|
||||
title: Alert
|
||||
sidebar_label: Alert
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
import { StoryWithControls } from '../../../src/components/StorybookWrapper';
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
|
||||
# Alert
|
||||
|
||||
Alert component for displaying important messages to users. Wraps Ant Design Alert with sensible defaults and improved accessibility.
|
||||
|
||||
## Live Example
|
||||
|
||||
<StoryWithControls
|
||||
component={Alert}
|
||||
props={{
|
||||
closable: true,
|
||||
type: 'info',
|
||||
message: 'This is a sample alert message.',
|
||||
description: 'Sample description for additional context.',
|
||||
showIcon: true
|
||||
}}
|
||||
controls={[
|
||||
{
|
||||
name: 'type',
|
||||
label: 'Type',
|
||||
type: 'select',
|
||||
options: [
|
||||
'info',
|
||||
'error',
|
||||
'warning',
|
||||
'success'
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'closable',
|
||||
label: 'Closable',
|
||||
type: 'boolean'
|
||||
},
|
||||
{
|
||||
name: 'showIcon',
|
||||
label: 'Show Icon',
|
||||
type: 'boolean'
|
||||
},
|
||||
{
|
||||
name: 'message',
|
||||
label: 'Message',
|
||||
type: 'text'
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
label: 'Description',
|
||||
type: 'text'
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
## Try It
|
||||
|
||||
Edit the code below to experiment with the component:
|
||||
|
||||
```tsx live
|
||||
function Demo() {
|
||||
return (
|
||||
<Alert
|
||||
closable
|
||||
type="info"
|
||||
message="This is a sample alert message."
|
||||
description="Sample description for additional context."
|
||||
showIcon
|
||||
/>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Props
|
||||
|
||||
| Prop | Type | Default | Description |
|
||||
|------|------|---------|-------------|
|
||||
| `closable` | `boolean` | `true` | Whether the Alert can be closed with a close button. |
|
||||
| `type` | `string` | `"info"` | Type of the alert (e.g., info, error, warning, success). |
|
||||
| `message` | `string` | `"This is a sample alert message."` | Message |
|
||||
| `description` | `string` | `"Sample description for additional context."` | Description |
|
||||
| `showIcon` | `boolean` | `true` | Whether to display an icon in the Alert. |
|
||||
|
||||
## Usage in Extensions
|
||||
|
||||
This component is available in the `@apache-superset/core/ui` package, which is automatically available to Superset extensions.
|
||||
|
||||
```tsx
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
|
||||
function MyExtension() {
|
||||
return (
|
||||
<Alert
|
||||
closable
|
||||
type="info"
|
||||
message="This is a sample alert message."
|
||||
/>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Source Links
|
||||
|
||||
- [Story file](https://github.com/apache/superset/blob/master/superset-frontend/packages/superset-core/src/ui/components/Alert/Alert.stories.tsx)
|
||||
- [Component source](https://github.com/apache/superset/blob/master/superset-frontend/packages/superset-core/src/ui/components/Alert/index.tsx)
|
||||
|
||||
---
|
||||
|
||||
*This page was auto-generated from the component's Storybook story.*
|
||||
@@ -1,93 +0,0 @@
|
||||
---
|
||||
title: Extension Components
|
||||
sidebar_label: Overview
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Components
|
||||
|
||||
These UI components are available to Superset extension developers through the `@apache-superset/core/ui` package. They provide a consistent look and feel with the rest of Superset and are designed to be used in extension panels, views, and other UI elements.
|
||||
|
||||
## Available Components
|
||||
|
||||
- [Alert](./alert)
|
||||
|
||||
## Usage
|
||||
|
||||
All components are exported from the `@apache-superset/core/ui` package:
|
||||
|
||||
```tsx
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
|
||||
export function MyExtensionPanel() {
|
||||
return (
|
||||
<Alert type="info">
|
||||
Welcome to my extension!
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Adding New Components
|
||||
|
||||
Components in `@apache-superset/core/ui` are automatically documented here. To add a new extension component:
|
||||
|
||||
1. Add the component to `superset-frontend/packages/superset-core/src/ui/components/`
|
||||
2. Export it from `superset-frontend/packages/superset-core/src/ui/components/index.ts`
|
||||
3. Create a Storybook story with an `Interactive` export:
|
||||
|
||||
```tsx
|
||||
export default {
|
||||
title: 'Extension Components/MyComponent',
|
||||
component: MyComponent,
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
component: 'Description of the component...',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const InteractiveMyComponent = (args) => <MyComponent {...args} />;
|
||||
|
||||
InteractiveMyComponent.args = {
|
||||
variant: 'primary',
|
||||
disabled: false,
|
||||
};
|
||||
|
||||
InteractiveMyComponent.argTypes = {
|
||||
variant: {
|
||||
control: { type: 'select' },
|
||||
options: ['primary', 'secondary'],
|
||||
},
|
||||
disabled: {
|
||||
control: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
4. Run `yarn start` in `docs/` - the page generates automatically!
|
||||
|
||||
## Interactive Documentation
|
||||
|
||||
For interactive examples with controls, visit the [Storybook](/storybook/?path=/docs/extension-components--docs).
|
||||
@@ -34,7 +34,7 @@ Frontend contribution types allow extensions to extend Superset's user interface
|
||||
|
||||
Extensions can add new views or panels to the host application, such as custom SQL Lab panels, dashboards, or other UI components. Each view is registered with a unique ID and can be activated or deactivated as needed. Contribution areas are uniquely identified (e.g., `sqllab.panels` for SQL Lab panels), enabling seamless integration into specific parts of the application.
|
||||
|
||||
``` json
|
||||
```json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"views": {
|
||||
@@ -53,7 +53,7 @@ Extensions can add new views or panels to the host application, such as custom S
|
||||
|
||||
Extensions can define custom commands that can be executed within the host application, such as context-aware actions or menu options. Each command can specify properties like a unique command identifier, an icon, a title, and a description. These commands can be invoked by users through menus, keyboard shortcuts, or other UI elements, enabling extensions to add rich, interactive functionality to Superset.
|
||||
|
||||
``` json
|
||||
```json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"commands": [
|
||||
@@ -72,7 +72,7 @@ Extensions can define custom commands that can be executed within the host appli
|
||||
|
||||
Extensions can contribute new menu items or context menus to the host application, providing users with additional actions and options. Each menu item can specify properties such as the target view, the command to execute, its placement (primary, secondary, or context), and conditions for when it should be displayed. Menu contribution areas are uniquely identified (e.g., `sqllab.editor` for the SQL Lab editor), allowing extensions to seamlessly integrate their functionality into specific menus and workflows within Superset.
|
||||
|
||||
``` json
|
||||
```json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"menus": {
|
||||
@@ -101,6 +101,27 @@ Extensions can contribute new menu items or context menus to the host applicatio
|
||||
}
|
||||
```
|
||||
|
||||
### Editors
|
||||
|
||||
Extensions can replace Superset's default text editors with custom implementations. This enables enhanced editing experiences using alternative editor frameworks like Monaco, CodeMirror, or custom solutions. When an extension registers an editor for a language, it replaces the default Ace editor in all locations that use that language (SQL Lab, Dashboard Properties, CSS editors, etc.).
|
||||
|
||||
```json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"editors": [
|
||||
{
|
||||
"id": "my_extension.monaco_sql",
|
||||
"name": "Monaco SQL Editor",
|
||||
"languages": ["sql"],
|
||||
"description": "Monaco-based SQL editor with IntelliSense"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See [Editors Extension Point](./extension-points/editors) for implementation details.
|
||||
|
||||
## Backend
|
||||
|
||||
Backend contribution types allow extensions to extend Superset's server-side capabilities with new API endpoints, MCP tools, and MCP prompts.
|
||||
@@ -109,7 +130,7 @@ Backend contribution types allow extensions to extend Superset's server-side cap
|
||||
|
||||
Extensions can register custom REST API endpoints under the `/api/v1/extensions/` namespace. This dedicated namespace prevents conflicts with built-in endpoints and provides a clear separation between core and extension functionality.
|
||||
|
||||
``` json
|
||||
```json
|
||||
"backend": {
|
||||
"entryPoints": ["my_extension.entrypoint"],
|
||||
"files": ["backend/src/my_extension/**/*.py"]
|
||||
@@ -118,7 +139,7 @@ Extensions can register custom REST API endpoints under the `/api/v1/extensions/
|
||||
|
||||
The entry point module registers the API with Superset:
|
||||
|
||||
``` python
|
||||
```python
|
||||
from superset_core.api.rest_api import add_extension_api
|
||||
from .api import MyExtensionAPI
|
||||
|
||||
|
||||
245
docs/developer_portal/extensions/extension-points/editors.md
Normal file
@@ -0,0 +1,245 @@
|
||||
---
|
||||
title: Editors
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Editor Contributions
|
||||
|
||||
Extensions can replace Superset's default text editors with custom implementations. This allows you to provide enhanced editing experiences using alternative editor frameworks like Monaco, CodeMirror, or custom solutions.
|
||||
|
||||
## Overview
|
||||
|
||||
Superset uses text editors in various places throughout the application:
|
||||
|
||||
| Language | Locations |
|
||||
|----------|-----------|
|
||||
| `sql` | SQL Lab, Metric/Filter Popovers |
|
||||
| `json` | Dashboard Properties, Annotation Modal, Theme Modal |
|
||||
| `css` | Dashboard Properties, CSS Template Modal |
|
||||
| `markdown` | Dashboard Markdown component |
|
||||
| `yaml` | Template Params Editor |
|
||||
|
||||
By registering an editor provider for a language, your extension replaces the default Ace editor in **all** locations that use that language.
|
||||
|
||||
## Manifest Configuration
|
||||
|
||||
Declare editor contributions in your `extension.json` manifest:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "monaco-editor",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"editors": [
|
||||
{
|
||||
"id": "monaco-editor.sql",
|
||||
"name": "Monaco SQL Editor",
|
||||
"languages": ["sql"],
|
||||
"description": "Monaco-based SQL editor with IntelliSense"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Implementing an Editor
|
||||
|
||||
Your editor component must implement the `EditorProps` interface and expose an `EditorHandle` via `forwardRef`. For the complete interface definitions, see `@apache-superset/core/api/editors.ts`.
|
||||
|
||||
### Key EditorProps
|
||||
|
||||
```typescript
|
||||
interface EditorProps {
|
||||
/** Controlled value */
|
||||
value: string;
|
||||
/** Content change handler */
|
||||
onChange: (value: string) => void;
|
||||
/** Language mode for syntax highlighting */
|
||||
language: EditorLanguage;
|
||||
/** Keyboard shortcuts to register */
|
||||
hotkeys?: EditorHotkey[];
|
||||
/** Callback when editor is ready with imperative handle */
|
||||
onReady?: (handle: EditorHandle) => void;
|
||||
/** Host-specific context (e.g., database info from SQL Lab) */
|
||||
metadata?: Record<string, unknown>;
|
||||
// ... additional props for styling, annotations, etc.
|
||||
}
|
||||
```
|
||||
|
||||
### Key EditorHandle Methods
|
||||
|
||||
```typescript
|
||||
interface EditorHandle {
|
||||
/** Focus the editor */
|
||||
focus(): void;
|
||||
/** Get the current editor content */
|
||||
getValue(): string;
|
||||
/** Get the current cursor position */
|
||||
getCursorPosition(): Position;
|
||||
/** Move the cursor to a specific position */
|
||||
moveCursorToPosition(position: Position): void;
|
||||
/** Set the selection range */
|
||||
setSelection(selection: Range): void;
|
||||
/** Scroll to a specific line */
|
||||
scrollToLine(line: number): void;
|
||||
// ... additional methods for text manipulation, annotations, etc.
|
||||
}
|
||||
```
|
||||
|
||||
## Example Implementation
|
||||
|
||||
Here's an example of a Monaco-based SQL editor implementing the key interfaces shown above:
|
||||
|
||||
### MonacoSQLEditor.tsx
|
||||
|
||||
```typescript
|
||||
import { forwardRef, useRef, useImperativeHandle, useEffect } from 'react';
|
||||
import * as monaco from 'monaco-editor';
|
||||
import type { editors } from '@apache-superset/core';
|
||||
|
||||
const MonacoSQLEditor = forwardRef<editors.EditorHandle, editors.EditorProps>(
|
||||
(props, ref) => {
|
||||
const { value, onChange, hotkeys, onReady } = props;
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const editorRef = useRef<monaco.editor.IStandaloneCodeEditor | null>(null);
|
||||
|
||||
// Implement EditorHandle interface
|
||||
const handle: editors.EditorHandle = {
|
||||
focus: () => editorRef.current?.focus(),
|
||||
getValue: () => editorRef.current?.getValue() ?? '',
|
||||
getCursorPosition: () => {
|
||||
const pos = editorRef.current?.getPosition();
|
||||
return { line: (pos?.lineNumber ?? 1) - 1, column: (pos?.column ?? 1) - 1 };
|
||||
},
|
||||
// ... implement remaining methods
|
||||
};
|
||||
|
||||
useImperativeHandle(ref, () => handle, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!containerRef.current) return;
|
||||
|
||||
const editor = monaco.editor.create(containerRef.current, { value, language: 'sql' });
|
||||
editorRef.current = editor;
|
||||
|
||||
editor.onDidChangeModelContent(() => onChange(editor.getValue()));
|
||||
|
||||
// Register hotkeys
|
||||
hotkeys?.forEach(hotkey => {
|
||||
editor.addAction({
|
||||
id: hotkey.name,
|
||||
label: hotkey.name,
|
||||
run: () => hotkey.exec(handle),
|
||||
});
|
||||
});
|
||||
|
||||
onReady?.(handle);
|
||||
return () => editor.dispose();
|
||||
}, []);
|
||||
|
||||
return <div ref={containerRef} style={{ height: '100%', width: '100%' }} />;
|
||||
},
|
||||
);
|
||||
|
||||
export default MonacoSQLEditor;
|
||||
```
|
||||
|
||||
### activate.ts
|
||||
|
||||
```typescript
|
||||
import { editors } from '@apache-superset/core';
|
||||
import MonacoSQLEditor from './MonacoSQLEditor';
|
||||
|
||||
export function activate(context) {
|
||||
// Register the Monaco editor for SQL
|
||||
const disposable = editors.registerEditorProvider(
|
||||
{
|
||||
id: 'monaco-sql-editor.sql',
|
||||
name: 'Monaco SQL Editor',
|
||||
languages: ['sql'],
|
||||
},
|
||||
MonacoSQLEditor,
|
||||
);
|
||||
|
||||
context.subscriptions.push(disposable);
|
||||
}
|
||||
```
|
||||
|
||||
## Handling Hotkeys
|
||||
|
||||
Superset passes keyboard shortcuts via the `hotkeys` prop. Each hotkey includes an `exec` function that receives the `EditorHandle`:
|
||||
|
||||
```typescript
|
||||
interface EditorHotkey {
|
||||
name: string;
|
||||
key: string; // e.g., "Ctrl-Enter", "Alt-Shift-F"
|
||||
description?: string;
|
||||
exec: (handle: EditorHandle) => void;
|
||||
}
|
||||
```
|
||||
|
||||
Your editor must register these hotkeys with your editor framework and call `exec(handle)` when triggered.
|
||||
|
||||
## Keywords
|
||||
|
||||
Superset passes static autocomplete suggestions via the `keywords` prop. These include table names, column names, and SQL functions based on the current database context:
|
||||
|
||||
```typescript
|
||||
interface EditorKeyword {
|
||||
name: string;
|
||||
value?: string; // Text to insert (defaults to name)
|
||||
meta?: string; // Category like "table", "column", "function"
|
||||
score?: number; // Sorting priority
|
||||
}
|
||||
```
|
||||
|
||||
Your editor should convert these to your framework's completion format and register them for autocomplete.
|
||||
|
||||
## Completion Providers
|
||||
|
||||
For dynamic autocomplete (e.g., fetching suggestions as the user types), implement and register a `CompletionProvider` via the `EditorHandle`:
|
||||
|
||||
```typescript
|
||||
const provider: CompletionProvider = {
|
||||
id: 'my-sql-completions',
|
||||
triggerCharacters: ['.', ' '],
|
||||
provideCompletions: async (content, position, context) => {
|
||||
// Use context.metadata for database info
|
||||
// Return array of CompletionItem
|
||||
return [
|
||||
{ label: 'SELECT', insertText: 'SELECT', kind: 'keyword' },
|
||||
// ...
|
||||
];
|
||||
},
|
||||
};
|
||||
|
||||
// Register during editor initialization
|
||||
const disposable = handle.registerCompletionProvider(provider);
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[SQL Lab Extension Points](./sqllab)** - Learn about other SQL Lab customizations
|
||||
- **[Contribution Types](../contribution-types)** - Explore other contribution types
|
||||
- **[Development](../development)** - Set up your development environment
|
||||
@@ -38,6 +38,7 @@ This page serves as a registry of community-created Superset extensions. These e
|
||||
| [SQL Lab Result Stats](https://github.com/michael-s-molina/superset-extensions/tree/main/result_stats) | A SQL Lab extension that automatically computes statistics for query results, providing type-aware analysis including numeric metrics (min, max, mean, median, std dev), string analysis (length, empty counts), and date range information. | Michael S. Molina | <a href="/img/extensions/result-stats.png" target="_blank"><img src="/img/extensions/result-stats.png" alt="Result Stats" width="120" /></a> |
|
||||
| [SQL Snippets](https://github.com/michael-s-molina/superset-extensions/tree/main/sql_snippets) | A SQL Lab extension that provides reusable SQL code snippets, enabling quick insertion of commonly used code blocks such as license headers, author information, and frequently used SQL patterns. | Michael S. Molina | <a href="/img/extensions/sql-snippets.png" target="_blank"><img src="/img/extensions/sql-snippets.png" alt="SQL Snippets" width="120" /></a> |
|
||||
| [SQL Lab Query Estimator](https://github.com/michael-s-molina/superset-extensions/tree/main/query_estimator) | A SQL Lab panel that analyzes query execution plans to estimate resource impact, detect performance issues like Cartesian products and high-cost operations, and visualize the query plan tree. | Michael S. Molina | <a href="/img/extensions/query-estimator.png" target="_blank"><img src="/img/extensions/query-estimator.png" alt="Query Estimator" width="120" /></a> |
|
||||
| [Editors Bundle](https://github.com/michael-s-molina/superset-extensions/tree/main/editors_bundle) | A Superset extension that demonstrates how to provide custom code editors for different languages. This extension showcases the editor contribution system by registering alternative editors that can replace Superset's default Ace editor. | Michael S. Molina | <a href="/img/extensions/editors-bundle.png" target="_blank"><img src="/img/extensions/editors-bundle.png" alt="Editors Bundle" width="120" /></a> |
|
||||
|
||||
## How to Add Your Extension
|
||||
|
||||
|
||||
@@ -43,8 +43,9 @@ This is a list of statements that describe how we do frontend development in Sup
|
||||
- We organize our repo so similar files live near each other, and tests are co-located with the files they test.
|
||||
- See: [SIP-61](https://github.com/apache/superset/issues/12098)
|
||||
- We prefer small, easily testable files and components.
|
||||
- We use ESLint and Prettier to automatically fix lint errors and format the code.
|
||||
- We use OXC (oxlint) and Prettier to automatically fix lint errors and format the code.
|
||||
- We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
|
||||
- If there's not a linting rule, we don't have a rule!
|
||||
- See: [Linting How-Tos](../contributing/howtos#typescript--javascript)
|
||||
- We use [React Storybook](https://storybook.js.org/) and [Applitools](https://applitools.com/) to help preview/test and stabilize our components
|
||||
- A public Storybook with components from the `master` branch is available [here](https://apache-superset.github.io/superset-ui/?path=/story/*)
|
||||
|
||||
@@ -86,7 +86,6 @@ Everything you need to contribute to the Apache Superset project. This section i
|
||||
- **[Configuration Guide](https://superset.apache.org/docs/configuration/configuring-superset)** - Setup and configuration
|
||||
|
||||
### Important Files
|
||||
- **[CONTRIBUTING.md](https://github.com/apache/superset/blob/master/CONTRIBUTING.md)** - Contribution guidelines
|
||||
- **[CLAUDE.md](https://github.com/apache/superset/blob/master/CLAUDE.md)** - LLM development guide
|
||||
- **[UPDATING.md](https://github.com/apache/superset/blob/master/UPDATING.md)** - Breaking changes log
|
||||
|
||||
|
||||
@@ -26,6 +26,9 @@ module.exports = {
|
||||
collapsed: true,
|
||||
items: [
|
||||
'contributing/overview',
|
||||
'guidelines/design-guidelines',
|
||||
'guidelines/frontend-style-guidelines',
|
||||
'guidelines/backend-style-guidelines',
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -61,5 +64,20 @@ module.exports = {
|
||||
'testing/overview',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'UI Components',
|
||||
collapsed: true,
|
||||
link: {
|
||||
type: 'doc',
|
||||
id: 'components/index',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'components',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -24,57 +24,204 @@ under the License.
|
||||
|
||||
# End-to-End Testing
|
||||
|
||||
🚧 **Coming Soon** 🚧
|
||||
Apache Superset uses Playwright for end-to-end testing, migrating from the legacy Cypress tests.
|
||||
|
||||
Guide for writing and running end-to-end tests using Playwright and Cypress.
|
||||
|
||||
## Topics to be covered:
|
||||
## Running Tests
|
||||
|
||||
### Playwright (Recommended)
|
||||
- Setting up Playwright environment
|
||||
- Writing reliable E2E tests
|
||||
- Page Object Model pattern
|
||||
- Handling async operations
|
||||
- Cross-browser testing
|
||||
- Visual regression testing
|
||||
- Debugging with Playwright Inspector
|
||||
- CI/CD integration
|
||||
|
||||
### Cypress (Deprecated)
|
||||
- Legacy Cypress test maintenance
|
||||
- Migration to Playwright
|
||||
- Running existing Cypress tests
|
||||
|
||||
## Quick Commands
|
||||
|
||||
### Playwright
|
||||
```bash
|
||||
# Run all Playwright tests
|
||||
npm run playwright:test
|
||||
cd superset-frontend
|
||||
|
||||
# Run in headed mode (see browser)
|
||||
npm run playwright:headed
|
||||
# Run all tests
|
||||
npm run playwright:test
|
||||
# or: npx playwright test
|
||||
|
||||
# Run specific test file
|
||||
npx playwright test tests/auth/login.spec.ts
|
||||
|
||||
# Debug specific test
|
||||
npm run playwright:debug tests/auth/login.spec.ts
|
||||
|
||||
# Open Playwright UI
|
||||
# Run with UI mode for debugging
|
||||
npm run playwright:ui
|
||||
# or: npx playwright test --ui
|
||||
|
||||
# Run in headed mode (see browser)
|
||||
npm run playwright:headed
|
||||
# or: npx playwright test --headed
|
||||
|
||||
# Debug specific test file
|
||||
npm run playwright:debug tests/auth/login.spec.ts
|
||||
# or: npx playwright test --debug tests/auth/login.spec.ts
|
||||
```
|
||||
|
||||
### Cypress (Deprecated)
|
||||
```bash
|
||||
# Run Cypress tests
|
||||
cd superset-frontend/cypress-base
|
||||
npm run cypress-run-chrome
|
||||
|
||||
# Open Cypress UI
|
||||
npm run cypress-debug
|
||||
Cypress tests are being migrated to Playwright. For legacy tests:
|
||||
|
||||
```bash
|
||||
cd superset-frontend/cypress-base
|
||||
npm run cypress-run-chrome # Headless
|
||||
npm run cypress-debug # Interactive UI
|
||||
```
|
||||
|
||||
---
|
||||
## Project Architecture
|
||||
|
||||
*This documentation is under active development. Check back soon for updates!*
|
||||
```
|
||||
superset-frontend/playwright/
|
||||
├── components/core/ # Reusable UI components
|
||||
├── pages/ # Page Object Models
|
||||
├── tests/ # Test files organized by feature
|
||||
├── utils/ # Shared constants and utilities
|
||||
└── playwright.config.ts
|
||||
```
|
||||
|
||||
## Design Principles
|
||||
|
||||
We follow **YAGNI** (You Aren't Gonna Need It), **DRY** (Don't Repeat Yourself), and **KISS** (Keep It Simple, Stupid) principles:
|
||||
|
||||
- Build only what's needed now
|
||||
- Reuse existing patterns and components
|
||||
- Keep solutions simple and maintainable
|
||||
|
||||
## Page Object Pattern
|
||||
|
||||
Each page object encapsulates:
|
||||
|
||||
- **Actions**: What you can do on the page
|
||||
- **Queries**: Information you can get from the page
|
||||
- **Selectors**: Centralized in private static SELECTORS constant
|
||||
- **NO Assertions**: Keep assertions in test files
|
||||
|
||||
**Example Page Object:**
|
||||
|
||||
```typescript
|
||||
export class AuthPage {
|
||||
// Selectors centralized in the page object
|
||||
private static readonly SELECTORS = {
|
||||
LOGIN_FORM: '[data-test="login-form"]',
|
||||
USERNAME_INPUT: '[data-test="username-input"]',
|
||||
} as const;
|
||||
|
||||
// Actions - what you can do
|
||||
async loginWithCredentials(username: string, password: string) {}
|
||||
|
||||
// Queries - information you can get
|
||||
async getCurrentUrl(): Promise<string> {}
|
||||
|
||||
// NO assertions - those belong in tests
|
||||
}
|
||||
```
|
||||
|
||||
**Example Test:**
|
||||
|
||||
```typescript
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { AuthPage } from '../../pages/AuthPage';
|
||||
import { LOGIN } from '../../utils/urls';
|
||||
|
||||
test('should login with correct credentials', async ({ page }) => {
|
||||
const authPage = new AuthPage(page);
|
||||
await authPage.goto();
|
||||
await authPage.loginWithCredentials('admin', 'general');
|
||||
|
||||
// Assertions belong in tests, not page objects
|
||||
expect(await authPage.getCurrentUrl()).not.toContain(LOGIN);
|
||||
});
|
||||
```
|
||||
|
||||
## Core Components
|
||||
|
||||
Reusable UI interaction classes for common elements (`components/core/`):
|
||||
|
||||
- **Form**: Container with properly scoped child element access
|
||||
- **Input**: Supports `fill()`, `type()`, and `pressSequentially()` methods
|
||||
- **Button**: Standard click, hover, focus interactions
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
```typescript
|
||||
import { Form } from '../components/core';
|
||||
|
||||
const loginForm = new Form(page, '[data-test="login-form"]');
|
||||
const usernameInput = loginForm.getInput('[data-test="username-input"]');
|
||||
await usernameInput.fill('admin');
|
||||
```
|
||||
|
||||
## Test Reports
|
||||
|
||||
Playwright generates multiple reports for better visibility:
|
||||
|
||||
```bash
|
||||
# View interactive HTML report (opens automatically on failure)
|
||||
npm run playwright:report
|
||||
# or: npx playwright show-report
|
||||
|
||||
# View test trace for debugging failures
|
||||
npx playwright show-trace test-results/[test-name]/trace.zip
|
||||
```
|
||||
|
||||
### Report Types
|
||||
|
||||
- **List Reporter**: Shows progress and summary table in terminal
|
||||
- **HTML Report**: Interactive web interface with screenshots, videos, and traces
|
||||
- **JSON Report**: Machine-readable format in `test-results/results.json`
|
||||
- **GitHub Actions**: Annotations in CI for failed tests
|
||||
|
||||
### Debugging Failed Tests
|
||||
|
||||
When tests fail, Playwright automatically captures:
|
||||
|
||||
- **Screenshots** at the point of failure
|
||||
- **Videos** of the entire test run
|
||||
- **Traces** with timeline and network activity
|
||||
- **Error context** with detailed debugging information
|
||||
|
||||
All debugging artifacts are available in the HTML report for easy analysis.
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Config**: `playwright.config.ts` - matches Cypress settings
|
||||
- **Base URL**: `http://localhost:8088` (assumes Superset running)
|
||||
- **Browsers**: Chrome only for Phase 1 (YAGNI)
|
||||
- **Retries**: 2 in CI, 0 locally (matches Cypress)
|
||||
|
||||
## Contributing Guidelines
|
||||
|
||||
### Adding New Tests
|
||||
|
||||
1. **Check existing components** before creating new ones
|
||||
2. **Use page objects** for page interactions
|
||||
3. **Keep assertions in tests**, not page objects
|
||||
4. **Follow naming conventions**: `feature.spec.ts`
|
||||
|
||||
### Adding New Components
|
||||
|
||||
1. **Follow YAGNI**: Only build what's immediately needed
|
||||
2. **Use Locator-based scoping** for proper element isolation
|
||||
3. **Support both string selectors and Locator objects** via constructor overloads
|
||||
4. **Add to `components/core/index.ts`** for easy importing
|
||||
|
||||
### Adding New Page Objects
|
||||
|
||||
1. **Centralize selectors** in private static SELECTORS constant
|
||||
2. **Import shared constants** from `utils/urls.ts`
|
||||
3. **Actions and queries only** - no assertions
|
||||
4. **Use existing components** for DOM interactions
|
||||
|
||||
## Migration from Cypress
|
||||
|
||||
When porting Cypress tests:
|
||||
|
||||
1. **Port the logic**, not the implementation
|
||||
2. **Use page objects** instead of inline selectors
|
||||
3. **Replace `cy.intercept/cy.wait`** with `page.waitForRequest()`
|
||||
4. **Use shared constants** from `utils/urls.ts`
|
||||
5. **Follow the established patterns** shown in `tests/auth/login.spec.ts`
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Centralize selectors** in page objects
|
||||
- **Centralize URLs** in `utils/urls.ts`
|
||||
- **Use meaningful test descriptions**
|
||||
- **Keep page objects action-focused**
|
||||
- **Put assertions in tests, not page objects**
|
||||
- **Follow the existing patterns** for consistency
|
||||
|
||||
114
docs/developer_portal/testing/storybook.md
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
title: Storybook
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Storybook
|
||||
|
||||
Superset uses [Storybook](https://storybook.js.org/) for developing and testing UI components in isolation. Storybook provides a sandbox to build components independently, outside of the main application.
|
||||
|
||||
## Public Storybook
|
||||
|
||||
A public Storybook with components from the `master` branch is available at:
|
||||
|
||||
**[apache-superset.github.io/superset-ui](https://apache-superset.github.io/superset-ui/?path=/story/*)**
|
||||
|
||||
## Running Locally
|
||||
|
||||
### Main Superset Storybook
|
||||
|
||||
To run the main Superset Storybook locally:
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
|
||||
# Start Storybook (opens at http://localhost:6006)
|
||||
npm run storybook
|
||||
|
||||
# Build static Storybook
|
||||
npm run build-storybook
|
||||
```
|
||||
|
||||
### @superset-ui Package Storybook
|
||||
|
||||
The `@superset-ui` packages have a separate Storybook for component library development:
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
|
||||
# Install dependencies and bootstrap packages
|
||||
npm ci && npm run bootstrap
|
||||
|
||||
# Start the @superset-ui Storybook (opens at http://localhost:9001)
|
||||
cd packages/superset-ui-demo
|
||||
npm run storybook
|
||||
```
|
||||
|
||||
## Adding Stories
|
||||
|
||||
### To an Existing Package
|
||||
|
||||
If stories already exist for the package, extend the `examples` array in the package's story file:
|
||||
|
||||
```
|
||||
storybook/stories/<package>/index.js
|
||||
```
|
||||
|
||||
### To a New Package
|
||||
|
||||
1. Add package dependencies:
|
||||
|
||||
```bash
|
||||
npm install <package>
|
||||
```
|
||||
|
||||
2. Create a story folder matching the package name:
|
||||
|
||||
```bash
|
||||
mkdir storybook/stories/superset-ui-<package>/
|
||||
```
|
||||
|
||||
3. Create an `index.js` file with the story configuration:
|
||||
|
||||
```javascript
|
||||
export default {
|
||||
examples: [
|
||||
{
|
||||
storyPath: '@superset-ui/package',
|
||||
storyName: 'My Story',
|
||||
renderStory: () => <MyComponent />,
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
Use the `|` separator for nested stories:
|
||||
```javascript
|
||||
storyPath: '@superset-ui/package|Category|Subcategory'
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Isolate components**: Stories should render components in isolation, without application context
|
||||
- **Show variations**: Create stories for different states, sizes, and configurations
|
||||
- **Document props**: Use Storybook's controls to expose configurable props
|
||||
- **Test edge cases**: Include stories for loading states, error states, and empty states
|
||||
@@ -1,40 +1,590 @@
|
||||
---
|
||||
title: API
|
||||
title: API Reference
|
||||
hide_title: true
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
import SwaggerUI from 'swagger-ui-react';
|
||||
import openapi from '/resources/openapi.json';
|
||||
import 'swagger-ui-react/swagger-ui.css';
|
||||
import { Alert } from 'antd';
|
||||
|
||||
## API
|
||||
## REST API Reference
|
||||
|
||||
Superset's public **REST API** follows the
|
||||
[OpenAPI specification](https://swagger.io/specification/), and is
|
||||
documented here. The docs below are generated using
|
||||
[Swagger React UI](https://www.npmjs.com/package/swagger-ui-react).
|
||||
|
||||
:::resources
|
||||
- [Blog: The Superset REST API](https://preset.io/blog/2020-10-01-superset-api/)
|
||||
- [Blog: Accessing APIs with Superset](https://preset.io/blog/accessing-apis-with-superset/)
|
||||
:::
|
||||
Superset exposes a comprehensive **REST API** that follows the [OpenAPI specification](https://swagger.io/specification/).
|
||||
You can use this API to programmatically interact with Superset for automation, integrations, and custom applications.
|
||||
|
||||
<Alert
|
||||
type="info"
|
||||
message={
|
||||
<div>
|
||||
<strong>NOTE! </strong>
|
||||
You can find an interactive version of this documentation on your local Superset
|
||||
instance at <strong>/swagger/v1</strong> (unless disabled)
|
||||
</div>
|
||||
showIcon
|
||||
message="Code Samples & Schema Documentation"
|
||||
description={
|
||||
<span>
|
||||
Each endpoint includes ready-to-use code samples in <strong>cURL</strong>, <strong>Python</strong>, and <strong>JavaScript</strong>.
|
||||
The sidebar includes <strong>Schema definitions</strong> for detailed data model documentation.
|
||||
</span>
|
||||
}
|
||||
style={{ marginBottom: '24px' }}
|
||||
/>
|
||||
|
||||
<br />
|
||||
<br />
|
||||
<hr />
|
||||
<div className="swagger-container">
|
||||
<SwaggerUI spec={openapi} />
|
||||
</div>
|
||||
---
|
||||
|
||||
### Authentication
|
||||
|
||||
Most API endpoints require authentication via JWT tokens.
|
||||
|
||||
#### Quick Start
|
||||
|
||||
```bash
|
||||
# 1. Get a JWT token
|
||||
curl -X POST http://localhost:8088/api/v1/security/login \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"username": "admin", "password": "admin", "provider": "db"}'
|
||||
|
||||
# 2. Use the access_token from the response
|
||||
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
http://localhost:8088/api/v1/dashboard/
|
||||
```
|
||||
|
||||
#### Security Endpoints
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the CSRF token](./api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
|
||||
| `POST` | [Get a guest token](./api/get-a-guest-token) | `/api/v1/security/guest_token/` |
|
||||
| `POST` | [Create security login](./api/create-security-login) | `/api/v1/security/login` |
|
||||
| `POST` | [Create security refresh](./api/create-security-refresh) | `/api/v1/security/refresh` |
|
||||
|
||||
---
|
||||
|
||||
### API Endpoints
|
||||
|
||||
#### Core Resources
|
||||
|
||||
<details>
|
||||
<summary><strong>Dashboards</strong> (26 endpoints) — Create, read, update, and delete dashboards.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete dashboards](./api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get a list of dashboards](./api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
|
||||
| `POST` | [Create a new dashboard](./api/create-a-new-dashboard) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get metadata information about this API resource (dashboard--info)](./api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
|
||||
| `GET` | [Get a dashboard detail information](./api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
|
||||
| `GET` | [Get a dashboard's chart definitions.](./api/get-a-dashboards-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
|
||||
| `POST` | [Create a copy of an existing dashboard](./api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
|
||||
| `GET` | [Get dashboard's datasets](./api/get-dashboards-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
|
||||
| `DELETE` | [Delete a dashboard's embedded configuration](./api/delete-a-dashboards-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get the dashboard's embedded configuration](./api/get-the-dashboards-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `POST` | [Set a dashboard's embedded configuration](./api/set-a-dashboards-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `PUT` | [Update dashboard by id_or_slug embedded](./api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get dashboard's tabs](./api/get-dashboards-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
|
||||
| `DELETE` | [Delete a dashboard](./api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `PUT` | [Update a dashboard](./api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](./api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
|
||||
| `PUT` | [Update colors configuration for a dashboard.](./api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
|
||||
| `DELETE` | [Remove the dashboard from the user favorite list](./api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `POST` | [Mark the dashboard as favorite for the current user](./api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `PUT` | [Update native filters configuration for a dashboard.](./api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
|
||||
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get dashboard's thumbnail](./api/get-dashboards-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
|
||||
| `GET` | [Download multiple dashboards as YAML files](./api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
|
||||
| `GET` | [Check favorited dashboards for current user](./api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
|
||||
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](./api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
|
||||
| `GET` | [Get related fields data (dashboard-related-column-name)](./api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Charts</strong> (20 endpoints) — Create, read, update, and delete charts (slices).</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete charts](./api/bulk-delete-charts) | `/api/v1/chart/` |
|
||||
| `GET` | [Get a list of charts](./api/get-a-list-of-charts) | `/api/v1/chart/` |
|
||||
| `POST` | [Create a new chart](./api/create-a-new-chart) | `/api/v1/chart/` |
|
||||
| `GET` | [Get metadata information about this API resource (chart--info)](./api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
|
||||
| `DELETE` | [Delete a chart](./api/delete-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Get a chart detail information](./api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
|
||||
| `PUT` | [Update a chart](./api/update-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](./api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
|
||||
| `GET` | [Return payload data response for a chart](./api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
|
||||
| `DELETE` | [Remove the chart from the user favorite list](./api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `POST` | [Mark the chart as favorite for the current user](./api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get chart thumbnail](./api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
|
||||
| `POST` | [Return payload data response for the given query (chart-data)](./api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
|
||||
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](./api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
|
||||
| `GET` | [Download multiple charts as YAML files](./api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
|
||||
| `GET` | [Check favorited charts for current user](./api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
|
||||
| `POST` | [Import chart(s) with associated datasets and databases](./api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
|
||||
| `GET` | [Get related fields data (chart-related-column-name)](./api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for the chart](./api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Datasets</strong> (18 endpoints) — Manage datasets (tables) used for building charts.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete datasets](./api/bulk-delete-datasets) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get a list of datasets](./api/get-a-list-of-datasets) | `/api/v1/dataset/` |
|
||||
| `POST` | [Create a new dataset](./api/create-a-new-dataset) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get metadata information about this API resource (dataset--info)](./api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
|
||||
| `DELETE` | [Delete a dataset](./api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `GET` | [Get a dataset](./api/get-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `PUT` | [Update a dataset](./api/update-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `DELETE` | [Delete a dataset column](./api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
|
||||
| `DELETE` | [Delete a dataset metric](./api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
|
||||
| `PUT` | [Refresh and update columns of a dataset](./api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
|
||||
| `GET` | [Get charts and dashboards count associated to a dataset](./api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
|
||||
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](./api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
|
||||
| `POST` | [Duplicate a dataset](./api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
|
||||
| `GET` | [Download multiple datasets as YAML files](./api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
|
||||
| `POST` | [Retrieve a table by name, or create it if it does not exist](./api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
|
||||
| `POST` | [Import dataset(s) with associated databases](./api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
|
||||
| `GET` | [Get related fields data (dataset-related-column-name)](./api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for each chart powered by the given table](./api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Database</strong> (31 endpoints) — Manage database connections and metadata.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of databases](./api/get-a-list-of-databases) | `/api/v1/database/` |
|
||||
| `POST` | [Create a new database](./api/create-a-new-database) | `/api/v1/database/` |
|
||||
| `GET` | [Get metadata information about this API resource (database--info)](./api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
|
||||
| `DELETE` | [Delete a database](./api/delete-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get a database](./api/get-a-database) | `/api/v1/database/{pk}` |
|
||||
| `PUT` | [Change a database](./api/change-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get all catalogs from a database](./api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
|
||||
| `GET` | [Get a database connection info](./api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
|
||||
| `GET` | [Get function names supported by a database](./api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
|
||||
| `GET` | [Get charts and dashboards count associated to a database](./api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
|
||||
| `GET` | [The list of the database schemas where to upload information](./api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
|
||||
| `GET` | [Get all schemas from a database](./api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
|
||||
| `DELETE` | [Delete a SSH tunnel](./api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
|
||||
| `POST` | [Re-sync all permissions for a database connection](./api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](./api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get table metadata](./api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](./api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
|
||||
| `GET` | [Get database table metadata](./api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get a list of tables for given database](./api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
|
||||
| `POST` | [Upload a file to a database table](./api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
|
||||
| `POST` | [Validate arbitrary SQL](./api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
|
||||
| `GET` | [Get names of databases currently available](./api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
|
||||
| `GET` | [Download database(s) and associated dataset(s) as a zip file](./api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
|
||||
| `POST` | [Import database(s) with associated datasets](./api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
|
||||
| `GET` | [Receive personal access tokens from OAuth2](./api/receive-personal-access-tokens-from-o-auth-2) | `/api/v1/database/oauth2/` |
|
||||
| `GET` | [Get related fields data (database-related-column-name)](./api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
|
||||
| `POST` | [Test a database connection](./api/test-a-database-connection) | `/api/v1/database/test_connection/` |
|
||||
| `POST` | [Upload a file and returns file metadata](./api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
|
||||
| `POST` | [Validate database connection parameters](./api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
|
||||
|
||||
</details>
|
||||
|
||||
#### Data Exploration
|
||||
|
||||
<details>
|
||||
<summary><strong>Explore</strong> (1 endpoints) — Chart exploration and data querying endpoints.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Assemble Explore related information in a single endpoint](./api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>SQL Lab</strong> (6 endpoints) — Execute SQL queries and manage SQL Lab sessions.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the bootstrap data for SqlLab page](./api/get-the-bootstrap-data-for-sql-lab-page) | `/api/v1/sqllab/` |
|
||||
| `POST` | [Estimate the SQL query execution cost](./api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
|
||||
| `POST` | [Execute a SQL query](./api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
|
||||
| `GET` | [Export the SQL query results to a CSV](./api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
|
||||
| `POST` | [Format SQL code](./api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
|
||||
| `GET` | [Get the result of a SQL query execution](./api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Queries</strong> (17 endpoints) — View and manage SQL Lab query history.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of queries](./api/get-a-list-of-queries) | `/api/v1/query/` |
|
||||
| `GET` | [Get query detail information](./api/get-query-detail-information) | `/api/v1/query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (query-distinct-column-name)](./api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
|
||||
| `GET` | [Get related fields data (query-related-column-name)](./api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
|
||||
| `POST` | [Manually stop a query with client_id](./api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
|
||||
| `GET` | [Get a list of queries that changed after last_updated_ms](./api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
|
||||
| `DELETE` | [Bulk delete saved queries](./api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get a list of saved queries](./api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `POST` | [Create a saved query](./api/create-a-saved-query) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get metadata information about this API resource (saved-query--info)](./api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
|
||||
| `DELETE` | [Delete a saved query](./api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get a saved query](./api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `PUT` | [Update a saved query](./api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](./api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
|
||||
| `GET` | [Download multiple saved queries as YAML files](./api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
|
||||
| `POST` | [Import saved queries with associated databases](./api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
|
||||
| `GET` | [Get related fields data (saved-query-related-column-name)](./api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Datasources</strong> (1 endpoints) — Query datasource metadata and column values.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get possible values for a datasource column](./api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Data Type</strong> (2 endpoints) — Endpoints for advanced data type operations and conversions.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Return an AdvancedDataTypeResponse](./api/return-an-advanced-data-type-response) | `/api/v1/advanced_data_type/convert` |
|
||||
| `GET` | [Return a list of available advanced data types](./api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
|
||||
|
||||
</details>
|
||||
|
||||
#### Organization & Customization
|
||||
|
||||
<details>
|
||||
<summary><strong>Tags</strong> (15 endpoints) — Organize assets with tags.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete tags](./api/bulk-delete-tags) | `/api/v1/tag/` |
|
||||
| `GET` | [Get a list of tags](./api/get-a-list-of-tags) | `/api/v1/tag/` |
|
||||
| `POST` | [Create a tag](./api/create-a-tag) | `/api/v1/tag/` |
|
||||
| `GET` | [Get metadata information about tag API endpoints](./api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
|
||||
| `POST` | [Add tags to an object](./api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
|
||||
| `DELETE` | [Delete a tagged object](./api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
|
||||
| `DELETE` | [Delete a tag](./api/delete-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `GET` | [Get a tag detail information](./api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
|
||||
| `PUT` | [Update a tag](./api/update-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `DELETE` | [Delete tag by pk favorites](./api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Create tag by pk favorites](./api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Bulk create tags and tagged objects](./api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
|
||||
| `GET` | [Get tag favorite status](./api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
|
||||
| `GET` | [Get all objects associated with a tag](./api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
|
||||
| `GET` | [Get related fields data (tag-related-column-name)](./api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Annotation Layers</strong> (14 endpoints) — Manage annotation layers and annotations for charts.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Delete multiple annotation layers in a bulk operation](./api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer)](./api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer)](./api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](./api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](./api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk)](./api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk)](./api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `DELETE` | [Bulk delete annotation layers](./api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](./api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](./api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get related fields data (annotation-layer-related-column-name)](./api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>CSS Templates</strong> (8 endpoints) — Manage CSS templates for custom dashboard styling.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete CSS templates](./api/bulk-delete-css-templates) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get a list of CSS templates](./api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
|
||||
| `POST` | [Create a CSS template](./api/create-a-css-template) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get metadata information about this API resource (css-template--info)](./api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
|
||||
| `DELETE` | [Delete a CSS template](./api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get a CSS template](./api/get-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `PUT` | [Update a CSS template](./api/update-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get related fields data (css-template-related-column-name)](./api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
#### Sharing & Embedding
|
||||
|
||||
<details>
|
||||
<summary><strong>Dashboard Permanent Link</strong> (2 endpoints) — Create and retrieve permanent links to dashboard states.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new dashboard's permanent link](./api/create-a-new-dashboards-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
|
||||
| `GET` | [Get dashboard's permanent link state](./api/get-dashboards-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Explore Permanent Link</strong> (2 endpoints) — Create and retrieve permanent links to chart explore states.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new permanent link (explore-permalink)](./api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
|
||||
| `GET` | [Get chart's permanent link state](./api/get-charts-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>SQL Lab Permanent Link</strong> (2 endpoints) — Create and retrieve permanent links to SQL Lab states.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new permanent link (sqllab-permalink)](./api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
|
||||
| `GET` | [Get permanent link state for SQLLab editor.](./api/get-permanent-link-state-for-sql-lab-editor) | `/api/v1/sqllab/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Embedded Dashboard</strong> (1 endpoints) — Configure embedded dashboard settings.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](./api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Dashboard Filter State</strong> (4 endpoints) — Manage temporary filter state for dashboards.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a dashboard's filter state](./api/create-a-dashboards-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
|
||||
| `DELETE` | [Delete a dashboard's filter state value](./api/delete-a-dashboards-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `GET` | [Get a dashboard's filter state value](./api/get-a-dashboards-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `PUT` | [Update a dashboard's filter state value](./api/update-a-dashboards-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Explore Form Data</strong> (4 endpoints) — Manage temporary form data for chart exploration.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new form_data](./api/create-a-new-form-data) | `/api/v1/explore/form_data` |
|
||||
| `DELETE` | [Delete a form_data](./api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `GET` | [Get a form_data](./api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `PUT` | [Update an existing form_data](./api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
#### Scheduling & Alerts
|
||||
|
||||
<details>
|
||||
<summary><strong>Report Schedules</strong> (11 endpoints) — Configure scheduled reports and alerts.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete report schedules](./api/bulk-delete-report-schedules) | `/api/v1/report/` |
|
||||
| `GET` | [Get a list of report schedules](./api/get-a-list-of-report-schedules) | `/api/v1/report/` |
|
||||
| `POST` | [Create a report schedule](./api/create-a-report-schedule) | `/api/v1/report/` |
|
||||
| `GET` | [Get metadata information about this API resource (report--info)](./api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
|
||||
| `DELETE` | [Delete a report schedule](./api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a report schedule](./api/get-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `PUT` | [Update a report schedule](./api/update-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a list of report schedule logs](./api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
|
||||
| `GET` | [Get a report schedule log (report-pk-log-log-id)](./api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
|
||||
| `GET` | [Get related fields data (report-related-column-name)](./api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
|
||||
| `GET` | [Get slack channels](./api/get-slack-channels) | `/api/v1/report/slack_channels/` |
|
||||
|
||||
</details>
|
||||
|
||||
#### Security & Access Control
|
||||
|
||||
<details>
|
||||
<summary><strong>Security Roles</strong> (10 endpoints) — Manage security roles and their permissions.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security roles](./api/get-security-roles) | `/api/v1/security/roles/` |
|
||||
| `POST` | [Create security roles](./api/create-security-roles) | `/api/v1/security/roles/` |
|
||||
| `GET` | [Get security roles info](./api/get-security-roles-info) | `/api/v1/security/roles/_info` |
|
||||
| `DELETE` | [Delete security roles by pk](./api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `GET` | [Get security roles by pk](./api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `PUT` | [Update security roles by pk](./api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `POST` | [Create security roles by role_id permissions](./api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
|
||||
| `GET` | [Get security roles by role_id permissions](./api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
|
||||
| `PUT` | [Update security roles by role_id users](./api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
|
||||
| `GET` | [List roles](./api/list-roles) | `/api/v1/security/roles/search/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Security Users</strong> (6 endpoints) — Manage user accounts.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security users](./api/get-security-users) | `/api/v1/security/users/` |
|
||||
| `POST` | [Create security users](./api/create-security-users) | `/api/v1/security/users/` |
|
||||
| `GET` | [Get security users info](./api/get-security-users-info) | `/api/v1/security/users/_info` |
|
||||
| `DELETE` | [Delete security users by pk](./api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users by pk](./api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `PUT` | [Update security users by pk](./api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Security Permissions</strong> (3 endpoints) — View available permissions.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security permissions](./api/get-security-permissions) | `/api/v1/security/permissions/` |
|
||||
| `GET` | [Get security permissions info](./api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
|
||||
| `GET` | [Get security permissions by pk](./api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Security Resources (View Menus)</strong> (6 endpoints) — Manage security resources (view menus).</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security resources](./api/get-security-resources) | `/api/v1/security/resources/` |
|
||||
| `POST` | [Create security resources](./api/create-security-resources) | `/api/v1/security/resources/` |
|
||||
| `GET` | [Get security resources info](./api/get-security-resources-info) | `/api/v1/security/resources/_info` |
|
||||
| `DELETE` | [Delete security resources by pk](./api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources by pk](./api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `PUT` | [Update security resources by pk](./api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Security Permissions on Resources (View Menus)</strong> (6 endpoints) — Manage permission-resource mappings.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security permissions resources](./api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `POST` | [Create security permissions resources](./api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `GET` | [Get security permissions resources info](./api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
|
||||
| `DELETE` | [Delete security permissions resources by pk](./api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources by pk](./api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `PUT` | [Update security permissions resources by pk](./api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Row Level Security</strong> (8 endpoints) — Manage row-level security rules for data access control.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete RLS rules](./api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get a list of RLS](./api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
|
||||
| `POST` | [Create a new RLS rule](./api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](./api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
|
||||
| `DELETE` | [Delete an RLS](./api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get an RLS](./api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `PUT` | [Update an RLS rule](./api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](./api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
#### Import/Export & Administration
|
||||
|
||||
<details>
|
||||
<summary><strong>Import/export</strong> (2 endpoints) — Import and export Superset assets (dashboards, charts, databases).</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Export all assets](./api/export-all-assets) | `/api/v1/assets/export/` |
|
||||
| `POST` | [Import multiple assets](./api/import-multiple-assets) | `/api/v1/assets/import/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>CacheRestApi</strong> (1 endpoints) — Cache management and invalidation operations.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Invalidate cache records and remove the database records](./api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>LogRestApi</strong> (4 endpoints) — Access audit logs and activity history.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of logs](./api/get-a-list-of-logs) | `/api/v1/log/` |
|
||||
| `POST` | [Create log](./api/create-log) | `/api/v1/log/` |
|
||||
| `GET` | [Get a log detail information](./api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
|
||||
| `GET` | [Get recent activity data for a user](./api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
|
||||
|
||||
</details>
|
||||
|
||||
#### User & System
|
||||
|
||||
<details>
|
||||
<summary><strong>Current User</strong> (2 endpoints) — Get information about the currently authenticated user.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the user object](./api/get-the-user-object) | `/api/v1/me/` |
|
||||
| `GET` | [Get the user roles](./api/get-the-user-roles) | `/api/v1/me/roles/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>User</strong> (1 endpoints) — User profile and preferences.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the user avatar](./api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Menu</strong> (1 endpoints) — Get the Superset menu structure.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get menu](./api/get-menu) | `/api/v1/menu/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Available Domains</strong> (1 endpoints) — Get available domains for the Superset instance.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get all available domains](./api/get-all-available-domains) | `/api/v1/available_domains/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>AsyncEventsRestApi</strong> (1 endpoints) — Real-time event streaming via Server-Sent Events (SSE).</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Read off of the Redis events stream](./api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>OpenApi</strong> (1 endpoints) — Access the OpenAPI specification.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get api by version openapi](./api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
### Additional Resources
|
||||
|
||||
- [Superset REST API Blog Post](https://preset.io/blog/2020-10-01-superset-api/)
|
||||
- [Accessing APIs with Superset](https://preset.io/blog/accessing-apis-with-superset/)
|
||||
|
||||
@@ -441,7 +441,7 @@ FEATURE_FLAGS = {
|
||||
}
|
||||
```
|
||||
|
||||
A current list of feature flags can be found in [RESOURCES/FEATURE_FLAGS.md](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md).
|
||||
A current list of feature flags can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
:::resources
|
||||
- [Blog: Feature Flags in Apache Superset](https://preset.io/blog/feature-flags-in-apache-superset-and-preset/)
|
||||
|
||||
107
docs/docs/configuration/feature-flags.mdx
Normal file
@@ -0,0 +1,107 @@
|
||||
---
|
||||
title: Feature Flags
|
||||
hide_title: true
|
||||
sidebar_position: 2
|
||||
version: 1
|
||||
---
|
||||
|
||||
import featureFlags from '@site/static/feature-flags.json';
|
||||
|
||||
export const FlagTable = ({flags}) => (
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Flag</th>
|
||||
<th>Default</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{flags.map((flag) => (
|
||||
<tr key={flag.name}>
|
||||
<td><code>{flag.name}</code></td>
|
||||
<td><code>{flag.default ? 'True' : 'False'}</code></td>
|
||||
<td>
|
||||
{flag.description}
|
||||
{flag.docs && (
|
||||
<> (<a href={flag.docs}>docs</a>)</>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
|
||||
# Feature Flags
|
||||
|
||||
Superset uses feature flags to control the availability of features. Feature flags allow
|
||||
gradual rollout of new functionality and provide a way to enable experimental features.
|
||||
|
||||
To enable a feature flag, add it to your `superset_config.py`:
|
||||
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"ENABLE_TEMPLATE_PROCESSING": True,
|
||||
}
|
||||
```
|
||||
|
||||
## Lifecycle
|
||||
|
||||
Feature flags progress through lifecycle stages:
|
||||
|
||||
| Stage | Description |
|
||||
|-------|-------------|
|
||||
| **Development** | Experimental features under active development. May be incomplete or unstable. |
|
||||
| **Testing** | Feature complete but undergoing testing. Usable but may contain bugs. |
|
||||
| **Stable** | Production-ready features. Safe for all deployments. |
|
||||
| **Deprecated** | Features scheduled for removal. Migrate away from these. |
|
||||
|
||||
---
|
||||
|
||||
## Development
|
||||
|
||||
These features are experimental and under active development. Use only in development environments.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.development} />
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
These features are complete but still being tested. They are usable but may have bugs.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.testing} />
|
||||
|
||||
---
|
||||
|
||||
## Stable
|
||||
|
||||
These features are production-ready and safe to enable.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.stable} />
|
||||
|
||||
---
|
||||
|
||||
## Deprecated
|
||||
|
||||
These features are scheduled for removal. Plan to migrate away from them.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.deprecated} />
|
||||
|
||||
---
|
||||
|
||||
## Adding New Feature Flags
|
||||
|
||||
When adding a new feature flag to `superset/config.py`, include the following annotations:
|
||||
|
||||
```python
|
||||
# Description of what the feature does
|
||||
# @lifecycle: development | testing | stable | deprecated
|
||||
# @docs: https://superset.apache.org/docs/... (optional)
|
||||
# @category: runtime_config | path_to_deprecation (optional, for stable flags)
|
||||
"MY_NEW_FEATURE": False,
|
||||
```
|
||||
|
||||
This documentation is auto-generated from the annotations in
|
||||
[config.py](https://github.com/apache/superset/blob/master/superset/config.py).
|
||||
@@ -60,7 +60,7 @@ There are two approaches to making dashboards publicly accessible:
|
||||
|
||||
**Option 2: Dashboard-level access (selective control)**
|
||||
1. Set `PUBLIC_ROLE_LIKE = "Public"` in `superset_config.py`
|
||||
2. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md)
|
||||
2. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/docs/configuration/feature-flags)
|
||||
3. Edit each dashboard's properties and add the "Public" role
|
||||
4. Only dashboards with the Public role explicitly assigned are visible to anonymous users
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ To help make the problem somewhat tractable—given that Apache Superset has no
|
||||
|
||||
To strive for data consistency (regardless of the timezone of the client) the Apache Superset backend tries to ensure that any timestamp sent to the client has an explicit (or semi-explicit as in the case with [Epoch time](https://en.wikipedia.org/wiki/Unix_time) which is always in reference to UTC) timezone encoded within.
|
||||
|
||||
The challenge however lies with the slew of [database engines](/docs/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
The challenge however lies with the slew of [database engines](/docs/databases#installing-drivers-in-docker) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
|
||||
For example the following is a comparison of MySQL and Presto,
|
||||
|
||||
|
||||
@@ -350,6 +350,12 @@ superset init
|
||||
# Note: you MUST have previously created an admin user with the username `admin` for this command to work.
|
||||
superset load-examples
|
||||
|
||||
# The load-examples command supports various options:
|
||||
# --force / -f Force reload data even if tables exist
|
||||
# --only-metadata / -m Only create table metadata without loading data (fast setup)
|
||||
# --load-test-data / -t Load additional test dashboards and datasets
|
||||
# --load-big-data / -b Generate synthetic data for stress testing (wide tables, many tables)
|
||||
|
||||
# Start the Flask dev web server from inside your virtualenv.
|
||||
# Note that your page may not have CSS at this point.
|
||||
# See instructions below on how to build the front-end assets.
|
||||
@@ -599,7 +605,7 @@ export enum FeatureFlag {
|
||||
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
|
||||
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
|
||||
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in `RESOURCES/FEATURE_FLAGS.md`.
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
## Git Hooks
|
||||
|
||||
@@ -692,6 +698,97 @@ secrets.
|
||||
|
||||
---
|
||||
|
||||
## Example Data and Test Loaders
|
||||
|
||||
### Example Datasets
|
||||
|
||||
Superset includes example datasets stored as Parquet files, organized by example name in the `superset/examples/` directory. Each example is self-contained:
|
||||
|
||||
```
|
||||
superset/examples/
|
||||
├── _shared/ # Shared configuration
|
||||
│ ├── database.yaml # Database connection config
|
||||
│ └── metadata.yaml # Import metadata
|
||||
├── birth_names/ # Example: US Birth Names
|
||||
│ ├── data.parquet # Dataset (compressed columnar)
|
||||
│ ├── dataset.yaml # Dataset metadata
|
||||
│ ├── dashboard.yaml # Dashboard configuration (optional)
|
||||
│ └── charts/ # Chart configurations (optional)
|
||||
│ ├── Boys.yaml
|
||||
│ ├── Girls.yaml
|
||||
│ └── ...
|
||||
├── energy_usage/ # Example: Energy Sankey
|
||||
│ ├── data.parquet
|
||||
│ ├── dataset.yaml
|
||||
│ └── charts/
|
||||
└── ... (27 example directories)
|
||||
```
|
||||
|
||||
#### Adding a New Example Dataset
|
||||
|
||||
**Simple dataset (data only):**
|
||||
|
||||
1. Create a directory: `superset/examples/my_dataset/`
|
||||
2. Add your data as `data.parquet`:
|
||||
```python
|
||||
import pandas as pd
|
||||
df = pd.read_csv("your_data.csv")
|
||||
df.to_parquet("superset/examples/my_dataset/data.parquet", compression="snappy")
|
||||
```
|
||||
3. The dataset will be auto-discovered when running `superset load-examples`
|
||||
|
||||
**Complete example with dashboard:**
|
||||
|
||||
1. Create your dataset directory with `data.parquet`
|
||||
2. Add `dataset.yaml` with metadata (columns, metrics, etc.)
|
||||
3. Add `dashboard.yaml` with dashboard layout
|
||||
4. Add chart configs in `charts/` directory
|
||||
5. See existing examples like `birth_names/` for reference
|
||||
|
||||
#### Exporting an Existing Dashboard
|
||||
|
||||
To export a dashboard and its charts as YAML configs:
|
||||
|
||||
1. In Superset, go to the dashboard you want to export
|
||||
2. Click the "..." menu → "Export"
|
||||
3. Unzip the exported file
|
||||
4. Copy the YAML files to your example directory
|
||||
5. Add the `data.parquet` file
|
||||
|
||||
#### Why Parquet?
|
||||
|
||||
- **Apache-friendly**: Parquet is an Apache project, ideal for ASF codebases
|
||||
- **Compressed**: Built-in Snappy compression (~27% smaller than CSV)
|
||||
- **Self-describing**: Schema is embedded in the file
|
||||
- **Widely supported**: Works with pandas, pyarrow, DuckDB, Spark, etc.
|
||||
|
||||
### Test Data Generation
|
||||
|
||||
For stress testing and development, Superset includes special test data generators that create synthetic data:
|
||||
|
||||
#### Big Data Loader (`--load-big-data`)
|
||||
|
||||
Located in `superset/cli/test_loaders.py`, this generates:
|
||||
|
||||
- **Wide Table** (`wide_table`): 100 columns of mixed types, 1000 rows
|
||||
- **Many Small Tables** (`small_table_0` through `small_table_999`): 1000 tables for testing catalog performance
|
||||
- **Long Name Table**: Table with 60-character random name for testing UI edge cases
|
||||
|
||||
This is primarily used for:
|
||||
- Performance testing with extreme data shapes
|
||||
- UI edge case validation
|
||||
- Database catalog stress testing
|
||||
- CI/CD pipeline validation
|
||||
|
||||
#### Test Dashboards (`--load-test-data`)
|
||||
|
||||
Loads additional test-specific content:
|
||||
- Tabbed dashboard example
|
||||
- Supported charts dashboard
|
||||
- Test configuration files (*.test.yaml)
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Python Testing
|
||||
|
||||
@@ -157,7 +157,7 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
|
||||
|
||||
To clarify, the database backend is an OLTP database used by Superset to store its internal
|
||||
information like your list of users and dashboard definitions. While Superset supports a
|
||||
[variety of databases as data _sources_](/docs/configuration/databases#installing-database-drivers),
|
||||
[variety of databases as data _sources_](/docs/databases#installing-database-drivers),
|
||||
only a few database engines are supported for use as the OLTP backend / metadata store.
|
||||
|
||||
Superset is tested using MySQL, PostgreSQL, and SQLite backends. It’s recommended you install
|
||||
@@ -190,7 +190,7 @@ second etc). Example:
|
||||
|
||||
## Does Superset work with [insert database engine here]?
|
||||
|
||||
The [Connecting to Databases section](/docs/configuration/databases) provides the best
|
||||
The [Connecting to Databases section](/docs/databases) provides the best
|
||||
overview for supported databases. Database engines not listed on that page may work too. We rely on
|
||||
the community to contribute to this knowledge base.
|
||||
|
||||
|
||||
@@ -149,7 +149,7 @@ For production clusters it's recommended to build own image with this step done
|
||||
Superset requires a Python DB-API database driver and a SQLAlchemy
|
||||
dialect to be installed for each datastore you want to connect to.
|
||||
|
||||
See [Install Database Drivers](/docs/configuration/databases) for more information.
|
||||
See [Install Database Drivers](/docs/databases#installing-database-drivers) for more information.
|
||||
It is recommended that you refer to versions listed in
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
|
||||
instead of hard-coding them in your bootstrap script, as seen below.
|
||||
|
||||
@@ -47,3 +47,15 @@ superset init
|
||||
While upgrading superset should not delete your charts and dashboards, we recommend following best
|
||||
practices and to backup your metadata database before upgrading. Before upgrading production, we
|
||||
recommend upgrading in a staging environment and upgrading production finally during off-peak usage.
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
For a detailed list of breaking changes and migration notes for each version, see
|
||||
[UPDATING.md](https://github.com/apache/superset/blob/master/UPDATING.md).
|
||||
|
||||
This file documents backwards-incompatible changes and provides guidance for migrating between
|
||||
major versions, including:
|
||||
- Configuration changes
|
||||
- API changes
|
||||
- Database migrations
|
||||
- Deprecated features
|
||||
|
||||
@@ -32,7 +32,7 @@ git clone https://github.com/apache/superset
|
||||
$ cd superset
|
||||
|
||||
# Set the repo to the state associated with the latest official version
|
||||
$ git checkout tags/5.0.0
|
||||
$ git checkout tags/6.0.0
|
||||
|
||||
# Fire up Superset using Docker Compose
|
||||
$ docker compose -f docker-compose-image-tag.yml up
|
||||
@@ -74,7 +74,7 @@ processes by running Docker Compose `stop` command. By doing so, you can avoid d
|
||||
From this point on, you can head on to:
|
||||
|
||||
- [Create your first Dashboard](/docs/using-superset/creating-your-first-dashboard)
|
||||
- [Connect to a Database](/docs/configuration/databases)
|
||||
- [Connect to a Database](/docs/databases)
|
||||
- [Using Docker Compose](/docs/installation/docker-compose)
|
||||
- [Configure Superset](/docs/configuration/configuring-superset/)
|
||||
- [Installing on Kubernetes](/docs/installation/kubernetes/)
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
|
||||
import type { Config } from '@docusaurus/types';
|
||||
import type { Options, ThemeConfig } from '@docusaurus/preset-classic';
|
||||
import type * as OpenApiPlugin from 'docusaurus-plugin-openapi-docs';
|
||||
import { themes } from 'prism-react-renderer';
|
||||
import remarkImportPartial from 'remark-import-partial';
|
||||
import remarkLocalizeBadges from './plugins/remark-localize-badges.mjs';
|
||||
@@ -134,7 +135,13 @@ if (!versionsConfig.developer_portal.disabled && !versionsConfig.developer_porta
|
||||
{
|
||||
type: 'doc',
|
||||
docsPluginId: 'developer_portal',
|
||||
docId: 'extensions/architectural-principles',
|
||||
docId: 'contributing/overview',
|
||||
label: 'Contributing',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docsPluginId: 'developer_portal',
|
||||
docId: 'extensions/overview',
|
||||
label: 'Extensions',
|
||||
},
|
||||
{
|
||||
@@ -146,14 +153,12 @@ if (!versionsConfig.developer_portal.disabled && !versionsConfig.developer_porta
|
||||
{
|
||||
type: 'doc',
|
||||
docsPluginId: 'developer_portal',
|
||||
docId: 'guidelines/design-guidelines',
|
||||
label: 'Guidelines',
|
||||
docId: 'components/index',
|
||||
label: 'UI Components',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docsPluginId: 'developer_portal',
|
||||
docId: 'contributing/overview',
|
||||
label: 'Contributing',
|
||||
label: 'API Reference',
|
||||
href: '/docs/api',
|
||||
},
|
||||
],
|
||||
});
|
||||
@@ -166,9 +171,11 @@ const config: Config = {
|
||||
url: 'https://superset.apache.org',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'warn',
|
||||
onBrokenMarkdownLinks: 'throw',
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
hooks: {
|
||||
onBrokenMarkdownLinks: 'throw',
|
||||
},
|
||||
},
|
||||
favicon: '/img/favicon.ico',
|
||||
organizationName: 'apache',
|
||||
@@ -177,18 +184,34 @@ const config: Config = {
|
||||
'@saucelabs/theme-github-codeblock',
|
||||
'@docusaurus/theme-mermaid',
|
||||
'@docusaurus/theme-live-codeblock',
|
||||
'docusaurus-theme-openapi-docs',
|
||||
],
|
||||
plugins: [
|
||||
require.resolve('./src/webpack.extend.ts'),
|
||||
...dynamicPlugins,
|
||||
[
|
||||
'docusaurus-plugin-less',
|
||||
'docusaurus-plugin-openapi-docs',
|
||||
{
|
||||
lessOptions: {
|
||||
javascriptEnabled: true,
|
||||
id: 'api',
|
||||
docsPluginId: 'classic',
|
||||
config: {
|
||||
superset: {
|
||||
specPath: 'static/resources/openapi.json',
|
||||
outputDir: 'docs/api',
|
||||
sidebarOptions: {
|
||||
groupPathsBy: 'tag',
|
||||
categoryLinkSource: 'tag',
|
||||
sidebarCollapsible: true,
|
||||
sidebarCollapsed: true,
|
||||
},
|
||||
showSchemas: true,
|
||||
hideSendButton: true,
|
||||
showInfoPage: false,
|
||||
showExtensions: true,
|
||||
} satisfies OpenApiPlugin.Options,
|
||||
},
|
||||
},
|
||||
],
|
||||
...dynamicPlugins,
|
||||
[
|
||||
'@docusaurus/plugin-client-redirects',
|
||||
{
|
||||
@@ -222,7 +245,7 @@ const config: Config = {
|
||||
from: '/gallery.html',
|
||||
},
|
||||
{
|
||||
to: '/docs/configuration/databases',
|
||||
to: '/docs/databases',
|
||||
from: '/druid.html',
|
||||
},
|
||||
{
|
||||
@@ -274,7 +297,7 @@ const config: Config = {
|
||||
from: '/docs/contributing/contribution-page',
|
||||
},
|
||||
{
|
||||
to: '/docs/configuration/databases',
|
||||
to: '/docs/databases',
|
||||
from: '/docs/databases/yugabyte/',
|
||||
},
|
||||
{
|
||||
@@ -362,6 +385,7 @@ const config: Config = {
|
||||
disableVersioning: false,
|
||||
showLastUpdateAuthor: true,
|
||||
showLastUpdateTime: true,
|
||||
docItemComponent: '@theme/ApiItem', // Required for OpenAPI docs
|
||||
},
|
||||
blog: {
|
||||
showReadingTime: true,
|
||||
@@ -410,6 +434,11 @@ const config: Config = {
|
||||
docId: 'intro',
|
||||
label: 'Getting Started',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'databases/index',
|
||||
label: 'Databases',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'faq',
|
||||
@@ -468,8 +497,10 @@ const config: Config = {
|
||||
footer: {
|
||||
links: [],
|
||||
copyright: `
|
||||
<div class="footer__applitools">
|
||||
We use <a href="https://applitools.com/" target="_blank" rel="nofollow"><img src="/img/applitools.png" title="Applitools" /></a>
|
||||
<div class="footer__ci-services">
|
||||
<span>CI powered by</span>
|
||||
<a href="https://applitools.com/" target="_blank" rel="nofollow noopener noreferrer"><img src="/img/applitools.png" alt="Applitools" title="Applitools - Visual Testing" /></a>
|
||||
<a href="https://www.netlify.com/" target="_blank" rel="nofollow noopener noreferrer"><img src="/img/netlify.png" alt="Netlify" title="Netlify - Deploy Previews" /></a>
|
||||
</div>
|
||||
<p>Copyright © ${new Date().getFullYear()},
|
||||
The <a href="https://www.apache.org/" target="_blank" rel="noreferrer">Apache Software Foundation</a>,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"copyright": {
|
||||
"message": "\n <div class=\"footer__applitools\">\n We use <a href=\"https://applitools.com/\" target=\"_blank\" rel=\"nofollow\"><img src=\"/img/applitools.png\" title=\"Applitools\" /></a>\n </div>\n <p>Copyright © 2024,\n The <a href=\"https://www.apache.org/\" target=\"_blank\" rel=\"noreferrer\">Apache Software Foundation</a>,\n Licensed under the Apache <a href=\"https://apache.org/licenses/LICENSE-2.0\" target=\"_blank\" rel=\"noreferrer\">License</a>.</p>\n <p><small>Apache Superset, Apache, Superset, the Superset logo, and the Apache feather logo are either registered trademarks or trademarks of The Apache Software Foundation. All other products or name brands are trademarks of their respective holders, including The Apache Software Foundation.\n <a href=\"https://www.apache.org/\" target=\"_blank\">Apache Software Foundation</a> resources</small></p>\n <img class=\"footer__divider\" src=\"/img/community/line.png\" alt=\"Divider\" />\n <p>\n <small>\n <a href=\"/docs/security/\" target=\"_blank\" rel=\"noreferrer\">Security</a> | \n <a href=\"https://www.apache.org/foundation/sponsorship.html\" target=\"_blank\" rel=\"noreferrer\">Donate</a> | \n <a href=\"https://www.apache.org/foundation/thanks.html\" target=\"_blank\" rel=\"noreferrer\">Thanks</a> | \n <a href=\"https://apache.org/events/current-event\" target=\"_blank\" rel=\"noreferrer\">Events</a> | \n <a href=\"https://apache.org/licenses/\" target=\"_blank\" rel=\"noreferrer\">License</a> | \n <a href=\"https://privacy.apache.org/policies/privacy-policy-public.html\" target=\"_blank\" rel=\"noreferrer\">Privacy</a>\n </small>\n </p>\n <!-- telemetry/analytics pixel: -->\n <img referrerPolicy=\"no-referrer-when-downgrade\" src=\"https://static.scarf.sh/a.png?x-pxid=39ae6855-95fc-4566-86e5-360d542b0a68\" />\n ",
|
||||
"message": "\n <div class=\"footer__ci-services\">\n <span>CI powered by</span>\n <a href=\"https://applitools.com/\" target=\"_blank\" rel=\"nofollow noopener noreferrer\"><img src=\"/img/applitools.png\" alt=\"Applitools\" title=\"Applitools - Visual Testing\" /></a>\n <a href=\"https://www.netlify.com/\" target=\"_blank\" rel=\"nofollow noopener noreferrer\"><img src=\"/img/netlify.png\" alt=\"Netlify\" title=\"Netlify - Deploy Previews\" /></a>\n </div>\n <p>Copyright © 2026,\n The <a href=\"https://www.apache.org/\" target=\"_blank\" rel=\"noreferrer\">Apache Software Foundation</a>,\n Licensed under the Apache <a href=\"https://apache.org/licenses/LICENSE-2.0\" target=\"_blank\" rel=\"noreferrer\">License</a>.</p>\n <p><small>Apache Superset, Apache, Superset, the Superset logo, and the Apache feather logo are either registered trademarks or trademarks of The Apache Software Foundation. All other products or name brands are trademarks of their respective holders, including The Apache Software Foundation.\n <a href=\"https://www.apache.org/\" target=\"_blank\">Apache Software Foundation</a> resources</small></p>\n <img class=\"footer__divider\" src=\"/img/community/line.png\" alt=\"Divider\" />\n <p>\n <small>\n <a href=\"/docs/security/\" target=\"_blank\" rel=\"noreferrer\">Security</a> | \n <a href=\"https://www.apache.org/foundation/sponsorship.html\" target=\"_blank\" rel=\"noreferrer\">Donate</a> | \n <a href=\"https://www.apache.org/foundation/thanks.html\" target=\"_blank\" rel=\"noreferrer\">Thanks</a> | \n <a href=\"https://apache.org/events/current-event\" target=\"_blank\" rel=\"noreferrer\">Events</a> | \n <a href=\"https://apache.org/licenses/\" target=\"_blank\" rel=\"noreferrer\">License</a> | \n <a href=\"https://privacy.apache.org/policies/privacy-policy-public.html\" target=\"_blank\" rel=\"noreferrer\">Privacy</a>\n </small>\n </p>\n <!-- telemetry/analytics pixel: -->\n <img referrerPolicy=\"no-referrer-when-downgrade\" src=\"https://static.scarf.sh/a.png?x-pxid=39ae6855-95fc-4566-86e5-360d542b0a68\" />\n ",
|
||||
"description": "The footer copyright"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,8 @@
|
||||
NODE_VERSION = "20"
|
||||
# Yarn version
|
||||
YARN_VERSION = "1.22.22"
|
||||
# Increase heap size for webpack bundling of Superset UI components
|
||||
NODE_OPTIONS = "--max-old-space-size=8192"
|
||||
|
||||
# Deploy preview settings
|
||||
[context.deploy-preview]
|
||||
|
||||
@@ -6,17 +6,27 @@
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"_init": "cat src/intro_header.txt ../README.md > docs/intro.md",
|
||||
"start": "yarn run _init && yarn run generate:extension-components && NODE_ENV=development docusaurus start",
|
||||
"start": "yarn run _init && yarn run generate:all && NODE_OPTIONS='--max-old-space-size=8192' NODE_ENV=development docusaurus start",
|
||||
"start:quick": "yarn run _init && NODE_OPTIONS='--max-old-space-size=8192' NODE_ENV=development docusaurus start",
|
||||
"stop": "pkill -f 'docusaurus start' || pkill -f 'docusaurus serve' || echo 'No docusaurus server running'",
|
||||
"build": "yarn run _init && yarn run generate:extension-components && DEBUG=docusaurus:* docusaurus build",
|
||||
"build": "yarn run _init && yarn run generate:all && NODE_OPTIONS='--max-old-space-size=8192' DEBUG=docusaurus:* docusaurus build",
|
||||
"generate:api-docs": "python3 scripts/fix-openapi-spec.py && docusaurus gen-api-docs superset && node scripts/convert-api-sidebar.mjs && node scripts/generate-api-index.mjs && node scripts/generate-api-tag-pages.mjs",
|
||||
"clean:api-docs": "docusaurus clean-api-docs superset",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"serve": "yarn run _init && docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids",
|
||||
"typecheck": "yarn run generate:extension-components && tsc",
|
||||
"typecheck": "yarn run generate:all && tsc",
|
||||
"generate:extension-components": "node scripts/generate-extension-components.mjs",
|
||||
"generate:superset-components": "node scripts/generate-superset-components.mjs",
|
||||
"generate:database-docs": "node scripts/generate-database-docs.mjs",
|
||||
"gen-db-docs": "node scripts/generate-database-docs.mjs",
|
||||
"generate:all": "yarn run generate:extension-components & yarn run generate:superset-components & yarn run generate:database-docs & wait && yarn run generate:api-docs",
|
||||
"lint:db-metadata": "python3 ../superset/db_engine_specs/lint_metadata.py",
|
||||
"lint:db-metadata:report": "python3 ../superset/db_engine_specs/lint_metadata.py --markdown -o ../superset/db_engine_specs/METADATA_STATUS.md",
|
||||
"update:readme-db-logos": "node scripts/generate-database-docs.mjs --update-readme",
|
||||
"eslint": "eslint .",
|
||||
"version:add": "node scripts/manage-versions.mjs add",
|
||||
"version:remove": "node scripts/manage-versions.mjs remove",
|
||||
@@ -37,40 +47,45 @@
|
||||
"@emotion/core": "^11.0.0",
|
||||
"@emotion/react": "^11.13.3",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
"@fontsource/fira-code": "^5.2.7",
|
||||
"@fontsource/inter": "^5.2.8",
|
||||
"@mdx-js/react": "^3.1.1",
|
||||
"@saucelabs/theme-github-codeblock": "^0.3.0",
|
||||
"@storybook/addon-docs": "^8.6.15",
|
||||
"@storybook/blocks": "^8.6.11",
|
||||
"@storybook/channels": "^8.6.11",
|
||||
"@storybook/client-logger": "^8.6.11",
|
||||
"@storybook/components": "^8.6.11",
|
||||
"@storybook/core": "^8.6.11",
|
||||
"@storybook/core-events": "^8.6.11",
|
||||
"@storybook/blocks": "^8.6.15",
|
||||
"@storybook/channels": "^8.6.15",
|
||||
"@storybook/client-logger": "^8.6.15",
|
||||
"@storybook/components": "^8.6.15",
|
||||
"@storybook/core": "^8.6.15",
|
||||
"@storybook/core-events": "^8.6.15",
|
||||
"@storybook/csf": "^0.1.13",
|
||||
"@storybook/docs-tools": "^8.6.11",
|
||||
"@storybook/preview-api": "^8.6.11",
|
||||
"@storybook/theming": "^8.6.11",
|
||||
"@storybook/docs-tools": "^8.6.15",
|
||||
"@storybook/preview-api": "^8.6.15",
|
||||
"@storybook/theming": "^8.6.15",
|
||||
"@superset-ui/core": "^0.20.4",
|
||||
"antd": "^6.2.0",
|
||||
"caniuse-lite": "^1.0.30001764",
|
||||
"docusaurus-plugin-less": "^2.0.2",
|
||||
"@swc/core": "^1.15.11",
|
||||
"antd": "^6.2.3",
|
||||
"baseline-browser-mapping": "^2.9.19",
|
||||
"caniuse-lite": "^1.0.30001769",
|
||||
"docusaurus-plugin-openapi-docs": "^4.6.0",
|
||||
"docusaurus-theme-openapi-docs": "^4.6.0",
|
||||
"js-yaml": "^4.1.1",
|
||||
"js-yaml-loader": "^1.2.2",
|
||||
"json-bigint": "^1.0.0",
|
||||
"less": "^4.5.1",
|
||||
"less-loader": "^12.3.0",
|
||||
"prism-react-renderer": "^2.4.1",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-github-btn": "^1.4.0",
|
||||
"react-resize-detector": "^9.1.1",
|
||||
"react-svg-pan-zoom": "^3.13.1",
|
||||
"react-table": "^7.8.0",
|
||||
"remark-import-partial": "^0.0.2",
|
||||
"reselect": "^5.1.1",
|
||||
"storybook": "^8.6.15",
|
||||
"swagger-ui-react": "^5.31.0",
|
||||
"swc-loader": "^0.2.7",
|
||||
"tinycolor2": "^1.4.2",
|
||||
"ts-loader": "^9.5.4",
|
||||
"unist-util-visit": "^5.0.0"
|
||||
"unist-util-visit": "^5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "^3.9.1",
|
||||
@@ -82,13 +97,13 @@
|
||||
"@typescript-eslint/parser": "^8.52.0",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.3",
|
||||
"eslint-plugin-prettier": "^5.5.5",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"globals": "^17.0.0",
|
||||
"prettier": "^3.7.4",
|
||||
"globals": "^17.3.0",
|
||||
"prettier": "^3.8.1",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.53.0",
|
||||
"webpack": "^5.104.1"
|
||||
"typescript-eslint": "^8.54.0",
|
||||
"webpack": "^5.105.0"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
@@ -102,5 +117,10 @@
|
||||
"last 1 safari version"
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"react-redux": "^9.2.0",
|
||||
"@reduxjs/toolkit": "^2.5.0",
|
||||
"baseline-browser-mapping": "^2.9.19"
|
||||
},
|
||||
"packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610"
|
||||
}
|
||||
|
||||
123
docs/scripts/convert-api-sidebar.mjs
Normal file
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Convert the generated TypeScript API sidebar to CommonJS format.
|
||||
* This allows the sidebar to be imported by sidebars.js.
|
||||
* Also adds unique keys to duplicate labels to avoid translation conflicts.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const sidebarTsPath = path.join(__dirname, '..', 'docs', 'api', 'sidebar.ts');
|
||||
const sidebarJsPath = path.join(__dirname, '..', 'docs', 'api', 'sidebar.js');
|
||||
|
||||
if (!fs.existsSync(sidebarTsPath)) {
|
||||
console.log('No sidebar.ts found, skipping conversion');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
let content = fs.readFileSync(sidebarTsPath, 'utf8');
|
||||
|
||||
// Remove TypeScript import
|
||||
content = content.replace(/import type.*\n/g, '');
|
||||
|
||||
// Remove type annotation
|
||||
content = content.replace(/: SidebarsConfig/g, '');
|
||||
|
||||
// Change export default to module.exports
|
||||
content = content.replace(
|
||||
/export default sidebar\.apisidebar;/,
|
||||
'module.exports = sidebar.apisidebar;'
|
||||
);
|
||||
|
||||
// Parse the sidebar to add unique keys for duplicate labels
|
||||
// This avoids translation key conflicts when the same label appears multiple times
|
||||
try {
|
||||
// Extract the sidebar object
|
||||
const sidebarMatch = content.match(/const sidebar = (\{[\s\S]*\});/);
|
||||
if (sidebarMatch) {
|
||||
// Use Function constructor instead of eval for safer evaluation
|
||||
const sidebarObj = new Function(`return ${sidebarMatch[1]}`)();
|
||||
|
||||
// First pass: count labels
|
||||
const countLabels = (items) => {
|
||||
const counts = {};
|
||||
const count = (item) => {
|
||||
if (item.type === 'doc' && item.label) {
|
||||
counts[item.label] = (counts[item.label] || 0) + 1;
|
||||
}
|
||||
if (item.items) {
|
||||
item.items.forEach(count);
|
||||
}
|
||||
};
|
||||
items.forEach(count);
|
||||
return counts;
|
||||
};
|
||||
|
||||
const counts = countLabels(sidebarObj.apisidebar);
|
||||
|
||||
// Second pass: add keys to items with duplicate labels
|
||||
const addKeys = (items, prefix = 'api') => {
|
||||
for (const item of items) {
|
||||
if (item.type === 'doc' && item.label && counts[item.label] > 1) {
|
||||
item.key = item.id;
|
||||
}
|
||||
// Also add keys to categories to avoid conflicts with main sidebar categories
|
||||
if (item.type === 'category' && item.label) {
|
||||
item.key = `${prefix}-category-${item.label.toLowerCase().replace(/\s+/g, '-')}`;
|
||||
}
|
||||
if (item.items) {
|
||||
addKeys(item.items, prefix);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
addKeys(sidebarObj.apisidebar);
|
||||
|
||||
// Regenerate the content with the updated sidebar
|
||||
content = `const sidebar = ${JSON.stringify(sidebarObj, null, 2)};
|
||||
|
||||
module.exports = sidebar.apisidebar;
|
||||
`;
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Could not add unique keys to sidebar:', e.message);
|
||||
// Fall back to simple conversion
|
||||
content = content.replace(
|
||||
/export default sidebar\.apisidebar;/,
|
||||
'module.exports = sidebar.apisidebar;'
|
||||
);
|
||||
}
|
||||
|
||||
// Add header with eslint-disable to allow @ts-nocheck
|
||||
const header = `/* eslint-disable @typescript-eslint/ban-ts-comment */
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Auto-generated CommonJS sidebar from sidebar.ts
|
||||
* Do not edit directly - run 'yarn generate:api-docs' to regenerate
|
||||
*/
|
||||
|
||||
`;
|
||||
|
||||
fs.writeFileSync(sidebarJsPath, header + content);
|
||||
console.log('Converted sidebar.ts to sidebar.js');
|
||||
296
docs/scripts/extract_custom_errors.py
Normal file
@@ -0,0 +1,296 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Extract custom_errors from database engine specs for documentation.
|
||||
|
||||
This script parses engine spec files to extract error handling information
|
||||
that can be displayed on database documentation pages.
|
||||
|
||||
Usage: python scripts/extract_custom_errors.py
|
||||
Output: JSON mapping of engine spec module names to their custom errors
|
||||
"""
|
||||
|
||||
import ast
|
||||
import json # noqa: TID251 - standalone docs script, not part of superset
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
# Map SupersetErrorType values to human-readable categories and issue codes
|
||||
ERROR_TYPE_INFO = {
|
||||
"CONNECTION_INVALID_USERNAME_ERROR": {
|
||||
"category": "Authentication",
|
||||
"description": "Invalid username",
|
||||
"issue_codes": [1012],
|
||||
},
|
||||
"CONNECTION_INVALID_PASSWORD_ERROR": {
|
||||
"category": "Authentication",
|
||||
"description": "Invalid password",
|
||||
"issue_codes": [1013],
|
||||
},
|
||||
"CONNECTION_ACCESS_DENIED_ERROR": {
|
||||
"category": "Authentication",
|
||||
"description": "Access denied",
|
||||
"issue_codes": [1014, 1015],
|
||||
},
|
||||
"CONNECTION_INVALID_HOSTNAME_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Invalid hostname",
|
||||
"issue_codes": [1007],
|
||||
},
|
||||
"CONNECTION_PORT_CLOSED_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Port closed or refused",
|
||||
"issue_codes": [1008],
|
||||
},
|
||||
"CONNECTION_HOST_DOWN_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Host unreachable",
|
||||
"issue_codes": [1009],
|
||||
},
|
||||
"CONNECTION_UNKNOWN_DATABASE_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Unknown database",
|
||||
"issue_codes": [1015],
|
||||
},
|
||||
"CONNECTION_DATABASE_PERMISSIONS_ERROR": {
|
||||
"category": "Permissions",
|
||||
"description": "Insufficient permissions",
|
||||
"issue_codes": [1017],
|
||||
},
|
||||
"CONNECTION_MISSING_PARAMETERS_ERROR": {
|
||||
"category": "Configuration",
|
||||
"description": "Missing parameters",
|
||||
"issue_codes": [1018],
|
||||
},
|
||||
"CONNECTION_DATABASE_TIMEOUT": {
|
||||
"category": "Connection",
|
||||
"description": "Connection timeout",
|
||||
"issue_codes": [1001, 1009],
|
||||
},
|
||||
"COLUMN_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Column not found",
|
||||
"issue_codes": [1003, 1004],
|
||||
},
|
||||
"TABLE_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Table not found",
|
||||
"issue_codes": [1003, 1005],
|
||||
},
|
||||
"SCHEMA_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Schema not found",
|
||||
"issue_codes": [1003, 1016],
|
||||
},
|
||||
"SYNTAX_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "SQL syntax error",
|
||||
"issue_codes": [1030],
|
||||
},
|
||||
"OBJECT_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Object not found",
|
||||
"issue_codes": [1029],
|
||||
},
|
||||
"GENERIC_DB_ENGINE_ERROR": {
|
||||
"category": "General",
|
||||
"description": "Database engine error",
|
||||
"issue_codes": [1002],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def extract_string_from_call(node: ast.Call) -> str | None:
|
||||
"""Extract string from __() or _() translation calls."""
|
||||
if not node.args:
|
||||
return None
|
||||
arg = node.args[0]
|
||||
if isinstance(arg, ast.Constant) and isinstance(arg.value, str):
|
||||
return arg.value
|
||||
elif isinstance(arg, ast.JoinedStr):
|
||||
# f-string - try to reconstruct
|
||||
parts = []
|
||||
for value in arg.values:
|
||||
if isinstance(value, ast.Constant):
|
||||
parts.append(str(value.value))
|
||||
elif isinstance(value, ast.FormattedValue):
|
||||
# Just use a placeholder
|
||||
parts.append("{...}")
|
||||
return "".join(parts)
|
||||
return None
|
||||
|
||||
|
||||
def extract_custom_errors_from_file(filepath: Path) -> dict[str, list[dict[str, Any]]]:
|
||||
"""
|
||||
Extract custom_errors definitions from a Python engine spec file.
|
||||
|
||||
Returns a dict mapping class names to their custom errors list.
|
||||
"""
|
||||
results = {}
|
||||
|
||||
try:
|
||||
with open(filepath, "r", encoding="utf-8") as f:
|
||||
source = f.read()
|
||||
|
||||
tree = ast.parse(source)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
class_name = node.name
|
||||
|
||||
for item in node.body:
|
||||
# Look for custom_errors = { ... }
|
||||
if (
|
||||
isinstance(item, ast.AnnAssign)
|
||||
and isinstance(item.target, ast.Name)
|
||||
and item.target.id == "custom_errors"
|
||||
and isinstance(item.value, ast.Dict)
|
||||
):
|
||||
errors = extract_errors_from_dict(item.value, source)
|
||||
if errors:
|
||||
results[class_name] = errors
|
||||
|
||||
# Also handle simple assignment: custom_errors = { ... }
|
||||
elif (
|
||||
isinstance(item, ast.Assign)
|
||||
and len(item.targets) == 1
|
||||
and isinstance(item.targets[0], ast.Name)
|
||||
and item.targets[0].id == "custom_errors"
|
||||
and isinstance(item.value, ast.Dict)
|
||||
):
|
||||
errors = extract_errors_from_dict(item.value, source)
|
||||
if errors:
|
||||
results[class_name] = errors
|
||||
|
||||
except (OSError, SyntaxError, ValueError) as e:
|
||||
print(f"Error parsing {filepath}: {e}", file=sys.stderr)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def extract_regex_info(key: ast.expr) -> dict[str, Any]:
|
||||
"""Extract regex pattern info from the dict key."""
|
||||
if isinstance(key, ast.Name):
|
||||
return {"regex_name": key.id}
|
||||
if isinstance(key, ast.Call):
|
||||
if (
|
||||
isinstance(key.func, ast.Attribute)
|
||||
and key.func.attr == "compile"
|
||||
and key.args
|
||||
and isinstance(key.args[0], ast.Constant)
|
||||
):
|
||||
return {"regex_pattern": key.args[0].value}
|
||||
return {}
|
||||
|
||||
|
||||
def extract_invalid_fields(extra_node: ast.Dict) -> list[str]:
|
||||
"""Extract invalid fields from the extra dict."""
|
||||
for k, v in zip(extra_node.keys, extra_node.values, strict=False):
|
||||
if (
|
||||
isinstance(k, ast.Constant)
|
||||
and k.value == "invalid"
|
||||
and isinstance(v, ast.List)
|
||||
):
|
||||
return [elem.value for elem in v.elts if isinstance(elem, ast.Constant)]
|
||||
return []
|
||||
|
||||
|
||||
def extract_error_tuple_info(value: ast.Tuple) -> dict[str, Any]:
|
||||
"""Extract error info from the (message, error_type, extra) tuple."""
|
||||
result: dict[str, Any] = {}
|
||||
|
||||
# First element: message template
|
||||
msg_node = value.elts[0]
|
||||
if isinstance(msg_node, ast.Call):
|
||||
message = extract_string_from_call(msg_node)
|
||||
if message:
|
||||
result["message_template"] = message
|
||||
elif isinstance(msg_node, ast.Constant):
|
||||
result["message_template"] = msg_node.value
|
||||
|
||||
# Second element: SupersetErrorType.SOMETHING
|
||||
type_node = value.elts[1]
|
||||
if isinstance(type_node, ast.Attribute):
|
||||
error_type = type_node.attr
|
||||
result["error_type"] = error_type
|
||||
if error_type in ERROR_TYPE_INFO:
|
||||
type_info = ERROR_TYPE_INFO[error_type]
|
||||
result["category"] = type_info["category"]
|
||||
result["description"] = type_info["description"]
|
||||
result["issue_codes"] = type_info["issue_codes"]
|
||||
|
||||
# Third element: extra dict with invalid fields
|
||||
if len(value.elts) >= 3 and isinstance(value.elts[2], ast.Dict):
|
||||
invalid_fields = extract_invalid_fields(value.elts[2])
|
||||
if invalid_fields:
|
||||
result["invalid_fields"] = invalid_fields
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def extract_errors_from_dict(dict_node: ast.Dict, source: str) -> list[dict[str, Any]]:
|
||||
"""Extract error information from a custom_errors dict AST node."""
|
||||
errors = []
|
||||
|
||||
for key, value in zip(dict_node.keys, dict_node.values, strict=False):
|
||||
if key is None or value is None:
|
||||
continue
|
||||
|
||||
error_info = extract_regex_info(key)
|
||||
|
||||
if isinstance(value, ast.Tuple) and len(value.elts) >= 2:
|
||||
error_info.update(extract_error_tuple_info(value))
|
||||
|
||||
if error_info.get("error_type") and error_info.get("message_template"):
|
||||
errors.append(error_info)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main function to extract custom_errors from all engine specs."""
|
||||
# Find the superset root directory
|
||||
script_dir = Path(__file__).parent
|
||||
root_dir = script_dir.parent.parent
|
||||
specs_dir = root_dir / "superset" / "db_engine_specs"
|
||||
|
||||
if not specs_dir.exists():
|
||||
print(f"Error: Engine specs directory not found: {specs_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
all_errors = {}
|
||||
|
||||
# Process each Python file in the specs directory
|
||||
for filepath in sorted(specs_dir.glob("*.py")):
|
||||
if filepath.name.startswith("_"):
|
||||
continue
|
||||
|
||||
module_name = filepath.stem
|
||||
class_errors = extract_custom_errors_from_file(filepath)
|
||||
|
||||
if class_errors:
|
||||
# Store errors by module and class
|
||||
all_errors[module_name] = class_errors
|
||||
|
||||
# Output as JSON
|
||||
print(json.dumps(all_errors, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
828
docs/scripts/fix-openapi-spec.py
Normal file
@@ -0,0 +1,828 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Fix missing schema references in the OpenAPI spec.
|
||||
|
||||
This script patches the openapi.json file to add any missing schemas
|
||||
that are referenced but not defined.
|
||||
"""
|
||||
|
||||
import json # noqa: TID251 - standalone docs script
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def add_missing_schemas(spec: dict[str, Any]) -> tuple[dict[str, Any], list[str]]:
|
||||
"""Add missing schema definitions to the OpenAPI spec."""
|
||||
schemas = spec.get("components", {}).get("schemas", {})
|
||||
fixed = []
|
||||
|
||||
# DashboardScreenshotPostSchema - based on superset/dashboards/schemas.py
|
||||
if "DashboardScreenshotPostSchema" not in schemas:
|
||||
schemas["DashboardScreenshotPostSchema"] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"dataMask": {
|
||||
"type": "object",
|
||||
"description": "An object representing the data mask.",
|
||||
"additionalProperties": True,
|
||||
},
|
||||
"activeTabs": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "A list representing active tabs.",
|
||||
},
|
||||
"anchor": {
|
||||
"type": "string",
|
||||
"description": "A string representing the anchor.",
|
||||
},
|
||||
"urlParams": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"minItems": 2,
|
||||
"maxItems": 2,
|
||||
},
|
||||
"description": "A list of tuples, each containing two strings.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("DashboardScreenshotPostSchema")
|
||||
|
||||
# DashboardNativeFiltersConfigUpdateSchema - based on superset/dashboards/schemas.py
|
||||
if "DashboardNativeFiltersConfigUpdateSchema" not in schemas:
|
||||
schemas["DashboardNativeFiltersConfigUpdateSchema"] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"deleted": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "List of deleted filter IDs.",
|
||||
},
|
||||
"modified": {
|
||||
"type": "array",
|
||||
"items": {"type": "object"},
|
||||
"description": "List of modified filter configurations.",
|
||||
},
|
||||
"reordered": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "List of filter IDs in new order.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("DashboardNativeFiltersConfigUpdateSchema")
|
||||
|
||||
# DashboardColorsConfigUpdateSchema - based on superset/dashboards/schemas.py
|
||||
if "DashboardColorsConfigUpdateSchema" not in schemas:
|
||||
schemas["DashboardColorsConfigUpdateSchema"] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"color_namespace": {
|
||||
"type": "string",
|
||||
"nullable": True,
|
||||
"description": "The color namespace.",
|
||||
},
|
||||
"color_scheme": {
|
||||
"type": "string",
|
||||
"nullable": True,
|
||||
"description": "The color scheme name.",
|
||||
},
|
||||
"map_label_colors": {
|
||||
"type": "object",
|
||||
"additionalProperties": {"type": "string"},
|
||||
"description": "Mapping of labels to colors.",
|
||||
},
|
||||
"shared_label_colors": {
|
||||
"type": "object",
|
||||
"additionalProperties": {"type": "string"},
|
||||
"description": "Shared label colors across charts.",
|
||||
},
|
||||
"label_colors": {
|
||||
"type": "object",
|
||||
"additionalProperties": {"type": "string"},
|
||||
"description": "Label to color mapping.",
|
||||
},
|
||||
"color_scheme_domain": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "Color scheme domain values.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("DashboardColorsConfigUpdateSchema")
|
||||
|
||||
# FormatQueryPayloadSchema - based on superset/sqllab/schemas.py
|
||||
if "FormatQueryPayloadSchema" not in schemas:
|
||||
schemas["FormatQueryPayloadSchema"] = {
|
||||
"type": "object",
|
||||
"required": ["sql"],
|
||||
"properties": {
|
||||
"sql": {
|
||||
"type": "string",
|
||||
"description": "The SQL query to format.",
|
||||
},
|
||||
"engine": {
|
||||
"type": "string",
|
||||
"nullable": True,
|
||||
"description": "The database engine.",
|
||||
},
|
||||
"database_id": {
|
||||
"type": "integer",
|
||||
"nullable": True,
|
||||
"description": "The database id.",
|
||||
},
|
||||
"template_params": {
|
||||
"type": "string",
|
||||
"nullable": True,
|
||||
"description": "The SQL query template params as JSON string.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("FormatQueryPayloadSchema")
|
||||
|
||||
# get_slack_channels_schema - based on superset/reports/schemas.py
|
||||
if "get_slack_channels_schema" not in schemas:
|
||||
schemas["get_slack_channels_schema"] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"search_string": {
|
||||
"type": "string",
|
||||
"description": "String to search for in channel names.",
|
||||
},
|
||||
"types": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"enum": ["public_channel", "private_channel"],
|
||||
},
|
||||
"description": "Types of channels to search.",
|
||||
},
|
||||
"exact_match": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to match channel names exactly.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("get_slack_channels_schema")
|
||||
|
||||
if "components" not in spec:
|
||||
spec["components"] = {}
|
||||
spec["components"]["schemas"] = schemas
|
||||
|
||||
return spec, fixed
|
||||
|
||||
|
||||
def path_to_operation_id(path: str, method: str) -> str:
|
||||
"""Convert a path and method to an operationId."""
|
||||
# Remove /api/v1/ prefix
|
||||
clean_path = path.replace("/api/v1/", "").strip("/")
|
||||
|
||||
# Replace path parameters
|
||||
clean_path = clean_path.replace("{", "by_").replace("}", "")
|
||||
|
||||
# Create operation name
|
||||
method_prefix = {
|
||||
"get": "get",
|
||||
"post": "create",
|
||||
"put": "update",
|
||||
"delete": "delete",
|
||||
"patch": "patch",
|
||||
}.get(method.lower(), method.lower())
|
||||
|
||||
return f"{method_prefix}_{clean_path}".replace("/", "_").replace("-", "_")
|
||||
|
||||
|
||||
def path_to_summary(path: str, method: str) -> str:
|
||||
"""Generate a human-readable summary from path and method."""
|
||||
# Remove /api/v1/ prefix
|
||||
clean_path = path.replace("/api/v1/", "").strip("/")
|
||||
|
||||
# Handle path parameters
|
||||
parts = []
|
||||
for part in clean_path.split("/"):
|
||||
if part.startswith("{") and part.endswith("}"):
|
||||
param = part[1:-1]
|
||||
parts.append(f"by {param}")
|
||||
else:
|
||||
parts.append(part.replace("_", " ").replace("-", " "))
|
||||
|
||||
resource = " ".join(parts)
|
||||
|
||||
method_verb = {
|
||||
"get": "Get",
|
||||
"post": "Create",
|
||||
"put": "Update",
|
||||
"delete": "Delete",
|
||||
"patch": "Update",
|
||||
}.get(method.lower(), method.capitalize())
|
||||
|
||||
return f"{method_verb} {resource}"
|
||||
|
||||
|
||||
def add_missing_operation_ids(spec: dict[str, Any]) -> int:
|
||||
"""Add operationId and summary to operations that are missing them."""
|
||||
fixed_count = 0
|
||||
|
||||
for path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["get", "post", "put", "delete", "patch"]:
|
||||
continue
|
||||
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
|
||||
summary = details.get("summary")
|
||||
operation_id = details.get("operationId")
|
||||
|
||||
if not summary and not operation_id:
|
||||
details["operationId"] = path_to_operation_id(path, method)
|
||||
details["summary"] = path_to_summary(path, method)
|
||||
fixed_count += 1
|
||||
|
||||
return fixed_count
|
||||
|
||||
|
||||
TAG_DESCRIPTIONS = {
|
||||
"Advanced Data Type": "Advanced data type operations and conversions.",
|
||||
"Annotation Layers": "Manage annotation layers and annotations for charts.",
|
||||
"AsyncEventsRestApi": "Real-time event streaming via Server-Sent Events (SSE).",
|
||||
"Available Domains": "Get available domains for the Superset instance.",
|
||||
"CSS Templates": "Manage CSS templates for custom dashboard styling.",
|
||||
"CacheRestApi": "Cache management and invalidation operations.",
|
||||
"Charts": "Create, read, update, and delete charts (slices).",
|
||||
"Current User": "Get information about the authenticated user.",
|
||||
"Dashboard Filter State": "Manage temporary filter state for dashboards.",
|
||||
"Dashboard Permanent Link": "Permanent links to dashboard states.",
|
||||
"Dashboards": "Create, read, update, and delete dashboards.",
|
||||
"Database": "Manage database connections and metadata.",
|
||||
"Datasets": "Manage datasets (tables) used for building charts.",
|
||||
"Datasources": "Query datasource metadata and column values.",
|
||||
"Embedded Dashboard": "Configure embedded dashboard settings.",
|
||||
"Explore": "Chart exploration and data querying endpoints.",
|
||||
"Explore Form Data": "Manage temporary form data for chart exploration.",
|
||||
"Explore Permanent Link": "Permanent links to chart explore states.",
|
||||
"Import/export": "Import and export Superset assets.",
|
||||
"LogRestApi": "Access audit logs and activity history.",
|
||||
"Menu": "Get the Superset menu structure.",
|
||||
"OpenApi": "Access the OpenAPI specification.",
|
||||
"Queries": "View and manage SQL Lab query history.",
|
||||
"Report Schedules": "Configure scheduled reports and alerts.",
|
||||
"Row Level Security": "Manage row-level security rules for data access.",
|
||||
"SQL Lab": "Execute SQL queries and manage SQL Lab sessions.",
|
||||
"SQL Lab Permanent Link": "Permanent links to SQL Lab states.",
|
||||
"Security": "Authentication and token management.",
|
||||
"Security Permissions": "View available permissions.",
|
||||
"Security Permissions on Resources (View Menus)": "Permission-resource mappings.",
|
||||
"Security Resources (View Menus)": "Manage security resources (view menus).",
|
||||
"Security Roles": "Manage security roles and their permissions.",
|
||||
"Security Users": "Manage user accounts.",
|
||||
"Tags": "Organize assets with tags.",
|
||||
"User": "User profile and preferences.",
|
||||
}
|
||||
|
||||
|
||||
def generate_code_sample(
|
||||
method: str, path: str, has_body: bool = False
|
||||
) -> list[dict[str, str]]:
|
||||
"""Generate code samples for an endpoint in multiple languages."""
|
||||
# Clean up path for display
|
||||
example_path = path.replace("{pk}", "1").replace("{id_or_slug}", "1")
|
||||
|
||||
samples = []
|
||||
|
||||
# cURL sample
|
||||
curl_cmd = f'curl -X {method.upper()} "http://localhost:8088{example_path}"'
|
||||
curl_cmd += ' \\\n -H "Authorization: Bearer $ACCESS_TOKEN"'
|
||||
if has_body:
|
||||
curl_cmd += ' \\\n -H "Content-Type: application/json"'
|
||||
curl_cmd += ' \\\n -d \'{"key": "value"}\''
|
||||
|
||||
samples.append(
|
||||
{
|
||||
"lang": "cURL",
|
||||
"label": "cURL",
|
||||
"source": curl_cmd,
|
||||
}
|
||||
)
|
||||
|
||||
# Python sample
|
||||
if method.lower() == "get":
|
||||
python_code = f"""import requests
|
||||
|
||||
response = requests.get(
|
||||
"http://localhost:8088{example_path}",
|
||||
headers={{"Authorization": "Bearer " + access_token}}
|
||||
)
|
||||
print(response.json())"""
|
||||
elif method.lower() == "post":
|
||||
python_code = f"""import requests
|
||||
|
||||
response = requests.post(
|
||||
"http://localhost:8088{example_path}",
|
||||
headers={{"Authorization": "Bearer " + access_token}},
|
||||
json={{"key": "value"}}
|
||||
)
|
||||
print(response.json())"""
|
||||
elif method.lower() == "put":
|
||||
python_code = f"""import requests
|
||||
|
||||
response = requests.put(
|
||||
"http://localhost:8088{example_path}",
|
||||
headers={{"Authorization": "Bearer " + access_token}},
|
||||
json={{"key": "value"}}
|
||||
)
|
||||
print(response.json())"""
|
||||
elif method.lower() == "delete":
|
||||
python_code = f"""import requests
|
||||
|
||||
response = requests.delete(
|
||||
"http://localhost:8088{example_path}",
|
||||
headers={{"Authorization": "Bearer " + access_token}}
|
||||
)
|
||||
print(response.status_code)"""
|
||||
else:
|
||||
python_code = f"""import requests
|
||||
|
||||
response = requests.{method.lower()}(
|
||||
"http://localhost:8088{example_path}",
|
||||
headers={{"Authorization": "Bearer " + access_token}}
|
||||
)
|
||||
print(response.json())"""
|
||||
|
||||
samples.append(
|
||||
{
|
||||
"lang": "Python",
|
||||
"label": "Python",
|
||||
"source": python_code,
|
||||
}
|
||||
)
|
||||
|
||||
# JavaScript sample
|
||||
if method.lower() == "get":
|
||||
js_code = f"""const response = await fetch(
|
||||
"http://localhost:8088{example_path}",
|
||||
{{
|
||||
headers: {{
|
||||
"Authorization": `Bearer ${{accessToken}}`
|
||||
}}
|
||||
}}
|
||||
);
|
||||
const data = await response.json();
|
||||
console.log(data);"""
|
||||
elif method.lower() in ["post", "put", "patch"]:
|
||||
js_code = f"""const response = await fetch(
|
||||
"http://localhost:8088{example_path}",
|
||||
{{
|
||||
method: "{method.upper()}",
|
||||
headers: {{
|
||||
"Authorization": `Bearer ${{accessToken}}`,
|
||||
"Content-Type": "application/json"
|
||||
}},
|
||||
body: JSON.stringify({{ key: "value" }})
|
||||
}}
|
||||
);
|
||||
const data = await response.json();
|
||||
console.log(data);"""
|
||||
else:
|
||||
js_code = f"""const response = await fetch(
|
||||
"http://localhost:8088{example_path}",
|
||||
{{
|
||||
method: "{method.upper()}",
|
||||
headers: {{
|
||||
"Authorization": `Bearer ${{accessToken}}`
|
||||
}}
|
||||
}}
|
||||
);
|
||||
console.log(response.status);"""
|
||||
|
||||
samples.append(
|
||||
{
|
||||
"lang": "JavaScript",
|
||||
"label": "JavaScript",
|
||||
"source": js_code,
|
||||
}
|
||||
)
|
||||
|
||||
return samples
|
||||
|
||||
|
||||
def add_code_samples(spec: dict[str, Any]) -> int:
|
||||
"""Add code samples to all endpoints."""
|
||||
count = 0
|
||||
|
||||
for path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["get", "post", "put", "delete", "patch"]:
|
||||
continue
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
|
||||
# Skip if already has code samples
|
||||
if "x-codeSamples" in details:
|
||||
continue
|
||||
|
||||
# Check if endpoint has a request body
|
||||
has_body = "requestBody" in details
|
||||
|
||||
details["x-codeSamples"] = generate_code_sample(method, path, has_body)
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def configure_servers(spec: dict[str, Any]) -> bool:
|
||||
"""Configure server URLs with variables for flexible API testing."""
|
||||
new_servers = [
|
||||
{
|
||||
"url": "http://localhost:8088",
|
||||
"description": "Local development server",
|
||||
},
|
||||
{
|
||||
"url": "{protocol}://{host}:{port}",
|
||||
"description": "Custom server",
|
||||
"variables": {
|
||||
"protocol": {
|
||||
"default": "http",
|
||||
"enum": ["http", "https"],
|
||||
"description": "HTTP protocol",
|
||||
},
|
||||
"host": {
|
||||
"default": "localhost",
|
||||
"description": "Server hostname or IP",
|
||||
},
|
||||
"port": {
|
||||
"default": "8088",
|
||||
"description": "Server port",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Check if already configured
|
||||
existing = spec.get("servers", [])
|
||||
if len(existing) >= 2 and any("variables" in s for s in existing):
|
||||
return False
|
||||
|
||||
spec["servers"] = new_servers
|
||||
return True
|
||||
|
||||
|
||||
def add_tag_definitions(spec: dict[str, Any]) -> int:
|
||||
"""Add tag definitions with descriptions to the OpenAPI spec."""
|
||||
# Collect all unique tags used in operations
|
||||
used_tags: set[str] = set()
|
||||
for _path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["get", "post", "put", "delete", "patch"]:
|
||||
continue
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
tags = details.get("tags", [])
|
||||
used_tags.update(tags)
|
||||
|
||||
# Create tag definitions
|
||||
tag_definitions = []
|
||||
for tag in sorted(used_tags):
|
||||
tag_def = {"name": tag}
|
||||
if tag in TAG_DESCRIPTIONS:
|
||||
tag_def["description"] = TAG_DESCRIPTIONS[tag]
|
||||
else:
|
||||
# Generate a generic description
|
||||
tag_def["description"] = f"Endpoints related to {tag}."
|
||||
tag_definitions.append(tag_def)
|
||||
|
||||
# Only update if we have new tags
|
||||
existing_tags = {t.get("name") for t in spec.get("tags", [])}
|
||||
new_tags = [t for t in tag_definitions if t["name"] not in existing_tags]
|
||||
|
||||
if new_tags or not spec.get("tags"):
|
||||
spec["tags"] = tag_definitions
|
||||
return len(tag_definitions)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def generate_example_from_schema( # noqa: C901
|
||||
schema: dict[str, Any],
|
||||
spec: dict[str, Any],
|
||||
depth: int = 0,
|
||||
max_depth: int = 5,
|
||||
) -> dict[str, Any] | list[Any] | str | int | float | bool | None:
|
||||
"""Generate an example value from an OpenAPI schema definition."""
|
||||
if depth > max_depth:
|
||||
return None
|
||||
|
||||
# Handle $ref
|
||||
if "$ref" in schema:
|
||||
ref_path = schema["$ref"]
|
||||
if ref_path.startswith("#/components/schemas/"):
|
||||
schema_name = ref_path.split("/")[-1]
|
||||
ref_schema = (
|
||||
spec.get("components", {}).get("schemas", {}).get(schema_name, {})
|
||||
)
|
||||
return generate_example_from_schema(ref_schema, spec, depth + 1, max_depth)
|
||||
return None
|
||||
|
||||
# If schema already has an example, use it
|
||||
if "example" in schema:
|
||||
return schema["example"]
|
||||
|
||||
schema_type = schema.get("type", "object")
|
||||
|
||||
if schema_type == "object":
|
||||
properties = schema.get("properties", {})
|
||||
if not properties:
|
||||
# Check for additionalProperties
|
||||
if schema.get("additionalProperties"):
|
||||
return {"key": "value"}
|
||||
return {}
|
||||
|
||||
result = {}
|
||||
for prop_name, prop_schema in properties.items():
|
||||
# Limit object depth and skip large nested objects
|
||||
if depth < max_depth:
|
||||
example_val = generate_example_from_schema(
|
||||
prop_schema, spec, depth + 1, max_depth
|
||||
)
|
||||
if example_val is not None:
|
||||
result[prop_name] = example_val
|
||||
return result
|
||||
|
||||
elif schema_type == "array":
|
||||
items_schema = schema.get("items", {})
|
||||
if items_schema:
|
||||
item_example = generate_example_from_schema(
|
||||
items_schema, spec, depth + 1, max_depth
|
||||
)
|
||||
if item_example is not None:
|
||||
return [item_example]
|
||||
return []
|
||||
|
||||
elif schema_type == "string":
|
||||
# Check for enum
|
||||
if "enum" in schema:
|
||||
return schema["enum"][0]
|
||||
# Check for format
|
||||
fmt = schema.get("format", "")
|
||||
if fmt == "date-time":
|
||||
return "2024-01-15T10:30:00Z"
|
||||
elif fmt == "date":
|
||||
return "2024-01-15"
|
||||
elif fmt == "email":
|
||||
return "user@example.com"
|
||||
elif fmt == "uri" or fmt == "url":
|
||||
return "https://example.com"
|
||||
elif fmt == "uuid":
|
||||
return "550e8400-e29b-41d4-a716-446655440000"
|
||||
# Use description hints or prop name
|
||||
return "string"
|
||||
|
||||
elif schema_type == "integer":
|
||||
if "minimum" in schema:
|
||||
return schema["minimum"]
|
||||
return 1
|
||||
|
||||
elif schema_type == "number":
|
||||
if "minimum" in schema:
|
||||
return schema["minimum"]
|
||||
return 1.0
|
||||
|
||||
elif schema_type == "boolean":
|
||||
return True
|
||||
|
||||
elif schema_type == "null":
|
||||
return None
|
||||
|
||||
# Handle oneOf, anyOf
|
||||
if "oneOf" in schema and schema["oneOf"]:
|
||||
return generate_example_from_schema(
|
||||
schema["oneOf"][0], spec, depth + 1, max_depth
|
||||
)
|
||||
if "anyOf" in schema and schema["anyOf"]:
|
||||
return generate_example_from_schema(
|
||||
schema["anyOf"][0], spec, depth + 1, max_depth
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def add_response_examples(spec: dict[str, Any]) -> int: # noqa: C901
|
||||
"""Add example values to API responses for better documentation."""
|
||||
count = 0
|
||||
|
||||
# First, add examples to standard error responses in components
|
||||
standard_errors = {
|
||||
"400": {"message": "Bad request: Invalid parameters provided"},
|
||||
"401": {"message": "Unauthorized: Authentication required"},
|
||||
"403": {
|
||||
"message": "Forbidden: You don't have permission to access this resource"
|
||||
},
|
||||
"404": {"message": "Not found: The requested resource does not exist"},
|
||||
"422": {"message": "Unprocessable entity: Validation error"},
|
||||
"500": {"message": "Internal server error: An unexpected error occurred"},
|
||||
}
|
||||
|
||||
responses = spec.get("components", {}).get("responses", {})
|
||||
for code, example_value in standard_errors.items():
|
||||
if code in responses:
|
||||
response = responses[code]
|
||||
content = response.get("content", {}).get("application/json", {})
|
||||
if content and "example" not in content:
|
||||
content["example"] = example_value
|
||||
count += 1
|
||||
|
||||
# Now add examples to inline response schemas in operations
|
||||
for _path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["get", "post", "put", "delete", "patch"]:
|
||||
continue
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
|
||||
responses_dict = details.get("responses", {})
|
||||
for _status_code, response in responses_dict.items():
|
||||
# Skip $ref responses (already handled above)
|
||||
if "$ref" in response:
|
||||
continue
|
||||
|
||||
content = response.get("content", {}).get("application/json", {})
|
||||
if not content:
|
||||
continue
|
||||
|
||||
# Skip if already has an example
|
||||
if "example" in content:
|
||||
continue
|
||||
|
||||
schema = content.get("schema", {})
|
||||
if schema:
|
||||
example = generate_example_from_schema(
|
||||
schema, spec, depth=0, max_depth=3
|
||||
)
|
||||
if example is not None and example != {}:
|
||||
content["example"] = example
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def add_request_body_examples(spec: dict[str, Any]) -> int:
|
||||
"""Add example values to API request bodies for better documentation."""
|
||||
count = 0
|
||||
|
||||
for _path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["post", "put", "patch"]:
|
||||
continue
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
|
||||
request_body = details.get("requestBody", {})
|
||||
if not request_body or "$ref" in request_body:
|
||||
continue
|
||||
|
||||
content = request_body.get("content", {}).get("application/json", {})
|
||||
if not content:
|
||||
continue
|
||||
|
||||
# Skip if already has an example
|
||||
if "example" in content:
|
||||
continue
|
||||
|
||||
schema = content.get("schema", {})
|
||||
if schema:
|
||||
example = generate_example_from_schema(
|
||||
schema, spec, depth=0, max_depth=4
|
||||
)
|
||||
if example is not None and example != {}:
|
||||
content["example"] = example
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def make_summaries_unique(spec: dict[str, Any]) -> int: # noqa: C901
|
||||
"""Make duplicate summaries unique by adding context from the path."""
|
||||
summary_info: dict[str, list[tuple[str, str]]] = {}
|
||||
fixed_count = 0
|
||||
|
||||
# First pass: collect all summaries and their paths (regardless of method)
|
||||
for path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["get", "post", "put", "delete", "patch"]:
|
||||
continue
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
summary = details.get("summary")
|
||||
if summary:
|
||||
if summary not in summary_info:
|
||||
summary_info[summary] = []
|
||||
summary_info[summary].append((path, method))
|
||||
|
||||
# Second pass: make duplicate summaries unique
|
||||
for path, methods in spec.get("paths", {}).items():
|
||||
for method, details in methods.items():
|
||||
if method not in ["get", "post", "put", "delete", "patch"]:
|
||||
continue
|
||||
if not isinstance(details, dict):
|
||||
continue
|
||||
summary = details.get("summary")
|
||||
if summary and len(summary_info.get(summary, [])) > 1:
|
||||
# Create a unique suffix from the full path
|
||||
# e.g., /api/v1/chart/{pk}/cache_screenshot/ -> "chart-cache-screenshot"
|
||||
clean_path = path.replace("/api/v1/", "").strip("/")
|
||||
# Remove parameter placeholders and convert to slug
|
||||
clean_path = clean_path.replace("{", "").replace("}", "")
|
||||
path_slug = clean_path.replace("/", "-").replace("_", "-")
|
||||
|
||||
# Check if this suffix is already in the summary
|
||||
if path_slug not in summary.lower():
|
||||
new_summary = f"{summary} ({path_slug})"
|
||||
details["summary"] = new_summary
|
||||
fixed_count += 1
|
||||
|
||||
return fixed_count
|
||||
|
||||
|
||||
def main() -> None: # noqa: C901
|
||||
"""Main function to fix the OpenAPI spec."""
|
||||
script_dir = Path(__file__).parent
|
||||
spec_path = script_dir.parent / "static" / "resources" / "openapi.json"
|
||||
|
||||
if not spec_path.exists():
|
||||
print(f"Error: OpenAPI spec not found at {spec_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Reading OpenAPI spec from {spec_path}")
|
||||
|
||||
with open(spec_path, encoding="utf-8") as f:
|
||||
spec = json.load(f)
|
||||
|
||||
spec, fixed_schemas = add_missing_schemas(spec)
|
||||
fixed_ops = add_missing_operation_ids(spec)
|
||||
fixed_tags = add_tag_definitions(spec)
|
||||
fixed_servers = configure_servers(spec)
|
||||
|
||||
changes_made = False
|
||||
|
||||
if fixed_servers:
|
||||
print("Configured server URLs with variables for flexible API testing")
|
||||
changes_made = True
|
||||
|
||||
if fixed_samples := add_code_samples(spec):
|
||||
print(f"Added code samples to {fixed_samples} endpoints")
|
||||
changes_made = True
|
||||
|
||||
if fixed_examples := add_response_examples(spec):
|
||||
print(f"Added example JSON responses to {fixed_examples} response schemas")
|
||||
changes_made = True
|
||||
|
||||
if fixed_request_examples := add_request_body_examples(spec):
|
||||
print(f"Added example JSON to {fixed_request_examples} request bodies")
|
||||
changes_made = True
|
||||
|
||||
if fixed_schemas:
|
||||
print(f"Added missing schemas: {', '.join(fixed_schemas)}")
|
||||
changes_made = True
|
||||
|
||||
if fixed_ops:
|
||||
print(f"Added operationId/summary to {fixed_ops} operations")
|
||||
changes_made = True
|
||||
|
||||
if fixed_tags:
|
||||
print(f"Added {fixed_tags} tag definitions with descriptions")
|
||||
changes_made = True
|
||||
|
||||
if fixed_summaries := make_summaries_unique(spec):
|
||||
print(f"Made {fixed_summaries} duplicate summaries unique")
|
||||
changes_made = True
|
||||
|
||||
if changes_made:
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
json.dump(spec, f, indent=2)
|
||||
f.write("\n") # Ensure trailing newline for pre-commit
|
||||
|
||||
print(f"Updated {spec_path}")
|
||||
else:
|
||||
print("No fixes needed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
277
docs/scripts/generate-api-index.mjs
Normal file
@@ -0,0 +1,277 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Generates a comprehensive API index MDX file from the OpenAPI spec.
|
||||
* This creates the api.mdx landing page with all endpoints organized by category.
|
||||
*
|
||||
* Uses the generated sidebar to get correct endpoint slugs (the plugin's
|
||||
* slug algorithm differs from a simple slugify, e.g. handling apostrophes
|
||||
* and camelCase differently).
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { createRequire } from 'module';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const SPEC_PATH = path.join(__dirname, '..', 'static', 'resources', 'openapi.json');
|
||||
const SIDEBAR_PATH = path.join(__dirname, '..', 'docs', 'api', 'sidebar.js');
|
||||
const OUTPUT_PATH = path.join(__dirname, '..', 'docs', 'api.mdx');
|
||||
|
||||
// Category groupings for better organization
|
||||
const CATEGORY_GROUPS = {
|
||||
'Authentication': ['Security'],
|
||||
'Core Resources': ['Dashboards', 'Charts', 'Datasets', 'Database'],
|
||||
'Data Exploration': ['Explore', 'SQL Lab', 'Queries', 'Datasources', 'Advanced Data Type'],
|
||||
'Organization & Customization': ['Tags', 'Annotation Layers', 'CSS Templates'],
|
||||
'Sharing & Embedding': [
|
||||
'Dashboard Permanent Link', 'Explore Permanent Link', 'SQL Lab Permanent Link',
|
||||
'Embedded Dashboard', 'Dashboard Filter State', 'Explore Form Data'
|
||||
],
|
||||
'Scheduling & Alerts': ['Report Schedules'],
|
||||
'Security & Access Control': [
|
||||
'Security Roles', 'Security Users', 'Security Permissions',
|
||||
'Security Resources (View Menus)', 'Security Permissions on Resources (View Menus)',
|
||||
'Row Level Security'
|
||||
],
|
||||
'Import/Export & Administration': ['Import/export', 'CacheRestApi', 'LogRestApi'],
|
||||
'User & System': ['Current User', 'User', 'Menu', 'Available Domains', 'AsyncEventsRestApi', 'OpenApi'],
|
||||
};
|
||||
|
||||
/**
|
||||
* Build a map from sidebar label → doc slug by reading the generated sidebar.
|
||||
* This ensures we use the exact same slugs that docusaurus-openapi-docs generated.
|
||||
*/
|
||||
function buildSlugMap() {
|
||||
const labelToSlug = {};
|
||||
|
||||
try {
|
||||
const sidebar = require(SIDEBAR_PATH);
|
||||
|
||||
const extractDocs = (items) => {
|
||||
for (const item of items) {
|
||||
if (item.type === 'doc' && item.label && item.id) {
|
||||
// id is like "api/create-security-login" → slug "create-security-login"
|
||||
const slug = item.id.replace(/^api\//, '');
|
||||
labelToSlug[item.label] = slug;
|
||||
}
|
||||
if (item.items) extractDocs(item.items);
|
||||
}
|
||||
};
|
||||
|
||||
extractDocs(sidebar);
|
||||
console.log(`Loaded ${Object.keys(labelToSlug).length} slug mappings from sidebar`);
|
||||
} catch {
|
||||
console.warn('Could not read sidebar, will use computed slugs');
|
||||
}
|
||||
|
||||
return labelToSlug;
|
||||
}
|
||||
|
||||
function slugify(text) {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/(^-|-$)/g, '');
|
||||
}
|
||||
|
||||
function main() {
|
||||
console.log(`Reading OpenAPI spec from ${SPEC_PATH}`);
|
||||
const spec = JSON.parse(fs.readFileSync(SPEC_PATH, 'utf-8'));
|
||||
|
||||
// Build slug map from the generated sidebar
|
||||
const labelToSlug = buildSlugMap();
|
||||
|
||||
// Build a map of tag -> endpoints
|
||||
const tagEndpoints = {};
|
||||
const tagDescriptions = {};
|
||||
|
||||
// Get tag descriptions
|
||||
for (const tag of spec.tags || []) {
|
||||
tagDescriptions[tag.name] = tag.description || '';
|
||||
}
|
||||
|
||||
// Collect endpoints by tag
|
||||
for (const [pathUrl, methods] of Object.entries(spec.paths || {})) {
|
||||
for (const [method, details] of Object.entries(methods)) {
|
||||
if (!['get', 'post', 'put', 'delete', 'patch'].includes(method)) continue;
|
||||
|
||||
const tags = details.tags || ['Untagged'];
|
||||
const summary = details.summary || `${method.toUpperCase()} ${pathUrl}`;
|
||||
|
||||
// Use sidebar slug if available, fall back to computed slug
|
||||
const slug = labelToSlug[summary] || slugify(summary);
|
||||
|
||||
for (const tag of tags) {
|
||||
if (!tagEndpoints[tag]) {
|
||||
tagEndpoints[tag] = [];
|
||||
}
|
||||
tagEndpoints[tag].push({
|
||||
method: method.toUpperCase(),
|
||||
path: pathUrl,
|
||||
summary,
|
||||
slug,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort endpoints within each tag by path
|
||||
for (const tag of Object.keys(tagEndpoints)) {
|
||||
tagEndpoints[tag].sort((a, b) => a.path.localeCompare(b.path));
|
||||
}
|
||||
|
||||
// Generate MDX content
|
||||
let mdx = `---
|
||||
title: API Reference
|
||||
hide_title: true
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
import { Alert } from 'antd';
|
||||
|
||||
## REST API Reference
|
||||
|
||||
Superset exposes a comprehensive **REST API** that follows the [OpenAPI specification](https://swagger.io/specification/).
|
||||
You can use this API to programmatically interact with Superset for automation, integrations, and custom applications.
|
||||
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="Code Samples & Schema Documentation"
|
||||
description={
|
||||
<span>
|
||||
Each endpoint includes ready-to-use code samples in <strong>cURL</strong>, <strong>Python</strong>, and <strong>JavaScript</strong>.
|
||||
The sidebar includes <strong>Schema definitions</strong> for detailed data model documentation.
|
||||
</span>
|
||||
}
|
||||
style={{ marginBottom: '24px' }}
|
||||
/>
|
||||
|
||||
---
|
||||
|
||||
`;
|
||||
|
||||
// Track which tags we've rendered
|
||||
const renderedTags = new Set();
|
||||
|
||||
// Render Authentication first (it's critical for using the API)
|
||||
mdx += `### Authentication
|
||||
|
||||
Most API endpoints require authentication via JWT tokens.
|
||||
|
||||
#### Quick Start
|
||||
|
||||
\`\`\`bash
|
||||
# 1. Get a JWT token
|
||||
curl -X POST http://localhost:8088/api/v1/security/login \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-d '{"username": "admin", "password": "admin", "provider": "db"}'
|
||||
|
||||
# 2. Use the access_token from the response
|
||||
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
http://localhost:8088/api/v1/dashboard/
|
||||
\`\`\`
|
||||
|
||||
#### Security Endpoints
|
||||
|
||||
`;
|
||||
|
||||
// Render Security tag endpoints
|
||||
if (tagEndpoints['Security']) {
|
||||
mdx += `| Method | Endpoint | Description |\n`;
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
for (const ep of tagEndpoints['Security']) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
mdx += '\n';
|
||||
renderedTags.add('Security');
|
||||
}
|
||||
|
||||
mdx += `---\n\n### API Endpoints\n\n`;
|
||||
|
||||
// Render each category group
|
||||
for (const [groupName, groupTags] of Object.entries(CATEGORY_GROUPS)) {
|
||||
if (groupName === 'Authentication') continue; // Already rendered
|
||||
|
||||
const tagsInGroup = groupTags.filter(tag => tagEndpoints[tag] && !renderedTags.has(tag));
|
||||
if (tagsInGroup.length === 0) continue;
|
||||
|
||||
mdx += `#### ${groupName}\n\n`;
|
||||
|
||||
for (const tag of tagsInGroup) {
|
||||
const description = tagDescriptions[tag] || '';
|
||||
const endpoints = tagEndpoints[tag];
|
||||
|
||||
mdx += `<details>\n`;
|
||||
mdx += `<summary><strong>${tag}</strong> (${endpoints.length} endpoints) — ${description}</summary>\n\n`;
|
||||
mdx += `| Method | Endpoint | Description |\n`;
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
|
||||
for (const ep of endpoints) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
mdx += `\n</details>\n\n`;
|
||||
renderedTags.add(tag);
|
||||
}
|
||||
}
|
||||
|
||||
// Render any remaining tags not in a group
|
||||
const remainingTags = Object.keys(tagEndpoints).filter(tag => !renderedTags.has(tag));
|
||||
if (remainingTags.length > 0) {
|
||||
mdx += `#### Other\n\n`;
|
||||
|
||||
for (const tag of remainingTags.sort()) {
|
||||
const description = tagDescriptions[tag] || '';
|
||||
const endpoints = tagEndpoints[tag];
|
||||
|
||||
mdx += `<details>\n`;
|
||||
mdx += `<summary><strong>${tag}</strong> (${endpoints.length} endpoints) — ${description}</summary>\n\n`;
|
||||
mdx += `| Method | Endpoint | Description |\n`;
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
|
||||
for (const ep of endpoints) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
mdx += `\n</details>\n\n`;
|
||||
}
|
||||
}
|
||||
|
||||
mdx += `---
|
||||
|
||||
### Additional Resources
|
||||
|
||||
- [Superset REST API Blog Post](https://preset.io/blog/2020-10-01-superset-api/)
|
||||
- [Accessing APIs with Superset](https://preset.io/blog/accessing-apis-with-superset/)
|
||||
`;
|
||||
|
||||
// Write output
|
||||
fs.writeFileSync(OUTPUT_PATH, mdx);
|
||||
console.log(`Generated API index at ${OUTPUT_PATH}`);
|
||||
console.log(`Total tags: ${Object.keys(tagEndpoints).length}`);
|
||||
console.log(`Total endpoints: ${Object.values(tagEndpoints).flat().length}`);
|
||||
}
|
||||
|
||||
main();
|
||||
176
docs/scripts/generate-api-tag-pages.mjs
Normal file
@@ -0,0 +1,176 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Replaces auto-generated tag pages (DocCardList cards) with endpoint tables
|
||||
* showing HTTP method, endpoint name, and URI path for each endpoint in the tag.
|
||||
*
|
||||
* Runs after `docusaurus gen-api-docs` and `convert-api-sidebar.mjs`.
|
||||
* Uses the generated sidebar to get correct endpoint slugs.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { createRequire } from 'module';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const SPEC_PATH = path.join(__dirname, '..', 'static', 'resources', 'openapi.json');
|
||||
const API_DOCS_DIR = path.join(__dirname, '..', 'docs', 'api');
|
||||
const SIDEBAR_PATH = path.join(API_DOCS_DIR, 'sidebar.js');
|
||||
|
||||
function slugify(text) {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/(^-|-$)/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a map from sidebar label → doc slug by reading the generated sidebar.
|
||||
*/
|
||||
function buildSlugMap() {
|
||||
const labelToSlug = {};
|
||||
|
||||
try {
|
||||
const sidebar = require(SIDEBAR_PATH);
|
||||
|
||||
const extractDocs = (items) => {
|
||||
for (const item of items) {
|
||||
if (item.type === 'doc' && item.label && item.id) {
|
||||
const slug = item.id.replace(/^api\//, '');
|
||||
labelToSlug[item.label] = slug;
|
||||
}
|
||||
if (item.items) extractDocs(item.items);
|
||||
}
|
||||
};
|
||||
|
||||
extractDocs(sidebar);
|
||||
} catch {
|
||||
console.warn('Could not read sidebar, will use computed slugs');
|
||||
}
|
||||
|
||||
return labelToSlug;
|
||||
}
|
||||
|
||||
function main() {
|
||||
console.log('Generating API tag pages with endpoint tables...');
|
||||
|
||||
const spec = JSON.parse(fs.readFileSync(SPEC_PATH, 'utf-8'));
|
||||
const labelToSlug = buildSlugMap();
|
||||
|
||||
// Build tag descriptions from the spec
|
||||
const tagDescriptions = {};
|
||||
for (const tag of spec.tags || []) {
|
||||
tagDescriptions[tag.name] = tag.description || '';
|
||||
}
|
||||
|
||||
// Build tag → endpoints map
|
||||
const tagEndpoints = {};
|
||||
for (const [pathUrl, methods] of Object.entries(spec.paths || {})) {
|
||||
for (const [method, details] of Object.entries(methods)) {
|
||||
if (!['get', 'post', 'put', 'delete', 'patch'].includes(method)) continue;
|
||||
|
||||
const tags = details.tags || ['Untagged'];
|
||||
const summary = details.summary || `${method.toUpperCase()} ${pathUrl}`;
|
||||
const slug = labelToSlug[summary] || slugify(summary);
|
||||
|
||||
for (const tag of tags) {
|
||||
if (!tagEndpoints[tag]) {
|
||||
tagEndpoints[tag] = [];
|
||||
}
|
||||
tagEndpoints[tag].push({
|
||||
method: method.toUpperCase(),
|
||||
path: pathUrl,
|
||||
summary,
|
||||
slug,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort endpoints within each tag by path then method
|
||||
for (const tag of Object.keys(tagEndpoints)) {
|
||||
tagEndpoints[tag].sort((a, b) =>
|
||||
a.path.localeCompare(b.path) || a.method.localeCompare(b.method)
|
||||
);
|
||||
}
|
||||
|
||||
// Scan existing .tag.mdx files and match by frontmatter title
|
||||
const tagFiles = fs.readdirSync(API_DOCS_DIR)
|
||||
.filter(f => f.endsWith('.tag.mdx'));
|
||||
|
||||
let updated = 0;
|
||||
for (const tagFile of tagFiles) {
|
||||
const tagFilePath = path.join(API_DOCS_DIR, tagFile);
|
||||
const existing = fs.readFileSync(tagFilePath, 'utf-8');
|
||||
|
||||
// Extract frontmatter
|
||||
const frontmatterMatch = existing.match(/^---\n([\s\S]*?)\n---/);
|
||||
if (!frontmatterMatch) {
|
||||
console.warn(` No frontmatter in ${tagFile}, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const frontmatter = frontmatterMatch[1];
|
||||
|
||||
// Extract the title from frontmatter (this matches the spec tag name)
|
||||
const titleMatch = frontmatter.match(/title:\s*"([^"]+)"/);
|
||||
if (!titleMatch) {
|
||||
console.warn(` No title in ${tagFile}, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const tagName = titleMatch[1];
|
||||
const endpoints = tagEndpoints[tagName];
|
||||
|
||||
if (!endpoints || endpoints.length === 0) {
|
||||
console.warn(` No endpoints found for tag "${tagName}" (${tagFile})`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const description = tagDescriptions[tagName] || '';
|
||||
|
||||
// Build the endpoint table
|
||||
let table = '| Method | Endpoint | Path |\n';
|
||||
table += '|--------|----------|------|\n';
|
||||
for (const ep of endpoints) {
|
||||
table += `| \`${ep.method}\` | [${ep.summary}](./${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
// Generate the new MDX content
|
||||
const mdx = `---
|
||||
${frontmatter}
|
||||
---
|
||||
|
||||
${description}
|
||||
|
||||
${table}
|
||||
`;
|
||||
|
||||
fs.writeFileSync(tagFilePath, mdx);
|
||||
updated++;
|
||||
}
|
||||
|
||||
console.log(`Updated ${updated} tag pages with endpoint tables`);
|
||||
}
|
||||
|
||||
main();
|
||||
1026
docs/scripts/generate-database-docs.mjs
Normal file
1415
docs/scripts/generate-superset-components.mjs
Normal file
@@ -110,9 +110,26 @@ const sidebars = {
|
||||
'testing/frontend-testing',
|
||||
'testing/backend-testing',
|
||||
'testing/e2e-testing',
|
||||
'testing/storybook',
|
||||
'testing/ci-cd',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'UI Components',
|
||||
collapsed: true,
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'components',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'link',
|
||||
label: 'API Reference',
|
||||
href: '/docs/api',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@@ -57,6 +57,20 @@ const sidebars = {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Databases',
|
||||
link: {
|
||||
type: 'doc',
|
||||
id: 'databases/index',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'databases',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Using Superset',
|
||||
@@ -93,9 +107,21 @@ const sidebars = {
|
||||
id: 'faq',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
label: 'API',
|
||||
id: 'api',
|
||||
type: 'category',
|
||||
label: 'API Reference',
|
||||
link: {
|
||||
type: 'doc',
|
||||
id: 'api',
|
||||
},
|
||||
items: (() => {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
return require('./docs/api/sidebar.js');
|
||||
} catch {
|
||||
// Generated by `yarn generate:api-docs`; empty until then
|
||||
return [];
|
||||
}
|
||||
})(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -39,11 +39,12 @@ const StyledBlurredSection = styled('section')`
|
||||
|
||||
interface BlurredSectionProps {
|
||||
children: ReactNode;
|
||||
id?: string;
|
||||
}
|
||||
|
||||
const BlurredSection = ({ children }: BlurredSectionProps) => {
|
||||
const BlurredSection = ({ children, id }: BlurredSectionProps) => {
|
||||
return (
|
||||
<StyledBlurredSection>
|
||||
<StyledBlurredSection id={id}>
|
||||
{children}
|
||||
<img className="blur" src="/img/community/blur.png" alt="Blur" />
|
||||
</StyledBlurredSection>
|
||||
|
||||
@@ -98,6 +98,7 @@ interface SectionHeaderProps {
|
||||
title: string;
|
||||
subtitle?: string | ReactNode;
|
||||
dark?: boolean;
|
||||
link?: string;
|
||||
}
|
||||
|
||||
const SectionHeader = ({
|
||||
@@ -105,15 +106,24 @@ const SectionHeader = ({
|
||||
title,
|
||||
subtitle,
|
||||
dark,
|
||||
link,
|
||||
}: SectionHeaderProps) => {
|
||||
const Heading = level;
|
||||
|
||||
const StyledRoot =
|
||||
level === 'h1' ? StyledSectionHeaderH1 : StyledSectionHeaderH2;
|
||||
|
||||
const titleContent = link ? (
|
||||
<a href={link} style={{ color: 'inherit', textDecoration: 'none' }}>
|
||||
{title}
|
||||
</a>
|
||||
) : (
|
||||
title
|
||||
);
|
||||
|
||||
return (
|
||||
<StyledRoot dark={!!dark}>
|
||||
<Heading className="title">{title}</Heading>
|
||||
<Heading className="title">{titleContent}</Heading>
|
||||
<img className="line" src="/img/community/line.png" alt="line" />
|
||||
{subtitle && <div className="subtitle">{subtitle}</div>}
|
||||
</StyledRoot>
|
||||
|
||||
@@ -18,33 +18,245 @@
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { supersetTheme, ThemeProvider } from '@superset-ui/core';
|
||||
import BrowserOnly from '@docusaurus/BrowserOnly';
|
||||
|
||||
// A simple component to display a story example
|
||||
export function StoryExample({ component: Component, props = {} }) {
|
||||
// Lazy-loaded component registry - populated on first use in browser
|
||||
let componentRegistry = null;
|
||||
let SupersetProviders = null;
|
||||
|
||||
function getComponentRegistry() {
|
||||
if (typeof window === 'undefined') {
|
||||
return {}; // SSR - return empty
|
||||
}
|
||||
|
||||
if (componentRegistry !== null) {
|
||||
return componentRegistry; // Already loaded
|
||||
}
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const antd = require('antd');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const SupersetComponents = require('@superset/components');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const CoreUI = require('@apache-superset/core/ui');
|
||||
|
||||
// Build component registry with antd as base fallback layer.
|
||||
// Some Superset components (e.g., Typography) use styled-components that may
|
||||
// fail to initialize in the docs build. Antd originals serve as fallbacks.
|
||||
componentRegistry = { ...antd, ...SupersetComponents, ...CoreUI };
|
||||
|
||||
return componentRegistry;
|
||||
} catch (error) {
|
||||
console.error('[StorybookWrapper] Failed to load components:', error);
|
||||
componentRegistry = {};
|
||||
return componentRegistry;
|
||||
}
|
||||
}
|
||||
|
||||
function getProviders() {
|
||||
if (typeof window === 'undefined') {
|
||||
return ({ children }) => children; // SSR
|
||||
}
|
||||
|
||||
if (SupersetProviders !== null) {
|
||||
return SupersetProviders;
|
||||
}
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { themeObject } = require('@apache-superset/core/ui');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { App, ConfigProvider } = require('antd');
|
||||
|
||||
// Configure Ant Design to render portals (tooltips, dropdowns, etc.)
|
||||
// inside the closest .storybook-example container instead of document.body
|
||||
// This fixes positioning issues in the docs pages
|
||||
const getPopupContainer = (triggerNode) => {
|
||||
// Find the closest .storybook-example container
|
||||
const container = triggerNode?.closest?.('.storybook-example');
|
||||
return container || document.body;
|
||||
};
|
||||
|
||||
SupersetProviders = ({ children }) => (
|
||||
<themeObject.SupersetThemeProvider>
|
||||
<ConfigProvider
|
||||
getPopupContainer={getPopupContainer}
|
||||
getTargetContainer={() => document.body}
|
||||
>
|
||||
<App>{children}</App>
|
||||
</ConfigProvider>
|
||||
</themeObject.SupersetThemeProvider>
|
||||
);
|
||||
return SupersetProviders;
|
||||
} catch (error) {
|
||||
console.error('[StorybookWrapper] Failed to load providers:', error);
|
||||
return ({ children }) => children;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a value is a valid React component (function, forwardRef, memo, etc.)
|
||||
function isReactComponent(value) {
|
||||
if (!value) return false;
|
||||
// Function/class components
|
||||
if (typeof value === 'function') return true;
|
||||
// forwardRef, memo, lazy — React wraps these as objects with $$typeof
|
||||
if (typeof value === 'object' && value.$$typeof) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Resolve component from string name or React component
|
||||
// Supports dot notation for nested components (e.g., 'Icons.InfoCircleOutlined')
|
||||
function resolveComponent(component) {
|
||||
if (!component) return null;
|
||||
// If already a component (function/class/forwardRef), return as-is
|
||||
if (isReactComponent(component)) return component;
|
||||
// If string, look up in registry
|
||||
if (typeof component === 'string') {
|
||||
const registry = getComponentRegistry();
|
||||
// Handle dot notation (e.g., 'Icons.InfoCircleOutlined')
|
||||
if (component.includes('.')) {
|
||||
const parts = component.split('.');
|
||||
let current = registry[parts[0]];
|
||||
for (let i = 1; i < parts.length && current; i++) {
|
||||
current = current[parts[i]];
|
||||
}
|
||||
return isReactComponent(current) ? current : null;
|
||||
}
|
||||
return registry[component] || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Loading placeholder for SSR
|
||||
function LoadingPlaceholder() {
|
||||
return (
|
||||
<ThemeProvider theme={supersetTheme}>
|
||||
<div
|
||||
className="storybook-example"
|
||||
style={{
|
||||
border: '1px solid #e8e8e8',
|
||||
borderRadius: '4px',
|
||||
padding: '20px',
|
||||
marginBottom: '20px',
|
||||
}}
|
||||
>
|
||||
{Component && <Component {...props} />}
|
||||
</div>
|
||||
</ThemeProvider>
|
||||
<div
|
||||
style={{
|
||||
border: '1px solid #e8e8e8',
|
||||
borderRadius: '4px',
|
||||
padding: '20px',
|
||||
marginBottom: '20px',
|
||||
minHeight: '100px',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
color: '#999',
|
||||
}}
|
||||
>
|
||||
Loading component...
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// A simple component to display a story with controls
|
||||
export function StoryWithControls({
|
||||
component: Component,
|
||||
props = {},
|
||||
controls = [],
|
||||
}) {
|
||||
// A simple component to display a story example
|
||||
export function StoryExample({ component, props = {} }) {
|
||||
return (
|
||||
<BrowserOnly fallback={<LoadingPlaceholder />}>
|
||||
{() => {
|
||||
const Component = resolveComponent(component);
|
||||
const Providers = getProviders();
|
||||
const { children, restProps } = extractChildren(props);
|
||||
return (
|
||||
<Providers>
|
||||
<div
|
||||
className="storybook-example"
|
||||
style={{
|
||||
border: '1px solid #e8e8e8',
|
||||
borderRadius: '4px',
|
||||
padding: '20px',
|
||||
marginBottom: '20px',
|
||||
position: 'relative', // Required for portal positioning
|
||||
}}
|
||||
>
|
||||
{Component ? (
|
||||
<Component {...restProps}>{children}</Component>
|
||||
) : (
|
||||
<div style={{ color: '#999' }}>
|
||||
Component "{String(component)}" not found
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Providers>
|
||||
);
|
||||
}}
|
||||
</BrowserOnly>
|
||||
);
|
||||
}
|
||||
|
||||
// Props that should be rendered as children rather than passed as props
|
||||
const CHILDREN_PROP_NAMES = ['label', 'children', 'text', 'content'];
|
||||
|
||||
// Extract children from props based on common conventions
|
||||
function extractChildren(props) {
|
||||
for (const propName of CHILDREN_PROP_NAMES) {
|
||||
if (props[propName] !== undefined && props[propName] !== null && props[propName] !== '') {
|
||||
const { [propName]: childContent, ...restProps } = props;
|
||||
return { children: childContent, restProps };
|
||||
}
|
||||
}
|
||||
return { children: null, restProps: props };
|
||||
}
|
||||
|
||||
// Generate sample children for layout components
|
||||
// Supports:
|
||||
// - Array of strings: ['Item 1', 'Item 2'] - renders as styled divs
|
||||
// - Array of component descriptors: [{ component: 'Button', props: { children: 'Click' } }]
|
||||
// - Number: 3 - generates that many sample items
|
||||
// - String: 'content' - renders as literal content
|
||||
function generateSampleChildren(sampleChildren, sampleChildrenStyle) {
|
||||
if (!sampleChildren) return null;
|
||||
|
||||
// Default style if none provided (minimal, just enough to see items)
|
||||
const itemStyle = sampleChildrenStyle || {};
|
||||
|
||||
// If it's an array, check if items are component descriptors or strings
|
||||
if (Array.isArray(sampleChildren)) {
|
||||
return sampleChildren.map((item, i) => {
|
||||
// Component descriptor: { component: 'Button', props: { ... } }
|
||||
if (item && typeof item === 'object' && item.component) {
|
||||
const ChildComponent = resolveComponent(item.component);
|
||||
if (ChildComponent) {
|
||||
return <ChildComponent key={i} {...item.props} />;
|
||||
}
|
||||
// Fallback if component not found
|
||||
return <div key={i}>{item.props?.children || `Unknown: ${item.component}`}</div>;
|
||||
}
|
||||
// Simple string
|
||||
return (
|
||||
<div key={i} style={itemStyle}>
|
||||
{item}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// If it's a number, generate that many sample items
|
||||
if (typeof sampleChildren === 'number') {
|
||||
return new Array(sampleChildren).fill(null).map((_, i) => (
|
||||
<div key={i} style={itemStyle}>
|
||||
Item {i + 1}
|
||||
</div>
|
||||
));
|
||||
}
|
||||
|
||||
// If it's a string, treat as literal content
|
||||
if (typeof sampleChildren === 'string') {
|
||||
return sampleChildren;
|
||||
}
|
||||
|
||||
return sampleChildren;
|
||||
}
|
||||
|
||||
// Inner component for StoryWithControls (browser-only)
|
||||
// renderComponent allows overriding which component to actually render (useful when the named
|
||||
// component is a namespace object like Icons, not a React component)
|
||||
// triggerProp: for components like Modal that need a trigger, specify the boolean prop that controls visibility
|
||||
function StoryWithControlsInner({ component, renderComponent, props, controls, sampleChildren, sampleChildrenStyle, triggerProp, onHideProp }) {
|
||||
// Use renderComponent if provided, otherwise use the main component name
|
||||
const componentToRender = renderComponent || component;
|
||||
const Component = resolveComponent(componentToRender);
|
||||
const Providers = getProviders();
|
||||
const [stateProps, setStateProps] = React.useState(props);
|
||||
|
||||
const updateProp = (key, value) => {
|
||||
@@ -54,8 +266,77 @@ export function StoryWithControls({
|
||||
}));
|
||||
};
|
||||
|
||||
// Extract children from props (label, children, text, content)
|
||||
// When sampleChildren is explicitly provided, skip extraction so all props
|
||||
// (like 'content') stay as component props rather than becoming children
|
||||
const { children: propsChildren, restProps } = sampleChildren
|
||||
? { children: null, restProps: stateProps }
|
||||
: extractChildren(stateProps);
|
||||
// Filter out undefined values so they don't override component defaults
|
||||
const filteredProps = Object.fromEntries(
|
||||
Object.entries(restProps).filter(([, v]) => v !== undefined)
|
||||
);
|
||||
|
||||
// Resolve any prop values that are component descriptors
|
||||
// e.g., { component: 'Button', props: { children: 'Click' } }
|
||||
// Also resolves descriptors nested inside array items:
|
||||
// e.g., items: [{ id: 'x', element: { component: 'div', props: { children: 'text' } } }]
|
||||
Object.keys(filteredProps).forEach(key => {
|
||||
const value = filteredProps[key];
|
||||
if (value && typeof value === 'object' && !Array.isArray(value) && value.component) {
|
||||
const PropComponent = resolveComponent(value.component);
|
||||
if (PropComponent) {
|
||||
filteredProps[key] = <PropComponent {...value.props} />;
|
||||
}
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
filteredProps[key] = value.map((item, idx) => {
|
||||
if (item && typeof item === 'object') {
|
||||
const resolved = { ...item };
|
||||
Object.keys(resolved).forEach(field => {
|
||||
const fieldValue = resolved[field];
|
||||
if (fieldValue && typeof fieldValue === 'object' && !Array.isArray(fieldValue) && fieldValue.component) {
|
||||
const FieldComponent = resolveComponent(fieldValue.component);
|
||||
if (FieldComponent) {
|
||||
resolved[field] = React.createElement(FieldComponent, { key: `${key}-${idx}`, ...fieldValue.props });
|
||||
}
|
||||
}
|
||||
});
|
||||
return resolved;
|
||||
}
|
||||
return item;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// For List-like components with dataSource but no renderItem, provide a default
|
||||
if (filteredProps.dataSource && !filteredProps.renderItem) {
|
||||
const ListItem = resolveComponent('List')?.Item;
|
||||
filteredProps.renderItem = (item) =>
|
||||
ListItem
|
||||
? React.createElement(ListItem, null, String(item))
|
||||
: React.createElement('div', null, String(item));
|
||||
}
|
||||
|
||||
// Use sample children if provided, otherwise use props children
|
||||
const children = generateSampleChildren(sampleChildren, sampleChildrenStyle) || propsChildren;
|
||||
|
||||
// For components with a trigger (like Modal with show/onHide), add handlers.
|
||||
// onHideProp supports comma-separated names for components with multiple close
|
||||
// callbacks (e.g., "onHide,handleSave,onConfirmNavigation").
|
||||
const triggerProps = {};
|
||||
if (triggerProp && onHideProp) {
|
||||
const closeHandler = () => updateProp(triggerProp, false);
|
||||
onHideProp.split(',').forEach(prop => {
|
||||
triggerProps[prop.trim()] = closeHandler;
|
||||
});
|
||||
}
|
||||
|
||||
// Get the Button component for trigger buttons
|
||||
const ButtonComponent = resolveComponent('Button');
|
||||
|
||||
return (
|
||||
<ThemeProvider theme={supersetTheme}>
|
||||
<Providers>
|
||||
<div className="storybook-with-controls">
|
||||
<div
|
||||
className="storybook-example"
|
||||
@@ -64,9 +345,24 @@ export function StoryWithControls({
|
||||
borderRadius: '4px',
|
||||
padding: '20px',
|
||||
marginBottom: '20px',
|
||||
position: 'relative', // Required for portal positioning
|
||||
}}
|
||||
>
|
||||
{Component && <Component {...stateProps} />}
|
||||
{Component ? (
|
||||
<>
|
||||
{/* Show a trigger button for components like Modal */}
|
||||
{triggerProp && ButtonComponent && (
|
||||
<ButtonComponent onClick={() => updateProp(triggerProp, true)}>
|
||||
Open {component}
|
||||
</ButtonComponent>
|
||||
)}
|
||||
<Component {...filteredProps} {...triggerProps}>{children}</Component>
|
||||
</>
|
||||
) : (
|
||||
<div style={{ color: '#999' }}>
|
||||
Component "{String(componentToRender)}" not found
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{controls.length > 0 && (
|
||||
@@ -87,26 +383,64 @@ export function StoryWithControls({
|
||||
</label>
|
||||
{control.type === 'select' ? (
|
||||
<select
|
||||
value={stateProps[control.name]}
|
||||
onChange={e => updateProp(control.name, e.target.value)}
|
||||
value={stateProps[control.name] ?? ''}
|
||||
onChange={e => updateProp(control.name, e.target.value || undefined)}
|
||||
style={{ width: '100%', padding: '5px' }}
|
||||
>
|
||||
{control.options.map(option => (
|
||||
<option value="">— None —</option>
|
||||
{control.options?.map(option => (
|
||||
<option key={option} value={option}>
|
||||
{option}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
) : control.type === 'inline-radio' || control.type === 'radio' ? (
|
||||
<div style={{ display: 'flex', gap: '10px', flexWrap: 'wrap' }}>
|
||||
{control.options?.map(option => (
|
||||
<label
|
||||
key={option}
|
||||
style={{ display: 'flex', alignItems: 'center', gap: '4px' }}
|
||||
>
|
||||
<input
|
||||
type="radio"
|
||||
name={control.name}
|
||||
value={option}
|
||||
checked={stateProps[control.name] === option}
|
||||
onChange={e => updateProp(control.name, e.target.value)}
|
||||
/>
|
||||
{option}
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
) : control.type === 'boolean' ? (
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={stateProps[control.name]}
|
||||
onChange={e => updateProp(control.name, e.target.checked)}
|
||||
/>
|
||||
) : control.type === 'number' ? (
|
||||
<input
|
||||
type="number"
|
||||
value={stateProps[control.name]}
|
||||
onChange={e => updateProp(control.name, Number(e.target.value))}
|
||||
style={{ width: '100%', padding: '5px' }}
|
||||
/>
|
||||
) : control.type === 'color' ? (
|
||||
<input
|
||||
type="color"
|
||||
value={stateProps[control.name] || '#000000'}
|
||||
onChange={e => updateProp(control.name, e.target.value)}
|
||||
style={{
|
||||
width: '50px',
|
||||
height: '30px',
|
||||
padding: '2px',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<input
|
||||
type="text"
|
||||
value={stateProps[control.name]}
|
||||
value={stateProps[control.name] ?? ''}
|
||||
onChange={e => updateProp(control.name, e.target.value)}
|
||||
style={{ width: '100%', padding: '5px' }}
|
||||
/>
|
||||
@@ -116,6 +450,81 @@ export function StoryWithControls({
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ThemeProvider>
|
||||
</Providers>
|
||||
);
|
||||
}
|
||||
|
||||
// A simple component to display a story with controls
|
||||
// renderComponent: optional override for which component to render (e.g., 'Icons.InfoCircleOutlined' when component='Icons')
|
||||
// triggerProp/onHideProp: for components like Modal that need a button to open (e.g., triggerProp="show", onHideProp="onHide")
|
||||
export function StoryWithControls({ component: Component, renderComponent, props = {}, controls = [], sampleChildren, sampleChildrenStyle, triggerProp, onHideProp }) {
|
||||
return (
|
||||
<BrowserOnly fallback={<LoadingPlaceholder />}>
|
||||
{() => (
|
||||
<StoryWithControlsInner
|
||||
component={Component}
|
||||
renderComponent={renderComponent}
|
||||
props={props}
|
||||
controls={controls}
|
||||
sampleChildren={sampleChildren}
|
||||
sampleChildrenStyle={sampleChildrenStyle}
|
||||
triggerProp={triggerProp}
|
||||
onHideProp={onHideProp}
|
||||
/>
|
||||
)}
|
||||
</BrowserOnly>
|
||||
);
|
||||
}
|
||||
|
||||
// Inner component for ComponentGallery (browser-only)
|
||||
function ComponentGalleryInner({ component, sizes, styles, sizeProp, styleProp }) {
|
||||
const Component = resolveComponent(component);
|
||||
const Providers = getProviders();
|
||||
|
||||
if (!Component) {
|
||||
return (
|
||||
<div style={{ color: '#999' }}>
|
||||
Component "{String(component)}" not found
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Providers>
|
||||
<div className="component-gallery">
|
||||
{sizes.map(size => (
|
||||
<div key={size} style={{ marginBottom: 40 }}>
|
||||
<h4 style={{ marginBottom: 16, color: '#666' }}>{size}</h4>
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: '12px', alignItems: 'center' }}>
|
||||
{styles.map(style => (
|
||||
<Component
|
||||
key={`${style}_${size}`}
|
||||
{...{ [sizeProp]: size, [styleProp]: style }}
|
||||
>
|
||||
{style}
|
||||
</Component>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</Providers>
|
||||
);
|
||||
}
|
||||
|
||||
// A component to display a gallery of all variants (sizes x styles)
|
||||
export function ComponentGallery({ component, sizes = [], styles = [], sizeProp = 'size', styleProp = 'variant' }) {
|
||||
return (
|
||||
<BrowserOnly fallback={<LoadingPlaceholder />}>
|
||||
{() => (
|
||||
<ComponentGalleryInner
|
||||
component={component}
|
||||
sizes={sizes}
|
||||
styles={styles}
|
||||
sizeProp={sizeProp}
|
||||
styleProp={styleProp}
|
||||
/>
|
||||
)}
|
||||
</BrowserOnly>
|
||||
);
|
||||
}
|
||||
|
||||
592
docs/src/components/databases/DatabaseIndex.tsx
Normal file
@@ -0,0 +1,592 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import React, { useState, useMemo } from 'react';
|
||||
import { Card, Row, Col, Statistic, Table, Tag, Input, Select, Tooltip } from 'antd';
|
||||
import {
|
||||
DatabaseOutlined,
|
||||
CheckCircleOutlined,
|
||||
ApiOutlined,
|
||||
KeyOutlined,
|
||||
SearchOutlined,
|
||||
LinkOutlined,
|
||||
BugOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import type { DatabaseData, DatabaseInfo, TimeGrains } from './types';
|
||||
|
||||
interface DatabaseIndexProps {
|
||||
data: DatabaseData;
|
||||
}
|
||||
|
||||
// Type for table entries (includes both regular DBs and compatible DBs)
|
||||
interface TableEntry {
|
||||
name: string;
|
||||
categories: string[]; // Multiple categories supported
|
||||
score: number;
|
||||
max_score: number;
|
||||
timeGrainCount: number;
|
||||
time_grains?: TimeGrains;
|
||||
hasDrivers: boolean;
|
||||
hasAuthMethods: boolean;
|
||||
hasConnectionString: boolean;
|
||||
hasCustomErrors: boolean;
|
||||
customErrorCount: number;
|
||||
joins?: boolean;
|
||||
subqueries?: boolean;
|
||||
supports_dynamic_schema?: boolean;
|
||||
supports_catalog?: boolean;
|
||||
ssh_tunneling?: boolean;
|
||||
supports_file_upload?: boolean;
|
||||
query_cancelation?: boolean;
|
||||
query_cost_estimation?: boolean;
|
||||
user_impersonation?: boolean;
|
||||
sql_validation?: boolean;
|
||||
documentation?: DatabaseInfo['documentation'];
|
||||
// For compatible databases
|
||||
isCompatible?: boolean;
|
||||
compatibleWith?: string;
|
||||
compatibleDescription?: string;
|
||||
}
|
||||
|
||||
// Map category constant names to display names
|
||||
const CATEGORY_DISPLAY_NAMES: Record<string, string> = {
|
||||
'CLOUD_AWS': 'Cloud - AWS',
|
||||
'CLOUD_GCP': 'Cloud - Google',
|
||||
'CLOUD_AZURE': 'Cloud - Azure',
|
||||
'CLOUD_DATA_WAREHOUSES': 'Cloud Data Warehouses',
|
||||
'APACHE_PROJECTS': 'Apache Projects',
|
||||
'TRADITIONAL_RDBMS': 'Traditional RDBMS',
|
||||
'ANALYTICAL_DATABASES': 'Analytical Databases',
|
||||
'SEARCH_NOSQL': 'Search & NoSQL',
|
||||
'QUERY_ENGINES': 'Query Engines',
|
||||
'TIME_SERIES': 'Time Series Databases',
|
||||
'OTHER': 'Other Databases',
|
||||
'OPEN_SOURCE': 'Open Source',
|
||||
'HOSTED_OPEN_SOURCE': 'Hosted Open Source',
|
||||
'PROPRIETARY': 'Proprietary',
|
||||
};
|
||||
|
||||
// Category colors for visual distinction
|
||||
const CATEGORY_COLORS: Record<string, string> = {
|
||||
'Cloud - AWS': 'orange',
|
||||
'Cloud - Google': 'blue',
|
||||
'Cloud - Azure': 'cyan',
|
||||
'Cloud Data Warehouses': 'purple',
|
||||
'Apache Projects': 'red',
|
||||
'Traditional RDBMS': 'green',
|
||||
'Analytical Databases': 'magenta',
|
||||
'Search & NoSQL': 'gold',
|
||||
'Query Engines': 'lime',
|
||||
'Time Series Databases': 'volcano',
|
||||
'Other Databases': 'default',
|
||||
// Licensing categories
|
||||
'Open Source': 'geekblue',
|
||||
'Hosted Open Source': 'cyan',
|
||||
'Proprietary': 'default',
|
||||
};
|
||||
|
||||
// Convert category constant to display name
|
||||
function getCategoryDisplayName(cat: string): string {
|
||||
return CATEGORY_DISPLAY_NAMES[cat] || cat;
|
||||
}
|
||||
|
||||
// Get categories for a database - uses categories from metadata when available
|
||||
// Falls back to name-based inference for compatible databases without categories
|
||||
function getCategories(
|
||||
name: string,
|
||||
documentationCategories?: string[]
|
||||
): string[] {
|
||||
// Prefer categories from documentation metadata (computed by Python)
|
||||
if (documentationCategories && documentationCategories.length > 0) {
|
||||
return documentationCategories.map(getCategoryDisplayName);
|
||||
}
|
||||
|
||||
// Fallback: infer from name (for compatible databases without categories)
|
||||
const nameLower = name.toLowerCase();
|
||||
|
||||
if (nameLower.includes('aws') || nameLower.includes('amazon'))
|
||||
return ['Cloud - AWS'];
|
||||
if (nameLower.includes('google') || nameLower.includes('bigquery'))
|
||||
return ['Cloud - Google'];
|
||||
if (nameLower.includes('azure') || nameLower.includes('microsoft'))
|
||||
return ['Cloud - Azure'];
|
||||
if (nameLower.includes('snowflake') || nameLower.includes('databricks'))
|
||||
return ['Cloud Data Warehouses'];
|
||||
if (
|
||||
nameLower.includes('apache') ||
|
||||
nameLower.includes('druid') ||
|
||||
nameLower.includes('hive') ||
|
||||
nameLower.includes('spark')
|
||||
)
|
||||
return ['Apache Projects'];
|
||||
if (
|
||||
nameLower.includes('postgres') ||
|
||||
nameLower.includes('mysql') ||
|
||||
nameLower.includes('sqlite') ||
|
||||
nameLower.includes('mariadb')
|
||||
)
|
||||
return ['Traditional RDBMS'];
|
||||
if (
|
||||
nameLower.includes('clickhouse') ||
|
||||
nameLower.includes('vertica') ||
|
||||
nameLower.includes('starrocks')
|
||||
)
|
||||
return ['Analytical Databases'];
|
||||
if (
|
||||
nameLower.includes('elastic') ||
|
||||
nameLower.includes('solr') ||
|
||||
nameLower.includes('couchbase')
|
||||
)
|
||||
return ['Search & NoSQL'];
|
||||
if (nameLower.includes('trino') || nameLower.includes('presto'))
|
||||
return ['Query Engines'];
|
||||
|
||||
return ['Other Databases'];
|
||||
}
|
||||
|
||||
// Count supported time grains
|
||||
function countTimeGrains(db: DatabaseInfo): number {
|
||||
if (!db.time_grains) return 0;
|
||||
return Object.values(db.time_grains).filter(Boolean).length;
|
||||
}
|
||||
|
||||
// Format time grain name for display (e.g., FIVE_MINUTES -> "5 min")
|
||||
function formatTimeGrain(grain: string): string {
|
||||
const mapping: Record<string, string> = {
|
||||
SECOND: 'Second',
|
||||
FIVE_SECONDS: '5 sec',
|
||||
THIRTY_SECONDS: '30 sec',
|
||||
MINUTE: 'Minute',
|
||||
FIVE_MINUTES: '5 min',
|
||||
TEN_MINUTES: '10 min',
|
||||
FIFTEEN_MINUTES: '15 min',
|
||||
THIRTY_MINUTES: '30 min',
|
||||
HALF_HOUR: '30 min',
|
||||
HOUR: 'Hour',
|
||||
SIX_HOURS: '6 hours',
|
||||
DAY: 'Day',
|
||||
WEEK: 'Week',
|
||||
WEEK_STARTING_SUNDAY: 'Week (Sun)',
|
||||
WEEK_STARTING_MONDAY: 'Week (Mon)',
|
||||
WEEK_ENDING_SATURDAY: 'Week (→Sat)',
|
||||
WEEK_ENDING_SUNDAY: 'Week (→Sun)',
|
||||
MONTH: 'Month',
|
||||
QUARTER: 'Quarter',
|
||||
QUARTER_YEAR: 'Quarter',
|
||||
YEAR: 'Year',
|
||||
};
|
||||
return mapping[grain] || grain;
|
||||
}
|
||||
|
||||
// Get list of supported time grains for tooltip
|
||||
function getSupportedTimeGrains(timeGrains?: TimeGrains): string[] {
|
||||
if (!timeGrains) return [];
|
||||
return Object.entries(timeGrains)
|
||||
.filter(([, supported]) => supported)
|
||||
.map(([grain]) => formatTimeGrain(grain));
|
||||
}
|
||||
|
||||
const DatabaseIndex: React.FC<DatabaseIndexProps> = ({ data }) => {
|
||||
const [searchText, setSearchText] = useState('');
|
||||
const [categoryFilter, setCategoryFilter] = useState<string | null>(null);
|
||||
|
||||
const { statistics, databases } = data;
|
||||
|
||||
// Convert databases object to array, including compatible databases
|
||||
const databaseList = useMemo(() => {
|
||||
const entries: TableEntry[] = [];
|
||||
|
||||
Object.entries(databases).forEach(([name, db]) => {
|
||||
// Add the main database
|
||||
// Use categories from documentation metadata (computed by Python) when available
|
||||
entries.push({
|
||||
...db,
|
||||
name,
|
||||
categories: getCategories(name, db.documentation?.categories),
|
||||
timeGrainCount: countTimeGrains(db),
|
||||
hasDrivers: (db.documentation?.drivers?.length ?? 0) > 0,
|
||||
hasAuthMethods: (db.documentation?.authentication_methods?.length ?? 0) > 0,
|
||||
hasConnectionString: Boolean(
|
||||
db.documentation?.connection_string ||
|
||||
(db.documentation?.drivers?.length ?? 0) > 0
|
||||
),
|
||||
hasCustomErrors: (db.documentation?.custom_errors?.length ?? 0) > 0,
|
||||
customErrorCount: db.documentation?.custom_errors?.length ?? 0,
|
||||
isCompatible: false,
|
||||
});
|
||||
|
||||
// Add compatible databases from this database's documentation
|
||||
const compatibleDbs = db.documentation?.compatible_databases ?? [];
|
||||
compatibleDbs.forEach((compat) => {
|
||||
// Check if this compatible DB already exists as a main entry
|
||||
const existsAsMain = Object.keys(databases).some(
|
||||
(dbName) => dbName.toLowerCase() === compat.name.toLowerCase()
|
||||
);
|
||||
|
||||
if (!existsAsMain) {
|
||||
// Compatible databases: use their categories if defined, or infer from name
|
||||
entries.push({
|
||||
name: compat.name,
|
||||
categories: getCategories(compat.name, compat.categories),
|
||||
// Compatible DBs inherit scores from parent
|
||||
score: db.score,
|
||||
max_score: db.max_score,
|
||||
timeGrainCount: countTimeGrains(db),
|
||||
hasDrivers: false,
|
||||
hasAuthMethods: false,
|
||||
hasConnectionString: Boolean(compat.connection_string),
|
||||
hasCustomErrors: false,
|
||||
customErrorCount: 0,
|
||||
joins: db.joins,
|
||||
subqueries: db.subqueries,
|
||||
supports_dynamic_schema: db.supports_dynamic_schema,
|
||||
supports_catalog: db.supports_catalog,
|
||||
ssh_tunneling: db.ssh_tunneling,
|
||||
documentation: {
|
||||
description: compat.description,
|
||||
connection_string: compat.connection_string,
|
||||
pypi_packages: compat.pypi_packages,
|
||||
},
|
||||
isCompatible: true,
|
||||
compatibleWith: name,
|
||||
compatibleDescription: `Uses ${name} driver`,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return entries;
|
||||
}, [databases]);
|
||||
|
||||
// Filter and sort databases
|
||||
const filteredDatabases = useMemo(() => {
|
||||
return databaseList
|
||||
.filter((db) => {
|
||||
const matchesSearch =
|
||||
!searchText ||
|
||||
db.name.toLowerCase().includes(searchText.toLowerCase()) ||
|
||||
db.documentation?.description
|
||||
?.toLowerCase()
|
||||
.includes(searchText.toLowerCase());
|
||||
const matchesCategory = !categoryFilter || db.categories.includes(categoryFilter);
|
||||
return matchesSearch && matchesCategory;
|
||||
})
|
||||
.sort((a, b) => b.score - a.score);
|
||||
}, [databaseList, searchText, categoryFilter]);
|
||||
|
||||
// Get unique categories and counts for filter
|
||||
const { categories, categoryCounts } = useMemo(() => {
|
||||
const counts: Record<string, number> = {};
|
||||
databaseList.forEach((db) => {
|
||||
// Count each category the database belongs to
|
||||
db.categories.forEach((cat) => {
|
||||
counts[cat] = (counts[cat] || 0) + 1;
|
||||
});
|
||||
});
|
||||
return {
|
||||
categories: Object.keys(counts).sort(),
|
||||
categoryCounts: counts,
|
||||
};
|
||||
}, [databaseList]);
|
||||
|
||||
// Table columns
|
||||
const columns = [
|
||||
{
|
||||
title: 'Database',
|
||||
dataIndex: 'name',
|
||||
key: 'name',
|
||||
sorter: (a: TableEntry, b: TableEntry) => a.name.localeCompare(b.name),
|
||||
render: (name: string, record: TableEntry) => {
|
||||
// Convert name to URL slug
|
||||
const toSlug = (n: string) => n.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, '');
|
||||
|
||||
// Link to parent for compatible DBs, otherwise to own page
|
||||
const linkTarget = record.isCompatible && record.compatibleWith
|
||||
? `/docs/databases/supported/${toSlug(record.compatibleWith)}`
|
||||
: `/docs/databases/supported/${toSlug(name)}`;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<a href={linkTarget}>
|
||||
<strong>{name}</strong>
|
||||
</a>
|
||||
{record.isCompatible && record.compatibleWith && (
|
||||
<Tag
|
||||
icon={<LinkOutlined />}
|
||||
color="geekblue"
|
||||
style={{ marginLeft: 8, fontSize: '11px' }}
|
||||
>
|
||||
{record.compatibleWith} compatible
|
||||
</Tag>
|
||||
)}
|
||||
<div style={{ fontSize: '12px', color: '#666' }}>
|
||||
{record.documentation?.description?.slice(0, 80)}
|
||||
{(record.documentation?.description?.length ?? 0) > 80 ? '...' : ''}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Categories',
|
||||
dataIndex: 'categories',
|
||||
key: 'categories',
|
||||
width: 220,
|
||||
filters: categories.map((cat) => ({ text: cat, value: cat })),
|
||||
onFilter: (value: React.Key | boolean, record: TableEntry) =>
|
||||
record.categories.includes(value as string),
|
||||
render: (cats: string[]) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{cats.map((cat) => (
|
||||
<Tag key={cat} color={CATEGORY_COLORS[cat] || 'default'}>{cat}</Tag>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Score',
|
||||
dataIndex: 'score',
|
||||
key: 'score',
|
||||
width: 80,
|
||||
sorter: (a: TableEntry, b: TableEntry) => a.score - b.score,
|
||||
defaultSortOrder: 'descend' as const,
|
||||
render: (score: number, record: TableEntry) => (
|
||||
<span
|
||||
style={{
|
||||
color: score > 150 ? '#52c41a' : score > 100 ? '#1890ff' : '#666',
|
||||
fontWeight: score > 150 ? 'bold' : 'normal',
|
||||
}}
|
||||
>
|
||||
{score}/{record.max_score}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Time Grains',
|
||||
dataIndex: 'timeGrainCount',
|
||||
key: 'timeGrainCount',
|
||||
width: 100,
|
||||
sorter: (a: TableEntry, b: TableEntry) => a.timeGrainCount - b.timeGrainCount,
|
||||
render: (count: number, record: TableEntry) => {
|
||||
if (count === 0) return <span>-</span>;
|
||||
const grains = getSupportedTimeGrains(record.time_grains);
|
||||
return (
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: '4px', maxWidth: 280 }}>
|
||||
{grains.map((grain) => (
|
||||
<Tag key={grain} style={{ margin: 0 }}>{grain}</Tag>
|
||||
))}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<span style={{ cursor: 'help', borderBottom: '1px dotted #999' }}>
|
||||
{count} grains
|
||||
</span>
|
||||
</Tooltip>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Features',
|
||||
key: 'features',
|
||||
width: 280,
|
||||
filters: [
|
||||
{ text: 'JOINs', value: 'joins' },
|
||||
{ text: 'Subqueries', value: 'subqueries' },
|
||||
{ text: 'Dynamic Schema', value: 'dynamic_schema' },
|
||||
{ text: 'Catalog', value: 'catalog' },
|
||||
{ text: 'SSH Tunneling', value: 'ssh' },
|
||||
{ text: 'File Upload', value: 'file_upload' },
|
||||
{ text: 'Query Cancel', value: 'query_cancel' },
|
||||
{ text: 'Cost Estimation', value: 'cost_estimation' },
|
||||
{ text: 'User Impersonation', value: 'impersonation' },
|
||||
{ text: 'SQL Validation', value: 'sql_validation' },
|
||||
],
|
||||
onFilter: (value: React.Key | boolean, record: TableEntry) => {
|
||||
switch (value) {
|
||||
case 'joins':
|
||||
return Boolean(record.joins);
|
||||
case 'subqueries':
|
||||
return Boolean(record.subqueries);
|
||||
case 'dynamic_schema':
|
||||
return Boolean(record.supports_dynamic_schema);
|
||||
case 'catalog':
|
||||
return Boolean(record.supports_catalog);
|
||||
case 'ssh':
|
||||
return Boolean(record.ssh_tunneling);
|
||||
case 'file_upload':
|
||||
return Boolean(record.supports_file_upload);
|
||||
case 'query_cancel':
|
||||
return Boolean(record.query_cancelation);
|
||||
case 'cost_estimation':
|
||||
return Boolean(record.query_cost_estimation);
|
||||
case 'impersonation':
|
||||
return Boolean(record.user_impersonation);
|
||||
case 'sql_validation':
|
||||
return Boolean(record.sql_validation);
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
},
|
||||
render: (_: unknown, record: TableEntry) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{record.joins && <Tag color="green">JOINs</Tag>}
|
||||
{record.subqueries && <Tag color="green">Subqueries</Tag>}
|
||||
{record.supports_dynamic_schema && <Tag color="blue">Dynamic Schema</Tag>}
|
||||
{record.supports_catalog && <Tag color="purple">Catalog</Tag>}
|
||||
{record.ssh_tunneling && <Tag color="cyan">SSH</Tag>}
|
||||
{record.supports_file_upload && <Tag color="orange">File Upload</Tag>}
|
||||
{record.query_cancelation && <Tag color="volcano">Query Cancel</Tag>}
|
||||
{record.query_cost_estimation && <Tag color="gold">Cost Est.</Tag>}
|
||||
{record.user_impersonation && <Tag color="magenta">Impersonation</Tag>}
|
||||
{record.sql_validation && <Tag color="lime">SQL Validation</Tag>}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Documentation',
|
||||
key: 'docs',
|
||||
width: 180,
|
||||
render: (_: unknown, record: TableEntry) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{record.hasConnectionString && (
|
||||
<Tag icon={<ApiOutlined />} color="default">
|
||||
Connection
|
||||
</Tag>
|
||||
)}
|
||||
{record.hasDrivers && (
|
||||
<Tag icon={<DatabaseOutlined />} color="default">
|
||||
Drivers
|
||||
</Tag>
|
||||
)}
|
||||
{record.hasAuthMethods && (
|
||||
<Tag icon={<KeyOutlined />} color="default">
|
||||
Auth
|
||||
</Tag>
|
||||
)}
|
||||
{record.hasCustomErrors && (
|
||||
<Tooltip title={`${record.customErrorCount} troubleshooting tips`}>
|
||||
<Tag icon={<BugOutlined />} color="volcano">
|
||||
Errors
|
||||
</Tag>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="database-index">
|
||||
{/* Statistics Cards */}
|
||||
<Row gutter={[16, 16]} style={{ marginBottom: 24 }}>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="Total Databases"
|
||||
value={statistics.totalDatabases}
|
||||
prefix={<DatabaseOutlined />}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="With Documentation"
|
||||
value={statistics.withDocumentation}
|
||||
prefix={<CheckCircleOutlined />}
|
||||
suffix={`/ ${statistics.totalDatabases}`}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="Multiple Drivers"
|
||||
value={statistics.withDrivers}
|
||||
prefix={<ApiOutlined />}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="Auth Methods"
|
||||
value={statistics.withAuthMethods}
|
||||
prefix={<KeyOutlined />}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
{/* Filters */}
|
||||
<Row gutter={[16, 16]} style={{ marginBottom: 16 }}>
|
||||
<Col xs={24} sm={12}>
|
||||
<Input
|
||||
placeholder="Search databases..."
|
||||
prefix={<SearchOutlined />}
|
||||
value={searchText}
|
||||
onChange={(e) => setSearchText(e.target.value)}
|
||||
allowClear
|
||||
/>
|
||||
</Col>
|
||||
<Col xs={24} sm={12}>
|
||||
<Select
|
||||
placeholder="Filter by category"
|
||||
style={{ width: '100%' }}
|
||||
value={categoryFilter}
|
||||
onChange={setCategoryFilter}
|
||||
allowClear
|
||||
options={categories.map((cat) => ({
|
||||
label: (
|
||||
<span>
|
||||
<Tag
|
||||
color={CATEGORY_COLORS[cat] || 'default'}
|
||||
style={{ marginRight: 8 }}
|
||||
>
|
||||
{categoryCounts[cat] || 0}
|
||||
</Tag>
|
||||
{cat}
|
||||
</span>
|
||||
),
|
||||
value: cat,
|
||||
}))}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
{/* Database Table */}
|
||||
<Table
|
||||
dataSource={filteredDatabases}
|
||||
columns={columns}
|
||||
rowKey={(record) => record.isCompatible ? `${record.compatibleWith}-${record.name}` : record.name}
|
||||
pagination={{
|
||||
pageSize: 20,
|
||||
showSizeChanger: true,
|
||||
showTotal: (total) => `${total} databases`,
|
||||
}}
|
||||
size="middle"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DatabaseIndex;
|
||||
788
docs/src/components/databases/DatabasePage.tsx
Normal file
@@ -0,0 +1,788 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import {
|
||||
Card,
|
||||
Collapse,
|
||||
Table,
|
||||
Tag,
|
||||
Typography,
|
||||
Alert,
|
||||
Space,
|
||||
Divider,
|
||||
Tabs,
|
||||
} from 'antd';
|
||||
import {
|
||||
CheckCircleOutlined,
|
||||
CloseCircleOutlined,
|
||||
WarningOutlined,
|
||||
LinkOutlined,
|
||||
KeyOutlined,
|
||||
SettingOutlined,
|
||||
BookOutlined,
|
||||
EditOutlined,
|
||||
GithubOutlined,
|
||||
BugOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import type { DatabaseInfo } from './types';
|
||||
|
||||
// Simple code block component for connection strings
|
||||
const CodeBlock: React.FC<{ children: React.ReactNode }> = ({ children }) => (
|
||||
<pre
|
||||
style={{
|
||||
background: 'var(--ifm-code-background)',
|
||||
padding: '12px 16px',
|
||||
borderRadius: '4px',
|
||||
overflow: 'auto',
|
||||
fontSize: '13px',
|
||||
fontFamily: 'var(--ifm-font-family-monospace)',
|
||||
}}
|
||||
>
|
||||
<code>{children}</code>
|
||||
</pre>
|
||||
);
|
||||
|
||||
const { Title, Paragraph, Text } = Typography;
|
||||
|
||||
interface DatabasePageProps {
|
||||
database: DatabaseInfo;
|
||||
name: string;
|
||||
}
|
||||
|
||||
// Feature badge component
|
||||
const FeatureBadge: React.FC<{ supported: boolean; label: string }> = ({
|
||||
supported,
|
||||
label,
|
||||
}) => (
|
||||
<Tag
|
||||
icon={supported ? <CheckCircleOutlined /> : <CloseCircleOutlined />}
|
||||
color={supported ? 'success' : 'default'}
|
||||
>
|
||||
{label}
|
||||
</Tag>
|
||||
);
|
||||
|
||||
// Time grain badge
|
||||
const TimeGrainBadge: React.FC<{ supported: boolean; grain: string }> = ({
|
||||
supported,
|
||||
grain,
|
||||
}) => (
|
||||
<Tag color={supported ? 'blue' : 'default'} style={{ margin: '2px' }}>
|
||||
{grain}
|
||||
</Tag>
|
||||
);
|
||||
|
||||
const DatabasePage: React.FC<DatabasePageProps> = ({ database, name }) => {
|
||||
const { documentation: docs } = database;
|
||||
|
||||
// Helper to render connection string with copy button
|
||||
const renderConnectionString = (connStr: string, description?: string) => (
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
{description && (
|
||||
<Text type="secondary" style={{ display: 'block', marginBottom: 4 }}>
|
||||
{description}
|
||||
</Text>
|
||||
)}
|
||||
<CodeBlock>{connStr}</CodeBlock>
|
||||
</div>
|
||||
);
|
||||
|
||||
// Render driver information
|
||||
const renderDrivers = () => {
|
||||
if (!docs?.drivers?.length) return null;
|
||||
|
||||
return (
|
||||
<Card title="Drivers" style={{ marginBottom: 16 }}>
|
||||
<Tabs
|
||||
items={docs.drivers.map((driver, idx) => ({
|
||||
key: String(idx),
|
||||
label: (
|
||||
<span>
|
||||
{driver.name}
|
||||
{driver.is_recommended && (
|
||||
<Tag color="green" style={{ marginLeft: 8 }}>
|
||||
Recommended
|
||||
</Tag>
|
||||
)}
|
||||
</span>
|
||||
),
|
||||
children: (
|
||||
<Space direction="vertical" style={{ width: '100%' }}>
|
||||
{driver.pypi_package && (
|
||||
<div>
|
||||
<Text strong>PyPI Package: </Text>
|
||||
<code>{driver.pypi_package}</code>
|
||||
</div>
|
||||
)}
|
||||
{driver.connection_string &&
|
||||
renderConnectionString(driver.connection_string)}
|
||||
{driver.notes && (
|
||||
<Alert message={driver.notes} type="info" showIcon />
|
||||
)}
|
||||
{driver.docs_url && (
|
||||
<a href={driver.docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Documentation
|
||||
</a>
|
||||
)}
|
||||
</Space>
|
||||
),
|
||||
}))}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render authentication methods
|
||||
const renderAuthMethods = () => {
|
||||
if (!docs?.authentication_methods?.length) return null;
|
||||
|
||||
return (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<KeyOutlined /> Authentication Methods
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Collapse
|
||||
accordion
|
||||
items={docs.authentication_methods.map((auth, idx) => ({
|
||||
key: String(idx),
|
||||
label: auth.name,
|
||||
children: (
|
||||
<>
|
||||
{auth.description && <Paragraph>{auth.description}</Paragraph>}
|
||||
{auth.requirements && (
|
||||
<Alert
|
||||
message="Requirements"
|
||||
description={auth.requirements}
|
||||
type="warning"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
{auth.connection_string &&
|
||||
renderConnectionString(
|
||||
auth.connection_string,
|
||||
'Connection String',
|
||||
)}
|
||||
{auth.secure_extra && (
|
||||
<div>
|
||||
<Text strong>Secure Extra Configuration:</Text>
|
||||
<CodeBlock>
|
||||
{JSON.stringify(auth.secure_extra, null, 2)}
|
||||
</CodeBlock>
|
||||
</div>
|
||||
)}
|
||||
{auth.engine_parameters && (
|
||||
<div>
|
||||
<Text strong>Engine Parameters:</Text>
|
||||
<CodeBlock>
|
||||
{JSON.stringify(auth.engine_parameters, null, 2)}
|
||||
</CodeBlock>
|
||||
</div>
|
||||
)}
|
||||
{auth.notes && (
|
||||
<Alert message={auth.notes} type="info" showIcon />
|
||||
)}
|
||||
</>
|
||||
),
|
||||
}))}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render engine parameters
|
||||
const renderEngineParams = () => {
|
||||
if (!docs?.engine_parameters?.length) return null;
|
||||
|
||||
return (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<SettingOutlined /> Engine Parameters
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Collapse
|
||||
items={docs.engine_parameters.map((param, idx) => ({
|
||||
key: String(idx),
|
||||
label: param.name,
|
||||
children: (
|
||||
<>
|
||||
{param.description && (
|
||||
<Paragraph>{param.description}</Paragraph>
|
||||
)}
|
||||
{param.json && (
|
||||
<CodeBlock>{JSON.stringify(param.json, null, 2)}</CodeBlock>
|
||||
)}
|
||||
{param.docs_url && (
|
||||
<a href={param.docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Learn more
|
||||
</a>
|
||||
)}
|
||||
</>
|
||||
),
|
||||
}))}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render compatible databases (for PostgreSQL, etc.)
|
||||
const renderCompatibleDatabases = () => {
|
||||
if (!docs?.compatible_databases?.length) return null;
|
||||
|
||||
// Create array of all item keys to expand by default
|
||||
const allItemKeys = docs.compatible_databases.map((_, idx) => String(idx));
|
||||
|
||||
return (
|
||||
<Card title="Compatible Databases" style={{ marginBottom: 16 }}>
|
||||
<Paragraph>
|
||||
The following databases are compatible with the {name} driver:
|
||||
</Paragraph>
|
||||
<Collapse
|
||||
defaultActiveKey={allItemKeys}
|
||||
items={docs.compatible_databases.map((compat, idx) => ({
|
||||
key: String(idx),
|
||||
label: (
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 12 }}>
|
||||
{compat.logo && (
|
||||
<img
|
||||
src={`/img/databases/${compat.logo}`}
|
||||
alt={compat.name}
|
||||
style={{
|
||||
width: 28,
|
||||
height: 28,
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<span>{compat.name}</span>
|
||||
</div>
|
||||
),
|
||||
children: (
|
||||
<>
|
||||
{compat.description && (
|
||||
<Paragraph>{compat.description}</Paragraph>
|
||||
)}
|
||||
{compat.connection_string &&
|
||||
renderConnectionString(compat.connection_string)}
|
||||
{compat.parameters && (
|
||||
<div>
|
||||
<Text strong>Parameters:</Text>
|
||||
<Table
|
||||
dataSource={Object.entries(compat.parameters).map(
|
||||
([key, value]) => ({
|
||||
key,
|
||||
parameter: key,
|
||||
description: value,
|
||||
}),
|
||||
)}
|
||||
columns={[
|
||||
{
|
||||
title: 'Parameter',
|
||||
dataIndex: 'parameter',
|
||||
key: 'p',
|
||||
},
|
||||
{
|
||||
title: 'Description',
|
||||
dataIndex: 'description',
|
||||
key: 'd',
|
||||
},
|
||||
]}
|
||||
pagination={false}
|
||||
size="small"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{compat.notes && (
|
||||
<Alert
|
||||
message={compat.notes}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginTop: 16 }}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
),
|
||||
}))}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render feature matrix
|
||||
const renderFeatures = () => {
|
||||
const features: Array<{ key: keyof DatabaseInfo; label: string }> = [
|
||||
{ key: 'joins', label: 'JOINs' },
|
||||
{ key: 'subqueries', label: 'Subqueries' },
|
||||
{ key: 'supports_dynamic_schema', label: 'Dynamic Schema' },
|
||||
{ key: 'supports_catalog', label: 'Catalog Support' },
|
||||
{ key: 'supports_dynamic_catalog', label: 'Dynamic Catalog' },
|
||||
{ key: 'ssh_tunneling', label: 'SSH Tunneling' },
|
||||
{ key: 'query_cancelation', label: 'Query Cancellation' },
|
||||
{ key: 'supports_file_upload', label: 'File Upload' },
|
||||
{ key: 'user_impersonation', label: 'User Impersonation' },
|
||||
{ key: 'query_cost_estimation', label: 'Cost Estimation' },
|
||||
{ key: 'sql_validation', label: 'SQL Validation' },
|
||||
];
|
||||
|
||||
return (
|
||||
<Card title="Supported Features" style={{ marginBottom: 16 }}>
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: 8 }}>
|
||||
{features.map(({ key, label }) => (
|
||||
<FeatureBadge
|
||||
key={key}
|
||||
supported={Boolean(database[key])}
|
||||
label={label}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
{database.score > 0 && (
|
||||
<div style={{ marginTop: 16 }}>
|
||||
<Text>
|
||||
Feature Score:{' '}
|
||||
<Text strong>
|
||||
{database.score}/{database.max_score}
|
||||
</Text>
|
||||
</Text>
|
||||
</div>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render time grains
|
||||
const renderTimeGrains = () => {
|
||||
if (!database.time_grains) return null;
|
||||
|
||||
const commonGrains = [
|
||||
'SECOND',
|
||||
'MINUTE',
|
||||
'HOUR',
|
||||
'DAY',
|
||||
'WEEK',
|
||||
'MONTH',
|
||||
'QUARTER',
|
||||
'YEAR',
|
||||
];
|
||||
const extendedGrains = Object.keys(database.time_grains).filter(
|
||||
g => !commonGrains.includes(g),
|
||||
);
|
||||
|
||||
return (
|
||||
<Card title="Time Grains" style={{ marginBottom: 16 }}>
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
<Text strong>Common Time Grains:</Text>
|
||||
<div style={{ marginTop: 8 }}>
|
||||
{commonGrains.map(grain => (
|
||||
<TimeGrainBadge
|
||||
key={grain}
|
||||
grain={grain}
|
||||
supported={Boolean(
|
||||
database.time_grains[
|
||||
grain as keyof typeof database.time_grains
|
||||
],
|
||||
)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
{extendedGrains.length > 0 && (
|
||||
<div>
|
||||
<Text strong>Extended Time Grains:</Text>
|
||||
<div style={{ marginTop: 8 }}>
|
||||
{extendedGrains.map(grain => (
|
||||
<TimeGrainBadge
|
||||
key={grain}
|
||||
grain={grain}
|
||||
supported={Boolean(
|
||||
database.time_grains[
|
||||
grain as keyof typeof database.time_grains
|
||||
],
|
||||
)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render troubleshooting / custom errors section
|
||||
const renderTroubleshooting = () => {
|
||||
if (!docs?.custom_errors?.length) return null;
|
||||
|
||||
// Group errors by category
|
||||
const errorsByCategory: Record<string, typeof docs.custom_errors> = {};
|
||||
for (const error of docs.custom_errors) {
|
||||
const category = error.category || 'General';
|
||||
if (!errorsByCategory[category]) {
|
||||
errorsByCategory[category] = [];
|
||||
}
|
||||
errorsByCategory[category].push(error);
|
||||
}
|
||||
|
||||
// Define category order for consistent display
|
||||
const categoryOrder = [
|
||||
'Authentication',
|
||||
'Connection',
|
||||
'Permissions',
|
||||
'Query',
|
||||
'Configuration',
|
||||
'General',
|
||||
];
|
||||
|
||||
const sortedCategories = Object.keys(errorsByCategory).sort((a, b) => {
|
||||
const aIdx = categoryOrder.indexOf(a);
|
||||
const bIdx = categoryOrder.indexOf(b);
|
||||
if (aIdx === -1 && bIdx === -1) return a.localeCompare(b);
|
||||
if (aIdx === -1) return 1;
|
||||
if (bIdx === -1) return -1;
|
||||
return aIdx - bIdx;
|
||||
});
|
||||
|
||||
// Category colors
|
||||
const categoryColors: Record<string, string> = {
|
||||
Authentication: 'orange',
|
||||
Connection: 'red',
|
||||
Permissions: 'purple',
|
||||
Query: 'blue',
|
||||
Configuration: 'cyan',
|
||||
General: 'default',
|
||||
};
|
||||
|
||||
return (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<BugOutlined /> Troubleshooting
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Paragraph type="secondary">
|
||||
Common error messages you may encounter when connecting to or querying{' '}
|
||||
{name}, along with their causes and solutions.
|
||||
</Paragraph>
|
||||
<Collapse
|
||||
accordion
|
||||
items={sortedCategories.map(category => ({
|
||||
key: category,
|
||||
label: (
|
||||
<span>
|
||||
<Tag color={categoryColors[category] || 'default'}>
|
||||
{category}
|
||||
</Tag>
|
||||
{errorsByCategory[category].length} error
|
||||
{errorsByCategory[category].length !== 1 ? 's' : ''}
|
||||
</span>
|
||||
),
|
||||
children: (
|
||||
<>
|
||||
{errorsByCategory[category].map((error, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
style={{
|
||||
marginBottom:
|
||||
idx < errorsByCategory[category].length - 1 ? 16 : 0,
|
||||
paddingBottom:
|
||||
idx < errorsByCategory[category].length - 1 ? 16 : 0,
|
||||
borderBottom:
|
||||
idx < errorsByCategory[category].length - 1
|
||||
? '1px solid var(--ifm-color-emphasis-200)'
|
||||
: 'none',
|
||||
}}
|
||||
>
|
||||
<div style={{ marginBottom: 8 }}>
|
||||
<Text strong>
|
||||
{error.description || error.error_type}
|
||||
</Text>
|
||||
</div>
|
||||
<Alert
|
||||
message={error.message_template}
|
||||
type="error"
|
||||
style={{ marginBottom: 8 }}
|
||||
/>
|
||||
{error.invalid_fields &&
|
||||
error.invalid_fields.length > 0 && (
|
||||
<div style={{ marginBottom: 8 }}>
|
||||
<Text type="secondary">Check these fields: </Text>
|
||||
{error.invalid_fields.map(field => (
|
||||
<Tag key={field} color="warning">
|
||||
{field}
|
||||
</Tag>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{error.issue_codes && error.issue_codes.length > 0 && (
|
||||
<div>
|
||||
<Text type="secondary">Related issue codes: </Text>
|
||||
{error.issue_codes.map(code => (
|
||||
<Tag key={code}>
|
||||
<a
|
||||
href={`/docs/using-superset/issue-codes#issue-${code}`}
|
||||
style={{ color: 'inherit' }}
|
||||
>
|
||||
Issue {code}
|
||||
</a>
|
||||
</Tag>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
),
|
||||
}))}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="database-page" id={name.toLowerCase().replace(/\s+/g, '-')}>
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
{docs?.logo && (
|
||||
<img
|
||||
src={`/img/databases/${docs.logo}`}
|
||||
alt={name}
|
||||
style={{
|
||||
height: 120,
|
||||
objectFit: 'contain',
|
||||
marginBottom: 12,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<Title level={1} style={{ margin: 0 }}>
|
||||
{name}
|
||||
</Title>
|
||||
{docs?.homepage_url && (
|
||||
<a
|
||||
href={docs.homepage_url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
style={{ fontSize: 14 }}
|
||||
>
|
||||
<LinkOutlined /> {docs.homepage_url}
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{docs?.description && <Paragraph>{docs.description}</Paragraph>}
|
||||
|
||||
{/* Warnings */}
|
||||
{docs?.warnings?.map((warning, idx) => (
|
||||
<Alert
|
||||
key={idx}
|
||||
message={warning}
|
||||
type="warning"
|
||||
icon={<WarningOutlined />}
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Known Limitations */}
|
||||
{docs?.limitations?.length > 0 && (
|
||||
<Card
|
||||
title="Known Limitations"
|
||||
style={{ marginBottom: 16 }}
|
||||
type="inner"
|
||||
>
|
||||
<ul style={{ margin: 0, paddingLeft: 20 }}>
|
||||
{docs.limitations.map((limitation, idx) => (
|
||||
<li key={idx}>{limitation}</li>
|
||||
))}
|
||||
</ul>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Installation */}
|
||||
{(docs?.pypi_packages?.length || docs?.install_instructions) && (
|
||||
<Card title="Installation" style={{ marginBottom: 16 }}>
|
||||
{docs.pypi_packages?.length > 0 && (
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
<Text strong>Required packages: </Text>
|
||||
{docs.pypi_packages.map(pkg => (
|
||||
<Tag key={pkg} color="blue">
|
||||
{pkg}
|
||||
</Tag>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{docs.version_requirements && (
|
||||
<Alert
|
||||
message={`Version requirement: ${docs.version_requirements}`}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
{docs.install_instructions && (
|
||||
<CodeBlock>{docs.install_instructions}</CodeBlock>
|
||||
)}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Basic Connection */}
|
||||
{docs?.connection_string && !docs?.drivers?.length && (
|
||||
<Card title="Connection String" style={{ marginBottom: 16 }}>
|
||||
{renderConnectionString(docs.connection_string)}
|
||||
{docs.parameters && (
|
||||
<Table
|
||||
dataSource={Object.entries(docs.parameters).map(
|
||||
([key, value]) => ({
|
||||
key,
|
||||
parameter: key,
|
||||
description: value,
|
||||
}),
|
||||
)}
|
||||
columns={[
|
||||
{ title: 'Parameter', dataIndex: 'parameter', key: 'p' },
|
||||
{ title: 'Description', dataIndex: 'description', key: 'd' },
|
||||
]}
|
||||
pagination={false}
|
||||
size="small"
|
||||
/>
|
||||
)}
|
||||
{docs.default_port && (
|
||||
<Text type="secondary">Default port: {docs.default_port}</Text>
|
||||
)}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Drivers */}
|
||||
{renderDrivers()}
|
||||
|
||||
{/* Connection Examples */}
|
||||
{docs?.connection_examples?.length > 0 && (
|
||||
<Card title="Connection Examples" style={{ marginBottom: 16 }}>
|
||||
{docs.connection_examples.map((example, idx) => (
|
||||
<div key={idx}>
|
||||
{renderConnectionString(
|
||||
example.connection_string,
|
||||
example.description,
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Authentication Methods */}
|
||||
{renderAuthMethods()}
|
||||
|
||||
{/* Engine Parameters */}
|
||||
{renderEngineParams()}
|
||||
|
||||
{/* Features */}
|
||||
{renderFeatures()}
|
||||
|
||||
{/* Time Grains */}
|
||||
{renderTimeGrains()}
|
||||
|
||||
{/* Troubleshooting / Custom Errors */}
|
||||
{renderTroubleshooting()}
|
||||
|
||||
{/* Compatible Databases */}
|
||||
{renderCompatibleDatabases()}
|
||||
|
||||
{/* Notes */}
|
||||
{docs?.notes && (
|
||||
<Alert
|
||||
message="Notes"
|
||||
description={docs.notes}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* External Links */}
|
||||
{(docs?.docs_url || docs?.tutorials?.length) && (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<BookOutlined /> Resources
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Space direction="vertical">
|
||||
{docs.docs_url && (
|
||||
<a href={docs.docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Official Documentation
|
||||
</a>
|
||||
)}
|
||||
{docs.sqlalchemy_docs_url && (
|
||||
<a
|
||||
href={docs.sqlalchemy_docs_url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<LinkOutlined /> SQLAlchemy Dialect Documentation
|
||||
</a>
|
||||
)}
|
||||
{docs.tutorials?.map((tutorial, idx) => (
|
||||
<a key={idx} href={tutorial} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Tutorial {idx + 1}
|
||||
</a>
|
||||
))}
|
||||
</Space>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Edit link */}
|
||||
{database.module && (
|
||||
<Card
|
||||
style={{
|
||||
marginBottom: 16,
|
||||
background: 'var(--ifm-background-surface-color)',
|
||||
borderStyle: 'dashed',
|
||||
}}
|
||||
size="small"
|
||||
>
|
||||
<Space>
|
||||
<GithubOutlined />
|
||||
<Text type="secondary">
|
||||
Help improve this documentation by editing the engine spec:
|
||||
</Text>
|
||||
<a
|
||||
href={`https://github.com/apache/superset/edit/master/superset/db_engine_specs/${database.module}.py`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<EditOutlined /> Edit {database.module}.py
|
||||
</a>
|
||||
</Space>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
<Divider />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DatabasePage;
|
||||
@@ -16,12 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
@primary-color: #20a7c9;
|
||||
@info-color: #66bcfe;
|
||||
@success-color: #59c189;
|
||||
@processing-color: #66bcfe;
|
||||
@error-color: #e04355;
|
||||
@highlight-color: #e04355;
|
||||
@normal-color: #d9d9d9;
|
||||
@white: #FFF;
|
||||
@black: #000;
|
||||
|
||||
export { default as DatabaseIndex } from './DatabaseIndex';
|
||||
export { default as DatabasePage } from './DatabasePage';
|
||||
export * from './types';
|
||||
255
docs/src/components/databases/types.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* TypeScript types for database documentation data
|
||||
* Generated from superset/db_engine_specs/lib.py
|
||||
*/
|
||||
|
||||
export interface Driver {
|
||||
name: string;
|
||||
pypi_package?: string;
|
||||
connection_string?: string;
|
||||
is_recommended?: boolean;
|
||||
notes?: string;
|
||||
docs_url?: string;
|
||||
default_port?: number;
|
||||
odbc_driver_paths?: Record<string, string>;
|
||||
environment_variables?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ConnectionExample {
|
||||
description: string;
|
||||
connection_string: string;
|
||||
}
|
||||
|
||||
export interface HostExample {
|
||||
platform: string;
|
||||
host: string;
|
||||
}
|
||||
|
||||
export interface AuthenticationMethod {
|
||||
name: string;
|
||||
description?: string;
|
||||
requirements?: string;
|
||||
connection_string?: string;
|
||||
secure_extra?: Record<string, unknown>;
|
||||
secure_extra_body?: Record<string, unknown>;
|
||||
secure_extra_path?: Record<string, unknown>;
|
||||
engine_parameters?: Record<string, unknown>;
|
||||
config_example?: Record<string, unknown>;
|
||||
notes?: string;
|
||||
}
|
||||
|
||||
export interface EngineParameter {
|
||||
name: string;
|
||||
description?: string;
|
||||
json?: Record<string, unknown>;
|
||||
secure_extra?: Record<string, unknown>;
|
||||
docs_url?: string;
|
||||
}
|
||||
|
||||
export interface SSLConfiguration {
|
||||
custom_certificate?: string;
|
||||
disable_ssl_verification?: {
|
||||
engine_params?: Record<string, unknown>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface CompatibleDatabase {
|
||||
name: string;
|
||||
description?: string;
|
||||
logo?: string;
|
||||
homepage_url?: string;
|
||||
categories?: string[]; // Category classifications (e.g., ["TRADITIONAL_RDBMS", "OPEN_SOURCE"])
|
||||
pypi_packages?: string[];
|
||||
connection_string?: string;
|
||||
parameters?: Record<string, string>;
|
||||
connection_examples?: ConnectionExample[];
|
||||
notes?: string;
|
||||
docs_url?: string;
|
||||
}
|
||||
|
||||
export interface CustomError {
|
||||
error_type: string; // e.g., "CONNECTION_INVALID_USERNAME_ERROR"
|
||||
message_template: string; // e.g., 'The username "%(username)s" does not exist.'
|
||||
regex_pattern?: string; // The regex pattern that matches this error (optional, for reference)
|
||||
regex_name?: string; // The name of the regex constant (e.g., "CONNECTION_INVALID_USERNAME_REGEX")
|
||||
invalid_fields?: string[]; // Fields that are invalid, e.g., ["username", "password"]
|
||||
issue_codes?: number[]; // Related issue codes from ISSUE_CODES mapping
|
||||
category?: string; // Error category: "Authentication", "Connection", "Query", etc.
|
||||
description?: string; // Human-readable short description of the error type
|
||||
}
|
||||
|
||||
export interface DatabaseDocumentation {
|
||||
description?: string;
|
||||
logo?: string;
|
||||
homepage_url?: string;
|
||||
categories?: string[]; // Category classifications (e.g., ["TRADITIONAL_RDBMS", "OPEN_SOURCE"])
|
||||
pypi_packages?: string[];
|
||||
connection_string?: string;
|
||||
default_port?: number;
|
||||
parameters?: Record<string, string>;
|
||||
notes?: string;
|
||||
limitations?: string[]; // Known limitations or caveats
|
||||
connection_examples?: ConnectionExample[];
|
||||
host_examples?: HostExample[];
|
||||
drivers?: Driver[];
|
||||
authentication_methods?: AuthenticationMethod[];
|
||||
engine_parameters?: EngineParameter[];
|
||||
ssl_configuration?: SSLConfiguration;
|
||||
version_requirements?: string;
|
||||
install_instructions?: string;
|
||||
warnings?: string[];
|
||||
tutorials?: string[];
|
||||
docs_url?: string;
|
||||
sqlalchemy_docs_url?: string;
|
||||
advanced_features?: Record<string, string>;
|
||||
compatible_databases?: CompatibleDatabase[];
|
||||
custom_errors?: CustomError[]; // Database-specific error messages and troubleshooting info
|
||||
}
|
||||
|
||||
export interface TimeGrains {
|
||||
SECOND?: boolean;
|
||||
MINUTE?: boolean;
|
||||
HOUR?: boolean;
|
||||
DAY?: boolean;
|
||||
WEEK?: boolean;
|
||||
MONTH?: boolean;
|
||||
QUARTER?: boolean;
|
||||
YEAR?: boolean;
|
||||
FIVE_SECONDS?: boolean;
|
||||
THIRTY_SECONDS?: boolean;
|
||||
FIVE_MINUTES?: boolean;
|
||||
TEN_MINUTES?: boolean;
|
||||
FIFTEEN_MINUTES?: boolean;
|
||||
THIRTY_MINUTES?: boolean;
|
||||
HALF_HOUR?: boolean;
|
||||
SIX_HOURS?: boolean;
|
||||
WEEK_STARTING_SUNDAY?: boolean;
|
||||
WEEK_STARTING_MONDAY?: boolean;
|
||||
WEEK_ENDING_SATURDAY?: boolean;
|
||||
WEEK_ENDING_SUNDAY?: boolean;
|
||||
QUARTER_YEAR?: boolean;
|
||||
}
|
||||
|
||||
export interface DatabaseInfo {
|
||||
engine: string;
|
||||
engine_name: string;
|
||||
engine_aliases?: string[];
|
||||
default_driver?: string;
|
||||
module?: string;
|
||||
documentation: DatabaseDocumentation;
|
||||
|
||||
// Diagnostics from lib.py diagnose() function
|
||||
time_grains: TimeGrains;
|
||||
score: number;
|
||||
max_score: number;
|
||||
|
||||
// SQL capabilities
|
||||
joins: boolean;
|
||||
subqueries: boolean;
|
||||
alias_in_select?: boolean;
|
||||
alias_in_orderby?: boolean;
|
||||
cte_in_subquery?: boolean;
|
||||
sql_comments?: boolean;
|
||||
escaped_colons?: boolean;
|
||||
time_groupby_inline?: boolean;
|
||||
alias_to_source_column?: boolean;
|
||||
order_by_not_in_select?: boolean;
|
||||
expressions_in_orderby?: boolean;
|
||||
|
||||
// Platform features
|
||||
limit_method?: string;
|
||||
limit_clause?: boolean;
|
||||
max_column_name?: number;
|
||||
supports_file_upload?: boolean;
|
||||
supports_dynamic_schema?: boolean;
|
||||
supports_catalog?: boolean;
|
||||
supports_dynamic_catalog?: boolean;
|
||||
|
||||
// Advanced features
|
||||
user_impersonation?: boolean;
|
||||
ssh_tunneling?: boolean;
|
||||
query_cancelation?: boolean;
|
||||
expand_data?: boolean;
|
||||
query_cost_estimation?: boolean;
|
||||
sql_validation?: boolean;
|
||||
get_metrics?: boolean;
|
||||
where_latest_partition?: boolean;
|
||||
get_extra_table_metadata?: boolean;
|
||||
dbapi_exception_mapping?: boolean;
|
||||
custom_errors?: boolean;
|
||||
masked_encrypted_extra?: boolean;
|
||||
column_type_mapping?: boolean;
|
||||
function_names?: boolean;
|
||||
}
|
||||
|
||||
export interface Statistics {
|
||||
totalDatabases: number;
|
||||
withDocumentation: number;
|
||||
withConnectionString: number;
|
||||
withDrivers: number;
|
||||
withAuthMethods: number;
|
||||
supportsJoins: number;
|
||||
supportsSubqueries: number;
|
||||
supportsDynamicSchema: number;
|
||||
supportsCatalog: number;
|
||||
averageScore: number;
|
||||
maxScore: number;
|
||||
byCategory: Record<string, string[]>;
|
||||
}
|
||||
|
||||
export interface DatabaseData {
|
||||
generated: string;
|
||||
statistics: Statistics;
|
||||
databases: Record<string, DatabaseInfo>;
|
||||
}
|
||||
|
||||
// Helper type for sorting databases
|
||||
export type SortField = 'name' | 'score' | 'category';
|
||||
export type SortDirection = 'asc' | 'desc';
|
||||
|
||||
// Helper to get common time grains
|
||||
export const COMMON_TIME_GRAINS = [
|
||||
'SECOND',
|
||||
'MINUTE',
|
||||
'HOUR',
|
||||
'DAY',
|
||||
'WEEK',
|
||||
'MONTH',
|
||||
'QUARTER',
|
||||
'YEAR',
|
||||
] as const;
|
||||
|
||||
export const EXTENDED_TIME_GRAINS = [
|
||||
'FIVE_SECONDS',
|
||||
'THIRTY_SECONDS',
|
||||
'FIVE_MINUTES',
|
||||
'TEN_MINUTES',
|
||||
'FIFTEEN_MINUTES',
|
||||
'THIRTY_MINUTES',
|
||||
'HALF_HOUR',
|
||||
'SIX_HOURS',
|
||||
'WEEK_STARTING_SUNDAY',
|
||||
'WEEK_STARTING_MONDAY',
|
||||
'WEEK_ENDING_SATURDAY',
|
||||
'WEEK_ENDING_SUNDAY',
|
||||
'QUARTER_YEAR',
|
||||
] as const;
|
||||
5923
docs/src/data/databases.json
Normal file
@@ -18,7 +18,6 @@
|
||||
*/
|
||||
import { useState } from 'react';
|
||||
import styled from '@emotion/styled';
|
||||
import { List } from 'antd';
|
||||
import Layout from '@theme/Layout';
|
||||
import { mq } from '../utils';
|
||||
import SectionHeader from '../components/SectionHeader';
|
||||
@@ -92,8 +91,12 @@ const StyledJoinCommunity = styled('section')`
|
||||
max-width: 540px;
|
||||
margin: 0 auto;
|
||||
padding: 40px 20px 20px 35px;
|
||||
list-style: none;
|
||||
}
|
||||
.item {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
padding: 0;
|
||||
border: 0;
|
||||
}
|
||||
@@ -189,39 +192,33 @@ const Community = () => {
|
||||
/>
|
||||
</BlurredSection>
|
||||
<StyledJoinCommunity>
|
||||
<List
|
||||
className="list"
|
||||
itemLayout="horizontal"
|
||||
dataSource={communityLinks}
|
||||
renderItem={({ url, title, description, image, ariaLabel }) => (
|
||||
<List.Item className="item">
|
||||
<List.Item.Meta
|
||||
avatar={
|
||||
<a
|
||||
className="title"
|
||||
href={url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
aria-label={ariaLabel}
|
||||
>
|
||||
<img className="icon" src={`/img/community/${image}`} />
|
||||
</a>
|
||||
}
|
||||
title={
|
||||
<ul className="list">
|
||||
{communityLinks.map(
|
||||
({ url, title, description, image, ariaLabel }) => (
|
||||
<li className="item" key={title}>
|
||||
<a
|
||||
className="avatar"
|
||||
href={url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
aria-label={ariaLabel}
|
||||
>
|
||||
<img className="icon" src={`/img/community/${image}`} />
|
||||
</a>
|
||||
<div>
|
||||
<a href={url} target="_blank" rel="noreferrer">
|
||||
<p className="title" style={{ marginBottom: 0 }}>
|
||||
{title}
|
||||
</p>
|
||||
</a>
|
||||
}
|
||||
description={<p className="description">{description}</p>}
|
||||
aria-label="Community link"
|
||||
/>
|
||||
</List.Item>
|
||||
<p className="description">{description}</p>
|
||||
</div>
|
||||
</li>
|
||||
),
|
||||
)}
|
||||
/>
|
||||
</ul>
|
||||
</StyledJoinCommunity>
|
||||
<BlurredSection>
|
||||
<BlurredSection id="superset-community-calendar">
|
||||
<SectionHeader
|
||||
level="h2"
|
||||
title="Superset Community Calendar"
|
||||
|
||||
@@ -23,11 +23,31 @@ import { Card, Carousel, Flex } from 'antd';
|
||||
import styled from '@emotion/styled';
|
||||
import GitHubButton from 'react-github-btn';
|
||||
import { mq } from '../utils';
|
||||
import { Databases } from '../resources/data';
|
||||
import SectionHeader from '../components/SectionHeader';
|
||||
import databaseData from '../data/databases.json';
|
||||
import BlurredSection from '../components/BlurredSection';
|
||||
import DataSet from '../../../RESOURCES/INTHEWILD.yaml';
|
||||
import '../styles/main.less';
|
||||
import type { DatabaseData } from '../components/databases/types';
|
||||
import '../styles/main.css';
|
||||
|
||||
// Build database list from databases.json (databases with logos)
|
||||
// Deduplicate by logo filename to avoid showing the same logo twice
|
||||
const typedDatabaseData = databaseData as DatabaseData;
|
||||
const seenLogos = new Set<string>();
|
||||
const Databases = Object.entries(typedDatabaseData.databases)
|
||||
.filter(([, db]) => db.documentation?.logo && db.documentation?.homepage_url)
|
||||
.map(([name, db]) => ({
|
||||
title: name,
|
||||
href: db.documentation?.homepage_url,
|
||||
imgName: db.documentation?.logo,
|
||||
docPath: `/docs/databases/supported/${name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, '')}`,
|
||||
}))
|
||||
.sort((a, b) => a.title.localeCompare(b.title))
|
||||
.filter((db) => {
|
||||
if (seenLogos.has(db.imgName!)) return false;
|
||||
seenLogos.add(db.imgName!);
|
||||
return true;
|
||||
});
|
||||
|
||||
interface Organization {
|
||||
name: string;
|
||||
@@ -89,13 +109,10 @@ const StyledMain = styled('main')`
|
||||
|
||||
const StyledTitleContainer = styled('div')`
|
||||
position: relative;
|
||||
padding: 130px 20px 0;
|
||||
margin-bottom: 160px;
|
||||
padding: 130px 20px 20px;
|
||||
margin-bottom: 0;
|
||||
background-image: url('/img/grid-background.jpg');
|
||||
background-size: cover;
|
||||
${mq[1]} {
|
||||
margin-bottom: 100px;
|
||||
}
|
||||
.info-container {
|
||||
position: relative;
|
||||
z-index: 4;
|
||||
@@ -440,22 +457,22 @@ const StyledIntegrations = styled('div')`
|
||||
padding: 0 20px;
|
||||
.database-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(5, minmax(0, 1fr));
|
||||
gap: 14px;
|
||||
max-width: 1160px;
|
||||
grid-template-columns: repeat(8, minmax(0, 1fr));
|
||||
gap: 10px;
|
||||
max-width: 1200px;
|
||||
margin: 25px auto 0;
|
||||
${mq[1]} {
|
||||
grid-template-columns: repeat(4, minmax(0, 1fr));
|
||||
grid-template-columns: repeat(5, minmax(0, 1fr));
|
||||
}
|
||||
${mq[0]} {
|
||||
grid-template-columns: repeat(1, minmax(0, 1fr));
|
||||
grid-template-columns: repeat(2, minmax(0, 1fr));
|
||||
}
|
||||
& > .item {
|
||||
border: 1px solid var(--ifm-border-color);
|
||||
border-radius: 10px;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
height: 120px;
|
||||
padding: 25px;
|
||||
height: 80px;
|
||||
padding: 14px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@@ -759,30 +776,26 @@ export default function Home(): JSX.Element {
|
||||
</BlurredSection>
|
||||
<BlurredSection>
|
||||
<StyledIntegrations>
|
||||
<SectionHeader level="h2" title="Supported Databases" />
|
||||
<SectionHeader level="h2" title="Supported Databases" link="/docs/databases" />
|
||||
<div className="database-grid">
|
||||
{Databases.map(({ title, href, imgName }) => (
|
||||
{Databases.map(({ title, imgName, docPath }) => (
|
||||
<div className="item" key={title}>
|
||||
{href ? (
|
||||
<a href={href} aria-label={`Go to ${title} page`}>
|
||||
<img src={`/img/databases/${imgName}`} title={title} />
|
||||
</a>
|
||||
) : (
|
||||
<a href={docPath} aria-label={`${title} documentation`}>
|
||||
<img src={`/img/databases/${imgName}`} title={title} />
|
||||
)}
|
||||
</a>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<span className="database-sub">
|
||||
...and many other{' '}
|
||||
<a href="/docs/configuration/databases#installing-database-drivers">
|
||||
<a href="/docs/databases#installing-database-drivers">
|
||||
compatible databases
|
||||
</a>
|
||||
</span>
|
||||
</StyledIntegrations>
|
||||
</BlurredSection>
|
||||
{/* Only show carousel when we have enough logos (>10) for a good display */}
|
||||
{companiesWithLogos.length > 10 && (
|
||||
{companiesWithLogos.length > 7 && (
|
||||
<BlurredSection>
|
||||
<div style={{ padding: '0 20px' }}>
|
||||
<SectionHeader
|
||||
|
||||
118
docs/src/shims/null-module.js
Normal file
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
// Null module shim for packages not available in the docs build.
|
||||
// These are transitive dependencies of superset-frontend components that exist
|
||||
// in the barrel file but are never rendered on the docs site.
|
||||
// webpack needs these to resolve at build time even though the code paths
|
||||
// that use them are never executed at runtime.
|
||||
//
|
||||
// This shim uses a recursive Proxy to handle nested property access chains:
|
||||
// import ace from 'ace-builds'; ace.config.set(...) → works (returns proxy)
|
||||
// import { useResizeDetector } from 'react-resize-detector' → returns noop hook
|
||||
// import ReactAce from 'react-ace' → returns NullComponent
|
||||
|
||||
const NullComponent = () => null;
|
||||
|
||||
// For hooks that return objects/arrays
|
||||
const useNoop = () => ({});
|
||||
|
||||
// Mock for useResizeDetector - returns { ref, width, height } where ref.current exists
|
||||
const useResizeDetectorMock = () => ({
|
||||
ref: { current: null },
|
||||
width: 0,
|
||||
height: 0,
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates a recursive proxy that handles any depth of property access.
|
||||
* This allows patterns like ace.config.set() or ace.config.setModuleUrl() to work.
|
||||
*
|
||||
* The proxy is both callable (returns undefined) and accessible (returns another proxy).
|
||||
*/
|
||||
function createDeepProxy() {
|
||||
const handler = {
|
||||
// Handle property access - return another proxy for chaining
|
||||
get(target, prop) {
|
||||
// Standard module properties
|
||||
if (prop === 'default') return createDeepProxy();
|
||||
if (prop === '__esModule') return true;
|
||||
|
||||
// Symbol properties (used by JS internals)
|
||||
if (typeof prop === 'symbol') {
|
||||
if (prop === Symbol.toPrimitive) return () => '';
|
||||
if (prop === Symbol.toStringTag) return 'NullModule';
|
||||
if (prop === Symbol.iterator) return undefined;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// React-specific properties
|
||||
if (prop === '$$typeof') return undefined;
|
||||
if (prop === 'propTypes') return undefined;
|
||||
if (prop === 'displayName') return 'NullComponent';
|
||||
|
||||
// Specific hook mocks for known hooks that need proper return values
|
||||
if (prop === 'useResizeDetector') {
|
||||
return useResizeDetectorMock;
|
||||
}
|
||||
|
||||
// Common hook names return useNoop for better compatibility
|
||||
if (typeof prop === 'string' && prop.startsWith('use')) {
|
||||
return useNoop;
|
||||
}
|
||||
|
||||
// Return another proxy to allow further chaining (ace.config.set)
|
||||
return createDeepProxy();
|
||||
},
|
||||
|
||||
// Handle function calls - return undefined (safe default)
|
||||
apply() {
|
||||
return undefined;
|
||||
},
|
||||
|
||||
// Handle new ClassName() - return an empty object
|
||||
construct() {
|
||||
return {};
|
||||
},
|
||||
};
|
||||
|
||||
// Create a proxy over a function so it's both callable and has properties
|
||||
return new Proxy(function NullModule() {}, handler);
|
||||
}
|
||||
|
||||
// Create the main module export as a deep proxy
|
||||
const nullModule = createDeepProxy();
|
||||
|
||||
// Support both CommonJS and ES module patterns
|
||||
module.exports = nullModule;
|
||||
module.exports.default = createDeepProxy();
|
||||
module.exports.__esModule = true;
|
||||
|
||||
// Named exports for common patterns (webpack may inline these)
|
||||
module.exports.useResizeDetector = useResizeDetectorMock;
|
||||
module.exports.withResizeDetector = createDeepProxy();
|
||||
module.exports.Resizable = NullComponent;
|
||||
module.exports.ResizableBox = NullComponent;
|
||||
module.exports.FixedSizeList = NullComponent;
|
||||
module.exports.VariableSizeList = NullComponent;
|
||||
|
||||
// ace-builds specific exports that CodeEditor uses
|
||||
module.exports.config = createDeepProxy();
|
||||
module.exports.require = createDeepProxy();
|
||||
module.exports.edit = createDeepProxy();
|
||||
54
docs/src/shims/react-table.js
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
// Shim for react-table to handle CommonJS to ES module interop
|
||||
// react-table v7 is CommonJS, but Superset components import it with ES module syntax
|
||||
// Use relative path to avoid circular dependency since webpack aliases 'react-table' to this file
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports -- CJS interop shim for react-table v7
|
||||
const reactTable = require('../../node_modules/react-table');
|
||||
|
||||
// Re-export all named exports
|
||||
export const {
|
||||
useTable,
|
||||
useFilters,
|
||||
useSortBy,
|
||||
usePagination,
|
||||
useGlobalFilter,
|
||||
useRowSelect,
|
||||
useRowState,
|
||||
useColumnOrder,
|
||||
useExpanded,
|
||||
useGroupBy,
|
||||
useResizeColumns,
|
||||
useBlockLayout,
|
||||
useAbsoluteLayout,
|
||||
useFlexLayout,
|
||||
actions,
|
||||
defaultColumn,
|
||||
makePropGetter,
|
||||
reduceHooks,
|
||||
loopHooks,
|
||||
ensurePluginOrder,
|
||||
functionalUpdate,
|
||||
useGetLatest,
|
||||
safeUseLayoutEffect,
|
||||
} = reactTable;
|
||||
|
||||
// Default export
|
||||
export default reactTable;
|
||||
@@ -123,6 +123,11 @@ ul.dropdown__menu svg {
|
||||
--ifm-code-padding-horizontal: 5px;
|
||||
}
|
||||
|
||||
/* Database logo images in intro/README */
|
||||
.database-logo {
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #25c2a0;
|
||||
--ifm-color-primary-dark: #21af90;
|
||||
@@ -182,3 +187,270 @@ ul.dropdown__menu svg {
|
||||
[data-theme='dark'] .ant-collapse-header {
|
||||
color: var(--ifm-font-base-color);
|
||||
}
|
||||
|
||||
/* Hide the non-functional "Send API Request" button and Response block in API docs */
|
||||
/* The interactive API testing doesn't work due to CORS restrictions */
|
||||
.openapi-explorer__request-btn {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.openapi-explorer__response-container {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* API Method Badge Colors - Swagger-style color coding */
|
||||
/* These override Infima badge classes used by docusaurus-openapi-docs */
|
||||
|
||||
/* GET - Blue (badge--primary) */
|
||||
.openapi__method-endpoint .badge--primary {
|
||||
background-color: #61affe !important;
|
||||
border-color: #61affe !important;
|
||||
}
|
||||
|
||||
/* POST - Green (badge--success) */
|
||||
.openapi__method-endpoint .badge--success {
|
||||
background-color: #49cc90 !important;
|
||||
border-color: #49cc90 !important;
|
||||
}
|
||||
|
||||
/* PUT - Info/Cyan -> Orange (badge--info) */
|
||||
.openapi__method-endpoint .badge--info {
|
||||
background-color: #fca130 !important;
|
||||
border-color: #fca130 !important;
|
||||
}
|
||||
|
||||
/* PATCH - Warning/Yellow -> Teal (badge--warning) */
|
||||
.openapi__method-endpoint .badge--warning {
|
||||
background-color: #50e3c2 !important;
|
||||
border-color: #50e3c2 !important;
|
||||
color: #1b1b1d !important;
|
||||
}
|
||||
|
||||
/* DELETE - Red (badge--danger) */
|
||||
.openapi__method-endpoint .badge--danger {
|
||||
background-color: #f93e3e !important;
|
||||
border-color: #f93e3e !important;
|
||||
}
|
||||
|
||||
/* Sidebar method badges - colored dots before endpoint names */
|
||||
/* The method classes (get, post, etc.) are on the <li> (menu__list-item),
|
||||
so we target the <a> (menu__link) inside using descendant selector */
|
||||
.menu__list-item.api-method > .menu__link::before {
|
||||
content: '';
|
||||
display: inline-block;
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
margin-right: 8px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.menu__list-item.get.api-method > .menu__link::before {
|
||||
background-color: #61affe;
|
||||
}
|
||||
|
||||
.menu__list-item.post.api-method > .menu__link::before {
|
||||
background-color: #49cc90;
|
||||
}
|
||||
|
||||
.menu__list-item.put.api-method > .menu__link::before {
|
||||
background-color: #fca130;
|
||||
}
|
||||
|
||||
.menu__list-item.patch.api-method > .menu__link::before {
|
||||
background-color: #50e3c2;
|
||||
}
|
||||
|
||||
.menu__list-item.delete.api-method > .menu__link::before {
|
||||
background-color: #f93e3e;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Component Example Isolation
|
||||
Prevents Docusaurus/Infima styles from bleeding into Superset components
|
||||
============================================ */
|
||||
|
||||
/* Reset link styles inside component examples */
|
||||
.storybook-example a {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
font-weight: inherit;
|
||||
line-height: inherit;
|
||||
vertical-align: inherit;
|
||||
}
|
||||
|
||||
.storybook-example a:hover {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
/* Reset list styles */
|
||||
.storybook-example ul,
|
||||
.storybook-example ol {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
/* Override Infima's .markdown li + li margin */
|
||||
.storybook-example li + li,
|
||||
.markdown .storybook-example li + li {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
/* Reset heading styles */
|
||||
.storybook-example h1,
|
||||
.storybook-example h2,
|
||||
.storybook-example h3,
|
||||
.storybook-example h4,
|
||||
.storybook-example h5,
|
||||
.storybook-example h6 {
|
||||
margin: 0;
|
||||
font-size: inherit;
|
||||
font-weight: inherit;
|
||||
}
|
||||
|
||||
/* Reset paragraph margins */
|
||||
.storybook-example p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* Reset table margins - Infima applies margin-bottom via --ifm-spacing-vertical */
|
||||
.storybook-example table {
|
||||
margin: 0;
|
||||
display: table;
|
||||
}
|
||||
|
||||
/* Ensure Ant Design components render correctly */
|
||||
.storybook-example .ant-breadcrumb {
|
||||
line-height: 1.5715;
|
||||
}
|
||||
|
||||
.storybook-example .ant-breadcrumb a {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.storybook-example .ant-breadcrumb a:hover {
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Ant Design Popup/Portal Isolation
|
||||
These components render outside .storybook-example via portals
|
||||
============================================ */
|
||||
|
||||
/* DatePicker, TimePicker dropdown panels - reset Infima table styles
|
||||
Using doubled selectors for higher specificity than Infima's defaults */
|
||||
.ant-picker-dropdown.ant-picker-dropdown table,
|
||||
.ant-picker-dropdown.ant-picker-dropdown thead,
|
||||
.ant-picker-dropdown.ant-picker-dropdown tbody,
|
||||
.ant-picker-dropdown.ant-picker-dropdown tr,
|
||||
.ant-picker-dropdown.ant-picker-dropdown th,
|
||||
.ant-picker-dropdown.ant-picker-dropdown td {
|
||||
border: none;
|
||||
background: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-picker-dropdown.ant-picker-dropdown table {
|
||||
border-collapse: separate;
|
||||
border-spacing: 0;
|
||||
width: 100%;
|
||||
display: table;
|
||||
}
|
||||
|
||||
/* Override Infima's zebra striping with higher specificity */
|
||||
.ant-picker-dropdown.ant-picker-dropdown tr:nth-child(2n),
|
||||
.ant-picker-dropdown.ant-picker-dropdown tbody tr:nth-child(2n) {
|
||||
background: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-picker-dropdown.ant-picker-dropdown th,
|
||||
.ant-picker-dropdown.ant-picker-dropdown td {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
/* Select, Dropdown, Popover portals */
|
||||
.ant-select-dropdown.ant-select-dropdown table,
|
||||
.ant-select-dropdown.ant-select-dropdown thead,
|
||||
.ant-select-dropdown.ant-select-dropdown tbody,
|
||||
.ant-select-dropdown.ant-select-dropdown tr,
|
||||
.ant-select-dropdown.ant-select-dropdown th,
|
||||
.ant-select-dropdown.ant-select-dropdown td,
|
||||
.ant-dropdown.ant-dropdown table,
|
||||
.ant-dropdown.ant-dropdown thead,
|
||||
.ant-dropdown.ant-dropdown tbody,
|
||||
.ant-dropdown.ant-dropdown tr,
|
||||
.ant-dropdown.ant-dropdown th,
|
||||
.ant-dropdown.ant-dropdown td,
|
||||
.ant-popover.ant-popover table,
|
||||
.ant-popover.ant-popover thead,
|
||||
.ant-popover.ant-popover tbody,
|
||||
.ant-popover.ant-popover tr,
|
||||
.ant-popover.ant-popover th,
|
||||
.ant-popover.ant-popover td {
|
||||
border: none;
|
||||
background: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-select-dropdown.ant-select-dropdown tr:nth-child(2n),
|
||||
.ant-dropdown.ant-dropdown tr:nth-child(2n),
|
||||
.ant-popover.ant-popover tr:nth-child(2n) {
|
||||
background: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
/* Modal portals */
|
||||
.ant-modal.ant-modal table,
|
||||
.ant-modal.ant-modal thead,
|
||||
.ant-modal.ant-modal tbody,
|
||||
.ant-modal.ant-modal tr,
|
||||
.ant-modal.ant-modal th,
|
||||
.ant-modal.ant-modal td {
|
||||
border: none;
|
||||
background: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-modal.ant-modal tr:nth-child(2n) {
|
||||
background: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Live Code Editor Height Limits
|
||||
Prevents tall code blocks from dominating the page
|
||||
============================================ */
|
||||
|
||||
/* Limit the code editor height and make it scrollable */
|
||||
/* Target multiple possible class names used by Docusaurus/react-live */
|
||||
.playgroundEditor,
|
||||
[class*="playgroundEditor"],
|
||||
.live-editor,
|
||||
[class*="liveEditor"] {
|
||||
max-height: 350px !important;
|
||||
overflow: auto !important;
|
||||
}
|
||||
|
||||
/* The actual textarea/code area inside the editor */
|
||||
.playgroundEditor textarea,
|
||||
.playgroundEditor pre,
|
||||
[class*="playgroundEditor"] textarea,
|
||||
[class*="playgroundEditor"] pre {
|
||||
max-height: 350px !important;
|
||||
overflow: auto !important;
|
||||
}
|
||||
|
||||
/* Also limit the preview area for consistency */
|
||||
.playgroundPreview,
|
||||
[class*="playgroundPreview"] {
|
||||
max-height: 400px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
/* Hide sidebar items with sidebar_class_name: hidden in frontmatter */
|
||||
.menu__list-item.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
@import 'antd-theme.less';
|
||||
|
||||
body {
|
||||
font-family: var(--ifm-font-family-base);
|
||||
@@ -57,13 +56,7 @@ html[data-theme='dark'] .docusaurus-highlight-code-line {
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
/* Hacks to disable Swagger UI's "try it out" interactive mode */
|
||||
.try-out,
|
||||
.auth-wrapper,
|
||||
.information-container {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* Legacy Swagger UI styles (for versioned docs that still use swagger-ui-react) */
|
||||
.swagger-ui table td,
|
||||
.swagger-ui table th,
|
||||
.swagger-ui table tr {
|
||||
@@ -87,26 +80,29 @@ a > span > svg {
|
||||
text-align: center;
|
||||
position: relative;
|
||||
z-index: 2;
|
||||
&::before {
|
||||
border-radius: inherit;
|
||||
background: linear-gradient(180deg, #11b0d8 0%, #116f86 100%);
|
||||
content: '';
|
||||
display: block;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
opacity: 0;
|
||||
width: 100%;
|
||||
z-index: -1;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
&:hover {
|
||||
color: #ffffff;
|
||||
&::before {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.default-button-theme::before {
|
||||
border-radius: inherit;
|
||||
background: linear-gradient(180deg, #11b0d8 0%, #116f86 100%);
|
||||
content: '';
|
||||
display: block;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
opacity: 0;
|
||||
width: 100%;
|
||||
z-index: -1;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
.default-button-theme:hover {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
.default-button-theme:hover::before {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* Navbar */
|
||||
@@ -115,32 +111,32 @@ a > span > svg {
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
transition: all 0.5s;
|
||||
}
|
||||
|
||||
.get-started-button {
|
||||
border-radius: 10px;
|
||||
font-size: 18px;
|
||||
font-weight: bold;
|
||||
width: 142px;
|
||||
padding: 7px 0;
|
||||
margin-right: 20px;
|
||||
}
|
||||
.navbar .get-started-button {
|
||||
border-radius: 10px;
|
||||
font-size: 18px;
|
||||
font-weight: bold;
|
||||
width: 142px;
|
||||
padding: 7px 0;
|
||||
margin-right: 20px;
|
||||
}
|
||||
|
||||
.github-button {
|
||||
background-image: url('/img/github.png');
|
||||
background-size: contain;
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
margin-right: 10px;
|
||||
}
|
||||
.navbar .github-button {
|
||||
background-image: url('/img/github.png');
|
||||
background-size: contain;
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.navbar--dark {
|
||||
background-color: transparent;
|
||||
border-bottom: 1px solid rgba(24, 115, 132, 0.4);
|
||||
}
|
||||
|
||||
.github-button {
|
||||
background-image: url('/img/github-dark.png');
|
||||
}
|
||||
.navbar--dark .github-button {
|
||||
background-image: url('/img/github-dark.png');
|
||||
}
|
||||
|
||||
.navbar__logo {
|
||||
@@ -159,11 +155,11 @@ a > span > svg {
|
||||
.navbar {
|
||||
padding-right: 8px;
|
||||
padding-left: 8px;
|
||||
}
|
||||
|
||||
.get-started-button,
|
||||
.github-button {
|
||||
display: none;
|
||||
}
|
||||
.navbar .get-started-button,
|
||||
.navbar .github-button {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.navbar__items {
|
||||
@@ -192,20 +188,20 @@ a > span > svg {
|
||||
--docsearch-searchbox-background: var(--ifm-navbar-background-color);
|
||||
border: 1px solid #187384;
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
||||
&.DocSearch-Button {
|
||||
width: 225px;
|
||||
}
|
||||
.navbar .DocSearch.DocSearch-Button {
|
||||
width: 225px;
|
||||
}
|
||||
|
||||
.DocSearch-Search-Icon {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
.navbar .DocSearch .DocSearch-Search-Icon {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
|
||||
.DocSearch-Button-Key,
|
||||
.DocSearch-Button-Placeholder {
|
||||
display: none;
|
||||
}
|
||||
.navbar .DocSearch .DocSearch-Button-Key,
|
||||
.navbar .DocSearch .DocSearch-Button-Placeholder {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.navbar--dark .DocSearch {
|
||||
@@ -226,18 +222,37 @@ a > span > svg {
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
.footer__applitools {
|
||||
.footer__ci-services {
|
||||
background-color: #0d3e49;
|
||||
color: #e1e1e1;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
padding: 16px 0;
|
||||
padding: 12px 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
img {
|
||||
height: 34px;
|
||||
}
|
||||
.footer__ci-services span {
|
||||
font-size: 13px;
|
||||
opacity: 0.85;
|
||||
}
|
||||
|
||||
.footer__ci-services a {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
.footer__ci-services a:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.footer__ci-services img {
|
||||
height: 28px;
|
||||
}
|
||||
|
||||
.footer__divider {
|
||||
@@ -252,7 +267,16 @@ a > span > svg {
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 996px) {
|
||||
.footer__applitools img {
|
||||
height: 28px;
|
||||
.footer__ci-services {
|
||||
gap: 12px;
|
||||
padding: 10px 16px;
|
||||
}
|
||||
|
||||
.footer__ci-services span {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.footer__ci-services img {
|
||||
height: 22px;
|
||||
}
|
||||
}
|
||||
10
docs/src/theme.d.ts
vendored
@@ -30,3 +30,13 @@ declare module '@theme/Layout' {
|
||||
|
||||
export default function Layout(props: Props): ReactNode;
|
||||
}
|
||||
|
||||
declare module '@theme/Playground/Header' {
|
||||
import type { ReactNode } from 'react';
|
||||
|
||||
export interface Props {
|
||||
readonly children?: ReactNode;
|
||||
}
|
||||
|
||||
export default function PlaygroundHeader(props: Props): ReactNode;
|
||||
}
|
||||
|
||||
107
docs/src/theme/Playground/Preview/index.tsx
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import React, { type ReactNode } from 'react';
|
||||
import { LiveError, LivePreview } from 'react-live';
|
||||
import BrowserOnly from '@docusaurus/BrowserOnly';
|
||||
import { ErrorBoundaryErrorMessageFallback } from '@docusaurus/theme-common';
|
||||
import ErrorBoundary from '@docusaurus/ErrorBoundary';
|
||||
import Translate from '@docusaurus/Translate';
|
||||
import PlaygroundHeader from '@theme/Playground/Header';
|
||||
|
||||
import styles from './styles.module.css';
|
||||
|
||||
// Get the theme wrapper for Superset components
|
||||
function getThemeWrapper() {
|
||||
if (typeof window === 'undefined') {
|
||||
return ({ children }: { children: React.ReactNode }) => <>{children}</>;
|
||||
}
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { themeObject } = require('@apache-superset/core/ui');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { App } = require('antd');
|
||||
|
||||
if (!themeObject?.SupersetThemeProvider) {
|
||||
return ({ children }: { children: React.ReactNode }) => <>{children}</>;
|
||||
}
|
||||
|
||||
return ({ children }: { children: React.ReactNode }) => (
|
||||
<themeObject.SupersetThemeProvider>
|
||||
<App>{children}</App>
|
||||
</themeObject.SupersetThemeProvider>
|
||||
);
|
||||
} catch (e) {
|
||||
console.error('[PlaygroundPreview] Failed to load theme provider:', e);
|
||||
return ({ children }: { children: React.ReactNode }) => <>{children}</>;
|
||||
}
|
||||
}
|
||||
|
||||
function Loader() {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
function ThemedLivePreview(): ReactNode {
|
||||
const ThemeWrapper = getThemeWrapper();
|
||||
return (
|
||||
<ThemeWrapper>
|
||||
<LivePreview />
|
||||
</ThemeWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
function PlaygroundLivePreview(): ReactNode {
|
||||
// No SSR for the live preview
|
||||
// See https://github.com/facebook/docusaurus/issues/5747
|
||||
return (
|
||||
<BrowserOnly fallback={<Loader />}>
|
||||
{() => (
|
||||
<>
|
||||
<ErrorBoundary
|
||||
fallback={(params) => (
|
||||
<ErrorBoundaryErrorMessageFallback {...params} />
|
||||
)}
|
||||
>
|
||||
<ThemedLivePreview />
|
||||
</ErrorBoundary>
|
||||
<LiveError />
|
||||
</>
|
||||
)}
|
||||
</BrowserOnly>
|
||||
);
|
||||
}
|
||||
|
||||
export default function PlaygroundPreview(): ReactNode {
|
||||
return (
|
||||
<>
|
||||
<PlaygroundHeader>
|
||||
<Translate
|
||||
id="theme.Playground.result"
|
||||
description="The result label of the live codeblocks"
|
||||
>
|
||||
Result
|
||||
</Translate>
|
||||
</PlaygroundHeader>
|
||||
<div className={styles.playgroundPreview}>
|
||||
<PlaygroundLivePreview />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
23
docs/src/theme/Playground/Preview/styles.module.css
Normal file
@@ -0,0 +1,23 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
.playgroundPreview {
|
||||
padding: 1rem;
|
||||
background-color: var(--ifm-pre-background);
|
||||
}
|
||||
@@ -18,36 +18,49 @@
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { Button, Card, Input, Space, Tag, Tooltip } from 'antd';
|
||||
|
||||
// Import extension components from @apache-superset/core/ui
|
||||
// This matches the established pattern used throughout the Superset codebase
|
||||
// Resolved via webpack alias to superset-frontend/packages/superset-core/src/ui/components
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
// Browser-only check for SSR safety
|
||||
const isBrowser = typeof window !== 'undefined';
|
||||
|
||||
/**
|
||||
* ReactLiveScope provides the scope for live code blocks.
|
||||
* Any component added here will be available in ```tsx live blocks.
|
||||
*
|
||||
* To add more components:
|
||||
* 1. Import the component from @apache-superset/core above
|
||||
* 2. Add it to the scope object below
|
||||
* Components are conditionally loaded only in the browser to avoid
|
||||
* SSG issues with Emotion CSS-in-JS jsx runtime.
|
||||
*
|
||||
* Components are available by name, e.g.:
|
||||
* <Button>Click me</Button>
|
||||
* <Avatar size="large" />
|
||||
* <Badge count={5} />
|
||||
*/
|
||||
const ReactLiveScope = {
|
||||
|
||||
// Base scope with React (always available)
|
||||
const ReactLiveScope: Record<string, unknown> = {
|
||||
// React core
|
||||
React,
|
||||
...React,
|
||||
|
||||
// Extension components from @apache-superset/core
|
||||
Alert,
|
||||
|
||||
// Common Ant Design components (for demos)
|
||||
Button,
|
||||
Card,
|
||||
Input,
|
||||
Space,
|
||||
Tag,
|
||||
Tooltip,
|
||||
};
|
||||
|
||||
// Only load Superset components in browser context
|
||||
// This prevents SSG errors from Emotion CSS-in-JS
|
||||
if (isBrowser) {
|
||||
try {
|
||||
// Dynamic require for browser-only execution
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const SupersetComponents = require('@superset/components');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { Alert } = require('@apache-superset/core/ui');
|
||||
|
||||
console.log('[ReactLiveScope] SupersetComponents keys:', Object.keys(SupersetComponents || {}).slice(0, 10));
|
||||
console.log('[ReactLiveScope] Has Button?', 'Button' in (SupersetComponents || {}));
|
||||
|
||||
Object.assign(ReactLiveScope, SupersetComponents, { Alert });
|
||||
|
||||
console.log('[ReactLiveScope] Final scope keys:', Object.keys(ReactLiveScope).slice(0, 20));
|
||||
} catch (e) {
|
||||
console.error('[ReactLiveScope] Failed to load Superset components:', e);
|
||||
}
|
||||
}
|
||||
|
||||
export default ReactLiveScope;
|
||||
|
||||
@@ -74,6 +74,14 @@ export default function Root({ children }) {
|
||||
window._paq.push(['trackSiteSearch', keyword, category, resultsCount]);
|
||||
};
|
||||
|
||||
// Helper to track page views
|
||||
const trackPageView = (url, title) => {
|
||||
if (devMode) {
|
||||
console.log('Matomo trackPageView:', { url, title });
|
||||
}
|
||||
window._paq.push(['trackPageView']);
|
||||
};
|
||||
|
||||
|
||||
// Track external link clicks using domain as category (vendor-agnostic)
|
||||
const handleLinkClick = (event) => {
|
||||
@@ -221,7 +229,6 @@ export default function Root({ children }) {
|
||||
trackDocsVersion();
|
||||
|
||||
if (devMode) {
|
||||
console.log('Tracking page view:', currentPath, currentTitle);
|
||||
window._paq.push(['setDomains', ['superset.apache.org']]);
|
||||
window._paq.push([
|
||||
'setCustomUrl',
|
||||
@@ -233,7 +240,7 @@ export default function Root({ children }) {
|
||||
|
||||
window._paq.push(['setReferrerUrl', window.location.href]);
|
||||
window._paq.push(['setDocumentTitle', currentTitle]);
|
||||
window._paq.push(['trackPageView']);
|
||||
trackPageView(currentPath, currentTitle);
|
||||
|
||||
// Check for 404 after page renders
|
||||
setTimeout(track404, 500);
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import webpack from 'webpack';
|
||||
import type { Plugin } from '@docusaurus/types';
|
||||
|
||||
export default function webpackExtendPlugin(): Plugin<void> {
|
||||
@@ -26,14 +27,86 @@ export default function webpackExtendPlugin(): Plugin<void> {
|
||||
configureWebpack(config) {
|
||||
const isDev = process.env.NODE_ENV === 'development';
|
||||
|
||||
// Use NormalModuleReplacementPlugin to forcefully replace react-table
|
||||
// This is necessary because regular aliases don't work for modules in nested node_modules
|
||||
const reactTableShim = path.resolve(__dirname, './shims/react-table.js');
|
||||
config.plugins?.push(
|
||||
new webpack.NormalModuleReplacementPlugin(
|
||||
/^react-table$/,
|
||||
reactTableShim,
|
||||
),
|
||||
);
|
||||
|
||||
// Stub out heavy third-party packages that are transitive dependencies of
|
||||
// superset-frontend components. The barrel file (components/index.ts)
|
||||
// re-exports all components, so webpack must resolve their imports even
|
||||
// though these components are never rendered on the docs site.
|
||||
const nullModuleShim = path.resolve(__dirname, './shims/null-module.js');
|
||||
const heavyDepsPatterns = [
|
||||
/^brace(\/|$)/, // ACE editor modes/themes
|
||||
/^react-ace(\/|$)/,
|
||||
/^ace-builds(\/|$)/,
|
||||
/^react-js-cron(\/|$)/, // Cron picker + CSS
|
||||
// react-resize-detector: NOT shimmed — DropdownContainer needs it at runtime
|
||||
// for overflow detection. Resolves from superset-frontend/node_modules.
|
||||
/^react-window(\/|$)/,
|
||||
/^re-resizable(\/|$)/,
|
||||
/^react-draggable(\/|$)/,
|
||||
/^ag-grid-react(\/|$)/,
|
||||
/^ag-grid-community(\/|$)/,
|
||||
];
|
||||
heavyDepsPatterns.forEach(pattern => {
|
||||
config.plugins?.push(
|
||||
new webpack.NormalModuleReplacementPlugin(pattern, nullModuleShim),
|
||||
);
|
||||
});
|
||||
|
||||
// Add YAML loader rule directly to existing rules
|
||||
config.module?.rules?.push({
|
||||
test: /\.ya?ml$/,
|
||||
use: 'js-yaml-loader',
|
||||
});
|
||||
|
||||
// Add swc-loader rule for superset-frontend files
|
||||
// SWC is a Rust-based transpiler that's significantly faster than babel
|
||||
const supersetFrontendPath = path.resolve(
|
||||
__dirname,
|
||||
'../../superset-frontend',
|
||||
);
|
||||
config.module?.rules?.push({
|
||||
test: /\.(tsx?|jsx?)$/,
|
||||
include: supersetFrontendPath,
|
||||
exclude: /node_modules/,
|
||||
use: {
|
||||
loader: 'swc-loader',
|
||||
options: {
|
||||
// Ignore superset-frontend/.swcrc which references plugins not
|
||||
// installed in the docs workspace (e.g. @swc/plugin-emotion)
|
||||
swcrc: false,
|
||||
jsc: {
|
||||
parser: {
|
||||
syntax: 'typescript',
|
||||
tsx: true,
|
||||
},
|
||||
transform: {
|
||||
react: {
|
||||
runtime: 'automatic',
|
||||
importSource: '@emotion/react',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
devtool: isDev ? 'eval-source-map' : config.devtool,
|
||||
devtool: isDev ? false : config.devtool,
|
||||
cache: {
|
||||
type: 'filesystem',
|
||||
buildDependencies: {
|
||||
config: [__filename],
|
||||
},
|
||||
},
|
||||
...(isDev && {
|
||||
optimization: {
|
||||
...config.optimization,
|
||||
@@ -44,8 +117,16 @@ export default function webpackExtendPlugin(): Plugin<void> {
|
||||
},
|
||||
}),
|
||||
resolve: {
|
||||
// Add superset-frontend node_modules to module resolution
|
||||
modules: [
|
||||
...(config.resolve?.modules || []),
|
||||
path.resolve(__dirname, '../../superset-frontend/node_modules'),
|
||||
],
|
||||
alias: {
|
||||
...config.resolve.alias,
|
||||
// Ensure single React instance across all modules (critical for hooks to work)
|
||||
react: path.resolve(__dirname, '../node_modules/react'),
|
||||
'react-dom': path.resolve(__dirname, '../node_modules/react-dom'),
|
||||
// Allow importing from superset-frontend
|
||||
src: path.resolve(__dirname, '../../superset-frontend/src'),
|
||||
// '@superset-ui/core': path.resolve(
|
||||
@@ -58,14 +139,29 @@ export default function webpackExtendPlugin(): Plugin<void> {
|
||||
__dirname,
|
||||
'../../superset-frontend/packages/superset-ui-core/src/components',
|
||||
),
|
||||
// Extension API package - allows docs to import from @apache-superset/core/ui
|
||||
// This matches the established pattern used throughout the Superset codebase
|
||||
// Point directly to components to avoid importing theme (which has font dependencies)
|
||||
// Note: TypeScript types come from docs/src/types/apache-superset-core (see tsconfig.json)
|
||||
// This split is intentional: webpack resolves actual source, tsconfig provides simplified types
|
||||
// Also alias the full package path for internal imports within components
|
||||
'@superset-ui/core/components': path.resolve(
|
||||
__dirname,
|
||||
'../../superset-frontend/packages/superset-ui-core/src/components',
|
||||
),
|
||||
// Use a shim for react-table to handle CommonJS to ES module interop
|
||||
// react-table v7 is CommonJS, but Superset components import it with ES module syntax
|
||||
'react-table': path.resolve(__dirname, './shims/react-table.js'),
|
||||
// Extension API package - resolve @apache-superset/core and its sub-paths
|
||||
// to source so the docs build doesn't depend on pre-built lib/ artifacts.
|
||||
// More specific sub-path aliases must come first; webpack matches the
|
||||
// longest prefix.
|
||||
'@apache-superset/core/ui': path.resolve(
|
||||
__dirname,
|
||||
'../../superset-frontend/packages/superset-core/src/ui/components',
|
||||
'../../superset-frontend/packages/superset-core/src/ui',
|
||||
),
|
||||
'@apache-superset/core/api/core': path.resolve(
|
||||
__dirname,
|
||||
'../../superset-frontend/packages/superset-core/src/api/core',
|
||||
),
|
||||
'@apache-superset/core': path.resolve(
|
||||
__dirname,
|
||||
'../../superset-frontend/packages/superset-core/src',
|
||||
),
|
||||
// Add proper Storybook aliases
|
||||
'@storybook/blocks': path.resolve(
|
||||
@@ -123,8 +219,6 @@ export default function webpackExtendPlugin(): Plugin<void> {
|
||||
),
|
||||
},
|
||||
},
|
||||
// We're removing the ts-loader rule that was processing superset-frontend files
|
||||
// This will prevent TypeScript errors from files outside the docs directory
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
6
docs/static/.htaccess
vendored
@@ -35,7 +35,7 @@ RewriteRule ^usertutorial\.html$ /docs/using-superset/creating-your-first-dashbo
|
||||
RewriteRule ^security\.html$ /docs/security/ [R=301,L]
|
||||
RewriteRule ^sqllab\.html$ /docs/configuration/sql-templating [R=301,L]
|
||||
RewriteRule ^gallery\.html$ /docs/intro [R=301,L]
|
||||
RewriteRule ^druid\.html$ /docs/configuration/databases [R=301,L]
|
||||
RewriteRule ^druid\.html$ /docs/databases [R=301,L]
|
||||
RewriteRule ^misc\.html$ /docs/configuration/country-map-tools [R=301,L]
|
||||
RewriteRule ^visualization\.html$ /docs/configuration/country-map-tools [R=301,L]
|
||||
RewriteRule ^videos\.html$ /docs/faq [R=301,L]
|
||||
@@ -47,7 +47,7 @@ RewriteRule ^docs/installation/email-reports$ /docs/configuration/alerts-reports
|
||||
RewriteRule ^docs/roadmap$ /docs/intro [R=301,L]
|
||||
RewriteRule ^docs/contributing/contribution-guidelines$ /docs/contributing/ [R=301,L]
|
||||
RewriteRule ^docs/contributing/contribution-page$ /docs/contributing/ [R=301,L]
|
||||
RewriteRule ^docs/databases/yugabyte/$ /docs/configuration/databases [R=301,L]
|
||||
RewriteRule ^docs/databases/yugabyte/$ /docs/databases [R=301,L]
|
||||
RewriteRule ^docs/frequently-asked-questions$ /docs/faq [R=301,L]
|
||||
RewriteRule ^docs/installation/running-on-kubernetes/$ /docs/installation/kubernetes [R=301,L]
|
||||
RewriteRule ^docs/contributing/testing-locally/$ /docs/contributing/howtos [R=301,L]
|
||||
@@ -62,7 +62,5 @@ RewriteRule ^docs/installation/cache/$ /docs/configuration/cache [R=301,L]
|
||||
RewriteRule ^docs/installation/async-queries-celery/$ /docs/configuration/async-queries-celery [R=301,L]
|
||||
RewriteRule ^docs/installation/event-logging/$ /docs/configuration/event-logging [R=301,L]
|
||||
|
||||
RewriteRule ^docs/databases.*$ /docs/configuration/databases [R=301,L]
|
||||
|
||||
# pre-commit hooks documentation
|
||||
RewriteRule ^docs/contributing/hooks-and-linting/$ /docs/contributing/development/#git-hooks-1
|
||||
|
||||
392
docs/static/feature-flags.json
vendored
Normal file
@@ -0,0 +1,392 @@
|
||||
{
|
||||
"generated": true,
|
||||
"source": "superset/config.py",
|
||||
"flags": {
|
||||
"development": [
|
||||
{
|
||||
"name": "AG_GRID_TABLE_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables Table V2 (AG Grid) viz plugin"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORT_TABS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables experimental tabs UI for Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "CHART_PLUGINS_EXPERIMENTAL",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables experimental chart plugins"
|
||||
},
|
||||
{
|
||||
"name": "CSV_UPLOAD_PYARROW_ENGINE",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Experimental PyArrow engine for CSV parsing (may have issues with dates/nulls)"
|
||||
},
|
||||
{
|
||||
"name": "DATASET_FOLDERS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Allow metrics and columns to be grouped into folders in the chart builder"
|
||||
},
|
||||
{
|
||||
"name": "DATE_RANGE_TIMESHIFTS_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable support for date range timeshifts (e.g., \"2015-01-03 : 2015-01-04\") in addition to relative timeshifts (e.g., \"1 day ago\")"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_ADVANCED_DATA_TYPES",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables advanced data type support"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_EXTENSIONS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Superset extensions for custom functionality without modifying core"
|
||||
},
|
||||
{
|
||||
"name": "MATRIXIFY",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Matrixify feature for matrix-style chart layouts"
|
||||
},
|
||||
{
|
||||
"name": "OPTIMIZE_SQL",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Try to optimize SQL queries \u2014 for now only predicate pushdown is supported"
|
||||
},
|
||||
{
|
||||
"name": "PRESTO_EXPAND_DATA",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Expand nested types in Presto into extra columns/arrays. Experimental, doesn't work with all nested types."
|
||||
},
|
||||
{
|
||||
"name": "TABLE_V2_TIME_COMPARISON_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Table V2 time comparison feature"
|
||||
},
|
||||
{
|
||||
"name": "TAGGING_SYSTEM",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables the tagging system for organizing assets"
|
||||
}
|
||||
],
|
||||
"testing": [
|
||||
{
|
||||
"name": "ALERT_REPORTS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables Alerts and Reports functionality",
|
||||
"docs": "https://superset.apache.org/docs/configuration/alerts-reports"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORTS_FILTER",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables filter functionality in Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORT_SLACK_V2",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables Slack V2 integration for Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORT_WEBHOOK",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables webhook integration for Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "ALLOW_FULL_CSV_EXPORT",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allow users to export full CSV of table viz type. Warning: Could cause server memory/compute issues with large datasets."
|
||||
},
|
||||
{
|
||||
"name": "AWS_DATABASE_IAM_AUTH",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable AWS IAM authentication for database connections (Aurora, Redshift). Allows cross-account role assumption via STS AssumeRole. Security note: When enabled, ensure Superset's IAM role has restricted sts:AssumeRole permissions to prevent unauthorized access."
|
||||
},
|
||||
{
|
||||
"name": "CACHE_IMPERSONATION",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable caching per impersonation key in datasources with user impersonation"
|
||||
},
|
||||
{
|
||||
"name": "DATE_FORMAT_IN_EMAIL_SUBJECT",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allow users to optionally specify date formats in email subjects",
|
||||
"docs": "https://superset.apache.org/docs/configuration/alerts-reports"
|
||||
},
|
||||
{
|
||||
"name": "DYNAMIC_PLUGINS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable dynamic plugin loading"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Generate screenshots (PDF/JPG) of dashboards using web driver. Depends on ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS."
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables endpoints to cache and retrieve dashboard screenshots via webdriver. Requires Celery and THUMBNAIL_CACHE_CONFIG."
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_SUPERSET_META_DB",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allows users to add a superset:// DB that can query across databases. Experimental with potential security/performance risks. See SUPERSET_META_DB_LIMIT.",
|
||||
"docs": "https://superset.apache.org/docs/configuration/databases/#querying-across-databases"
|
||||
},
|
||||
{
|
||||
"name": "ESTIMATE_QUERY_COST",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable query cost estimation. Supported in Presto, Postgres, and BigQuery. Requires `cost_estimate_enabled: true` in database `extra` attribute."
|
||||
},
|
||||
{
|
||||
"name": "GLOBAL_ASYNC_QUERIES",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable async queries for dashboards and Explore via WebSocket. Requires Redis 5.0+ and Celery workers.",
|
||||
"docs": "https://superset.apache.org/docs/contributing/misc#async-chart-queries"
|
||||
},
|
||||
{
|
||||
"name": "IMPERSONATE_WITH_EMAIL_PREFIX",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "When impersonating a user, use the email prefix instead of username"
|
||||
},
|
||||
{
|
||||
"name": "PLAYWRIGHT_REPORTS_AND_THUMBNAILS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Replace Selenium with Playwright for reports and thumbnails. Supports deck.gl visualizations. Requires playwright pip package."
|
||||
},
|
||||
{
|
||||
"name": "RLS_IN_SQLLAB",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Apply RLS rules to SQL Lab queries. Requires query parsing/manipulation. May break queries or allow RLS bypass. Use with care!"
|
||||
},
|
||||
{
|
||||
"name": "SSH_TUNNELING",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allow users to enable SSH tunneling when creating a DB connection. DB engine must support SSH Tunnels.",
|
||||
"docs": "https://superset.apache.org/docs/configuration/setup-ssh-tunneling"
|
||||
},
|
||||
{
|
||||
"name": "USE_ANALOGOUS_COLORS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Use analogous colors in charts"
|
||||
}
|
||||
],
|
||||
"stable": [
|
||||
{
|
||||
"name": "ALERTS_ATTACH_REPORTS",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "When enabled, alerts send email/slack with screenshot AND link. When disabled, alerts send only link; reports still send screenshot.",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "ALLOW_ADHOC_SUBQUERY",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Allow ad-hoc subqueries in SQL Lab",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "CACHE_QUERY_BY_USER",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable caching per user key for Superset cache",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "CSS_TEMPLATES",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enables CSS Templates in Settings menu and dashboard forms",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DASHBOARD_RBAC",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Role-based access control for dashboards",
|
||||
"docs": "https://superset.apache.org/docs/using-superset/creating-your-first-dashboard",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DASHBOARD_VIRTUALIZATION",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enables dashboard virtualization for improved performance",
|
||||
"category": "path_to_deprecation"
|
||||
},
|
||||
{
|
||||
"name": "DASHBOARD_VIRTUALIZATION_DEFER_DATA",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Supports simultaneous data and dashboard virtualization for backend performance",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DATAPANEL_CLOSED_BY_DEFAULT",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Data panel closed by default in chart builder",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DRILL_BY",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable drill-by functionality in charts",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DRUID_JOINS",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable Druid JOINs (requires Druid version with JOIN support)",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "EMBEDDABLE_CHARTS",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable sharing charts with embedding",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "EMBEDDED_SUPERSET",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable embedded Superset functionality",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_FACTORY_RESET_COMMAND",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable factory reset CLI command",
|
||||
"category": "internal"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_TEMPLATE_PROCESSING",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable Jinja templating in SQL queries",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "ESCAPE_MARKDOWN_HTML",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Escape HTML in Markdown components (rather than rendering it)",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "FILTERBAR_CLOSED_BY_DEFAULT",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Filter bar closed by default when opening dashboard",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "FORCE_GARBAGE_COLLECTION_AFTER_EVERY_REQUEST",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Force garbage collection after every request",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "LISTVIEWS_DEFAULT_CARD_VIEW",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Use card view as default in list views",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "MENU_HIDE_USER_INFO",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Hide user info in the navigation menu",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "SLACK_ENABLE_AVATARS",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Use Slack avatars for users. Requires adding slack-edge.com to TALISMAN_CONFIG.",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "SQLLAB_BACKEND_PERSISTENCE",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable SQL Lab backend persistence for query state",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "SQLLAB_FORCE_RUN_ASYNC",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Force SQL Lab to run async via Celery regardless of database settings",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "THUMBNAILS",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Exposes API endpoint to compute thumbnails",
|
||||
"docs": "https://superset.apache.org/docs/configuration/cache",
|
||||
"category": "runtime_config"
|
||||
}
|
||||
],
|
||||
"deprecated": [
|
||||
{
|
||||
"name": "AVOID_COLORS_COLLISION",
|
||||
"default": true,
|
||||
"lifecycle": "deprecated",
|
||||
"description": "Avoid color collisions in charts by using distinct colors"
|
||||
},
|
||||
{
|
||||
"name": "DRILL_TO_DETAIL",
|
||||
"default": true,
|
||||
"lifecycle": "deprecated",
|
||||
"description": "Enable drill-to-detail functionality in charts"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_JAVASCRIPT_CONTROLS",
|
||||
"default": false,
|
||||
"lifecycle": "deprecated",
|
||||
"description": "Allow JavaScript in chart controls. WARNING: XSS security vulnerability!"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
BIN
docs/static/img/atomic-design.png
vendored
Normal file
|
After Width: | Height: | Size: 159 KiB |
21
docs/static/img/community/bluesky-symbol.svg
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 568 501" width="40" height="40" fill="#1185FE">
|
||||
<path d="M123.121 33.664C188.241 82.553 258.281 181.68 284 234.873c25.719-53.192 95.759-152.32 160.879-201.21C491.866-1.611 568-28.906 568 57.947c0 17.346-9.945 145.713-15.778 166.555-20.275 72.453-94.155 90.933-159.875 79.748C507.222 323.8 536.444 388.56 473.333 453.32c-119.86 122.992-172.272-30.859-185.702-70.281-2.462-7.227-3.614-10.608-3.631-7.733-.017-2.875-1.169.506-3.631 7.733-13.43 39.422-65.842 193.273-185.702 70.281-63.111-64.76-33.889-129.52 80.986-149.071-65.72 11.185-139.6-7.295-159.875-79.748C9.945 203.659 0 75.291 0 57.946 0-28.906 76.135-1.612 123.121 33.664z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
23
docs/static/img/community/globe-symbol.svg
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="40" height="40" fill="none" stroke="#484848" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<circle cx="12" cy="12" r="10"/>
|
||||
<line x1="2" y1="12" x2="22" y2="12"/>
|
||||
<path d="M12 2a15.3 15.3 0 0 1 4 10 15.3 15.3 0 0 1-4 10 15.3 15.3 0 0 1-4-10 15.3 15.3 0 0 1 4-10z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
21
docs/static/img/community/linkedin-symbol.svg
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="40" height="40" fill="#0A66C2">
|
||||
<path d="M20.447 20.452h-3.554v-5.569c0-1.328-.027-3.037-1.852-3.037-1.853 0-2.136 1.445-2.136 2.939v5.667H9.351V9h3.414v1.561h.046c.477-.9 1.637-1.85 3.37-1.85 3.601 0 4.267 2.37 4.267 5.455v6.286zM5.337 7.433a2.062 2.062 0 01-2.063-2.065 2.064 2.064 0 112.063 2.065zm1.782 13.019H3.555V9h3.564v11.452zM22.225 0H1.771C.792 0 0 .774 0 1.729v20.542C0 23.227.792 24 1.771 24h20.451C23.2 24 24 23.227 24 22.271V1.729C24 .774 23.2 0 22.222 0h.003z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
21
docs/static/img/community/x-symbol.svg
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="40" height="40" fill="#484848">
|
||||
<path d="M18.244 2.25h3.308l-7.227 8.26 8.502 11.24H16.17l-5.214-6.817L4.99 21.75H1.68l7.73-8.835L1.254 2.25H8.08l4.713 6.231zm-1.161 17.52h1.833L7.084 4.126H5.117z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
BIN
docs/static/img/databases/alloydb.png
vendored
Normal file
|
After Width: | Height: | Size: 86 KiB |
BIN
docs/static/img/databases/altinity.png
vendored
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
docs/static/img/databases/amazon-redshift.jpg
vendored
|
Before Width: | Height: | Size: 17 KiB |
BIN
docs/static/img/databases/apache-druid.jpeg
vendored
|
Before Width: | Height: | Size: 210 KiB |