mirror of
https://github.com/apache/superset.git
synced 2026-04-29 04:54:21 +00:00
Compare commits
406 Commits
semantic-l
...
glyph-init
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9042601f5c | ||
|
|
1bebbcf067 | ||
|
|
58ac91c059 | ||
|
|
81fdc2bd0e | ||
|
|
23b91d22ef | ||
|
|
6eb4db6930 | ||
|
|
2324b4c9e5 | ||
|
|
6ca0f7a925 | ||
|
|
64424f1625 | ||
|
|
debdfbc835 | ||
|
|
4c01b5c324 | ||
|
|
137ebdee39 | ||
|
|
a272253243 | ||
|
|
35ac4c74fd | ||
|
|
4f3403b134 | ||
|
|
86bc493423 | ||
|
|
91dba9dcbf | ||
|
|
bdcc98743d | ||
|
|
e053418c97 | ||
|
|
0404c99e39 | ||
|
|
fd3eea0557 | ||
|
|
0c490dc1ab | ||
|
|
1166df3579 | ||
|
|
0b4fcce03b | ||
|
|
393259bb9e | ||
|
|
560da50df8 | ||
|
|
5d5012aa9f | ||
|
|
6c75365427 | ||
|
|
3a3cbc2900 | ||
|
|
23a47e2f5a | ||
|
|
fc67569cd4 | ||
|
|
e17bfae6bd | ||
|
|
936e37bd02 | ||
|
|
ad3812edd7 | ||
|
|
5f58241795 | ||
|
|
481bfa0f68 | ||
|
|
462fffc23c | ||
|
|
7503ee4e09 | ||
|
|
fa3d4a75ca | ||
|
|
adb575be2f | ||
|
|
d56bc5826f | ||
|
|
72c69e2ca6 | ||
|
|
22cfc4536b | ||
|
|
fac5d2bcb6 | ||
|
|
4fe2085596 | ||
|
|
005b2af985 | ||
|
|
2a38ce001e | ||
|
|
f4772a9383 | ||
|
|
dcdcf88969 | ||
|
|
d8f7ae83ee | ||
|
|
911d72c957 | ||
|
|
169d27c9e9 | ||
|
|
62c7b48b5c | ||
|
|
4f444ae1d2 | ||
|
|
459b4cb23d | ||
|
|
d914b35cc0 | ||
|
|
4f5789abfe | ||
|
|
e9b6791ffb | ||
|
|
0294c30c9e | ||
|
|
1cface15e6 | ||
|
|
ecefba5bf7 | ||
|
|
53dddf4db2 | ||
|
|
1e8d648f47 | ||
|
|
14c0cad0ba | ||
|
|
f895250cf9 | ||
|
|
ea90d1f141 | ||
|
|
f48322c17d | ||
|
|
413dfc98ff | ||
|
|
57f8f50292 | ||
|
|
07aa7622f7 | ||
|
|
ffbf81e952 | ||
|
|
734d64081f | ||
|
|
85cf46dc1c | ||
|
|
4fc8157d6f | ||
|
|
af185ad8c9 | ||
|
|
dd7f0c4224 | ||
|
|
77ebc5ac10 | ||
|
|
e9b91f6ec9 | ||
|
|
3ac76e116d | ||
|
|
4249b8ee6a | ||
|
|
047360641a | ||
|
|
337eb3baf5 | ||
|
|
70b2e520b2 | ||
|
|
16e046b5d9 | ||
|
|
a6d85dccf8 | ||
|
|
84279acd2f | ||
|
|
03caa7b337 | ||
|
|
6f67b05375 | ||
|
|
64ee48f147 | ||
|
|
f9be2b816a | ||
|
|
dfdf8e75d8 | ||
|
|
0c1edd4568 | ||
|
|
7a5441bc7a | ||
|
|
5edaed2e5b | ||
|
|
861e5cd013 | ||
|
|
d7d94ba640 | ||
|
|
9968393e4c | ||
|
|
9aff89c1b4 | ||
|
|
aaa174f820 | ||
|
|
1949d1bb96 | ||
|
|
f9fde87e85 | ||
|
|
cedc35e39f | ||
|
|
12a266fd2f | ||
|
|
5909e90081 | ||
|
|
dcc556a9a7 | ||
|
|
61986100bd | ||
|
|
c76ddcbbec | ||
|
|
740ddc03e2 | ||
|
|
2080633e57 | ||
|
|
ac27c0aa3c | ||
|
|
53fa65fe67 | ||
|
|
fdef8fa50a | ||
|
|
1334040fd6 | ||
|
|
52af489d8f | ||
|
|
d07a452e9b | ||
|
|
aed95453b3 | ||
|
|
4451e8db05 | ||
|
|
dd2eb6293d | ||
|
|
1b1be96274 | ||
|
|
e5489bd30f | ||
|
|
12aa425049 | ||
|
|
c31224c891 | ||
|
|
85e830de46 | ||
|
|
d4ba44fce2 | ||
|
|
7cd76e4647 | ||
|
|
e112d863bf | ||
|
|
fe5d5fdae6 | ||
|
|
02411ffde0 | ||
|
|
1697cf733b | ||
|
|
28c802fb6c | ||
|
|
362b5e3b89 | ||
|
|
bf5070471d | ||
|
|
100789200a | ||
|
|
f95f125c4c | ||
|
|
fd67d3190a | ||
|
|
bd8d4ddbee | ||
|
|
ecb4e483df | ||
|
|
f8cb935105 | ||
|
|
ba8d6eb9ac | ||
|
|
c399fd2801 | ||
|
|
9e04c3471d | ||
|
|
8f8fe19e3e | ||
|
|
ff3dab9b3b | ||
|
|
ff24e2f27d | ||
|
|
54eb6317ef | ||
|
|
32c98d02d3 | ||
|
|
6b25d0663e | ||
|
|
c0bcf28947 | ||
|
|
e0ea807031 | ||
|
|
8d070f5cb6 | ||
|
|
5cd8e1e736 | ||
|
|
0ced20457b | ||
|
|
e3e6b0e18b | ||
|
|
c026ae2ce7 | ||
|
|
ae491aee00 | ||
|
|
3258082819 | ||
|
|
d36ddbbb33 | ||
|
|
5920cb57ea | ||
|
|
fb6f3fbb4d | ||
|
|
91539f77aa | ||
|
|
b8f31124d0 | ||
|
|
da8e077a44 | ||
|
|
32435bc3e9 | ||
|
|
2cf0d7936e | ||
|
|
0830a57fa6 | ||
|
|
0f56e3b9ae | ||
|
|
ee45b26ad7 | ||
|
|
f3407d7a56 | ||
|
|
f51f7f3307 | ||
|
|
2f4f64dfe8 | ||
|
|
ae584c8886 | ||
|
|
b1e004e122 | ||
|
|
737a5162e4 | ||
|
|
b800412eda | ||
|
|
24a4f8510d | ||
|
|
33a425bbbc | ||
|
|
5ce4c52cfa | ||
|
|
c9ec173647 | ||
|
|
71f9dcff5a | ||
|
|
479b7a3fba | ||
|
|
594ea972ca | ||
|
|
d77f7b6d20 | ||
|
|
f4ded02e0d | ||
|
|
f97fa08477 | ||
|
|
789be78166 | ||
|
|
ea3d247017 | ||
|
|
6456f4c516 | ||
|
|
e9bbf06938 | ||
|
|
ebee35ea5a | ||
|
|
d0fb77cbc8 | ||
|
|
46659c2bd1 | ||
|
|
8407e9cf3b | ||
|
|
5eeba2e734 | ||
|
|
4ca8c000d1 | ||
|
|
7108658de0 | ||
|
|
42311f602e | ||
|
|
6b948ee894 | ||
|
|
5e0ee40762 | ||
|
|
6aaf2266a9 | ||
|
|
d14f502126 | ||
|
|
d0361cb881 | ||
|
|
821b259805 | ||
|
|
2329d49f9e | ||
|
|
28e3ba749e | ||
|
|
cd2c889c9a | ||
|
|
52c711b0bc | ||
|
|
6f8052b828 | ||
|
|
5f431ee1ec | ||
|
|
de7a72a37b | ||
|
|
a1a57d50a4 | ||
|
|
5844c05281 | ||
|
|
c7a4d4f2cc | ||
|
|
d6d8e71b71 | ||
|
|
57ec3b5a6d | ||
|
|
11d3750044 | ||
|
|
40db928091 | ||
|
|
fdde5fe2d3 | ||
|
|
6bd37d11ae | ||
|
|
94900e0fb3 | ||
|
|
c3a9e28573 | ||
|
|
e28ab05068 | ||
|
|
b27ec49204 | ||
|
|
a1706229db | ||
|
|
8bcb499a06 | ||
|
|
a3ea950567 | ||
|
|
c722c92adb | ||
|
|
824dafa342 | ||
|
|
104eb90013 | ||
|
|
76f1b5ed5a | ||
|
|
29a52652b9 | ||
|
|
8b1c41a012 | ||
|
|
71a38305d9 | ||
|
|
4ae62dcae8 | ||
|
|
da31e82b6a | ||
|
|
18d3da81ca | ||
|
|
2b1c72a92c | ||
|
|
75c6da97b2 | ||
|
|
6439440260 | ||
|
|
989bb3432f | ||
|
|
b441844ca6 | ||
|
|
09a1788a8b | ||
|
|
cde9abfce2 | ||
|
|
0bcefe34ac | ||
|
|
649112aa1f | ||
|
|
120ca5cf8f | ||
|
|
0035da83af | ||
|
|
019f9442ae | ||
|
|
084f9832c7 | ||
|
|
8a339febeb | ||
|
|
e5579ed939 | ||
|
|
d5dbd06824 | ||
|
|
a1b5b92265 | ||
|
|
92c63a54e4 | ||
|
|
e5b7e38a30 | ||
|
|
edcb38517f | ||
|
|
51a6b30179 | ||
|
|
a588668899 | ||
|
|
9cf86c1533 | ||
|
|
740b328199 | ||
|
|
375bcd00ba | ||
|
|
076d4950d0 | ||
|
|
0e9cffe12e | ||
|
|
46e21c3003 | ||
|
|
1f5df7407f | ||
|
|
e1c022344e | ||
|
|
78081755aa | ||
|
|
955953b467 | ||
|
|
ead19f9ba3 | ||
|
|
c16ca9527c | ||
|
|
d674d54e2e | ||
|
|
0e8b69089d | ||
|
|
0e8c420002 | ||
|
|
e433cd5f69 | ||
|
|
c1b52cb8ed | ||
|
|
dba3fdfadf | ||
|
|
b7a541a9da | ||
|
|
1bde06b366 | ||
|
|
e8927ca3b3 | ||
|
|
9d58599329 | ||
|
|
858a72d8c1 | ||
|
|
c79c85cdfe | ||
|
|
47ea316792 | ||
|
|
5b38a1a0d4 | ||
|
|
f2e677c150 | ||
|
|
d992a5836f | ||
|
|
ee2ab7e078 | ||
|
|
683a65488f | ||
|
|
751804d044 | ||
|
|
0d3c4d5d22 | ||
|
|
b8b7b958d9 | ||
|
|
0092cdca81 | ||
|
|
2120569267 | ||
|
|
4b7ae3a8f7 | ||
|
|
a64e5e15fc | ||
|
|
4f14eddf73 | ||
|
|
70a3e8fb42 | ||
|
|
954da8a3cc | ||
|
|
4c9a463db3 | ||
|
|
47dbdd7a59 | ||
|
|
abc0678454 | ||
|
|
dc403145ed | ||
|
|
955b3b2b19 | ||
|
|
ab80ec8066 | ||
|
|
bb22eb1ca8 | ||
|
|
8d7c83419c | ||
|
|
d300d69f8f | ||
|
|
9e7e813255 | ||
|
|
f4b919bf7d | ||
|
|
3940354120 | ||
|
|
b35b1d7633 | ||
|
|
0131e542e9 | ||
|
|
e7c060466d | ||
|
|
340ab9f238 | ||
|
|
1dcc887a62 | ||
|
|
67cf287c03 | ||
|
|
440cbc4c1f | ||
|
|
8d04c33adf | ||
|
|
8a00badf45 | ||
|
|
a76ec75933 | ||
|
|
240091516a | ||
|
|
1127374edd | ||
|
|
a18b62cf6b | ||
|
|
98553f83e3 | ||
|
|
1d9c93a793 | ||
|
|
fb2826f92e | ||
|
|
236e000398 | ||
|
|
8c603a6f8b | ||
|
|
45a42396ab | ||
|
|
4479614754 | ||
|
|
e1a8886d32 | ||
|
|
23b61b080e | ||
|
|
2f14c6cd69 | ||
|
|
964c16f1a4 | ||
|
|
b6f1b4db2f | ||
|
|
482c674a0f | ||
|
|
16e6452b8c | ||
|
|
c36ac53445 | ||
|
|
eabb5bdf7d | ||
|
|
e5da6d3183 | ||
|
|
3345eb32c5 | ||
|
|
1d8d30e5bb | ||
|
|
4a249a0745 | ||
|
|
d121cfdbda | ||
|
|
3eec441abe | ||
|
|
22c061c06c | ||
|
|
7f85d92b85 | ||
|
|
dc98a3b397 | ||
|
|
c458f99dd4 | ||
|
|
70aec7fa76 | ||
|
|
5c91ab91fb | ||
|
|
62d86aba14 | ||
|
|
b40467c7e2 | ||
|
|
f955f0d133 | ||
|
|
a8111de3ef | ||
|
|
9800fb7702 | ||
|
|
17027ff5ca | ||
|
|
9c963b50e6 | ||
|
|
bf5de3cb50 | ||
|
|
2c6bed27aa | ||
|
|
d05ab91d11 | ||
|
|
7748b60ff5 | ||
|
|
e4cb84bc02 | ||
|
|
005e4e3ea8 | ||
|
|
5e3ff0787b | ||
|
|
51798edb23 | ||
|
|
6e0960c3f5 | ||
|
|
db995ad5bf | ||
|
|
b12f5f8394 | ||
|
|
92986c2ecc | ||
|
|
d4206de8e0 | ||
|
|
4935938bb1 | ||
|
|
2b6b4e363b | ||
|
|
de69377b04 | ||
|
|
fd7ce4976a | ||
|
|
775d1ba061 | ||
|
|
9fc7a83320 | ||
|
|
a754258fad | ||
|
|
a745fd49fa | ||
|
|
01f032017f | ||
|
|
d5c5dbb3bf | ||
|
|
c9a7a85159 | ||
|
|
de7f41a888 | ||
|
|
a0e63faf62 | ||
|
|
341ae994c5 | ||
|
|
81e561bdc9 | ||
|
|
170d1b92eb | ||
|
|
2db0107d12 | ||
|
|
df0211fe29 | ||
|
|
1bf1890084 | ||
|
|
2af5a5adb0 | ||
|
|
fe21485065 | ||
|
|
18ab5382b1 | ||
|
|
f98939103b | ||
|
|
ab36bd3965 | ||
|
|
fb2a8ac9a2 | ||
|
|
06a8f4df02 | ||
|
|
cf88551a56 | ||
|
|
aca18fff99 | ||
|
|
a4860075d2 | ||
|
|
bae716fa83 | ||
|
|
0c87034b17 | ||
|
|
8d5d71199a | ||
|
|
cd36845d56 | ||
|
|
c966dd4f9e | ||
|
|
062e4a2922 | ||
|
|
186693b840 |
41
.envrc.example
Normal file
41
.envrc.example
Normal file
@@ -0,0 +1,41 @@
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# Auto-configure Docker Compose for multi-instance support
|
||||
# Requires direnv: https://direnv.net/
|
||||
#
|
||||
# Install: brew install direnv (or apt install direnv)
|
||||
# Setup: Add 'eval "$(direnv hook bash)"' to ~/.bashrc (or ~/.zshrc)
|
||||
# Allow: Run 'direnv allow' in this directory once
|
||||
|
||||
# Generate unique project name from directory
|
||||
export COMPOSE_PROJECT_NAME=$(basename "$PWD" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g')
|
||||
|
||||
# Find available ports sequentially to avoid collisions
|
||||
_is_free() { ! lsof -i ":$1" &>/dev/null 2>&1; }
|
||||
|
||||
_p=80; while ! _is_free $_p; do ((_p++)); done; export NGINX_PORT=$_p
|
||||
_p=8088; while ! _is_free $_p; do ((_p++)); done; export SUPERSET_PORT=$_p
|
||||
_p=9000; while ! _is_free $_p; do ((_p++)); done; export NODE_PORT=$_p
|
||||
_p=8080; while ! _is_free $_p || [ $_p -eq $NGINX_PORT ]; do ((_p++)); done; export WEBSOCKET_PORT=$_p
|
||||
_p=8081; while ! _is_free $_p || [ $_p -eq $WEBSOCKET_PORT ]; do ((_p++)); done; export CYPRESS_PORT=$_p
|
||||
_p=5432; while ! _is_free $_p; do ((_p++)); done; export DATABASE_PORT=$_p
|
||||
_p=6379; while ! _is_free $_p; do ((_p++)); done; export REDIS_PORT=$_p
|
||||
|
||||
unset _p _is_free
|
||||
|
||||
echo "🐳 Superset configured: http://localhost:$SUPERSET_PORT (dev: localhost:$NODE_PORT)"
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -33,6 +33,7 @@
|
||||
|
||||
# Notify PMC members of changes to extension-related files
|
||||
|
||||
/docs/developer_portal/extensions/ @michael-s-molina @villebro @rusackas
|
||||
/superset-core/ @michael-s-molina @villebro @geido @eschutho @rusackas @kgabryje
|
||||
/superset-extensions-cli/ @michael-s-molina @villebro @geido @eschutho @rusackas @kgabryje
|
||||
/superset/core/ @michael-s-molina @villebro @geido @eschutho @rusackas @kgabryje
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -41,8 +41,8 @@ body:
|
||||
label: Superset version
|
||||
options:
|
||||
- master / latest-dev
|
||||
- "6.0.0"
|
||||
- "5.0.0"
|
||||
- "4.1.3"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
13
.github/workflows/bashlib.sh
vendored
13
.github/workflows/bashlib.sh
vendored
@@ -117,6 +117,19 @@ testdata() {
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
playwright_testdata() {
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
say "::group::Load all examples for Playwright tests"
|
||||
# must specify PYTHONPATH to make `tests.superset_test_config` importable
|
||||
export PYTHONPATH="$GITHUB_WORKSPACE"
|
||||
pip install -e .
|
||||
superset db upgrade
|
||||
superset load_test_users
|
||||
superset load_examples
|
||||
superset init
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
celery-worker() {
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
say "::group::Start Celery worker"
|
||||
|
||||
2
.github/workflows/bump-python-package.yml
vendored
2
.github/workflows/bump-python-package.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: master
|
||||
|
||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Cancel duplicate workflow runs
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
|
||||
2
.github/workflows/check-python-deps.yml
vendored
2
.github/workflows/check-python-deps.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Check and notify
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@@ -71,7 +71,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
|
||||
4
.github/workflows/dependency-review.yml
vendored
4
.github/workflows/dependency-review.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@v4
|
||||
continue-on-error: true
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
21
.github/workflows/docker.yml
vendored
21
.github/workflows/docker.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -101,6 +101,23 @@ jobs:
|
||||
docker images $IMAGE_TAG
|
||||
docker history $IMAGE_TAG
|
||||
|
||||
# Scan for vulnerabilities in built container image after pushes to mainline branch.
|
||||
- name: Run Trivy container image vulnerabity scan
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ env.IMAGE_TAG }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
vuln-type: 'os'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
ignore-unfixed: true
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
|
||||
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
||||
- name: docker-compose sanity check
|
||||
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'dev'
|
||||
shell: bash
|
||||
@@ -117,7 +134,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
|
||||
4
.github/workflows/embedded-sdk-release.yml
vendored
4
.github/workflows/embedded-sdk-release.yml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
4
.github/workflows/embedded-sdk-test.yml
vendored
4
.github/workflows/embedded-sdk-test.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
4
.github/workflows/ephemeral-env.yml
vendored
4
.github/workflows/ephemeral-env.yml
vendored
@@ -160,7 +160,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
persist-credentials: false
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
2
.github/workflows/generate-FOSSA-report.yml
vendored
2
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -14,10 +14,10 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
|
||||
2
.github/workflows/issue_creation.yml
vendored
2
.github/workflows/issue_creation.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
2
.github/workflows/latest-release-tag.yml
vendored
2
.github/workflows/latest-release-tag.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
2
.github/workflows/license-check.yml
vendored
2
.github/workflows/license-check.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
2
.github/workflows/pr-lint.yml
vendored
2
.github/workflows/pr-lint.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
10
.github/workflows/pre-commit.yml
vendored
10
.github/workflows/pre-commit.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
python-version: ["current", "previous", "next"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
yarn install --immutable
|
||||
|
||||
- name: Cache pre-commit environments
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -71,7 +71,9 @@ jobs:
|
||||
GIT_DIFF_EXIT_CODE=$?
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
|
||||
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
|
||||
echo "❌ Pre-commit check failed (exit code: ${PRE_COMMIT_EXIT_CODE})."
|
||||
echo "🔍 Modified files:"
|
||||
git diff --name-only
|
||||
else
|
||||
echo "❌ Git working directory is dirty."
|
||||
echo "📌 This likely means that pre-commit made changes that were not committed."
|
||||
|
||||
2
.github/workflows/prefer-typescript.yml
vendored
2
.github/workflows/prefer-typescript.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
name: Bump version and publish package(s)
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
# pulls all commits (needed for lerna / semantic release to correctly version)
|
||||
fetch-depth: 0
|
||||
@@ -42,13 +42,13 @@ jobs:
|
||||
|
||||
- name: Install Node.js
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v5
|
||||
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir-path.outputs.dir }}
|
||||
|
||||
18
.github/workflows/showtime-cleanup.yml
vendored
18
.github/workflows/showtime-cleanup.yml
vendored
@@ -7,12 +7,6 @@ on:
|
||||
|
||||
# Manual trigger for testing
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
max_age_hours:
|
||||
description: 'Maximum age in hours before cleanup'
|
||||
required: false
|
||||
default: '48'
|
||||
type: string
|
||||
|
||||
# Common environment variables
|
||||
env:
|
||||
@@ -38,13 +32,5 @@ jobs:
|
||||
|
||||
- name: Cleanup expired environments
|
||||
run: |
|
||||
MAX_AGE="${{ github.event.inputs.max_age_hours || '48' }}"
|
||||
|
||||
# Validate max_age is numeric
|
||||
if [[ ! "$MAX_AGE" =~ ^[0-9]+$ ]]; then
|
||||
echo "❌ Invalid max_age_hours format: $MAX_AGE (must be numeric)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Cleaning up environments older than ${MAX_AGE}h"
|
||||
python -m showtime cleanup --older-than "${MAX_AGE}h"
|
||||
echo "Cleaning up environments respecting TTL labels"
|
||||
python -m showtime cleanup --respect-ttl
|
||||
|
||||
2
.github/workflows/showtime-trigger.yml
vendored
2
.github/workflows/showtime-trigger.yml
vendored
@@ -147,7 +147,7 @@ jobs:
|
||||
|
||||
- name: Checkout PR code (only if build needed)
|
||||
if: steps.auth.outputs.authorized == 'true' && steps.check.outputs.build_needed == 'true'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ steps.check.outputs.target_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
2
.github/workflows/superset-app-cli.yml
vendored
2
.github/workflows/superset-app-cli.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
with:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
|
||||
@@ -30,13 +30,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: master
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install eyes-storybook dependencies
|
||||
|
||||
4
.github/workflows/superset-docs-deploy.yml
vendored
4
.github/workflows/superset-docs-deploy.yml
vendored
@@ -31,12 +31,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: Setup Python
|
||||
|
||||
8
.github/workflows/superset-docs-verify.yml
vendored
8
.github/workflows/superset-docs-verify.yml
vendored
@@ -18,10 +18,10 @@ jobs:
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new version first!
|
||||
- uses: JustinBeckwith/linkinator-action@3d5ba091319fa7b0ac14703761eebb7d100e6f6d # v1.11.0
|
||||
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
|
||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||
with:
|
||||
paths: "**/*.md, **/*.mdx"
|
||||
@@ -58,12 +58,12 @@ jobs:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
|
||||
22
.github/workflows/superset-e2e.yml
vendored
22
.github/workflows/superset-e2e.yml
vendored
@@ -69,21 +69,21 @@ jobs:
|
||||
# Conditional checkout based on context
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -146,7 +146,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
if: failure()
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
@@ -186,21 +186,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -223,10 +223,10 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: testdata
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
working-directory: superset-extensions-cli
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML coverage report
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: superset-extensions-cli-coverage-html
|
||||
path: htmlcov/
|
||||
|
||||
32
.github/workflows/superset-frontend.yml
vendored
32
.github/workflows/superset-frontend.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: docker-image
|
||||
path: docker-image.tar.gz
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
@@ -127,7 +127,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -167,3 +167,21 @@ jobs:
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build-storybook"
|
||||
|
||||
test-storybook:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: Build Storybook and Run Tests
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run build-storybook && npx playwright install-deps && npx playwright install chromium && npm run test-storybook:ci"
|
||||
|
||||
2
.github/workflows/superset-helm-lint.yml
vendored
2
.github/workflows/superset-helm-lint.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
2
.github/workflows/superset-helm-release.yml
vendored
2
.github/workflows/superset-helm-release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
|
||||
12
.github/workflows/superset-playwright.yml
vendored
12
.github/workflows/superset-playwright.yml
vendored
@@ -60,21 +60,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -97,10 +97,10 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: testdata
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
6
.github/workflows/superset-translations.yml
vendored
6
.github/workflows/superset-translations.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install dependencies
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install dependencies
|
||||
|
||||
2
.github/workflows/supersetbot.yml
vendored
2
.github/workflows/supersetbot.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
});
|
||||
|
||||
- name: "Checkout ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
8
.github/workflows/tag-release.yml
vendored
8
.github/workflows/tag-release.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
build: "true"
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -107,12 +107,12 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
|
||||
4
.github/workflows/tech-debt.yml
vendored
4
.github/workflows/tech-debt.yml
vendored
@@ -27,10 +27,10 @@ jobs:
|
||||
name: Generate Reports
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -138,3 +138,4 @@ PROJECT.md
|
||||
.claude_rc*
|
||||
.env.local
|
||||
oxc-custom-build/
|
||||
*.code-workspace
|
||||
|
||||
@@ -54,7 +54,7 @@ repos:
|
||||
exclude: ^helm/superset/templates/
|
||||
- id: debug-statements
|
||||
- id: end-of-file-fixer
|
||||
exclude: .*/lerna\.json$
|
||||
exclude: .*/lerna\.json$|^docs/static/img/logos/
|
||||
- id: trailing-whitespace
|
||||
exclude: ^.*\.(snap)
|
||||
args: ["--markdown-linebreak-ext=md"]
|
||||
@@ -106,12 +106,19 @@ repos:
|
||||
files: helm
|
||||
verbose: false
|
||||
args: ["--log-level", "error"]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.7
|
||||
# Using local hooks ensures ruff version matches requirements/development.txt
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
name: ruff-format
|
||||
entry: ruff format
|
||||
language: system
|
||||
types: [python]
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
name: ruff
|
||||
entry: ruff check --fix --show-fixes
|
||||
language: system
|
||||
types: [python]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
|
||||
@@ -53,7 +53,7 @@ extension-pkg-whitelist=pyarrow
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=all
|
||||
enable=disallowed-json-import,disallowed-sql-import,consider-using-transaction
|
||||
enable=json-import,disallowed-sql-import,consider-using-transaction
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
@@ -76,6 +76,9 @@ snowflake.svg
|
||||
ydb.svg
|
||||
loading.svg
|
||||
|
||||
# docs third-party logos, i.e. docs/static/img/logos/*
|
||||
logos/*
|
||||
|
||||
# docs-related
|
||||
erd.puml
|
||||
erd.svg
|
||||
@@ -83,6 +86,7 @@ intro_header.txt
|
||||
|
||||
# for LLMs
|
||||
llm-context.md
|
||||
llms.txt
|
||||
AGENTS.md
|
||||
LLMS.md
|
||||
CLAUDE.md
|
||||
|
||||
@@ -49,3 +49,4 @@ under the License.
|
||||
- [4.1.3](./CHANGELOG/4.1.3.md)
|
||||
- [4.1.4](./CHANGELOG/4.1.4.md)
|
||||
- [5.0.0](./CHANGELOG/5.0.0.md)
|
||||
- [6.0.0](./CHANGELOG/6.0.0.md)
|
||||
|
||||
1062
CHANGELOG/6.0.0.md
Normal file
1062
CHANGELOG/6.0.0.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -160,7 +160,7 @@ ENV SUPERSET_HOME="/app/superset_home" \
|
||||
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
|
||||
|
||||
WORKDIR /app
|
||||
# Set up necessary directories and user
|
||||
# Set up necessary directories
|
||||
RUN mkdir -p \
|
||||
${PYTHONPATH} \
|
||||
superset/static \
|
||||
|
||||
27
Makefile
27
Makefile
@@ -18,7 +18,7 @@
|
||||
# Python version installed; we need 3.10-3.11
|
||||
PYTHON=`command -v python3.11 || command -v python3.10`
|
||||
|
||||
.PHONY: install superset venv pre-commit
|
||||
.PHONY: install superset venv pre-commit up down logs ps nuke ports open
|
||||
|
||||
install: superset pre-commit
|
||||
|
||||
@@ -112,3 +112,28 @@ report-celery-beat:
|
||||
|
||||
admin-user:
|
||||
superset fab create-admin
|
||||
|
||||
# Docker Compose with auto-assigned ports (for running multiple instances)
|
||||
up:
|
||||
./scripts/docker-compose-up.sh
|
||||
|
||||
up-detached:
|
||||
./scripts/docker-compose-up.sh -d
|
||||
|
||||
down:
|
||||
./scripts/docker-compose-up.sh down
|
||||
|
||||
logs:
|
||||
./scripts/docker-compose-up.sh logs -f
|
||||
|
||||
ps:
|
||||
./scripts/docker-compose-up.sh ps
|
||||
|
||||
nuke:
|
||||
./scripts/docker-compose-up.sh nuke
|
||||
|
||||
ports:
|
||||
./scripts/docker-compose-up.sh ports
|
||||
|
||||
open:
|
||||
./scripts/docker-compose-up.sh open
|
||||
|
||||
10
README.md
10
README.md
@@ -23,8 +23,12 @@ under the License.
|
||||
[](https://github.com/apache/superset/releases/latest)
|
||||
[](https://github.com/apache/superset/actions)
|
||||
[](https://badge.fury.io/py/apache_superset)
|
||||
[](https://codecov.io/github/apache/superset)
|
||||
[](https://pypi.python.org/pypi/apache_superset)
|
||||
[](https://github.com/apache/superset/stargazers)
|
||||
[](https://github.com/apache/superset/graphs/contributors)
|
||||
[](https://github.com/apache/superset/commits/master)
|
||||
[](https://github.com/apache/superset/issues)
|
||||
[](https://github.com/apache/superset/pulls)
|
||||
[](http://bit.ly/join-superset-slack)
|
||||
[](https://superset.apache.org)
|
||||
|
||||
@@ -51,7 +55,7 @@ A modern, enterprise-ready business intelligence web application.
|
||||
[**Get Involved**](#get-involved) |
|
||||
[**Contributor Guide**](#contributor-guide) |
|
||||
[**Resources**](#resources) |
|
||||
[**Organizations Using Superset**](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md)
|
||||
[**Organizations Using Superset**](https://superset.apache.org/inTheWild)
|
||||
|
||||
## Why Superset?
|
||||
|
||||
@@ -167,7 +171,7 @@ how to set up a development environment.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Superset "In the Wild"](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md) - open a PR to add your org to the list!
|
||||
- [Superset "In the Wild"](https://superset.apache.org/inTheWild) - see who's using Superset, and [add your organization](https://github.com/apache/superset/edit/master/RESOURCES/INTHEWILD.yaml) to the list!
|
||||
- [Feature Flags](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) - the status of Superset's Feature Flags.
|
||||
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
|
||||
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
|
||||
|
||||
@@ -68,7 +68,7 @@ These features flags are **safe for production**. They have been tested and will
|
||||
|
||||
### Flags retained for runtime configuration
|
||||
|
||||
Currently some of our feature flags act as dynamic configurations that can changed
|
||||
Currently some of our feature flags act as dynamic configurations that can change
|
||||
on the fly. This acts in contradiction with the typical ephemeral feature flag use case,
|
||||
where the flag is used to mature a feature, and eventually deprecated once the feature is
|
||||
solid. Eventually we'll likely refactor these under a more formal "dynamic configurations" managed
|
||||
|
||||
@@ -1,226 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Superset Users in the Wild
|
||||
|
||||
Here's a list of organizations, broken down into broad industry categories, that have taken the time to send a PR to let
|
||||
the world know they are using Apache Superset. If you are a user and want to be recognized,
|
||||
all you have to do is file a simple PR [like this one](https://github.com/apache/superset/pull/10122) — [just click here](https://github.com/apache/superset/edit/master/RESOURCES/INTHEWILD.md) to do so. If you think
|
||||
the categorization is inaccurate, please file a PR with your correction as well.
|
||||
Join our growing community!
|
||||
|
||||
### Sharing Economy
|
||||
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Faasos](https://faasos.com/) [@shashanksingh]
|
||||
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
||||
- [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel]
|
||||
- [Lime](https://www.li.me/) [@cxmcc]
|
||||
- [Lyft](https://www.lyft.com/)
|
||||
- [Ontruck](https://www.ontruck.com/)
|
||||
|
||||
### Financial Services
|
||||
|
||||
- [Aktia Bank plc](https://www.aktia.com)
|
||||
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
- [Cape Crypto](https://capecrypto.com)
|
||||
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](https://clark.de/)
|
||||
- [Europace](https://europace.de)
|
||||
- [KarrotPay](https://www.daangnpay.com/)
|
||||
- [Remita](https://remita.net) [@mujibishola]
|
||||
- [Taveo](https://www.taveo.com) [@codek]
|
||||
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
||||
- [Wise](https://wise.com) [@koszti]
|
||||
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [Cover Genius](https://covergenius.com/)
|
||||
|
||||
### Gaming
|
||||
|
||||
- [Popoko VM Games Studio](https://popoko.live)
|
||||
|
||||
### E-Commerce
|
||||
|
||||
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
|
||||
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
|
||||
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
||||
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
||||
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
||||
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
||||
- [Fynd](https://www.fynd.com/) [@darpanjain07]
|
||||
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
||||
- [GoTo/Gojek](https://www.gojek.io/) [@gwthm-in]
|
||||
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [Now](https://www.now.vn/) [@davidkohcw]
|
||||
- [Qunar](https://www.qunar.com/) [@flametest]
|
||||
- [Rakuten Viki](https://www.viki.com)
|
||||
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
||||
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
||||
- [ShopUp](https://www.shopup.org/) [@gwthm-in]
|
||||
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
||||
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
||||
- [Utair](https://www.utair.ru) [@utair-digital]
|
||||
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
||||
- [Zalando](https://www.zalando.com) [@dmigo]
|
||||
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
||||
- [Zepto](https://www.zeptonow.com/) [@gwthm-in]
|
||||
|
||||
### Enterprise Technology
|
||||
|
||||
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
|
||||
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
|
||||
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
|
||||
- [Astronomer](https://www.astronomer.io) [@ryw]
|
||||
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
|
||||
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
||||
- [Canonical](https://canonical.com)
|
||||
- [Careem](https://www.careem.com/) [@samraHanif0340]
|
||||
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
||||
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
||||
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
|
||||
- [Dial Once](https://www.dial-once.com/)
|
||||
- [Dremio](https://dremio.com) [@narendrans]
|
||||
- [EFinance](https://www.efinance.com.eg) [@habeeb556]
|
||||
- [Elestio](https://elest.io/) [@kaiwalyakoparkar]
|
||||
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
|
||||
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
|
||||
- [FBK - ICT center](https://ict.fbk.eu)
|
||||
- [Formbricks](https://formbricks.com)
|
||||
- [Gavagai](https://gavagai.io) [@gavagai-corp]
|
||||
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
|
||||
- [HPE](https://www.hpe.com/in/en/home.html) [@anmol-hpe]
|
||||
- [Hydrolix](https://www.hydrolix.io/)
|
||||
- [Intercom](https://www.intercom.com/) [@kate-gallo]
|
||||
- [jampp](https://jampp.com/)
|
||||
- [Konfío](https://konfio.mx) [@uis-rodriguez]
|
||||
- [Mainstrat](https://mainstrat.com/)
|
||||
- [mishmash io](https://mishmash.io/) [@mishmash-io]
|
||||
- [Myra Labs](https://www.myralabs.com/) [@viksit]
|
||||
- [Nielsen](https://www.nielsen.com/) [@amitNielsen]
|
||||
- [Ona](https://ona.io) [@pld]
|
||||
- [Orange](https://www.orange.com) [@icsu]
|
||||
- [Oslandia](https://oslandia.com)
|
||||
- [Oxylabs](https://oxylabs.io/) [@rytis-ulys]
|
||||
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
||||
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
||||
- [PlaidCloud](https://www.plaidcloud.com)
|
||||
- [Preset, Inc.](https://preset.io)
|
||||
- [PubNub](https://pubnub.com) [@jzucker2]
|
||||
- [ReadyTech](https://www.readytech.io)
|
||||
- [Reward Gateway](https://www.rewardgateway.com)
|
||||
- [RIADVICE](https://riadvice.tn) [@riadvice]
|
||||
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
|
||||
- [shipmnts](https://shipmnts.com)
|
||||
- [Showmax](https://showmax.com) [@bobek]
|
||||
- [SingleStore](https://www.singlestore.com/)
|
||||
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
||||
- [Tenable](https://www.tenable.com) [@dflionis]
|
||||
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
||||
- [timbr.ai](https://timbr.ai/) [@semantiDan]
|
||||
- [Tobii](https://www.tobii.com/) [@dwa]
|
||||
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
||||
- [Unvired](https://unvired.com) [@srinisubramanian]
|
||||
- [Virtuoso QA](https://www.virtuosoqa.com)
|
||||
- [Whale](https://whale.im)
|
||||
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
||||
- [WinWin Network马上赢](https://brandct.cn/) [@wenbinye]
|
||||
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
||||
|
||||
### Media & Entertainment
|
||||
|
||||
- [6play](https://www.6play.fr) [@CoryChaplin]
|
||||
- [bilibili](https://www.bilibili.com) [@Moinheart]
|
||||
- [BurdaForward](https://www.burda-forward.de/en/)
|
||||
- [Douban](https://www.douban.com/) [@luchuan]
|
||||
- [Kuaishou](https://www.kuaishou.com/) [@zhaoyu89730105]
|
||||
- [Netflix](https://www.netflix.com/)
|
||||
- [Prensa Iberica](https://www.prensaiberica.es/) [@zamar-roura]
|
||||
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/) [@shenyuanli,@marklaw]
|
||||
- [Xite](https://xite.com/) [@shashankkoppar]
|
||||
- [Zaihang](https://www.zaih.com/)
|
||||
|
||||
### Education
|
||||
|
||||
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Open edX](https://openedx.org/)
|
||||
- [Platzi.com](https://platzi.com/)
|
||||
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
||||
- [Udemy](https://www.udemy.com/) [@sungjuly]
|
||||
- [VIPKID](https://www.vipkid.com.cn/) [@illpanda]
|
||||
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
||||
|
||||
### Energy
|
||||
|
||||
- [Airboxlab](https://foobot.io) [@antoine-galataud]
|
||||
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
|
||||
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
|
||||
- [Scoot](https://scoot.co/) [@haaspt]
|
||||
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
|
||||
|
||||
### Healthcare
|
||||
|
||||
- [Amino](https://amino.com) [@shkr]
|
||||
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
||||
- [Care](https://www.getcare.io/) [@alandao2021]
|
||||
- [Living Goods](https://www.livinggoods.org) [@chelule]
|
||||
- [Maieutical Labs](https://maieuticallabs.it) [@xrmx]
|
||||
- [Medic](https://medic.org) [@1yuv]
|
||||
- [REDCap Cloud](https://www.redcapcloud.com/)
|
||||
- [TrustMedis](https://trustmedis.com/) [@famasya]
|
||||
- [WeSure](https://www.wesure.cn/)
|
||||
- [2070Health](https://2070health.com/)
|
||||
|
||||
### HR / Staffing
|
||||
|
||||
- [Swile](https://www.swile.co/) [@PaoloTerzi]
|
||||
- [Symmetrics](https://www.symmetrics.fyi)
|
||||
- [bluquist](https://bluquist.com/)
|
||||
|
||||
### Government
|
||||
|
||||
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
||||
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
||||
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
||||
|
||||
### Travel
|
||||
|
||||
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
||||
- [HomeToGo](https://hometogo.com/) [@pedromartinsteenstrup]
|
||||
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
||||
|
||||
### Others
|
||||
|
||||
- [10Web](https://10web.io/)
|
||||
- [AI inside](https://inside.ai/en/)
|
||||
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
||||
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
|
||||
- [Flowbird](https://flowbird.com) [@EmmanuelCbd]
|
||||
- [GEOTAB](https://www.geotab.com) [@JZ6]
|
||||
- [Grassroot](https://www.grassrootinstitute.org/)
|
||||
- [Increff](https://www.increff.com/) [@ishansinghania]
|
||||
- [komoot](https://www.komoot.com/) [@christophlingg]
|
||||
- [Let's Roam](https://www.letsroam.com/)
|
||||
- [Machrent SA](https://www.machrent.com/)
|
||||
- [Onebeat](https://1beat.com/) [@GuyAttia]
|
||||
- [X](https://x.com/)
|
||||
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
|
||||
- [Yahoo!](https://yahoo.com/)
|
||||
678
RESOURCES/INTHEWILD.yaml
Normal file
678
RESOURCES/INTHEWILD.yaml
Normal file
@@ -0,0 +1,678 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Apache Superset Users in the Wild
|
||||
#
|
||||
# To add your organization:
|
||||
# 1. Find the appropriate category (or add a new one)
|
||||
# 2. Add an entry with your organization details
|
||||
# 3. Optionally add a logo file to docs/static/img/logos/
|
||||
#
|
||||
# Required fields:
|
||||
# - name: Your organization name
|
||||
# - url: Link to your organization's website
|
||||
#
|
||||
# Optional fields:
|
||||
# - logo: Filename of logo in docs/static/img/logos/ (e.g., "mycompany.svg")
|
||||
# - contributors: List of GitHub usernames who contributed (e.g., ["@username"])
|
||||
|
||||
categories:
|
||||
Sharing Economy:
|
||||
- name: Airbnb
|
||||
url: https://github.com/airbnb
|
||||
|
||||
- name: Faasos
|
||||
url: https://faasos.com/
|
||||
contributors: ["@shashanksingh"]
|
||||
|
||||
- name: Free2Move
|
||||
url: https://www.free2move.com/
|
||||
contributors: ["@PaoloTerzi"]
|
||||
|
||||
- name: Hostnfly
|
||||
url: https://www.hostnfly.com/
|
||||
contributors: ["@alexisrosuel"]
|
||||
|
||||
- name: Lime
|
||||
url: https://www.li.me/
|
||||
contributors: ["@cxmcc"]
|
||||
|
||||
- name: Lyft
|
||||
url: https://www.lyft.com/
|
||||
|
||||
- name: Ontruck
|
||||
url: https://www.ontruck.com/
|
||||
|
||||
Financial Services:
|
||||
- name: Aktia Bank plc
|
||||
url: https://www.aktia.com
|
||||
|
||||
- name: American Express
|
||||
url: https://www.americanexpress.com
|
||||
contributors: ["@TheLastSultan"]
|
||||
|
||||
- name: bumper
|
||||
url: https://www.bumper.co/
|
||||
contributors: ["@vasu-ram", "@JamiePercival"]
|
||||
|
||||
- name: Cape Crypto
|
||||
url: https://capecrypto.com
|
||||
|
||||
- name: Capital Service S.A.
|
||||
url: https://capitalservice.pl
|
||||
contributors: ["@pkonarzewski"]
|
||||
|
||||
- name: Clark.de
|
||||
url: https://clark.de/
|
||||
|
||||
- name: EnquiryLabs
|
||||
url: https://www.enquirylabs.co.uk
|
||||
|
||||
- name: Europace
|
||||
url: https://europace.de
|
||||
|
||||
- name: KarrotPay
|
||||
url: https://www.daangnpay.com/
|
||||
|
||||
- name: Remita
|
||||
url: https://remita.net
|
||||
contributors: ["@mujibishola"]
|
||||
|
||||
- name: Taveo
|
||||
url: https://www.taveo.com
|
||||
contributors: ["@codek"]
|
||||
|
||||
- name: Unit
|
||||
url: https://www.unit.co/about-us
|
||||
contributors: ["@amitmiran137"]
|
||||
|
||||
- name: Wise
|
||||
url: https://wise.com
|
||||
contributors: ["@koszti"]
|
||||
|
||||
- name: Xendit
|
||||
url: https://xendit.co/
|
||||
contributors: ["@LieAlbertTriAdrian"]
|
||||
|
||||
- name: Cover Genius
|
||||
url: https://covergenius.com/
|
||||
|
||||
Gaming:
|
||||
- name: Popoko VM Games Studio
|
||||
url: https://popoko.live
|
||||
|
||||
E-Commerce:
|
||||
- name: AiHello
|
||||
url: https://www.aihello.com
|
||||
contributors: ["@ganeshkrishnan1"]
|
||||
|
||||
- name: Bazaar Technologies
|
||||
url: https://www.bazaartech.com
|
||||
contributors: ["@umair-abro"]
|
||||
|
||||
- name: Blinkit
|
||||
url: https://www.blinkit.com/
|
||||
contributors: ["@amsharm2"]
|
||||
|
||||
- name: Dragonpass
|
||||
url: https://www.dragonpass.com.cn/
|
||||
contributors: ["@zhxjdwh"]
|
||||
|
||||
- name: Dropit Shopping
|
||||
url: https://www.dropit.shop/
|
||||
contributors: ["@dropit-dev"]
|
||||
|
||||
- name: Fanatics
|
||||
url: https://www.fanatics.com/
|
||||
contributors: ["@coderfender"]
|
||||
|
||||
- name: Fordeal
|
||||
url: https://www.fordeal.com
|
||||
contributors: ["@Renkai"]
|
||||
|
||||
- name: Fynd
|
||||
url: https://www.fynd.com/
|
||||
contributors: ["@darpanjain07"]
|
||||
|
||||
- name: GFG - Global Fashion Group
|
||||
url: https://global-fashion-group.com
|
||||
contributors: ["@ksaagariconic"]
|
||||
|
||||
- name: GoTo/Gojek
|
||||
url: https://www.gojek.io/
|
||||
contributors: ["@gwthm-in"]
|
||||
|
||||
- name: HuiShouBao
|
||||
url: https://www.huishoubao.com/
|
||||
contributors: ["@Yukinoshita-Yukino"]
|
||||
|
||||
- name: Now
|
||||
url: https://www.now.vn/
|
||||
contributors: ["@davidkohcw"]
|
||||
|
||||
- name: Qunar
|
||||
url: https://www.qunar.com/
|
||||
contributors: ["@flametest"]
|
||||
|
||||
- name: Rakuten Viki
|
||||
url: https://www.viki.com
|
||||
|
||||
- name: Shopee
|
||||
url: https://shopee.sg
|
||||
contributors: ["@xiaohanyu"]
|
||||
|
||||
- name: Shopkick
|
||||
url: https://www.shopkick.com
|
||||
contributors: ["@LAlbertalli"]
|
||||
|
||||
- name: ShopUp
|
||||
url: https://www.shopup.org/
|
||||
contributors: ["@gwthm-in"]
|
||||
|
||||
- name: Tails.com
|
||||
url: https://tails.com/gb/
|
||||
contributors: ["@alanmcruickshank"]
|
||||
|
||||
- name: THE ICONIC
|
||||
url: https://theiconic.com.au/
|
||||
contributors: ["@ksaagariconic"]
|
||||
|
||||
- name: Utair
|
||||
url: https://www.utair.ru
|
||||
contributors: ["@utair-digital"]
|
||||
|
||||
- name: VkusVill
|
||||
url: https://vkusvill.ru/
|
||||
contributors: ["@ETselikov"]
|
||||
|
||||
- name: Zalando
|
||||
url: https://www.zalando.com
|
||||
contributors: ["@dmigo"]
|
||||
|
||||
- name: Zalora
|
||||
url: https://www.zalora.com
|
||||
contributors: ["@ksaagariconic"]
|
||||
|
||||
- name: Zepto
|
||||
url: https://www.zeptonow.com/
|
||||
contributors: ["@gwthm-in"]
|
||||
|
||||
Enterprise Technology:
|
||||
- name: A3Data
|
||||
url: https://a3data.com.br
|
||||
contributors: ["@neylsoncrepalde"]
|
||||
|
||||
- name: Analytics Aura
|
||||
url: https://analyticsaura.com/
|
||||
contributors: ["@Analytics-Aura"]
|
||||
|
||||
- name: Apollo GraphQL
|
||||
url: https://www.apollographql.com/
|
||||
contributors: ["@evans"]
|
||||
|
||||
- name: Astronomer
|
||||
url: https://www.astronomer.io
|
||||
contributors: ["@ryw"]
|
||||
|
||||
- name: Avesta Technologies
|
||||
url: https://avestatechnologies.com/
|
||||
contributors: ["@TheRum"]
|
||||
|
||||
- name: Caizin
|
||||
url: https://caizin.com/
|
||||
contributors: ["@tejaskatariya"]
|
||||
|
||||
- name: Canonical
|
||||
url: https://canonical.com
|
||||
|
||||
- name: Careem
|
||||
url: https://www.careem.com/
|
||||
contributors: ["@samraHanif0340"]
|
||||
|
||||
- name: Cloudsmith
|
||||
url: https://cloudsmith.io
|
||||
contributors: ["@alancarson"]
|
||||
|
||||
- name: Cyberhaven
|
||||
url: https://www.cyberhaven.com/
|
||||
contributors: ["@toliver-ch"]
|
||||
|
||||
- name: Deepomatic
|
||||
url: https://deepomatic.com/
|
||||
contributors: ["@Zanoellia"]
|
||||
|
||||
- name: Dial Once
|
||||
url: https://www.dial-once.com/
|
||||
|
||||
- name: Dremio
|
||||
url: https://dremio.com
|
||||
contributors: ["@narendrans"]
|
||||
|
||||
- name: EFinance
|
||||
url: https://www.efinance.com.eg
|
||||
contributors: ["@habeeb556"]
|
||||
|
||||
- name: Elestio
|
||||
url: https://elest.io/
|
||||
contributors: ["@kaiwalyakoparkar"]
|
||||
|
||||
- name: ELMO Cloud HR & Payroll
|
||||
url: https://elmosoftware.com.au/
|
||||
|
||||
- name: Endress+Hauser
|
||||
url: https://www.endress.com/
|
||||
contributors: ["@rumbin"]
|
||||
|
||||
- name: FBK - ICT center
|
||||
url: https://ict.fbk.eu
|
||||
|
||||
- name: Formbricks
|
||||
url: https://formbricks.com
|
||||
|
||||
- name: Gavagai
|
||||
url: https://gavagai.io
|
||||
contributors: ["@gavagai-corp"]
|
||||
|
||||
- name: GfK Data Lab
|
||||
url: https://www.gfk.com/home
|
||||
contributors: ["@mherr"]
|
||||
|
||||
- name: HPE
|
||||
url: https://www.hpe.com/in/en/home.html
|
||||
contributors: ["@anmol-hpe"]
|
||||
|
||||
- name: Hydrolix
|
||||
url: https://www.hydrolix.io/
|
||||
|
||||
- name: Intercom
|
||||
url: https://www.intercom.com/
|
||||
contributors: ["@kate-gallo"]
|
||||
|
||||
- name: jampp
|
||||
url: https://jampp.com/
|
||||
|
||||
- name: Konfío
|
||||
url: https://konfio.mx
|
||||
contributors: ["@uis-rodriguez"]
|
||||
|
||||
- name: Mainstrat
|
||||
url: https://mainstrat.com/
|
||||
|
||||
- name: mishmash io
|
||||
url: https://mishmash.io/
|
||||
contributors: ["@mishmash-io"]
|
||||
|
||||
- name: Myra Labs
|
||||
url: https://www.myralabs.com/
|
||||
contributors: ["@viksit"]
|
||||
|
||||
- name: Nielsen
|
||||
url: https://www.nielsen.com/
|
||||
contributors: ["@amitNielsen"]
|
||||
|
||||
- name: Ona
|
||||
url: https://ona.io
|
||||
contributors: ["@pld"]
|
||||
|
||||
- name: Orange
|
||||
url: https://www.orange.com
|
||||
contributors: ["@icsu"]
|
||||
|
||||
- name: Oslandia
|
||||
url: https://oslandia.com
|
||||
|
||||
- name: Oxylabs
|
||||
url: https://oxylabs.io/
|
||||
contributors: ["@rytis-ulys"]
|
||||
|
||||
- name: Peak AI
|
||||
url: https://www.peak.ai/
|
||||
contributors: ["@azhar22k"]
|
||||
|
||||
- name: PeopleDoc
|
||||
url: https://www.people-doc.com
|
||||
contributors: ["@rodo"]
|
||||
|
||||
- name: PlaidCloud
|
||||
url: https://plaidcloud.com
|
||||
logo: plaidcloud.svg
|
||||
contributors: ["@rad-pat"]
|
||||
|
||||
- name: Preset, Inc.
|
||||
url: https://preset.io
|
||||
logo: preset.svg
|
||||
contributors: ["@mistercrunch", "@betodealmeida", "@dpgaspar", "@rusackas", "@sadpandajoe", "@Vitor-Avila", "@kgabryje", "@geido", "@eschutho", "@Antonio-RiveroMartnez", "@yousoph"]
|
||||
|
||||
- name: PubNub
|
||||
url: https://pubnub.com
|
||||
contributors: ["@jzucker2"]
|
||||
|
||||
- name: ReadyTech
|
||||
url: https://www.readytech.io
|
||||
|
||||
- name: Reward Gateway
|
||||
url: https://www.rewardgateway.com
|
||||
|
||||
- name: RIADVICE
|
||||
url: https://riadvice.tn
|
||||
contributors: ["@riadvice"]
|
||||
|
||||
- name: ScopeAI
|
||||
url: https://www.getscopeai.com
|
||||
contributors: ["@iloveluce"]
|
||||
|
||||
- name: shipmnts
|
||||
url: https://shipmnts.com
|
||||
|
||||
- name: Showmax
|
||||
url: https://showmax.com
|
||||
contributors: ["@bobek"]
|
||||
|
||||
- name: SingleStore
|
||||
url: https://www.singlestore.com/
|
||||
|
||||
- name: TechAudit
|
||||
url: https://www.techaudit.info
|
||||
contributors: ["@ETselikov"]
|
||||
|
||||
- name: Tenable
|
||||
url: https://www.tenable.com
|
||||
contributors: ["@dflionis"]
|
||||
|
||||
- name: Tentacle
|
||||
url: https://www.linkedin.com/company/tentacle-cmi/
|
||||
contributors: ["@jdclarke5"]
|
||||
|
||||
- name: timbr.ai
|
||||
url: https://timbr.ai/
|
||||
contributors: ["@semantiDan"]
|
||||
|
||||
- name: Tobii
|
||||
url: https://www.tobii.com/
|
||||
contributors: ["@dwa"]
|
||||
|
||||
- name: Tooploox
|
||||
url: https://www.tooploox.com/
|
||||
contributors: ["@jakubczaplicki"]
|
||||
|
||||
- name: Unvired
|
||||
url: https://unvired.com
|
||||
contributors: ["@srinisubramanian"]
|
||||
|
||||
- name: UserGuiding
|
||||
url: https://userguiding.com/
|
||||
logo: userguiding.svg
|
||||
contributors: ["@tzercin"]
|
||||
|
||||
- name: Virtuoso QA
|
||||
url: https://www.virtuosoqa.com
|
||||
|
||||
- name: Whale
|
||||
url: https://whale.im
|
||||
|
||||
- name: Windsor.ai
|
||||
url: https://www.windsor.ai/
|
||||
contributors: ["@octaviancorlade"]
|
||||
|
||||
- name: WinWin Network马上赢
|
||||
url: https://brandct.cn/
|
||||
contributors: ["@wenbinye"]
|
||||
|
||||
- name: Zeta
|
||||
url: https://www.zeta.tech/
|
||||
contributors: ["@shaikidris"]
|
||||
|
||||
Media & Entertainment:
|
||||
- name: 6play
|
||||
url: https://www.6play.fr
|
||||
contributors: ["@CoryChaplin"]
|
||||
|
||||
- name: bilibili
|
||||
url: https://www.bilibili.com
|
||||
contributors: ["@Moinheart"]
|
||||
|
||||
- name: BurdaForward
|
||||
url: https://www.burda-forward.de/en/
|
||||
|
||||
- name: Douban
|
||||
url: https://www.douban.com/
|
||||
contributors: ["@luchuan"]
|
||||
|
||||
- name: Kuaishou
|
||||
url: https://www.kuaishou.com/
|
||||
contributors: ["@zhaoyu89730105"]
|
||||
|
||||
- name: Netflix
|
||||
url: https://www.netflix.com/
|
||||
|
||||
- name: Prensa Iberica
|
||||
url: https://www.prensaiberica.es/
|
||||
contributors: ["@zamar-roura"]
|
||||
|
||||
- name: TME QQMUSIC/WESING
|
||||
url: https://www.tencentmusic.com/
|
||||
contributors: ["@shenyuanli", "@marklaw"]
|
||||
|
||||
- name: Xite
|
||||
url: https://xite.com/
|
||||
contributors: ["@shashankkoppar"]
|
||||
|
||||
- name: Zaihang
|
||||
url: https://www.zaih.com/
|
||||
|
||||
Education:
|
||||
- name: Aveti Learning
|
||||
url: https://avetilearning.com/
|
||||
contributors: ["@TheShubhendra"]
|
||||
|
||||
- name: Brilliant.org
|
||||
url: https://brilliant.org/
|
||||
|
||||
- name: Cirrus Assessment
|
||||
url: https://cirrusassessment.com/
|
||||
logo: cirrus.svg
|
||||
contributors: ["@jeroenhabets", "@ddmm-white", "@paulrocost"]
|
||||
|
||||
- name: Open edX
|
||||
url: https://openedx.org/
|
||||
|
||||
- name: Platzi.com
|
||||
url: https://platzi.com/
|
||||
|
||||
- name: Sunbird
|
||||
url: https://www.sunbird.org/
|
||||
contributors: ["@eksteporg"]
|
||||
|
||||
- name: The GRAPH Network
|
||||
url: https://thegraphnetwork.org/
|
||||
contributors: ["@fccoelho"]
|
||||
|
||||
- name: Udemy
|
||||
url: https://www.udemy.com/
|
||||
contributors: ["@sungjuly"]
|
||||
|
||||
- name: VIPKID
|
||||
url: https://www.vipkid.com.cn/
|
||||
contributors: ["@illpanda"]
|
||||
|
||||
- name: WikiMedia Foundation
|
||||
url: https://wikimediafoundation.org
|
||||
contributors: ["@vg"]
|
||||
|
||||
Energy:
|
||||
- name: Airboxlab
|
||||
url: https://foobot.io
|
||||
contributors: ["@antoine-galataud"]
|
||||
|
||||
- name: DouroECI
|
||||
url: https://www.douroeci.com/
|
||||
contributors: ["@nunohelibeires"]
|
||||
|
||||
- name: Safaricom
|
||||
url: https://www.safaricom.co.ke/
|
||||
contributors: ["@mmutiso"]
|
||||
|
||||
- name: Scoot
|
||||
url: https://scoot.co/
|
||||
contributors: ["@haaspt"]
|
||||
|
||||
- name: Wattbewerb
|
||||
url: https://wattbewerb.de/
|
||||
contributors: ["@wattbewerb"]
|
||||
|
||||
- name: Rogow
|
||||
url: https://rogow.com.br/
|
||||
contributors: ["@nilmonto"]
|
||||
|
||||
Healthcare:
|
||||
- name: Amino
|
||||
url: https://amino.com
|
||||
contributors: ["@shkr"]
|
||||
|
||||
- name: Bluesquare
|
||||
url: https://www.bluesquarehub.com/
|
||||
contributors: ["@madewulf"]
|
||||
|
||||
- name: Care
|
||||
url: https://www.getcare.io/
|
||||
contributors: ["@alandao2021"]
|
||||
|
||||
- name: Living Goods
|
||||
url: https://www.livinggoods.org
|
||||
contributors: ["@chelule"]
|
||||
|
||||
- name: Maieutical Labs
|
||||
url: https://maieuticallabs.it
|
||||
contributors: ["@xrmx"]
|
||||
|
||||
- name: Medic
|
||||
url: https://medic.org
|
||||
contributors: ["@1yuv"]
|
||||
|
||||
- name: REDCap Cloud
|
||||
url: https://www.redcapcloud.com/
|
||||
|
||||
- name: TrustMedis
|
||||
url: https://trustmedis.com/
|
||||
contributors: ["@famasya"]
|
||||
|
||||
- name: WeSure
|
||||
url: https://www.wesure.cn/
|
||||
|
||||
- name: 2070Health
|
||||
url: https://2070health.com/
|
||||
|
||||
HR / Staffing:
|
||||
- name: Swile
|
||||
url: https://www.swile.co/
|
||||
contributors: ["@PaoloTerzi"]
|
||||
|
||||
- name: Symmetrics
|
||||
url: https://www.symmetrics.fyi
|
||||
|
||||
- name: bluquist
|
||||
url: https://bluquist.com/
|
||||
|
||||
Government:
|
||||
- name: City of Ann Arbor, MI
|
||||
url: https://www.a2gov.org/
|
||||
contributors: ["@sfirke"]
|
||||
|
||||
- name: RIS3 Strategy of CZ, MIT CR
|
||||
url: https://www.ris3.cz/
|
||||
contributors: ["@RIS3CZ"]
|
||||
|
||||
- name: NRLM - Sarathi, India
|
||||
url: https://pib.gov.in/PressReleasePage.aspx?PRID=1999586
|
||||
|
||||
Mobile Software:
|
||||
- name: VLMedia
|
||||
url: https://www.vlmedia.com.tr
|
||||
logo: vlmedia.svg
|
||||
contributors: ["@iercan"]
|
||||
|
||||
Travel:
|
||||
- name: Agoda
|
||||
url: https://www.agoda.com/
|
||||
contributors: ["@lostseaway", "@maiake", "@obombayo"]
|
||||
|
||||
- name: HomeToGo
|
||||
url: https://hometogo.com/
|
||||
contributors: ["@pedromartinsteenstrup"]
|
||||
|
||||
- name: Skyscanner
|
||||
url: https://www.skyscanner.net/
|
||||
contributors: ["@cleslie", "@stanhoucke"]
|
||||
|
||||
Logistics:
|
||||
- name: Stockarea
|
||||
url: https://stockarea.io
|
||||
|
||||
Others:
|
||||
- name: 10Web
|
||||
url: https://10web.io/
|
||||
|
||||
- name: AI inside
|
||||
url: https://inside.ai/en/
|
||||
|
||||
- name: Automattic
|
||||
url: https://automattic.com/
|
||||
contributors: ["@Khrol", "@Usiel"]
|
||||
|
||||
- name: Dropbox
|
||||
url: https://www.dropbox.com/
|
||||
contributors: ["@bkyryliuk"]
|
||||
|
||||
- name: Flowbird
|
||||
url: https://flowbird.com
|
||||
contributors: ["@EmmanuelCbd"]
|
||||
|
||||
- name: GEOTAB
|
||||
url: https://www.geotab.com
|
||||
contributors: ["@JZ6"]
|
||||
|
||||
- name: Grassroot
|
||||
url: https://www.grassrootinstitute.org/
|
||||
|
||||
- name: HOLLYLAND猛玛
|
||||
url: https://www.hollyland.com
|
||||
logo: hollyland猛玛.svg
|
||||
contributors: ["@hlyda0601"]
|
||||
|
||||
- name: Increff
|
||||
url: https://www.increff.com/
|
||||
contributors: ["@ishansinghania"]
|
||||
|
||||
- name: komoot
|
||||
url: https://www.komoot.com/
|
||||
contributors: ["@christophlingg"]
|
||||
|
||||
- name: Let's Roam
|
||||
url: https://www.letsroam.com/
|
||||
|
||||
- name: Machrent SA
|
||||
url: https://www.machrent.com/
|
||||
|
||||
- name: Onebeat
|
||||
url: https://1beat.com/
|
||||
contributors: ["@GuyAttia"]
|
||||
|
||||
- name: X
|
||||
url: https://x.com/
|
||||
|
||||
- name: Yahoo!
|
||||
url: https://yahoo.com/
|
||||
@@ -17,192 +17,193 @@ specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
| |Admin|Alpha|Gamma|SQL_LAB|
|
||||
|--------------------------------------------------|---|---|---|---|
|
||||
| Permission/role description |Admins have all possible rights, including granting or revoking rights from other users and altering other people’s slices and dashboards.|Alpha users have access to all data sources, but they cannot grant or revoke access from other users. They are also limited to altering the objects that they own. Alpha users can add and alter data sources.|Gamma users have limited access. They can only consume data coming from data sources they have been given access to through another complementary role. They only have access to view the slices and dashboards made from data sources that they have access to. Currently Gamma users are not able to alter or add data sources. We assume that they are mostly content consumers, though they can create slices and dashboards.|The sql_lab role grants access to SQL Lab. Note that while Admin users have access to all databases by default, both Alpha and Gamma users need to be given access on a per database basis.||
|
||||
| can read on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can write on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can read on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Annotation |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Annotation |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Dataset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Log |:heavy_check_mark:|O|O|O|
|
||||
| can write on Log |:heavy_check_mark:|O|O|O|
|
||||
| can read on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Database |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can write on Database |:heavy_check_mark:|O|O|O|
|
||||
| can read on Query |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can this form get on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can add on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can list on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can userinfo on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| resetmypassword on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| resetpasswords on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| userinfoedit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can show on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can edit on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can add on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can list on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| copyrole on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can get on OpenApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on SwaggerView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can query on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can time range on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can external metadata on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can save on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can publish on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can csv on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can slice on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sync druid source on Superset |:heavy_check_mark:|O|O|O|
|
||||
| can explore on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can approve on Superset |:heavy_check_mark:|O|O|O|
|
||||
| can explore json on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can fetch datasource metadata on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can csrf token on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can select star on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can warm up cache on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab table viz on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can available domains on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can request access on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can post on TableSchemaView |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can expanded on TableSchemaView |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can delete on TableSchemaView |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can get on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can post on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can delete query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can migrate query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can activate on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can delete on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can put on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can read on SecurityRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| menu access on Security |:heavy_check_mark:|O|O|O|
|
||||
| menu access on List Users |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on List Roles |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Action Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Manage |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Annotation Layers |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on CSS Templates |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Import Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| menu access on SQL Editor |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| menu access on Saved Queries |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| menu access on Query Search |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
|
||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can download on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can add on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can delete on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can external metadata by name on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get value on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can store on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can tagged objects on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can suggestions on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can post on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can edit on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| muldelete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can edit on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| muldelete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can edit on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Row Level Security |:heavy_check_mark:|O|O|O|
|
||||
| menu access on Access requests |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Home |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Plugins |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Dashboard Email Schedules |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Chart Emails |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Alerts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Alerts & Report |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Scan New Datasources |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can share dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can set embedded on Dashboard |:heavy_check_mark:|O|O|O|
|
||||
| can export on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on Database |:heavy_check_mark:|O|O|O|
|
||||
| can export on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can import on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can dashboard permalink on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can grant guest token on SecurityRestApi |:heavy_check_mark:|O|O|O|
|
||||
| can read on AdvancedDataType |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on EmbeddedDashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can duplicate on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Explore |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can samples on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on AvailableDomains |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get or create dataset on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get column values on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export csv on SQLLab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can get results on SQLLab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can execute sql query on SQLLab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can recent activity on Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| |Admin|Alpha|Gamma|Public|SQL_LAB|
|
||||
|--------------------------------------------------|---|---|---|---|---|
|
||||
| Permission/role description |Admins have all possible rights, including granting or revoking rights from other users and altering other people's slices and dashboards.|Alpha users have access to all data sources, but they cannot grant or revoke access from other users. They are also limited to altering the objects that they own. Alpha users can add and alter data sources.|Gamma users have limited access. They can only consume data coming from data sources they have been given access to through another complementary role. They only have access to view the slices and dashboards made from data sources that they have access to. Currently Gamma users are not able to alter or add data sources. We assume that they are mostly content consumers, though they can create slices and dashboards.|Public is the most restrictive built-in role, designed for anonymous/unauthenticated users viewing public dashboards. It provides minimal read-only access for dashboard viewing with interactive filters. Use `PUBLIC_ROLE_LIKE = "Public"` to apply these permissions to anonymous users.|The sql_lab role grants access to SQL Lab. Note that while Admin users have access to all databases by default, both Alpha and Gamma users need to be given access on a per database basis.||
|
||||
| can read on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can write on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can read on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Annotation |:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|O|
|
||||
| can write on Annotation |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on AnnotationLayerRestApi |:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|O|
|
||||
| can read on Dataset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on Log |:heavy_check_mark:|O|O|O|O|
|
||||
| can write on Log |:heavy_check_mark:|O|O|O|O|
|
||||
| can read on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Database |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can write on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can read on Query |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can this form get on ResetPasswordView |:heavy_check_mark:|O|O|O|O|
|
||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|O|
|
||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|O|
|
||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can add on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can list on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can userinfo on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| resetmypassword on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| resetpasswords on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| userinfoedit on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can show on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can edit on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can delete on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can add on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can list on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| copyrole on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can get on OpenApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on SwaggerView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can query on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can time range on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can external metadata on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can save on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can publish on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can csv on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can slice on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sync druid source on Superset |:heavy_check_mark:|O|O|O|O|
|
||||
| can explore on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can approve on Superset |:heavy_check_mark:|O|O|O|O|
|
||||
| can explore json on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can fetch datasource metadata on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can csrf token on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can sqllab on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can select star on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can warm up cache on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can sqllab table viz on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can available domains on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can request access on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can post on TableSchemaView |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can expanded on TableSchemaView |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can delete on TableSchemaView |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can get on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can post on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can delete query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can migrate query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can activate on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can delete on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can put on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can read on SecurityRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| menu access on Security |:heavy_check_mark:|O|O|O|O|
|
||||
| menu access on List Users |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on List Roles |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Action Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Manage |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| menu access on Annotation Layers |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on CSS Templates |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| menu access on Import Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| menu access on SQL Editor |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| menu access on Saved Queries |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| menu access on Query Search |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|O|
|
||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can download on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can add on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can delete on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can external metadata by name on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get value on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can store on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can tagged objects on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can suggestions on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can post on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can edit on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can add on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| muldelete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can edit on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can add on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| muldelete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can edit on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can add on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Row Level Security |:heavy_check_mark:|O|O|O|O|
|
||||
| menu access on Access requests |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Home |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Plugins |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Dashboard Email Schedules |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Chart Emails |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Alerts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Alerts & Report |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Scan New Datasources |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can share dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can set embedded on Dashboard |:heavy_check_mark:|O|O|O|O|
|
||||
| can export on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can export on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can write on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can import on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can dashboard permalink on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can grant guest token on SecurityRestApi |:heavy_check_mark:|O|O|O|O|
|
||||
| can read on AdvancedDataType |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on EmbeddedDashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can duplicate on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on Explore |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can samples on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on AvailableDomains |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get or create dataset on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can get column values on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can export csv on SQLLab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can get results on SQLLab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can execute sql query on SQLLab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can recent activity on Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|
||||
27
UPDATING.md
27
UPDATING.md
@@ -124,8 +124,31 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
|
||||
|
||||
---
|
||||
|
||||
- [33055](https://github.com/apache/superset/pull/33055): Upgrades Flask-AppBuilder to 5.0.0. The AUTH_OID authentication type has been deprecated and is no longer available as an option in Flask-AppBuilder. OpenID (OID) is considered a deprecated authentication protocol - if you are using AUTH_OID, you will need to migrate to an alternative authentication method such as OAuth, LDAP, or database authentication before upgrading.
|
||||
- [35621](https://github.com/apache/superset/pull/35621): The default hash algorithm has changed from MD5 to SHA-256 for improved security and FedRAMP compliance. This affects cache keys for thumbnails, dashboard digests, chart digests, and filter option names. Existing cached data will be invalidated upon upgrade. To opt out of this change and maintain backward compatibility, set `HASH_ALGORITHM = "md5"` in your `superset_config.py`.
|
||||
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
|
||||
|
||||
### Breaking Changes
|
||||
- [36317](https://github.com/apache/superset/pull/36317): The `CUSTOM_FONT_URLS` configuration option has been removed. Use the new per-theme `fontUrls` token in `THEME_DEFAULT` or database-managed themes instead.
|
||||
- **Before:**
|
||||
```python
|
||||
CUSTOM_FONT_URLS = [
|
||||
"https://fonts.example.com/myfont.css",
|
||||
]
|
||||
```
|
||||
- **After:**
|
||||
```python
|
||||
THEME_DEFAULT = {
|
||||
"token": {
|
||||
"fontUrls": [
|
||||
"https://fonts.example.com/myfont.css",
|
||||
],
|
||||
# ... other tokens
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 6.0.0
|
||||
- [33055](https://github.com/apache/superset/pull/33055): Upgrades Flask-AppBuilder to 5.0.0. The AUTH_OID authentication type has been deprecated and is no longer available as an option in Flask-AppBuilder. OpenID (OID) is considered a deprecated authentication protocol - if you are using AUTH_OID, you will need to migrate to an alternative authentication method such as OAuth, LDAP, or database authentication before upgrading.
|
||||
- [34871](https://github.com/apache/superset/pull/34871): Fixed Jest test hanging issue from Ant Design v5 upgrade. MessageChannel is now mocked in test environment to prevent rc-overflow from causing Jest to hang. Test environment only - no production impact.
|
||||
- [34782](https://github.com/apache/superset/pull/34782): Dataset exports now include the dataset ID in their file name (similar to charts and dashboards). If managing assets as code, make sure to rename existing dataset YAMLs to include the ID (and avoid duplicated files).
|
||||
- [34536](https://github.com/apache/superset/pull/34536): The `ENVIRONMENT_TAG_CONFIG` color values have changed to support only Ant Design semantic colors. Update your `superset_config.py`:
|
||||
@@ -142,7 +165,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
- [33116](https://github.com/apache/superset/pull/33116) In Echarts Series charts (e.g. Line, Area, Bar, etc.) charts, the `x_axis_sort_series` and `x_axis_sort_series_ascending` form data items have been renamed with `x_axis_sort` and `x_axis_sort_asc`.
|
||||
There's a migration added that can potentially affect a significant number of existing charts.
|
||||
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||
- [31590](https://github.com/apache/superset/pull/31590) Marks the beginning of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
|
||||
- [31590](https://github.com/apache/superset/pull/31590) Marks the begining of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
|
||||
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [34319](https://github.com/apache/superset/pull/34319) Drill to Detail and Drill By is now supported in Embedded mode, and also with the `DASHBOARD_RBAC` FF. If you don't want to expose these features in Embedded / `DASHBOARD_RBAC`, make sure the roles used for Embedded / `DASHBOARD_RBAC`don't have the required permissions to perform D2D actions.
|
||||
|
||||
|
||||
@@ -112,6 +112,8 @@ services:
|
||||
superset-init-light:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
ports:
|
||||
- "${SUPERSET_PORT:-8088}:8088"
|
||||
environment:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
@@ -157,7 +159,7 @@ services:
|
||||
environment:
|
||||
# set this to false if you have perf issues running the npm i; npm run dev in-docker
|
||||
# if you do so, you have to run this manually on the host, which should perform better!
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: false
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
|
||||
@@ -54,10 +54,9 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: nginx:latest
|
||||
container_name: superset_nginx
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "${NGINX_PORT:-80}:80"
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
volumes:
|
||||
@@ -66,10 +65,9 @@ services:
|
||||
|
||||
redis:
|
||||
image: redis:7
|
||||
container_name: superset_cache
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:6379:6379"
|
||||
- "127.0.0.1:${REDIS_PORT:-6379}:6379"
|
||||
volumes:
|
||||
- redis:/data
|
||||
|
||||
@@ -80,10 +78,9 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:5432:5432"
|
||||
- "127.0.0.1:${DATABASE_PORT:-5432}:5432"
|
||||
volumes:
|
||||
- db_home:/var/lib/postgresql/data
|
||||
- ./docker/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
@@ -96,13 +93,12 @@ services:
|
||||
required: false
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_app
|
||||
command: ["/app/docker/docker-bootstrap.sh", "app"]
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
- ${SUPERSET_PORT:-8088}:8088
|
||||
# When in cypress-mode ->
|
||||
- 8081:8081
|
||||
- ${CYPRESS_PORT:-8081}:8081
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
user: *superset-user
|
||||
@@ -114,10 +110,9 @@ services:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
|
||||
superset-websocket:
|
||||
container_name: superset_websocket
|
||||
build: ./superset-websocket
|
||||
ports:
|
||||
- 8080:8080
|
||||
- ${WEBSOCKET_PORT:-8080}:8080
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
depends_on:
|
||||
@@ -149,7 +144,6 @@ services:
|
||||
superset-init:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_init
|
||||
command: ["/app/docker/docker-init.sh"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -186,9 +180,10 @@ services:
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset:8088"
|
||||
# Bind to all interfaces so Docker port mapping works
|
||||
WEBPACK_DEVSERVER_HOST: "0.0.0.0"
|
||||
ports:
|
||||
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
|
||||
container_name: superset_node
|
||||
- "127.0.0.1:${NODE_PORT:-9000}:9000" # exposing the dynamic webpack dev server
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -200,7 +195,6 @@ services:
|
||||
superset-worker:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_worker
|
||||
command: ["/app/docker/docker-bootstrap.sh", "worker"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -226,7 +220,6 @@ services:
|
||||
superset-worker-beat:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_worker_beat
|
||||
command: ["/app/docker/docker-bootstrap.sh", "beat"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -244,7 +237,6 @@ services:
|
||||
superset-tests-worker:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_tests_worker
|
||||
command: ["/app/docker/docker-bootstrap.sh", "worker"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
|
||||
@@ -21,6 +21,15 @@ PYTHONUNBUFFERED=1
|
||||
COMPOSE_PROJECT_NAME=superset
|
||||
DEV_MODE=true
|
||||
|
||||
# Port configuration (override in .env-local for multiple instances)
|
||||
# NGINX_PORT=80
|
||||
# SUPERSET_PORT=8088
|
||||
# NODE_PORT=9000
|
||||
# WEBSOCKET_PORT=8080
|
||||
# CYPRESS_PORT=8081
|
||||
# DATABASE_PORT=5432
|
||||
# REDIS_PORT=6379
|
||||
|
||||
# database configurations (do not modify)
|
||||
DATABASE_DB=superset
|
||||
DATABASE_HOST=db
|
||||
|
||||
39
docker/.env-local.example
Normal file
39
docker/.env-local.example
Normal file
@@ -0,0 +1,39 @@
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Example .env-local file for running multiple Superset instances
|
||||
# Copy this file to .env-local and customize for your setup
|
||||
# -----------------------------------------------------------------------
|
||||
|
||||
# Unique project name prevents container/volume conflicts between clones
|
||||
# Each clone should have a different name (e.g., superset-pr123, superset-feature-x)
|
||||
COMPOSE_PROJECT_NAME=superset-dev2
|
||||
|
||||
# Port offsets for running multiple instances simultaneously
|
||||
# Instance 1 (default): 80, 8088, 9000, 8080, 8081, 5432, 6379
|
||||
# Instance 2 example: 81, 8089, 9001, 8082, 8083, 5433, 6380
|
||||
NGINX_PORT=81
|
||||
SUPERSET_PORT=8089
|
||||
NODE_PORT=9001
|
||||
WEBSOCKET_PORT=8082
|
||||
CYPRESS_PORT=8083
|
||||
DATABASE_PORT=5433
|
||||
REDIS_PORT=6380
|
||||
|
||||
# For verbose logging during development:
|
||||
# SUPERSET_LOG_LEVEL=debug
|
||||
@@ -77,6 +77,34 @@ To run the container, simply run: `docker compose up`
|
||||
After waiting several minutes for Superset initialization to finish, you can open a browser and view [`http://localhost:8088`](http://localhost:8088)
|
||||
to start your journey.
|
||||
|
||||
### Running Multiple Instances
|
||||
|
||||
If you need to run multiple Superset instances simultaneously (e.g., different branches or clones), use the make targets which automatically find available ports:
|
||||
|
||||
```bash
|
||||
make up
|
||||
```
|
||||
|
||||
This automatically:
|
||||
- Generates a unique project name from your directory
|
||||
- Finds available ports (incrementing from defaults if in use)
|
||||
- Displays the assigned URLs before starting
|
||||
|
||||
Available commands (run from repo root):
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `make up` | Start services (foreground) |
|
||||
| `make up-detached` | Start services (background) |
|
||||
| `make down` | Stop all services |
|
||||
| `make ps` | Show running containers |
|
||||
| `make logs` | Follow container logs |
|
||||
| `make nuke` | Stop, remove volumes & local images |
|
||||
|
||||
From a subdirectory, use: `make -C $(git rev-parse --show-toplevel) up`
|
||||
|
||||
**Important**: Always use these commands instead of plain `docker compose down`, which won't know the correct project name.
|
||||
|
||||
## Developing
|
||||
|
||||
While running, the container server will reload on modification of the Superset Python and JavaScript source code.
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
|
||||
# Import all settings from the main config first
|
||||
from flask_caching.backends.filesystemcache import FileSystemCache
|
||||
|
||||
from superset_config import * # noqa: F403
|
||||
|
||||
# Override caching to use simple in-memory cache instead of Redis
|
||||
|
||||
3
docs/.gitignore
vendored
3
docs/.gitignore
vendored
@@ -23,3 +23,6 @@ docs/.zshrc
|
||||
|
||||
# Gets copied from the root of the project at build time (yarn start / yarn build)
|
||||
docs/intro.md
|
||||
|
||||
# Generated badge images (downloaded at build time by remark-localize-badges plugin)
|
||||
static/badges/
|
||||
|
||||
@@ -139,6 +139,41 @@ docker volume rm superset_db_home
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
### Running multiple instances
|
||||
|
||||
If you need to run multiple Superset clones simultaneously (e.g., testing different branches),
|
||||
use `make up` instead of `docker compose up`:
|
||||
|
||||
```bash
|
||||
make up
|
||||
```
|
||||
|
||||
This automatically:
|
||||
- Generates a unique project name from your directory name
|
||||
- Finds available ports (incrementing from 8088, 9000, etc. if already in use)
|
||||
- Displays the assigned URLs before starting
|
||||
|
||||
Each clone gets isolated containers and volumes, so you can run them side-by-side without conflicts.
|
||||
|
||||
Available commands (run from repo root):
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `make up` | Start services (foreground) |
|
||||
| `make up-detached` | Start services (background) |
|
||||
| `make down` | Stop all services |
|
||||
| `make ps` | Show running containers |
|
||||
| `make logs` | Follow container logs |
|
||||
| `make ports` | Show assigned URLs and ports |
|
||||
| `make open` | Open browser to dev server |
|
||||
| `make nuke` | Stop, remove volumes & local images |
|
||||
|
||||
From a subdirectory, use: `make -C $(git rev-parse --show-toplevel) up`
|
||||
|
||||
:::warning
|
||||
Always use these commands instead of plain `docker compose down`, which won't know the correct project name for your instance.
|
||||
:::
|
||||
|
||||
## GitHub Codespaces (Cloud Development)
|
||||
|
||||
GitHub Codespaces provides a complete, pre-configured development environment in the cloud. This is ideal for:
|
||||
|
||||
@@ -138,19 +138,6 @@ The diagram shows:
|
||||
3. **The host application** implements the APIs and manages extensions
|
||||
4. **Extensions** integrate seamlessly with the host through well-defined interfaces
|
||||
|
||||
### Extension Dependencies
|
||||
|
||||
Extensions can depend on any combination of packages based on their needs. For example:
|
||||
|
||||
**Frontend-only extension** (e.g., a custom chart type):
|
||||
- Depends on `@apache-superset/core` for UI components and React APIs
|
||||
|
||||
**Full-stack extension** (e.g., a custom SQL editor with new API endpoints):
|
||||
- Depends on `@apache-superset/core` for frontend components
|
||||
- Depends on `apache-superset-core` for backend APIs and models
|
||||
|
||||
This modular approach allows extension authors to choose exactly what they need while promoting consistency and reusability.
|
||||
|
||||
## Dynamic Module Loading
|
||||
|
||||
One of the most sophisticated aspects of the extension architecture is how frontend code is dynamically loaded at runtime using Webpack's Module Federation.
|
||||
@@ -215,7 +202,6 @@ import {
|
||||
authentication,
|
||||
core,
|
||||
commands,
|
||||
environment,
|
||||
extensions,
|
||||
sqlLab,
|
||||
} from 'src/extensions';
|
||||
@@ -226,7 +212,6 @@ export default function setupExtensionsAPI() {
|
||||
authentication,
|
||||
core,
|
||||
commands,
|
||||
environment,
|
||||
extensions,
|
||||
sqlLab,
|
||||
};
|
||||
@@ -248,6 +233,7 @@ This architecture provides several key benefits:
|
||||
|
||||
Now that you understand the architecture, explore:
|
||||
|
||||
- **[Extension Project Structure](./extension-project-structure)** - How to organize your extension code
|
||||
- **[Frontend Contribution Types](./frontend-contribution-types)** - What kinds of extensions you can build
|
||||
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
|
||||
- **[Quick Start](./quick-start)** - Build your first extension
|
||||
- **[Contribution Types](./contribution-types)** - What kinds of extensions you can build
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
|
||||
131
docs/developer_portal/extensions/components/alert.mdx
Normal file
131
docs/developer_portal/extensions/components/alert.mdx
Normal file
@@ -0,0 +1,131 @@
|
||||
---
|
||||
title: Alert
|
||||
sidebar_label: Alert
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
import { StoryWithControls } from '../../../src/components/StorybookWrapper';
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
|
||||
# Alert
|
||||
|
||||
Alert component for displaying important messages to users. Wraps Ant Design Alert with sensible defaults and improved accessibility.
|
||||
|
||||
## Live Example
|
||||
|
||||
<StoryWithControls
|
||||
component={Alert}
|
||||
props={{
|
||||
closable: true,
|
||||
type: 'info',
|
||||
message: 'This is a sample alert message.',
|
||||
description: 'Sample description for additional context.',
|
||||
showIcon: true
|
||||
}}
|
||||
controls={[
|
||||
{
|
||||
name: 'type',
|
||||
label: 'Type',
|
||||
type: 'select',
|
||||
options: [
|
||||
'info',
|
||||
'error',
|
||||
'warning',
|
||||
'success'
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'closable',
|
||||
label: 'Closable',
|
||||
type: 'boolean'
|
||||
},
|
||||
{
|
||||
name: 'showIcon',
|
||||
label: 'Show Icon',
|
||||
type: 'boolean'
|
||||
},
|
||||
{
|
||||
name: 'message',
|
||||
label: 'Message',
|
||||
type: 'text'
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
label: 'Description',
|
||||
type: 'text'
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
## Try It
|
||||
|
||||
Edit the code below to experiment with the component:
|
||||
|
||||
```tsx live
|
||||
function Demo() {
|
||||
return (
|
||||
<Alert
|
||||
closable
|
||||
type="info"
|
||||
message="This is a sample alert message."
|
||||
description="Sample description for additional context."
|
||||
showIcon
|
||||
/>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Props
|
||||
|
||||
| Prop | Type | Default | Description |
|
||||
|------|------|---------|-------------|
|
||||
| `closable` | `boolean` | `true` | Whether the Alert can be closed with a close button. |
|
||||
| `type` | `string` | `"info"` | Type of the alert (e.g., info, error, warning, success). |
|
||||
| `message` | `string` | `"This is a sample alert message."` | Message |
|
||||
| `description` | `string` | `"Sample description for additional context."` | Description |
|
||||
| `showIcon` | `boolean` | `true` | Whether to display an icon in the Alert. |
|
||||
|
||||
## Usage in Extensions
|
||||
|
||||
This component is available in the `@apache-superset/core/ui` package, which is automatically available to Superset extensions.
|
||||
|
||||
```tsx
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
|
||||
function MyExtension() {
|
||||
return (
|
||||
<Alert
|
||||
closable
|
||||
type="info"
|
||||
message="This is a sample alert message."
|
||||
/>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Source Links
|
||||
|
||||
- [Story file](https://github.com/apache/superset/blob/master/superset-frontend/packages/superset-core/src/ui/components/Alert/Alert.stories.tsx)
|
||||
- [Component source](https://github.com/apache/superset/blob/master/superset-frontend/packages/superset-core/src/ui/components/Alert/index.tsx)
|
||||
|
||||
---
|
||||
|
||||
*This page was auto-generated from the component's Storybook story.*
|
||||
93
docs/developer_portal/extensions/components/index.mdx
Normal file
93
docs/developer_portal/extensions/components/index.mdx
Normal file
@@ -0,0 +1,93 @@
|
||||
---
|
||||
title: Extension Components
|
||||
sidebar_label: Overview
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Components
|
||||
|
||||
These UI components are available to Superset extension developers through the `@apache-superset/core/ui` package. They provide a consistent look and feel with the rest of Superset and are designed to be used in extension panels, views, and other UI elements.
|
||||
|
||||
## Available Components
|
||||
|
||||
- [Alert](./alert)
|
||||
|
||||
## Usage
|
||||
|
||||
All components are exported from the `@apache-superset/core/ui` package:
|
||||
|
||||
```tsx
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
|
||||
export function MyExtensionPanel() {
|
||||
return (
|
||||
<Alert type="info">
|
||||
Welcome to my extension!
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Adding New Components
|
||||
|
||||
Components in `@apache-superset/core/ui` are automatically documented here. To add a new extension component:
|
||||
|
||||
1. Add the component to `superset-frontend/packages/superset-core/src/ui/components/`
|
||||
2. Export it from `superset-frontend/packages/superset-core/src/ui/components/index.ts`
|
||||
3. Create a Storybook story with an `Interactive` export:
|
||||
|
||||
```tsx
|
||||
export default {
|
||||
title: 'Extension Components/MyComponent',
|
||||
component: MyComponent,
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
component: 'Description of the component...',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const InteractiveMyComponent = (args) => <MyComponent {...args} />;
|
||||
|
||||
InteractiveMyComponent.args = {
|
||||
variant: 'primary',
|
||||
disabled: false,
|
||||
};
|
||||
|
||||
InteractiveMyComponent.argTypes = {
|
||||
variant: {
|
||||
control: { type: 'select' },
|
||||
options: ['primary', 'secondary'],
|
||||
},
|
||||
disabled: {
|
||||
control: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
4. Run `yarn start` in `docs/` - the page generates automatically!
|
||||
|
||||
## Interactive Documentation
|
||||
|
||||
For interactive examples with controls, visit the [Storybook](/storybook/?path=/docs/extension-components--docs).
|
||||
130
docs/developer_portal/extensions/contribution-types.md
Normal file
130
docs/developer_portal/extensions/contribution-types.md
Normal file
@@ -0,0 +1,130 @@
|
||||
---
|
||||
title: Contribution Types
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Contribution Types
|
||||
|
||||
To facilitate the development of extensions, we define a set of well-defined contribution types that extensions can implement. These contribution types serve as the building blocks for extensions, allowing them to interact with the host application and provide new functionality.
|
||||
|
||||
## Frontend
|
||||
|
||||
Frontend contribution types allow extensions to extend Superset's user interface with new views, commands, and menu items.
|
||||
|
||||
### Views
|
||||
|
||||
Extensions can add new views or panels to the host application, such as custom SQL Lab panels, dashboards, or other UI components. Each view is registered with a unique ID and can be activated or deactivated as needed. Contribution areas are uniquely identified (e.g., `sqllab.panels` for SQL Lab panels), enabling seamless integration into specific parts of the application.
|
||||
|
||||
``` json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"views": {
|
||||
"sqllab.panels": [
|
||||
{
|
||||
"id": "my_extension.main",
|
||||
"name": "My Panel Name"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Commands
|
||||
|
||||
Extensions can define custom commands that can be executed within the host application, such as context-aware actions or menu options. Each command can specify properties like a unique command identifier, an icon, a title, and a description. These commands can be invoked by users through menus, keyboard shortcuts, or other UI elements, enabling extensions to add rich, interactive functionality to Superset.
|
||||
|
||||
``` json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"commands": [
|
||||
{
|
||||
"command": "my_extension.copy_query",
|
||||
"icon": "CopyOutlined",
|
||||
"title": "Copy Query",
|
||||
"description": "Copy the current query to clipboard"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Menus
|
||||
|
||||
Extensions can contribute new menu items or context menus to the host application, providing users with additional actions and options. Each menu item can specify properties such as the target view, the command to execute, its placement (primary, secondary, or context), and conditions for when it should be displayed. Menu contribution areas are uniquely identified (e.g., `sqllab.editor` for the SQL Lab editor), allowing extensions to seamlessly integrate their functionality into specific menus and workflows within Superset.
|
||||
|
||||
``` json
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"menus": {
|
||||
"sqllab.editor": {
|
||||
"primary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "my_extension.copy_query"
|
||||
}
|
||||
],
|
||||
"secondary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "my_extension.prettify"
|
||||
}
|
||||
],
|
||||
"context": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "my_extension.clear"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Backend
|
||||
|
||||
Backend contribution types allow extensions to extend Superset's server-side capabilities with new API endpoints, MCP tools, and MCP prompts.
|
||||
|
||||
### REST API Endpoints
|
||||
|
||||
Extensions can register custom REST API endpoints under the `/api/v1/extensions/` namespace. This dedicated namespace prevents conflicts with built-in endpoints and provides a clear separation between core and extension functionality.
|
||||
|
||||
``` json
|
||||
"backend": {
|
||||
"entryPoints": ["my_extension.entrypoint"],
|
||||
"files": ["backend/src/my_extension/**/*.py"]
|
||||
}
|
||||
```
|
||||
|
||||
The entry point module registers the API with Superset:
|
||||
|
||||
``` python
|
||||
from superset_core.api.rest_api import add_extension_api
|
||||
from .api import MyExtensionAPI
|
||||
|
||||
add_extension_api(MyExtensionAPI)
|
||||
```
|
||||
|
||||
### MCP Tools and Prompts
|
||||
|
||||
Extensions can contribute Model Context Protocol (MCP) tools and prompts that AI agents can discover and use. See [MCP Integration](./mcp) for detailed documentation.
|
||||
166
docs/developer_portal/extensions/dependencies.md
Normal file
166
docs/developer_portal/extensions/dependencies.md
Normal file
@@ -0,0 +1,166 @@
|
||||
---
|
||||
title: Dependencies
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Dependencies
|
||||
|
||||
This guide explains how to manage dependencies in your Superset extensions, including the difference between public APIs and internal code, and best practices for maintaining stable extensions.
|
||||
|
||||
## Core Packages vs Internal Code
|
||||
|
||||
Extensions run in the same context as Superset during runtime. This means extension developers can technically import any module from the Superset codebase, not just the public APIs. Understanding the distinction between public and internal code is critical for building maintainable extensions.
|
||||
|
||||
### Public APIs (Stable)
|
||||
|
||||
The core packages follow [semantic versioning](https://semver.org/) and provide stable, documented APIs:
|
||||
|
||||
| Package | Language | Description |
|
||||
|---------|----------|-------------|
|
||||
| `@apache-superset/core` | JavaScript/TypeScript | Frontend APIs, UI components, hooks, and utilities |
|
||||
| `apache-superset-core` | Python | Backend APIs, models, DAOs, and utilities |
|
||||
|
||||
**Benefits of using core packages:**
|
||||
|
||||
- **Semantic versioning**: Breaking changes are communicated through version numbers
|
||||
- **Documentation**: APIs are documented with clear usage examples
|
||||
- **Stability commitment**: We strive to maintain backward compatibility
|
||||
- **Type safety**: Full TypeScript and Python type definitions
|
||||
|
||||
### Internal Code (Unstable)
|
||||
|
||||
Any code that is not exported through the core packages is considered internal. This includes:
|
||||
|
||||
- Direct imports from `superset-frontend/src/` modules
|
||||
- Direct imports from `superset/` Python modules (outside of `superset_core`)
|
||||
- Undocumented functions, classes, or utilities
|
||||
|
||||
:::warning Use at Your Own Risk
|
||||
Internal code can change at any time without notice. If you depend on internal modules, your extension may break when Superset is upgraded. There is no guarantee of backward compatibility for internal code.
|
||||
:::
|
||||
|
||||
**Example of internal vs public imports:**
|
||||
|
||||
```typescript
|
||||
// ✅ Public API - stable
|
||||
import { Button, sqlLab } from '@apache-superset/core';
|
||||
|
||||
// ❌ Internal code - may break without notice
|
||||
import { someInternalFunction } from 'src/explore/components/SomeComponent';
|
||||
```
|
||||
|
||||
```python
|
||||
# ✅ Public API - stable
|
||||
from superset_core.api.models import Database
|
||||
from superset_core.api.daos import DatabaseDAO
|
||||
|
||||
# ❌ Internal code - may break without notice
|
||||
from superset.views.core import SomeInternalClass
|
||||
```
|
||||
|
||||
## API Evolution
|
||||
|
||||
The core packages are still evolving. While we follow semantic versioning, the APIs may change as we add new extension points and refine existing ones based on community feedback.
|
||||
|
||||
**What this means for extension developers:**
|
||||
|
||||
- Check the release notes when upgrading Superset
|
||||
- Test your extensions against new Superset versions before deploying
|
||||
- Participate in discussions about API changes to influence the direction
|
||||
- In some cases, using internal dependencies may be acceptable while the public API is being developed for your use case
|
||||
|
||||
### When Internal Dependencies May Be Acceptable
|
||||
|
||||
While public APIs are always preferred, there are situations where using internal code may be reasonable:
|
||||
|
||||
1. **Missing functionality**: The public API doesn't yet expose what you need
|
||||
2. **Prototype/experimental extensions**: You're exploring capabilities before committing to a stable implementation
|
||||
3. **Bridge period**: You need functionality that's planned for the public API but not yet released
|
||||
|
||||
In these cases, document your internal dependencies clearly and plan to migrate to public APIs when they become available.
|
||||
|
||||
## Core Library Dependencies
|
||||
|
||||
An important architectural principle of the Superset extension system is that **we do not provide abstractions on top of core dependencies** like React (frontend) or SQLAlchemy (backend).
|
||||
|
||||
### Why We Don't Abstract Core Libraries
|
||||
|
||||
Abstracting libraries like React or SQLAlchemy would:
|
||||
|
||||
- Create maintenance overhead keeping abstractions in sync with upstream
|
||||
- Limit access to the full power of these libraries
|
||||
- Add unnecessary abstraction layers
|
||||
- Fragment the ecosystem with Superset-specific variants
|
||||
|
||||
### Depending on Core Libraries Directly
|
||||
|
||||
Extension developers should depend on and use core libraries directly:
|
||||
|
||||
**Frontend (examples):**
|
||||
- [React](https://react.dev/) - UI framework
|
||||
- [Ant Design](https://ant.design/) - UI component library (prefer Superset components from `@apache-superset/core/ui` when available to preserve visual consistency)
|
||||
- [Emotion](https://emotion.sh/) - CSS-in-JS styling
|
||||
- ...
|
||||
|
||||
**Backend (examples):**
|
||||
- [SQLAlchemy](https://www.sqlalchemy.org/) - Database toolkit
|
||||
- [Flask](https://flask.palletsprojects.com/) - Web framework
|
||||
- [Flask-AppBuilder](https://flask-appbuilder.readthedocs.io/) - Application framework
|
||||
- ...
|
||||
|
||||
:::info Version Compatibility
|
||||
When Superset upgrades its core dependencies (e.g., a new major version of Ant Design or SQLAlchemy), extension developers should upgrade their extensions accordingly. This ensures compatibility and access to the latest features and security fixes.
|
||||
:::
|
||||
|
||||
## API Versioning and Changelog
|
||||
|
||||
Once the extensions API reaches **v1**, we will maintain a dedicated `CHANGELOG.md` file to track all changes to the public APIs. This will include:
|
||||
|
||||
- New APIs and features
|
||||
- Deprecation notices
|
||||
- Breaking changes with migration guides
|
||||
- Bug fixes affecting API behavior
|
||||
|
||||
Until then, monitor the Superset release notes and test your extensions with each new release.
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Do
|
||||
|
||||
- **Prefer public APIs**: Always check if functionality exists in `@apache-superset/core` or `apache-superset-core` before using internal code
|
||||
- **Pin versions**: Specify compatible Superset versions in your extension metadata
|
||||
- **Test upgrades**: Verify your extension works with new Superset releases before deploying
|
||||
- **Report missing APIs**: If you need functionality not in the public API, open a GitHub issue to request it
|
||||
- **Use core libraries directly**: Leverage Ant Design, SQLAlchemy, and other core libraries directly
|
||||
|
||||
### Don't
|
||||
|
||||
- **Assume stability of internal code**: Internal modules can change or be removed in any release
|
||||
- **Depend on implementation details**: Even if something works, it may not be supported
|
||||
- **Skip upgrade testing**: Always test your extension against new Superset versions
|
||||
- **Expect abstractions**: Use core dependencies directly rather than expecting Superset-specific abstractions
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Architecture](./architecture)** - Understand the extension system design
|
||||
- **[Development](./development)** - Learn about APIs and development workflow
|
||||
- **[Quick Start](./quick-start)** - Build your first extension
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Deploying an Extension
|
||||
sidebar_position: 8
|
||||
title: Deployment
|
||||
sidebar_position: 7
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -22,7 +22,7 @@ specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Deploying an Extension
|
||||
# Deployment
|
||||
|
||||
Once an extension has been developed, the deployment process involves packaging and uploading it to the host application.
|
||||
|
||||
@@ -33,13 +33,17 @@ Packaging is handled by the `superset-extensions bundle` command, which:
|
||||
3. Generates a `manifest.json` with build-time metadata, including the contents of `extension.json` and references to built assets.
|
||||
4. Packages everything into a `.supx` file (a zip archive with a specific structure required by Superset).
|
||||
|
||||
Uploading is accomplished through Superset's REST API at `/api/v1/extensions/import/`. The endpoint accepts the `.supx` file as form data and processes it by:
|
||||
To deploy an extension, place the `.supx` file in the extensions directory configured via `EXTENSIONS_PATH` in your `superset_config.py`:
|
||||
|
||||
1. Extracting and validating the extension metadata and manifest.
|
||||
2. Storing extension assets in the metadata database for dynamic loading.
|
||||
3. Registering the extension in the metadata database, including its name, version, author, and capabilities.
|
||||
4. Automatically activating the extension, making it immediately available for use and management via the Superset UI or API.
|
||||
``` python
|
||||
EXTENSIONS_PATH = "/path/to/extensions"
|
||||
```
|
||||
|
||||
This API-driven approach enables automated deployment workflows and simplifies extension management for administrators. Extensions can be uploaded through the Swagger UI, programmatically via scripts, or through the management interface:
|
||||
During application startup, Superset automatically discovers and loads all `.supx` files from this directory:
|
||||
|
||||
https://github.com/user-attachments/assets/98b16cdd-8ec5-4812-9d5e-9915badd8f0d
|
||||
1. Scans the configured directory for `.supx` files.
|
||||
2. Validates each file is a properly formatted zip archive.
|
||||
3. Extracts and validates the extension manifest and metadata.
|
||||
4. Loads the extension, making it available for use.
|
||||
|
||||
This file-based approach simplifies deployment in containerized environments and enables version control of extensions alongside infrastructure configuration.
|
||||
@@ -1,48 +0,0 @@
|
||||
---
|
||||
title: Development Mode
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Development Mode
|
||||
|
||||
Development mode accelerates extension development by letting developers see changes in Superset quickly, without the need for repeated packaging and uploading. To enable development mode, set the `LOCAL_EXTENSIONS` configuration in your `superset_config.py`:
|
||||
|
||||
``` python
|
||||
LOCAL_EXTENSIONS = [
|
||||
"/path/to/your/extension1",
|
||||
"/path/to/your/extension2",
|
||||
]
|
||||
```
|
||||
|
||||
This instructs Superset to load and serve extensions directly from disk, so you can iterate quickly. Running `superset-extensions dev` watches for file changes and rebuilds assets automatically, while the Webpack development server (started separately with `npm run dev-server`) serves updated files as soon as they're modified. This enables immediate feedback for React components, styles, and other frontend code. Changes to backend files are also detected automatically and immediately synced, ensuring that both frontend and backend updates are reflected in your development environment.
|
||||
|
||||
Example output when running in development mode:
|
||||
|
||||
```
|
||||
superset-extensions dev
|
||||
|
||||
⚙️ Building frontend assets…
|
||||
✅ Frontend rebuilt
|
||||
✅ Backend files synced
|
||||
✅ Manifest updated
|
||||
👀 Watching for changes in: /dataset_references/frontend, /dataset_references/backend
|
||||
```
|
||||
305
docs/developer_portal/extensions/development.md
Normal file
305
docs/developer_portal/extensions/development.md
Normal file
@@ -0,0 +1,305 @@
|
||||
---
|
||||
title: Development
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Development
|
||||
|
||||
This guide covers everything you need to know about developing extensions for Superset, from project structure to development workflow.
|
||||
|
||||
## Project Structure
|
||||
|
||||
The [apache-superset-extensions-cli](https://github.com/apache/superset/tree/master/superset-extensions-cli) package provides a command-line interface (CLI) that streamlines the extension development workflow. It offers the following commands:
|
||||
|
||||
```
|
||||
superset-extensions init: Generates the initial folder structure and scaffolds a new extension project.
|
||||
|
||||
superset-extensions build: Builds extension assets.
|
||||
|
||||
superset-extensions bundle: Packages the extension into a .supx file.
|
||||
|
||||
superset-extensions dev: Automatically rebuilds the extension as files change.
|
||||
```
|
||||
|
||||
When creating a new extension with `superset-extensions init <extension-name>`, the CLI generates a standardized folder structure:
|
||||
|
||||
```
|
||||
dataset_references/
|
||||
├── extension.json
|
||||
├── frontend/
|
||||
│ ├── src/
|
||||
│ ├── webpack.config.js
|
||||
│ ├── tsconfig.json
|
||||
│ └── package.json
|
||||
├── backend/
|
||||
│ ├── src/
|
||||
│ └── dataset_references/
|
||||
│ ├── tests/
|
||||
│ ├── pyproject.toml
|
||||
│ └── requirements.txt
|
||||
├── dist/
|
||||
│ ├── manifest.json
|
||||
│ ├── frontend
|
||||
│ └── dist/
|
||||
│ ├── remoteEntry.d7a9225d042e4ccb6354.js
|
||||
│ └── 900.038b20cdff6d49cfa8d9.js
|
||||
│ └── backend
|
||||
│ └── dataset_references/
|
||||
│ ├── __init__.py
|
||||
│ ├── api.py
|
||||
│ └── entrypoint.py
|
||||
├── dataset_references-1.0.0.supx
|
||||
└── README.md
|
||||
```
|
||||
|
||||
The `extension.json` file serves as the declared metadata for the extension, containing the extension's name, version, author, description, and a list of capabilities. This file is essential for the host application to understand how to load and manage the extension.
|
||||
|
||||
The `frontend` directory contains the source code for the frontend components of the extension, including React components, styles, and assets. The `webpack.config.js` file is used to configure Webpack for building the frontend code, while the `tsconfig.json` file defines the TypeScript configuration for the project. The `package.json` file specifies the dependencies and scripts for building and testing the frontend code.
|
||||
|
||||
The `backend` directory contains the source code for the backend components of the extension, including Python modules, tests, and configuration files. The `pyproject.toml` file is used to define the Python package and its dependencies, while the `requirements.txt` file lists the required Python packages for the extension. The `src` folder contains the functional backend source files, `tests` directory contains unit tests for the backend code, ensuring that the extension behaves as expected and meets the defined requirements.
|
||||
|
||||
The `dist` directory is built when running the `build` or `dev` command, and contains the files that will be included in the bundle. The `manifest.json` file contains critical metadata about the extension, including the majority of the contents of the `extension.json` file, but also other build-time information, like the name of the built Webpack Module Federation remote entry file. The files in the `dist` directory will be zipped into the final `.supx` file. Although this file is technically a zip archive, the `.supx` extension makes it clear that it is a Superset extension package and follows a specific file layout. This packaged file can be distributed and installed in Superset instances.
|
||||
|
||||
The `README.md` file provides documentation and instructions for using the extension, including how to install, configure, and use its functionality.
|
||||
|
||||
## Extension Metadata
|
||||
|
||||
The `extension.json` file contains all metadata necessary for the host application to understand and manage the extension:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "dataset_references",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"views": {
|
||||
"sqllab.panels": [
|
||||
{
|
||||
"id": "dataset_references.main",
|
||||
"name": "Dataset references"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"moduleFederation": {
|
||||
"exposes": ["./index"]
|
||||
}
|
||||
},
|
||||
"backend": {
|
||||
"entryPoints": ["dataset_references.entrypoint"],
|
||||
"files": ["backend/src/dataset_references/**/*.py"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `contributions` section declares how the extension extends Superset's functionality through views, commands, menus, and other contribution types. The `backend` section specifies entry points and files to include in the bundle.
|
||||
|
||||
## Interacting with the Host
|
||||
|
||||
Extensions interact with Superset through well-defined, versioned APIs provided by the `@apache-superset/core` (frontend) and `apache-superset-core` (backend) packages. These APIs are designed to be stable, discoverable, and consistent for both built-in and external extensions.
|
||||
|
||||
**Note**: The `superset_core.api` module provides abstract classes that are replaced with concrete implementations via dependency injection when Superset initializes. This allows extensions to use the same interfaces as the host application.
|
||||
|
||||
### Frontend APIs
|
||||
|
||||
The frontend extension APIs (via `@apache-superset/core`) are organized into logical namespaces such as `authentication`, `commands`, `extensions`, `sqlLab`, and others. Each namespace groups related functionality, making it easy for extension authors to discover and use the APIs relevant to their needs. For example, the `sqlLab` namespace provides events and methods specific to SQL Lab, allowing extensions to react to user actions and interact with the SQL Lab environment:
|
||||
|
||||
```typescript
|
||||
export const getCurrentTab: () => Tab | undefined;
|
||||
|
||||
export const getDatabases: () => Database[];
|
||||
|
||||
export const getTabs: () => Tab[];
|
||||
|
||||
export const onDidChangeActivePanel: Event<Panel>;
|
||||
|
||||
export const onDidChangeTabTitle: Event<string>;
|
||||
|
||||
export const onDidQueryRun: Event<Editor>;
|
||||
|
||||
export const onDidQueryStop: Event<Editor>;
|
||||
```
|
||||
|
||||
The following code demonstrates more examples of the existing frontend APIs:
|
||||
|
||||
```typescript
|
||||
import { core, commands, sqlLab, authentication, Button } from '@apache-superset/core';
|
||||
import MyPanel from './MyPanel';
|
||||
|
||||
export function activate(context) {
|
||||
// Register a new panel (view) in SQL Lab and use shared UI components in your extension's React code
|
||||
const panelDisposable = core.registerView('my_extension.panel', <MyPanel><Button/></MyPanel>);
|
||||
|
||||
// Register a custom command
|
||||
const commandDisposable = commands.registerCommand('my_extension.copy_query', {
|
||||
title: 'Copy Query',
|
||||
execute: () => {
|
||||
// Command logic here
|
||||
},
|
||||
});
|
||||
|
||||
// Listen for query run events in SQL Lab
|
||||
const eventDisposable = sqlLab.onDidQueryRun(editor => {
|
||||
// Handle query execution event
|
||||
});
|
||||
|
||||
// Access a CSRF token for secure API requests
|
||||
authentication.getCSRFToken().then(token => {
|
||||
// Use token as needed
|
||||
});
|
||||
|
||||
// Add all disposables for automatic cleanup on deactivation
|
||||
context.subscriptions.push(panelDisposable, commandDisposable, eventDisposable);
|
||||
}
|
||||
```
|
||||
|
||||
### Backend APIs
|
||||
|
||||
Backend APIs (via `apache-superset-core`) follow a similar pattern, providing access to Superset's models, sessions, and query capabilities. Extensions can register REST API endpoints, access the metadata database, and interact with Superset's core functionality.
|
||||
|
||||
Extension endpoints are registered under a dedicated `/extensions` namespace to avoid conflicting with built-in endpoints and also because they don't share the same version constraints. By grouping all extension endpoints under `/extensions`, Superset establishes a clear boundary between core and extension functionality, making it easier to manage, document, and secure both types of APIs.
|
||||
|
||||
```python
|
||||
from superset_core.api.models import Database, get_session
|
||||
from superset_core.api.daos import DatabaseDAO
|
||||
from superset_core.api.rest_api import add_extension_api
|
||||
from .api import DatasetReferencesAPI
|
||||
|
||||
# Register a new extension REST API
|
||||
add_extension_api(DatasetReferencesAPI)
|
||||
|
||||
# Fetch Superset entities via the DAO to apply base filters that filter out entities
|
||||
# that the user doesn't have access to
|
||||
databases = DatabaseDAO.find_all()
|
||||
|
||||
# ..or apply simple filters on top of base filters
|
||||
databases = DatabaseDAO.filter_by(uuid=database.uuid)
|
||||
if not databases:
|
||||
raise Exception("Database not found")
|
||||
|
||||
return databases[0]
|
||||
|
||||
# Perform complex queries using SQLAlchemy Query, also filtering out
|
||||
# inaccessible entities
|
||||
session = get_session()
|
||||
databases_query = session.query(Database).filter(
|
||||
Database.database_name.ilike("%abc%")
|
||||
)
|
||||
return DatabaseDAO.query(databases_query)
|
||||
```
|
||||
|
||||
In the future, we plan to expand the backend APIs to support configuring security models, database engines, SQL Alchemy dialects, etc.
|
||||
|
||||
## Development Mode
|
||||
|
||||
Development mode accelerates extension development by letting developers see changes in Superset quickly, without the need for repeated packaging and uploading. To enable development mode, set the `LOCAL_EXTENSIONS` configuration in your `superset_config.py`:
|
||||
|
||||
```python
|
||||
LOCAL_EXTENSIONS = [
|
||||
"/path/to/your/extension1",
|
||||
"/path/to/your/extension2",
|
||||
]
|
||||
```
|
||||
|
||||
This instructs Superset to load and serve extensions directly from disk, so you can iterate quickly. Running `superset-extensions dev` watches for file changes and rebuilds assets automatically, while the Webpack development server (started separately with `npm run dev-server`) serves updated files as soon as they're modified. This enables immediate feedback for React components, styles, and other frontend code. Changes to backend files are also detected automatically and immediately synced, ensuring that both frontend and backend updates are reflected in your development environment.
|
||||
|
||||
Example output when running in development mode:
|
||||
|
||||
```
|
||||
superset-extensions dev
|
||||
|
||||
⚙️ Building frontend assets…
|
||||
✅ Frontend rebuilt
|
||||
✅ Backend files synced
|
||||
✅ Manifest updated
|
||||
👀 Watching for changes in: /dataset_references/frontend, /dataset_references/backend
|
||||
```
|
||||
|
||||
## Contributing Extension-Compatible Components
|
||||
|
||||
Components in `@apache-superset/core` are automatically documented in the Developer Portal. Simply add a component to the package and it will appear in the extension documentation.
|
||||
|
||||
### Requirements
|
||||
|
||||
1. **Location**: The component must be in `superset-frontend/packages/superset-core/src/ui/components/`
|
||||
2. **Exported**: The component must be exported from the package's `index.ts`
|
||||
3. **Story**: The component must have a Storybook story
|
||||
|
||||
### Creating a Story for Your Component
|
||||
|
||||
Create a story file with an `Interactive` export that defines args and argTypes:
|
||||
|
||||
```typescript
|
||||
// MyComponent.stories.tsx
|
||||
import { MyComponent } from '.';
|
||||
|
||||
export default {
|
||||
title: 'Extension Components/MyComponent',
|
||||
component: MyComponent,
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
component: 'A brief description of what this component does.',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Define an interactive story with args
|
||||
export const InteractiveMyComponent = (args) => <MyComponent {...args} />;
|
||||
|
||||
InteractiveMyComponent.args = {
|
||||
variant: 'primary',
|
||||
disabled: false,
|
||||
};
|
||||
|
||||
InteractiveMyComponent.argTypes = {
|
||||
variant: {
|
||||
control: { type: 'select' },
|
||||
options: ['primary', 'secondary', 'danger'],
|
||||
},
|
||||
disabled: {
|
||||
control: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### How Documentation is Generated
|
||||
|
||||
When the docs site is built (`yarn start` or `yarn build` in the `docs/` directory):
|
||||
|
||||
1. The `generate-extension-components` script scans all stories in `superset-core`
|
||||
2. For each story, it generates an MDX page with:
|
||||
- Component description
|
||||
- **Live interactive example** with controls extracted from `argTypes`
|
||||
- **Editable code playground** for experimentation
|
||||
- Props table from story `args`
|
||||
- Usage code snippet
|
||||
- Links to source files
|
||||
3. Pages appear automatically in **Developer Portal → Extensions → Components**
|
||||
|
||||
### Best Practices
|
||||
|
||||
- **Use descriptive titles**: The title path determines the component's location in docs (e.g., `Extension Components/Alert`)
|
||||
- **Define argTypes**: These become interactive controls in the documentation
|
||||
- **Provide default args**: These populate the initial state of the live example
|
||||
- **Write clear descriptions**: Help extension developers understand when to use each component
|
||||
@@ -1,55 +0,0 @@
|
||||
---
|
||||
title: Extension Metadata
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Metadata
|
||||
|
||||
The `extension.json` file contains all metadata necessary for the host application to understand and manage the extension:
|
||||
|
||||
``` json
|
||||
{
|
||||
"name": "dataset_references",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"views": {
|
||||
"sqllab.panels": [
|
||||
{
|
||||
"id": "dataset_references.main",
|
||||
"name": "Dataset references"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"moduleFederation": {
|
||||
"exposes": ["./index"]
|
||||
}
|
||||
},
|
||||
"backend": {
|
||||
"entryPoints": ["dataset_references.entrypoint"],
|
||||
"files": ["backend/src/dataset_references/**/*.py"]
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
The `contributions` section declares how the extension extends Superset's functionality through views, commands, menus, and other contribution types. The `backend` section specifies entry points and files to include in the bundle.
|
||||
209
docs/developer_portal/extensions/extension-points/sqllab.md
Normal file
209
docs/developer_portal/extensions/extension-points/sqllab.md
Normal file
@@ -0,0 +1,209 @@
|
||||
---
|
||||
title: SQL Lab
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# SQL Lab Extension Points
|
||||
|
||||
SQL Lab provides 5 extension points where extensions can contribute custom UI components. Each area serves a specific purpose and can be customized to add new functionality.
|
||||
|
||||
## Layout Overview
|
||||
|
||||
```
|
||||
┌──────────┬─────────────────────────────────────────┬─────────────┐
|
||||
│ │ │ │
|
||||
│ │ │ │
|
||||
│ │ Editor │ │
|
||||
│ │ │ │
|
||||
│ Left │ │ Right │
|
||||
│ Sidebar ├─────────────────────────────────────────┤ Sidebar │
|
||||
│ │ │ │
|
||||
│ │ Panels │ │
|
||||
│ │ │ │
|
||||
│ │ │ │
|
||||
│ │ │ │
|
||||
├──────────┴─────────────────────────────────────────┴─────────────┤
|
||||
│ Status Bar │
|
||||
└──────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
| Extension Point | ID | Description |
|
||||
| ----------------- | --------------------- | ---------------------------------------------------------- |
|
||||
| **Left Sidebar** | `sqllab.leftSidebar` | Navigation and browsing (database explorer, saved queries) |
|
||||
| **Editor** | `sqllab.editor` | SQL query editor workspace |
|
||||
| **Right Sidebar** | `sqllab.rightSidebar` | Contextual tools (AI assistants, query analysis) |
|
||||
| **Panels** | `sqllab.panels` | Results and related views (visualizations, data profiling) |
|
||||
| **Status Bar** | `sqllab.statusBar` | Connection status and query metrics |
|
||||
|
||||
## Area Customizations
|
||||
|
||||
Each extension point area supports three types of action customizations:
|
||||
|
||||
```
|
||||
┌───────────────────────────────────────────────────────────────┐
|
||||
│ Area Title [Button] [Button] [•••] │
|
||||
├───────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ │
|
||||
│ Area Content │
|
||||
│ │
|
||||
│ (right-click for context menu) │
|
||||
│ │
|
||||
│ │
|
||||
└───────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
| Action Type | Location | Use Case |
|
||||
| --------------------- | ----------------- | ----------------------------------------------------- |
|
||||
| **Primary Actions** | Top-right buttons | Frequently used actions (e.g., run, refresh, add new) |
|
||||
| **Secondary Actions** | 3-dot menu (•••) | Less common actions (e.g., export, settings) |
|
||||
| **Context Actions** | Right-click menu | Context-sensitive actions on content |
|
||||
|
||||
## Examples
|
||||
|
||||
### Adding a Panel
|
||||
|
||||
This example adds a "Data Profiler" panel to SQL Lab:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "data_profiler",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"views": {
|
||||
"sqllab.panels": [
|
||||
{
|
||||
"id": "data_profiler.main",
|
||||
"name": "Data Profiler"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
import { core } from '@apache-superset/core';
|
||||
import DataProfilerPanel from './DataProfilerPanel';
|
||||
|
||||
export function activate(context) {
|
||||
// Register the panel view with the ID declared in extension.json
|
||||
const disposable = core.registerView('data_profiler.main', <DataProfilerPanel />);
|
||||
context.subscriptions.push(disposable);
|
||||
}
|
||||
```
|
||||
|
||||
### Adding Actions to the Editor
|
||||
|
||||
This example adds primary, secondary, and context actions to the editor:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "query_tools",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"commands": [
|
||||
{
|
||||
"command": "query_tools.format",
|
||||
"title": "Format Query",
|
||||
"icon": "FormatPainterOutlined"
|
||||
},
|
||||
{
|
||||
"command": "query_tools.explain",
|
||||
"title": "Explain Query"
|
||||
},
|
||||
{
|
||||
"command": "query_tools.copy_as_cte",
|
||||
"title": "Copy as CTE"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
"sqllab.editor": {
|
||||
"primary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "query_tools.format"
|
||||
}
|
||||
],
|
||||
"secondary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "query_tools.explain"
|
||||
}
|
||||
],
|
||||
"context": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "query_tools.copy_as_cte"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
import { commands, sqlLab } from '@apache-superset/core';
|
||||
|
||||
export function activate(context) {
|
||||
// Register the commands declared in extension.json
|
||||
const formatCommand = commands.registerCommand('query_tools.format', {
|
||||
execute: () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab?.editor) {
|
||||
// Format the SQL query
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const explainCommand = commands.registerCommand('query_tools.explain', {
|
||||
execute: () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab?.editor) {
|
||||
// Show query explanation
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const copyAsCteCommand = commands.registerCommand('query_tools.copy_as_cte', {
|
||||
execute: () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab?.editor) {
|
||||
// Copy selected text as CTE
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
context.subscriptions.push(formatCommand, explainCommand, copyAsCteCommand);
|
||||
}
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Contribution Types](../contribution-types)** - Learn about other contribution types (commands, menus)
|
||||
- **[Development](../development)** - Set up your development environment
|
||||
- **[Quick Start](../quick-start)** - Build a complete extension
|
||||
@@ -1,78 +0,0 @@
|
||||
---
|
||||
title: Extension Project Structure
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Project Structure
|
||||
|
||||
The `apache-superset-extensions-cli` package provides a command-line interface (CLI) that streamlines the extension development workflow. It offers the following commands:
|
||||
|
||||
```
|
||||
superset-extensions init: Generates the initial folder structure and scaffolds a new extension project.
|
||||
|
||||
superset-extensions build: Builds extension assets.
|
||||
|
||||
superset-extensions bundle: Packages the extension into a .supx file.
|
||||
|
||||
superset-extensions dev: Automatically rebuilds the extension as files change.
|
||||
```
|
||||
|
||||
When creating a new extension with `superset-extensions init <extension-name>`, the CLI generates a standardized folder structure:
|
||||
|
||||
```
|
||||
dataset_references/
|
||||
├── extension.json
|
||||
├── frontend/
|
||||
│ ├── src/
|
||||
│ ├── webpack.config.js
|
||||
│ ├── tsconfig.json
|
||||
│ └── package.json
|
||||
├── backend/
|
||||
│ ├── src/
|
||||
│ └── dataset_references/
|
||||
│ ├── tests/
|
||||
│ ├── pyproject.toml
|
||||
│ └── requirements.txt
|
||||
├── dist/
|
||||
│ ├── manifest.json
|
||||
│ ├── frontend
|
||||
│ └── dist/
|
||||
│ ├── remoteEntry.d7a9225d042e4ccb6354.js
|
||||
│ └── 900.038b20cdff6d49cfa8d9.js
|
||||
│ └── backend
|
||||
│ └── dataset_references/
|
||||
│ ├── __init__.py
|
||||
│ ├── api.py
|
||||
│ └── entrypoint.py
|
||||
├── dataset_references-1.0.0.supx
|
||||
└── README.md
|
||||
```
|
||||
|
||||
The `extension.json` file serves as the declared metadata for the extension, containing the extension's name, version, author, description, and a list of capabilities. This file is essential for the host application to understand how to load and manage the extension.
|
||||
|
||||
The `frontend` directory contains the source code for the frontend components of the extension, including React components, styles, and assets. The `webpack.config.js` file is used to configure Webpack for building the frontend code, while the `tsconfig.json` file defines the TypeScript configuration for the project. The `package.json` file specifies the dependencies and scripts for building and testing the frontend code.
|
||||
|
||||
The `backend` directory contains the source code for the backend components of the extension, including Python modules, tests, and configuration files. The `pyproject.toml` file is used to define the Python package and its dependencies, while the r`equirements.txt` file lists the required Python packages for the extension. The `src` folder contains the functional backend source files, `tests` directory contains unit tests for the backend code, ensuring that the extension behaves as expected and meets the defined requirements.
|
||||
|
||||
The `dist` directory is built when running the `build` or `dev` command, and contains the files that will be included in the bundle. The `manifest.json` file contains critical metadata about the extension, including the majority of the contents of the `extension.json` file, but also other build-time information, like the name of the built Webpack Module Federation remote entry file. The files in the `dist` directory will be zipped into the final `.supx` file. Although this file is technically a zip archive, the `.supx` extension makes it clear that it is a Superset extension package and follows a specific file layout. This packaged file can be distributed and installed in Superset instances.
|
||||
|
||||
The `README.md` file provides documentation and instructions for using the extension, including how to install, configure, and use its functionality.
|
||||
@@ -1,90 +0,0 @@
|
||||
---
|
||||
title: Frontend Contribution Types
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Frontend Contribution Types
|
||||
|
||||
To facilitate the development of extensions, we will define a set of well-defined contribution types that extensions can implement. These contribution types will serve as the building blocks for extensions, allowing them to interact with the host application and provide new functionality. The initial set of contribution types will include:
|
||||
|
||||
## Views
|
||||
|
||||
Extensions can add new views or panels to the host application, such as custom SQL Lab panels, dashboards, or other UI components. Each view is registered with a unique ID and can be activated or deactivated as needed. Contribution areas are uniquely identified (e.g., `sqllab.panels` for SQL Lab panels), enabling seamless integration into specific parts of the application.
|
||||
|
||||
``` json
|
||||
"views": {
|
||||
"sqllab.panels": [
|
||||
{
|
||||
"id": "dataset_references.main",
|
||||
"name": "Table references"
|
||||
}
|
||||
]
|
||||
},
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
Extensions can define custom commands that can be executed within the host application, such as context-aware actions or menu options. Each command can specify properties like a unique command identifier, an icon, a title, and a description. These commands can be invoked by users through menus, keyboard shortcuts, or other UI elements, enabling extensions to add rich, interactive functionality to Superset.
|
||||
|
||||
``` json
|
||||
"commands": [
|
||||
{
|
||||
"command": "extension1.copy_query",
|
||||
"icon": "CopyOutlined",
|
||||
"title": "Copy Query",
|
||||
"description": "Copy the current query to clipboard"
|
||||
},
|
||||
]
|
||||
```
|
||||
|
||||
## Menus
|
||||
|
||||
Extensions can contribute new menu items or context menus to the host application, providing users with additional actions and options. Each menu item can specify properties such as the target view, the command to execute, its placement (primary, secondary, or context), and conditions for when it should be displayed. Menu contribution areas are uniquely identified (e.g., `sqllab.editor` for the SQL Lab editor), allowing extensions to seamlessly integrate their functionality into specific menus and workflows within Superset.
|
||||
|
||||
``` json
|
||||
"menus": {
|
||||
"sqllab.editor": {
|
||||
"primary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "extension1.copy_query"
|
||||
}
|
||||
],
|
||||
"secondary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "extension1.prettify"
|
||||
}
|
||||
],
|
||||
"context": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "extension1.clear"
|
||||
},
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "extension1.refresh"
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
```
|
||||
@@ -1,129 +0,0 @@
|
||||
---
|
||||
title: Interacting with the Host
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Interacting with the Host
|
||||
|
||||
Extensions interact with Superset through well-defined, versioned APIs provided by the `@apache-superset/core` (frontend) and `apache-superset-core` (backend) packages. These APIs are designed to be stable, discoverable, and consistent for both built-in and external extensions.
|
||||
|
||||
**Note**: The `superset_core.api` module provides abstract classes that are replaced with concrete implementations via dependency injection when Superset initializes. This allows extensions to use the same interfaces as the host application.
|
||||
|
||||
**Frontend APIs** (via `@apache-superset/core)`:
|
||||
|
||||
The frontend extension APIs in Superset are organized into logical namespaces such as `authentication`, `commands`, `extensions`, `sqlLab`, and others. Each namespace groups related functionality, making it easy for extension authors to discover and use the APIs relevant to their needs. For example, the `sqlLab` namespace provides events and methods specific to SQL Lab, allowing extensions to react to user actions and interact with the SQL Lab environment:
|
||||
|
||||
``` typescript
|
||||
export const getCurrentTab: () => Tab | undefined;
|
||||
|
||||
export const getDatabases: () => Database[];
|
||||
|
||||
export const getTabs: () => Tab[];
|
||||
|
||||
export const onDidChangeEditorContent: Event<string>;
|
||||
|
||||
export const onDidClosePanel: Event<Panel>;
|
||||
|
||||
export const onDidChangeActivePanel: Event<Panel>;
|
||||
|
||||
export const onDidChangeTabTitle: Event<string>;
|
||||
|
||||
export const onDidQueryRun: Event<Editor>;
|
||||
|
||||
export const onDidQueryStop: Event<Editor>;
|
||||
```
|
||||
|
||||
The following code demonstrates more examples of the existing frontend APIs:
|
||||
|
||||
``` typescript
|
||||
import { core, commands, sqlLab, authentication, Button } from '@apache-superset/core';
|
||||
import MyPanel from './MyPanel';
|
||||
|
||||
export function activate(context) {
|
||||
// Register a new panel (view) in SQL Lab and use shared UI components in your extension's React code
|
||||
const panelDisposable = core.registerView('my_extension.panel', <MyPanel><Button/></MyPanel>);
|
||||
|
||||
// Register a custom command
|
||||
const commandDisposable = commands.registerCommand('my_extension.copy_query', {
|
||||
title: 'Copy Query',
|
||||
execute: () => {
|
||||
// Command logic here
|
||||
},
|
||||
});
|
||||
|
||||
// Listen for query run events in SQL Lab
|
||||
const eventDisposable = sqlLab.onDidQueryRun(editor => {
|
||||
// Handle query execution event
|
||||
});
|
||||
|
||||
// Access a CSRF token for secure API requests
|
||||
authentication.getCSRFToken().then(token => {
|
||||
// Use token as needed
|
||||
});
|
||||
|
||||
// Add all disposables for automatic cleanup on deactivation
|
||||
context.subscriptions.push(panelDisposable, commandDisposable, eventDisposable);
|
||||
}
|
||||
```
|
||||
|
||||
**Backend APIs** (via `apache-superset-core`):
|
||||
|
||||
Backend APIs follow a similar pattern, providing access to Superset's models, sessions, and query capabilities. Extensions can register REST API endpoints, access the metadata database, and interact with Superset's core functionality.
|
||||
|
||||
Extension endpoints are registered under a dedicated `/extensions` namespace to avoid conflicting with built-in endpoints and also because they don't share the same version constraints. By grouping all extension endpoints under `/extensions`, Superset establishes a clear boundary between core and extension functionality, making it easier to manage, document, and secure both types of APIs.
|
||||
|
||||
``` python
|
||||
from superset_core.api.models import Database, get_session
|
||||
from superset_core.api.daos import DatabaseDAO
|
||||
from superset_core.api.rest_api import add_extension_api
|
||||
from .api import DatasetReferencesAPI
|
||||
|
||||
# Register a new extension REST API
|
||||
add_extension_api(DatasetReferencesAPI)
|
||||
|
||||
# Fetch Superset entities via the DAO to apply base filters that filter out entities
|
||||
# that the user doesn't have access to
|
||||
databases = DatabaseDAO.find_all()
|
||||
|
||||
# ..or apply simple filters on top of base filters
|
||||
databases = DatabaseDAO.filter_by(uuid=database.uuid)
|
||||
if not databases:
|
||||
raise Exception("Database not found")
|
||||
|
||||
return databases[0]
|
||||
|
||||
# Perform complex queries using SQLAlchemy Query, also filtering out
|
||||
# inaccessible entities
|
||||
session = get_session()
|
||||
databases_query = session.query(Database).filter(
|
||||
Database.database_name.ilike("%abc%")
|
||||
)
|
||||
return DatabaseDAO.query(databases_query)
|
||||
|
||||
# Bypass security model for highly custom use cases
|
||||
session = get_session()
|
||||
all_databases_containing_abc = session.query(Database).filter(
|
||||
Database.database_name.ilike("%abc%")
|
||||
).all()
|
||||
```
|
||||
|
||||
In the future, we plan to expand the backend APIs to support configuring security models, database engines, SQL Alchemy dialects, etc.
|
||||
459
docs/developer_portal/extensions/mcp.md
Normal file
459
docs/developer_portal/extensions/mcp.md
Normal file
@@ -0,0 +1,459 @@
|
||||
---
|
||||
title: MCP Integration
|
||||
hide_title: true
|
||||
sidebar_position: 8
|
||||
version: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# MCP Integration
|
||||
|
||||
Model Context Protocol (MCP) integration allows extensions to register custom AI agent capabilities that integrate seamlessly with Superset's MCP service. Extensions can provide both **tools** (executable functions) and **prompts** (interactive guidance) that AI agents can discover and use.
|
||||
|
||||
## What is MCP?
|
||||
|
||||
MCP enables extensions to extend Superset's AI capabilities in two ways:
|
||||
|
||||
### MCP Tools
|
||||
Tools are Python functions that AI agents can call to perform specific tasks. They provide executable functionality that extends Superset's capabilities.
|
||||
|
||||
**Examples of MCP tools:**
|
||||
- Data processing and transformation functions
|
||||
- Custom analytics calculations
|
||||
- Integration with external APIs
|
||||
- Specialized report generation
|
||||
- Business-specific operations
|
||||
|
||||
### MCP Prompts
|
||||
Prompts provide interactive guidance and context to AI agents. They help agents understand how to better assist users with specific workflows or domain knowledge.
|
||||
|
||||
**Examples of MCP prompts:**
|
||||
- Step-by-step workflow guidance
|
||||
- Domain-specific context and knowledge
|
||||
- Interactive troubleshooting assistance
|
||||
- Template generation helpers
|
||||
- Best practices recommendations
|
||||
|
||||
## Getting Started
|
||||
|
||||
## MCP Tools
|
||||
|
||||
### Basic Tool Registration
|
||||
|
||||
The simplest way to create an MCP tool is using the `@tool` decorator:
|
||||
|
||||
```python
|
||||
from superset_core.mcp import tool
|
||||
|
||||
@tool
|
||||
def hello_world() -> dict:
|
||||
"""A simple greeting tool."""
|
||||
return {"message": "Hello from my extension!"}
|
||||
```
|
||||
|
||||
This creates a tool that AI agents can call by name. The tool name defaults to the function name.
|
||||
|
||||
### Decorator Parameters
|
||||
|
||||
The `@tool` decorator accepts several optional parameters:
|
||||
|
||||
**Parameter details:**
|
||||
- **`name`**: Tool identifier (AI agents use this to call your tool)
|
||||
- **`description`**: Explains what the tool does (helps AI agents decide when to use it)
|
||||
- **`tags`**: Categories for organization and discovery
|
||||
- **`protect`**: Whether the tool requires user authentication (defaults to `True`)
|
||||
|
||||
### Naming Your Tools
|
||||
|
||||
For extensions, include your extension ID in tool names to avoid conflicts:
|
||||
|
||||
## Complete Example
|
||||
|
||||
Here's a more comprehensive example showing best practices:
|
||||
|
||||
```python
|
||||
# backend/mcp_tools.py
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
from superset_core.mcp import tool
|
||||
|
||||
class RandomNumberRequest(BaseModel):
|
||||
"""Request schema for random number generation."""
|
||||
|
||||
min_value: int = Field(
|
||||
description="Minimum value (inclusive) for random number generation",
|
||||
ge=-2147483648,
|
||||
le=2147483647
|
||||
)
|
||||
max_value: int = Field(
|
||||
description="Maximum value (inclusive) for random number generation",
|
||||
ge=-2147483648,
|
||||
le=2147483647
|
||||
)
|
||||
|
||||
@tool(
|
||||
name="example_extension.random_number",
|
||||
tags=["extension", "utility", "random", "generator"]
|
||||
)
|
||||
def random_number_generator(request: RandomNumberRequest) -> dict:
|
||||
"""
|
||||
Generate a random integer between specified bounds.
|
||||
|
||||
This tool validates input ranges and provides detailed error messages
|
||||
for invalid requests.
|
||||
"""
|
||||
|
||||
# Validate business logic (Pydantic handles type/range validation)
|
||||
if request.min_value > request.max_value:
|
||||
return {
|
||||
"status": "error",
|
||||
"error": f"min_value ({request.min_value}) cannot be greater than max_value ({request.max_value})",
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Generate random number
|
||||
result = random.randint(request.min_value, request.max_value)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"random_number": result,
|
||||
"min_value": request.min_value,
|
||||
"max_value": request.max_value,
|
||||
"range_size": request.max_value - request.min_value + 1,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Response Format
|
||||
|
||||
Use consistent response structures:
|
||||
|
||||
```python
|
||||
# Success response
|
||||
{
|
||||
"status": "success",
|
||||
"result": "your_data_here",
|
||||
"timestamp": "2024-01-01T00:00:00Z"
|
||||
}
|
||||
|
||||
# Error response
|
||||
{
|
||||
"status": "error",
|
||||
"error": "Clear error message",
|
||||
"timestamp": "2024-01-01T00:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Documentation
|
||||
|
||||
Write clear descriptions and docstrings:
|
||||
|
||||
```python
|
||||
@tool(
|
||||
name="my_extension.process_data",
|
||||
description="Process customer data and generate insights. Requires valid customer ID and date range.",
|
||||
tags=["analytics", "customer", "reporting"]
|
||||
)
|
||||
def process_data(customer_id: int, start_date: str, end_date: str) -> dict:
|
||||
"""
|
||||
Process customer data for the specified date range.
|
||||
|
||||
This tool analyzes customer behavior patterns and generates
|
||||
actionable insights for business decision-making.
|
||||
|
||||
Args:
|
||||
customer_id: Unique customer identifier
|
||||
start_date: Analysis start date (YYYY-MM-DD format)
|
||||
end_date: Analysis end date (YYYY-MM-DD format)
|
||||
|
||||
Returns:
|
||||
Dictionary containing analysis results and recommendations
|
||||
"""
|
||||
# Implementation here
|
||||
pass
|
||||
```
|
||||
|
||||
### Tool Naming
|
||||
|
||||
- **Extension tools**: Use prefixed names like `my_extension.tool_name`
|
||||
- **Descriptive names**: `calculate_tax_amount` vs `calculate`
|
||||
- **Consistent naming**: Follow patterns within your extension
|
||||
|
||||
## How AI Agents Use Your Tools
|
||||
|
||||
Once registered, AI agents can discover and use your tools automatically:
|
||||
|
||||
```
|
||||
User: "Generate a random number between 1 and 100"
|
||||
Agent: I'll use the random number generator tool.
|
||||
→ Calls: example_extension.random_number(min_value=1, max_value=100)
|
||||
← Returns: {"status": "success", "random_number": 42, ...}
|
||||
Agent: I generated the number 42 for you.
|
||||
```
|
||||
|
||||
The AI agent sees your tool's:
|
||||
- **Name**: How to call it
|
||||
- **Description**: What it does and when to use it
|
||||
- **Parameters**: What inputs it expects (from Pydantic schema)
|
||||
- **Tags**: Categories for discovery
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Tool Not Available to AI Agents
|
||||
|
||||
1. **Check extension registration**: Verify your tool module is listed in extension entrypoints
|
||||
2. **Verify decorator**: Ensure `@tool` is correctly applied
|
||||
3. **Extension loading**: Confirm your extension is installed and enabled
|
||||
|
||||
### Input Validation Errors
|
||||
|
||||
1. **Pydantic models**: Ensure field types match expected inputs
|
||||
2. **Field constraints**: Check min/max values and string lengths are reasonable
|
||||
3. **Required fields**: Verify which parameters are required vs optional
|
||||
|
||||
### Runtime Issues
|
||||
|
||||
1. **Error handling**: Add try/catch blocks with clear error messages
|
||||
2. **Response format**: Use consistent status/error/timestamp structure
|
||||
3. **Testing**: Test your tools with various input scenarios
|
||||
|
||||
### Development Tips
|
||||
|
||||
1. **Start simple**: Begin with basic tools, add complexity gradually
|
||||
2. **Test locally**: Use MCP clients (like Claude Desktop) to test your tools
|
||||
3. **Clear descriptions**: Write tool descriptions as if explaining to a new user
|
||||
4. **Meaningful tags**: Use tags that help categorize and discover tools
|
||||
5. **Error messages**: Provide specific, actionable error messages
|
||||
|
||||
## MCP Prompts
|
||||
|
||||
### Basic Prompt Registration
|
||||
|
||||
Create interactive prompts using the `@prompt` decorator:
|
||||
|
||||
```python
|
||||
from superset_core.mcp import prompt
|
||||
from fastmcp import Context
|
||||
|
||||
@prompt("my_extension.workflow_guide")
|
||||
async def workflow_guide(ctx: Context) -> str:
|
||||
"""Interactive guide for data analysis workflows."""
|
||||
return """
|
||||
# Data Analysis Workflow Guide
|
||||
|
||||
Here's a step-by-step approach to effective data analysis in Superset:
|
||||
|
||||
## 1. Data Discovery
|
||||
- Start by exploring your datasets using the dataset browser
|
||||
- Check data quality and identify key metrics
|
||||
- Look for patterns and relationships in your data
|
||||
|
||||
## 2. Chart Creation
|
||||
- Choose appropriate visualizations for your data types
|
||||
- Apply filters to focus on relevant subsets
|
||||
- Configure proper aggregations and groupings
|
||||
|
||||
## 3. Dashboard Assembly
|
||||
- Combine related charts into coherent dashboards
|
||||
- Use filters and parameters for interactivity
|
||||
- Add markdown components for context and explanations
|
||||
|
||||
Would you like guidance on any specific step?
|
||||
"""
|
||||
```
|
||||
|
||||
### Advanced Prompt Examples
|
||||
|
||||
#### Domain-Specific Context
|
||||
|
||||
```python
|
||||
@prompt(
|
||||
"sales_extension.sales_analysis_guide",
|
||||
title="Sales Analysis Guide",
|
||||
description="Specialized guidance for sales data analysis workflows"
|
||||
)
|
||||
async def sales_analysis_guide(ctx: Context) -> str:
|
||||
"""Provides sales-specific analysis guidance and best practices."""
|
||||
return """
|
||||
# Sales Data Analysis Best Practices
|
||||
|
||||
## Key Metrics to Track
|
||||
- **Revenue Growth**: Month-over-month and year-over-year trends
|
||||
- **Conversion Rates**: Lead-to-opportunity-to-close ratios
|
||||
- **Customer Lifetime Value**: Total value per customer segment
|
||||
- **Sales Cycle Length**: Time from lead to close by product/region
|
||||
|
||||
## Recommended Chart Types
|
||||
- **Time Series**: Revenue trends, seasonal patterns
|
||||
- **Funnel Charts**: Conversion analysis across sales stages
|
||||
- **Geographic Maps**: Regional performance comparison
|
||||
- **Cohort Analysis**: Customer retention and growth patterns
|
||||
|
||||
## Common Pitfalls to Avoid
|
||||
- Don't mix different time granularities without proper context
|
||||
- Always normalize for business days when comparing periods
|
||||
- Consider external factors (holidays, market events) in analysis
|
||||
- Segment by relevant dimensions (product, region, channel)
|
||||
|
||||
## Next Steps
|
||||
1. Identify your primary sales KPIs
|
||||
2. Create baseline trend charts for each metric
|
||||
3. Build comparative views across segments
|
||||
4. Set up automated alerts for significant changes
|
||||
"""
|
||||
```
|
||||
|
||||
#### Interactive Troubleshooting
|
||||
|
||||
```python
|
||||
@prompt("support_extension.troubleshoot_charts")
|
||||
async def troubleshoot_charts(ctx: Context) -> str:
|
||||
"""Interactive troubleshooting assistant for chart issues."""
|
||||
return """
|
||||
# Chart Troubleshooting Assistant
|
||||
|
||||
Let's diagnose your chart issue step by step:
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### 🚫 No Data Showing
|
||||
**Possible causes:**
|
||||
- Filters are too restrictive
|
||||
- Date range doesn't match your data
|
||||
- Database connection issues
|
||||
- Missing permissions
|
||||
|
||||
**Check:** Try removing all filters and expanding the date range first.
|
||||
|
||||
### 📊 Unexpected Aggregation Results
|
||||
**Possible causes:**
|
||||
- Incorrect grouping dimensions
|
||||
- Wrong aggregation function (SUM vs COUNT vs AVG)
|
||||
- Data quality issues (duplicates, nulls)
|
||||
- Time zone mismatches
|
||||
|
||||
**Check:** Verify your GROUP BY columns and aggregation logic.
|
||||
|
||||
### 🐌 Slow Performance
|
||||
**Possible causes:**
|
||||
- Large dataset without proper indexing
|
||||
- Complex joins or calculations
|
||||
- Missing query optimizations
|
||||
- Resource constraints
|
||||
|
||||
**Check:** Simplify the query and add appropriate filters first.
|
||||
|
||||
## Debug Steps
|
||||
1. **Start Simple**: Create a basic count query first
|
||||
2. **Add Gradually**: Introduce complexity step by step
|
||||
3. **Check SQL**: Review the generated SQL for issues
|
||||
4. **Test Data**: Verify with a small sample first
|
||||
|
||||
What specific issue are you experiencing?
|
||||
"""
|
||||
```
|
||||
|
||||
### Prompt Best Practices
|
||||
|
||||
#### Content Structure
|
||||
- **Use clear headings** and sections for easy navigation
|
||||
- **Provide actionable steps** rather than just theory
|
||||
- **Include examples** relevant to the user's domain
|
||||
- **Offer next steps** to continue the workflow
|
||||
|
||||
#### Interactive Design
|
||||
- **Ask questions** to engage the user
|
||||
- **Provide options** for different scenarios
|
||||
- **Reference specific Superset features** by name
|
||||
- **Link to related tools** when appropriate
|
||||
|
||||
#### Context Awareness
|
||||
```python
|
||||
@prompt("analytics_extension.context_aware_guide")
|
||||
async def context_aware_guide(ctx: Context) -> str:
|
||||
"""Provides guidance based on current user context."""
|
||||
# Access user information if available
|
||||
user_info = getattr(ctx, 'user', None)
|
||||
|
||||
guidance = """# Personalized Analytics Guide\n\n"""
|
||||
|
||||
if user_info:
|
||||
guidance += f"Welcome back! Here's guidance tailored for your role:\n\n"
|
||||
|
||||
guidance += """
|
||||
## Getting Started
|
||||
Based on your previous activity, here are recommended next steps:
|
||||
|
||||
1. **Review Recent Dashboards**: Check your most-used dashboards for updates
|
||||
2. **Explore New Data**: Look for recently added datasets in your domain
|
||||
3. **Share Insights**: Consider sharing successful analyses with your team
|
||||
|
||||
## Advanced Techniques
|
||||
- Set up automated alerts for key metrics
|
||||
- Create parameterized dashboards for different audiences
|
||||
- Use SQL Lab for complex custom analyses
|
||||
"""
|
||||
|
||||
return guidance
|
||||
```
|
||||
|
||||
## Combining Tools and Prompts
|
||||
|
||||
Extensions can provide both tools and prompts that work together:
|
||||
|
||||
```python
|
||||
# Tool for data processing
|
||||
@tool("analytics_extension.calculate_metrics")
|
||||
def calculate_metrics(data: dict) -> dict:
|
||||
"""Calculate advanced analytics metrics."""
|
||||
# Implementation here
|
||||
pass
|
||||
|
||||
# Prompt that guides users to the tool
|
||||
@prompt("analytics_extension.metrics_guide")
|
||||
async def metrics_guide(ctx: Context) -> str:
|
||||
"""Guide users through advanced metrics calculation."""
|
||||
return """
|
||||
# Advanced Metrics Calculation
|
||||
|
||||
Use the `calculate_metrics` tool to compute specialized analytics:
|
||||
|
||||
## Available Metrics
|
||||
- Customer Lifetime Value (CLV)
|
||||
- Cohort Retention Rates
|
||||
- Statistical Significance Tests
|
||||
- Predictive Trend Analysis
|
||||
|
||||
## Usage
|
||||
Call the tool with your dataset to get detailed calculations
|
||||
and recommendations for visualization approaches.
|
||||
|
||||
Would you like to calculate metrics for your current dataset?
|
||||
"""
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Development](./development)** - Project structure, APIs, and dev workflow
|
||||
- **[Security](./security)** - Security best practices for extensions
|
||||
@@ -24,53 +24,31 @@ under the License.
|
||||
|
||||
# Overview
|
||||
|
||||
Apache Superset's extension system allows developers to enhance and customize Superset's functionality through a modular, plugin-based architecture. Extensions can add new visualization types, custom UI components, data processing capabilities, and integration points.
|
||||
Apache Superset's extension system enables organizations to build custom features without modifying the core codebase. Inspired by the [VS Code extension model](https://code.visualstudio.com/api), this architecture addresses a long-standing challenge: teams previously had to fork Superset or make invasive modifications to add capabilities like query optimizers, custom panels, or specialized integrations—resulting in maintenance overhead and codebase fragmentation.
|
||||
|
||||
The extension system introduces a modular, plugin-based architecture where both built-in features and external extensions use the same well-defined APIs. This "lean core" approach ensures that any capability available to Superset's internal features is equally accessible to community-developed extensions, fostering a vibrant ecosystem while reducing the maintenance burden on core contributors.
|
||||
|
||||
## What are Superset Extensions?
|
||||
|
||||
Superset extensions are self-contained packages that extend the core platform's capabilities. They follow a standardized architecture that ensures compatibility, security, and maintainability while providing powerful customization options.
|
||||
|
||||
## Extension Architecture
|
||||
|
||||
- **[Architecture](./architecture)** - Architectural principles and high-level system overview
|
||||
- **[Extension Project Structure](./extension-project-structure)** - Standard project layout and organization
|
||||
- **[Extension Metadata](./extension-metadata)** - Configuration and manifest structure
|
||||
|
||||
## Development Guide
|
||||
|
||||
- **[Frontend Contribution Types](./frontend-contribution-types)** - Types of UI contributions available
|
||||
- **[Interacting with Host](./interacting-with-host)** - Communication patterns with Superset core
|
||||
- **[Development Mode](./development-mode)** - Tools and workflows for extension development
|
||||
|
||||
For information about runtime loading and dependency management, see the [Dynamic Module Loading](./architecture#dynamic-module-loading) section in the Architecture page.
|
||||
|
||||
## Deployment & Management
|
||||
|
||||
- **[Deploying Extension](./deploying-extension)** - Packaging and distribution strategies
|
||||
- **[Security Implications](./security-implications)** - Security considerations and best practices
|
||||
|
||||
## Hands-on Examples
|
||||
|
||||
- **[Quick Start](./quick-start)** - Complete Hello World extension walkthrough
|
||||
Superset extensions are self-contained `.supx` packages that extend the platform's capabilities through standardized contribution points. Each extension can include both frontend (React/TypeScript) and backend (Python) components, bundled together and loaded dynamically at runtime using Webpack Module Federation.
|
||||
|
||||
## Extension Capabilities
|
||||
|
||||
Extensions can provide:
|
||||
|
||||
- **Custom Visualizations**: New chart types and data visualization components
|
||||
- **UI Enhancements**: Custom dashboards, panels, and interactive elements
|
||||
- **Data Connectors**: Integration with external data sources and APIs
|
||||
- **Workflow Automation**: Custom actions and batch processing capabilities
|
||||
- **Authentication Providers**: SSO and custom authentication mechanisms
|
||||
- **Theme Customization**: Custom styling and branding options
|
||||
- **Custom UI Components**: New panels, views, and interactive elements
|
||||
- **Commands and Menus**: Custom actions accessible via menus and keyboard shortcuts
|
||||
- **REST API Endpoints**: Backend services under the `/api/v1/extensions/` namespace
|
||||
- **MCP Tools and Prompts**: AI agent capabilities for enhanced user assistance
|
||||
|
||||
## Getting Started
|
||||
## Next Steps
|
||||
|
||||
1. **Learn the Architecture**: Start with [Architecture](./architecture) to understand the design philosophy
|
||||
2. **Set up Development**: Follow the [Development Mode](./development-mode) guide to configure your environment
|
||||
3. **Build Your First Extension**: Complete the [Quick Start](./quick-start) tutorial
|
||||
4. **Deploy and Share**: Use the [Deploying Extension](./deploying-extension) guide to package your extension
|
||||
|
||||
## Extension Ecosystem
|
||||
|
||||
The extension system is designed to foster a vibrant ecosystem of community-contributed functionality. By following the established patterns and guidelines, developers can create extensions that seamlessly integrate with Superset while maintaining the platform's reliability and performance standards.
|
||||
- **[Quick Start](./quick-start)** - Build your first extension with a complete walkthrough
|
||||
- **[Architecture](./architecture)** - Design principles and system overview
|
||||
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
|
||||
- **[Contribution Types](./contribution-types)** - Available extension points
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Deployment](./deployment)** - Packaging and deploying extensions
|
||||
- **[MCP Integration](./mcp)** - Adding AI agent capabilities using extensions
|
||||
- **[Security](./security)** - Security considerations and best practices
|
||||
- **[Community Extensions](./registry)** - Browse extensions shared by the community
|
||||
|
||||
@@ -191,7 +191,125 @@ This registers your API with Superset when the extension loads.
|
||||
|
||||
## Step 5: Create Frontend Component
|
||||
|
||||
The CLI generated boilerplate files. The webpack config and package.json are already properly configured with Module Federation.
|
||||
The CLI generates the frontend configuration files. Below are the key configurations that enable Module Federation integration with Superset.
|
||||
|
||||
**`frontend/package.json`**
|
||||
|
||||
The `@apache-superset/core` package must be listed in both `peerDependencies` (to declare runtime compatibility) and `devDependencies` (to provide TypeScript types during build):
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "hello_world",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"license": "Apache-2.0",
|
||||
"scripts": {
|
||||
"start": "webpack serve --mode development",
|
||||
"build": "webpack --stats-error-details --mode production"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@apache-superset/core": "^x.x.x",
|
||||
"react": "^x.x.x",
|
||||
"react-dom": "^x.x.x"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@apache-superset/core": "^x.x.x",
|
||||
"@types/react": "^x.x.x",
|
||||
"ts-loader": "^x.x.x",
|
||||
"typescript": "^x.x.x",
|
||||
"webpack": "^5.x.x",
|
||||
"webpack-cli": "^x.x.x",
|
||||
"webpack-dev-server": "^x.x.x"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**`frontend/webpack.config.js`**
|
||||
|
||||
The webpack configuration requires specific settings for Module Federation. Key settings include `externalsType: "window"` and `externals` to map `@apache-superset/core` to `window.superset` at runtime, `import: false` for shared modules to use the host's React instead of bundling a separate copy, and `remoteEntry.[contenthash].js` for cache busting:
|
||||
|
||||
```javascript
|
||||
const path = require("path");
|
||||
const { ModuleFederationPlugin } = require("webpack").container;
|
||||
const packageConfig = require("./package.json");
|
||||
|
||||
module.exports = (env, argv) => {
|
||||
const isProd = argv.mode === "production";
|
||||
|
||||
return {
|
||||
entry: isProd ? {} : "./src/index.tsx",
|
||||
mode: isProd ? "production" : "development",
|
||||
devServer: {
|
||||
port: 3001,
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
},
|
||||
},
|
||||
output: {
|
||||
filename: isProd ? undefined : "[name].[contenthash].js",
|
||||
chunkFilename: "[name].[contenthash].js",
|
||||
clean: true,
|
||||
path: path.resolve(__dirname, "dist"),
|
||||
publicPath: `/api/v1/extensions/${packageConfig.name}/`,
|
||||
},
|
||||
resolve: {
|
||||
extensions: [".ts", ".tsx", ".js", ".jsx"],
|
||||
},
|
||||
// Map @apache-superset/core imports to window.superset at runtime
|
||||
externalsType: "window",
|
||||
externals: {
|
||||
"@apache-superset/core": "superset",
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.tsx?$/,
|
||||
use: "ts-loader",
|
||||
exclude: /node_modules/,
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new ModuleFederationPlugin({
|
||||
name: packageConfig.name,
|
||||
filename: "remoteEntry.[contenthash].js",
|
||||
exposes: {
|
||||
"./index": "./src/index.tsx",
|
||||
},
|
||||
shared: {
|
||||
react: {
|
||||
singleton: true,
|
||||
requiredVersion: packageConfig.peerDependencies.react,
|
||||
import: false, // Use host's React, don't bundle
|
||||
},
|
||||
"react-dom": {
|
||||
singleton: true,
|
||||
requiredVersion: packageConfig.peerDependencies["react-dom"],
|
||||
import: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
**`frontend/tsconfig.json`**
|
||||
|
||||
```json
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"moduleResolution": "node",
|
||||
"jsx": "react",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
```
|
||||
|
||||
**Create `frontend/src/HelloWorldPanel.tsx`**
|
||||
|
||||
@@ -337,7 +455,7 @@ Add the following to your `superset_config.py`:
|
||||
```python
|
||||
# Enable extensions feature
|
||||
FEATURE_FLAGS = {
|
||||
"EXTENSIONS": True,
|
||||
"ENABLE_EXTENSIONS": True,
|
||||
}
|
||||
|
||||
# Set the directory where extensions are stored
|
||||
@@ -388,10 +506,9 @@ Here's what happens when your extension loads:
|
||||
|
||||
Now that you have a working extension, explore:
|
||||
|
||||
- **[Development Mode](./development-mode)** - Faster iteration with local development and watch mode
|
||||
- **[Extension Project Structure](./extension-project-structure)** - Best practices for organizing larger extensions
|
||||
- **[Frontend Contribution Types](./frontend-contribution-types)** - Other UI contribution points beyond panels
|
||||
- **[Interacting with Host](./interacting-with-host)** - Advanced APIs for interacting with Superset
|
||||
- **[Security Implications](./security-implications)** - Security best practices for extensions
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Contribution Types](./contribution-types)** - Other contribution points beyond panels
|
||||
- **[Deployment](./deployment)** - Packaging and deploying your extension
|
||||
- **[Security](./security)** - Security best practices for extensions
|
||||
|
||||
For a complete real-world example, examine the query insights extension in the Superset codebase.
|
||||
|
||||
54
docs/developer_portal/extensions/registry.md
Normal file
54
docs/developer_portal/extensions/registry.md
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
title: Community Extensions
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Community Extensions
|
||||
|
||||
This page serves as a registry of community-created Superset extensions. These extensions are developed and maintained by community members and are not officially supported or vetted by the Apache Superset project. **Before installing any community extension, administrators are responsible for evaluating the extension's source code for security vulnerabilities, performance impact, UI/UX quality, and compatibility with their Superset deployment.**
|
||||
|
||||
## Extensions
|
||||
|
||||
| Name | Description | Author | Preview |
|
||||
| ------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| [Extensions API Explorer](https://github.com/michael-s-molina/superset-extensions/tree/main/api_explorer) | A SQL Lab panel that demonstrates the Extensions API by providing an interactive explorer for testing commands like getTabs, getCurrentTab, and getDatabases. Useful for extension developers to understand and experiment with the available APIs. | Michael S. Molina | <a href="/img/extensions/api-explorer.png" target="_blank"><img src="/img/extensions/api-explorer.png" alt="Extensions API Explorer" width="120" /></a> |
|
||||
| [SQL Query Flow Visualizer](https://github.com/msyavuz/superset-sql-visualizer) | A SQL Lab panel that transforms SQL queries into interactive flow diagrams, helping developers and analysts understand query execution paths and data relationships. | Mehmet Salih Yavuz | <a href="/img/extensions/sql-flow-visualizer.png" target="_blank"><img src="/img/extensions/sql-flow-visualizer.png" alt="SQL Flow Visualizer" width="120" /></a> |
|
||||
| [SQL Lab Export to Google Sheets](https://github.com/michael-s-molina/superset-extensions/tree/main/sqllab_gsheets) | A Superset extension that allows users to export SQL Lab query results directly to Google Sheets. | Michael S. Molina | <a href="/img/extensions/gsheets-export.png" target="_blank"><img src="/img/extensions/gsheets-export.png" alt="SQL Lab Export to Google Sheets" width="120" /></a> |
|
||||
| [SQL Lab Export to Parquet](https://github.com/rusackas/superset-extensions/tree/main/sqllab_parquet) | Export SQL Lab query results directly to Apache Parquet format with Snappy compression. | Evan Rusackas | <a href="/img/extensions/parquet-export.png" target="_blank"><img src="/img/extensions/parquet-export.png" alt="SQL Lab Export to Parquet" width="120" /></a> |
|
||||
| [SQL Lab Query Comparison](https://github.com/michael-s-molina/superset-extensions/tree/main/query_comparison) | A SQL Lab extension that enables side-by-side comparison of query results across different tabs, with GitHub-style diff visualization showing added/removed rows and columns. | Michael S. Molina | <a href="/img/extensions/query-comparison.png" target="_blank"><img src="/img/extensions/query-comparison.png" alt="Query Comparison" width="120" /></a> |
|
||||
| [SQL Lab Result Stats](https://github.com/michael-s-molina/superset-extensions/tree/main/result_stats) | A SQL Lab extension that automatically computes statistics for query results, providing type-aware analysis including numeric metrics (min, max, mean, median, std dev), string analysis (length, empty counts), and date range information. | Michael S. Molina | <a href="/img/extensions/result-stats.png" target="_blank"><img src="/img/extensions/result-stats.png" alt="Result Stats" width="120" /></a> |
|
||||
| [SQL Snippets](https://github.com/michael-s-molina/superset-extensions/tree/main/sql_snippets) | A SQL Lab extension that provides reusable SQL code snippets, enabling quick insertion of commonly used code blocks such as license headers, author information, and frequently used SQL patterns. | Michael S. Molina | <a href="/img/extensions/sql-snippets.png" target="_blank"><img src="/img/extensions/sql-snippets.png" alt="SQL Snippets" width="120" /></a> |
|
||||
| [SQL Lab Query Estimator](https://github.com/michael-s-molina/superset-extensions/tree/main/query_estimator) | A SQL Lab panel that analyzes query execution plans to estimate resource impact, detect performance issues like Cartesian products and high-cost operations, and visualize the query plan tree. | Michael S. Molina | <a href="/img/extensions/query-estimator.png" target="_blank"><img src="/img/extensions/query-estimator.png" alt="Query Estimator" width="120" /></a> |
|
||||
|
||||
## How to Add Your Extension
|
||||
|
||||
To add your extension to this registry, submit a pull request to the [Apache Superset repository](https://github.com/apache/superset) with the following changes:
|
||||
|
||||
1. Add a row to the **Extensions** table above using this format:
|
||||
|
||||
```markdown
|
||||
| [Your Extension](https://github.com/your-username/your-repo) | A brief description of your extension. | Your Name | <a href="/img/extensions/your-screenshot.png" target="_blank"><img src="/img/extensions/your-screenshot.png" alt="Your Extension" width="120" /></a> |
|
||||
```
|
||||
|
||||
2. Add a screenshot to `docs/static/img/extensions/` (recommended size: 800x450px, PNG or JPG format)
|
||||
|
||||
3. Submit your PR with a title like "docs: Add [Extension Name] to community extensions registry"
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Security Implications and Responsibilities
|
||||
sidebar_position: 12
|
||||
title: Security
|
||||
sidebar_position: 9
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -22,12 +22,14 @@ specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Security Implications and Responsibilities
|
||||
# Security
|
||||
|
||||
By default, extensions are disabled and must be explicitly enabled by setting the `ENABLE_EXTENSIONS` feature flag. Built-in extensions are included as part of the Superset codebase and are held to the same security standards and review processes as the rest of the application.
|
||||
|
||||
For external extensions, administrators are responsible for evaluating and verifying the security of any extensions they choose to install, just as they would when installing third-party NPM or PyPI packages. At this stage, all extensions run in the same context as the host application, without additional sandboxing. This means that external extensions can impact the security and performance of a Superset environment in the same way as any other installed dependency.
|
||||
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of known Superset extensions may be maintained in a means similar to [this page](https://github.com/apache/superset/wiki/Superset-Third%E2%80%90Party-Plugins-Directory) on the wiki. We also discussed the possibility of introducing a shared registry for vetted extensions but decided to leave it out of the initial scope of the project. We might introduce a registry at a later stage depending on the evolution of extensions created by the community.
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
|
||||
Any performance or security vulnerabilities introduced by external extensions should be reported directly to the extension author, not as Superset vulnerabilities. Any security concerns regarding built-in extensions (included in Superset's monorepo) should be reported to the Superset Security mailing list for triage and resolution by maintainers.
|
||||
**Any performance or security vulnerabilities introduced by external extensions should be reported directly to the extension author, not as Superset vulnerabilities.**
|
||||
|
||||
Any security concerns regarding built-in extensions (included in Superset's monorepo) should be reported to the Superset Security mailing list for triage and resolution by maintainers.
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Backend Style Guidelines
|
||||
sidebar_position: 3
|
||||
title: Overview
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -26,22 +26,22 @@ under the License.
|
||||
|
||||
This is a list of statements that describe how we do backend development in Superset. While they might not be 100% true for all files in the repo, they represent the gold standard we strive towards for backend quality and style.
|
||||
|
||||
* We use a monolithic Python/Flask/Flask-AppBuilder backend, with small single-responsibility satellite services where necessary.
|
||||
* Files are generally organized by feature or object type. Within each domain, we can have api controllers, models, schemas, commands, and data access objects (dao).
|
||||
* See: [Proposal for Improving Superset's Python Code Organization](https://github.com/apache/superset/issues/9077)
|
||||
* API controllers use Marshmallow Schemas to serialize/deserialize data.
|
||||
* Authentication and authorization are controlled by the [security manager](https://github.com/apache/superset/blob/master/superset/security/manager).
|
||||
* We use Pytest for unit and integration tests. These live in the `tests` directory.
|
||||
* We add tests for every new piece of functionality added to the backend.
|
||||
* We use pytest fixtures to share setup between tests.
|
||||
* We use sqlalchemy to access both Superset's application database, and users' analytics databases.
|
||||
* We make changes backwards compatible whenever possible.
|
||||
* If a change cannot be made backwards compatible, it goes into a major release.
|
||||
* See: [Proposal For Semantic Versioning](https://github.com/apache/superset/issues/12566)
|
||||
* We use Swagger for API documentation, with docs written inline on the API endpoint code.
|
||||
* We prefer thin ORM models, putting shared functionality in other utilities.
|
||||
* Several linters/checkers are used to maintain consistent code style and type safety: pylint, pypy, black, isort.
|
||||
* `__init__.py` files are kept empty to avoid implicit dependencies.
|
||||
- We use a monolithic Python/Flask/Flask-AppBuilder backend, with small single-responsibility satellite services where necessary.
|
||||
- Files are generally organized by feature or object type. Within each domain, we can have api controllers, models, schemas, commands, and data access objects (DAO).
|
||||
- See: [Proposal for Improving Superset's Python Code Organization](https://github.com/apache/superset/issues/9077)
|
||||
- API controllers use Marshmallow Schemas to serialize/deserialize data.
|
||||
- Authentication and authorization are controlled by the [security manager](https://github.com/apache/superset/blob/master/superset/security/manager).
|
||||
- We use Pytest for unit and integration tests. These live in the `tests` directory.
|
||||
- We add tests for every new piece of functionality added to the backend.
|
||||
- We use pytest fixtures to share setup between tests.
|
||||
- We use SQLAlchemy to access both Superset's application database, and users' analytics databases.
|
||||
- We make changes backwards compatible whenever possible.
|
||||
- If a change cannot be made backwards compatible, it goes into a major release.
|
||||
- See: [Proposal For Semantic Versioning](https://github.com/apache/superset/issues/12566)
|
||||
- We use Swagger for API documentation, with docs written inline on the API endpoint code.
|
||||
- We prefer thin ORM models, putting shared functionality in other utilities.
|
||||
- Several linters/checkers are used to maintain consistent code style and type safety: pylint, mypy, black, isort.
|
||||
- `__init__.py` files are kept empty to avoid implicit dependencies.
|
||||
|
||||
## Code Organization
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: DAO Style Guidelines and Best Practices
|
||||
sidebar_position: 1
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -26,19 +26,29 @@ under the License.
|
||||
|
||||
A Data Access Object (DAO) is a pattern that provides an abstract interface to the SQLAlchemy Object Relational Mapper (ORM). The DAOs are critical as they form the building block of the application which are wrapped by the associated commands and RESTful API endpoints.
|
||||
|
||||
Currently there are numerous inconsistencies and violation of the DRY principal within the codebase as it relates to DAOs and ORMs—unnecessary commits, non-ACID transactions, etc.—which makes the code unnecessarily complex and convoluted. Addressing the underlying issues with the DAOs _should_ help simplify the downstream operations and improve the developer experience.
|
||||
There are numerous inconsistencies and violations of the DRY principal within the codebase as it relates to DAOs and ORMs—unnecessary commits, non-ACID transactions, etc.—which makes the code unnecessarily complex and convoluted. Addressing the underlying issues with the DAOs _should_ help simplify the downstream operations and improve the developer experience.
|
||||
|
||||
To ensure consistency the following rules should be adhered to:
|
||||
|
||||
1. All database operations (including testing) should be defined within a DAO, i.e., there should not be any explicit `db.session.add`, `db.session.merge`, etc. calls outside of a DAO.
|
||||
## Core Rules
|
||||
|
||||
2. A DAO should use `create`, `update`, `delete`, `upsert` terms—typical database operations which ensure consistency with commands—rather than action based terms like `save`, `saveas`, `override`, etc.
|
||||
1. **All database operations (including testing) should be defined within a DAO**, i.e., there should not be any explicit `db.session.add`, `db.session.merge`, etc. calls outside of a DAO.
|
||||
|
||||
3. Sessions should be managed via a [context manager](https://docs.sqlalchemy.org/en/20/orm/session_transaction.html#begin-once) which auto-commits on success and rolls back on failure, i.e., there should be no explicit `db.session.commit` or `db.session.rollback` calls within the DAO.
|
||||
2. **A DAO should use `create`, `update`, `delete`, `upsert` terms**—typical database operations which ensure consistency with commands—rather than action based terms like `save`, `saveas`, `override`, etc.
|
||||
|
||||
4. There should be a single atomic transaction representing the entirety of the operation, i.e., when creating a dataset with associated columns and metrics either all the changes succeed when the transaction is committed, or all the changes are undone when the transaction is rolled back. SQLAlchemy supports [nested transactions](https://docs.sqlalchemy.org/en/20/orm/session_transaction.html#nested-transaction) via the `begin_nested` method which can be nested—inline with how DAOs are invoked.
|
||||
3. **Sessions should be managed via a [context manager](https://docs.sqlalchemy.org/en/20/orm/session_transaction.html#begin-once)** which auto-commits on success and rolls back on failure, i.e., there should be no explicit `db.session.commit` or `db.session.rollback` calls within the DAO.
|
||||
|
||||
5. The database layer should adopt a "shift left" mentality i.e., uniqueness/foreign key constraints, relationships, cascades, etc. should all be defined in the database layer rather than being enforced in the application layer.
|
||||
4. **There should be a single atomic transaction representing the entirety of the operation**, i.e., when creating a dataset with associated columns and metrics either all the changes succeed when the transaction is committed, or all the changes are undone when the transaction is rolled back. SQLAlchemy supports [nested transactions](https://docs.sqlalchemy.org/en/20/orm/session_transaction.html#nested-transaction) via the `begin_nested` method which can be nested—inline with how DAOs are invoked.
|
||||
|
||||
5. **The database layer should adopt a "shift left" mentality** i.e., uniqueness/foreign key constraints, relationships, cascades, etc. should all be defined in the database layer rather than being enforced in the application layer.
|
||||
|
||||
6. **Exception-based validation**: Ask for forgiveness rather than permission. Try to perform the operation and rely on database constraints to verify that the model is acceptable, rather than pre-validating conditions.
|
||||
|
||||
7. **Bulk operations**: Provide bulk `create`, `update`, and `delete` methods where applicable for performance optimization.
|
||||
|
||||
8. **Sparse updates**: Updates should only modify explicitly defined attributes.
|
||||
|
||||
9. **Test transactions**: Tests should leverage nested transactions which should be rolled back on teardown, rather than deleting objects.
|
||||
|
||||
## DAO Implementation Examples
|
||||
|
||||
|
||||
@@ -30,21 +30,23 @@ This is an area to host resources and documentation supporting the evolution and
|
||||
|
||||
### Sentence case
|
||||
|
||||
Use sentence-case capitalization for everything in the UI (except these **).
|
||||
Use sentence-case capitalization for everything in the UI (except these exceptions below).
|
||||
|
||||
Sentence case is predominantly lowercase. Capitalize only the initial character of the first word, and other words that require capitalization, like:
|
||||
|
||||
* **Proper nouns.** Objects in the product _are not_ considered proper nouns e.g. dashboards, charts, saved queries etc. Proprietary feature names eg. SQL Lab, Preset Manager _are_ considered proper nouns
|
||||
* **Acronyms** (e.g. CSS, HTML)
|
||||
* When referring to **UI labels that are themselves capitalized** from sentence case (e.g. page titles - Dashboards page, Charts page, Saved queries page, etc.)
|
||||
* User input that is reflected in the UI. E.g. a user-named a dashboard tab
|
||||
- **Proper nouns.** Objects in the product _are not_ considered proper nouns e.g. dashboards, charts, saved queries etc. Proprietary feature names eg. SQL Lab, Preset Manager _are_ considered proper nouns
|
||||
- **Acronyms** (e.g. CSS, HTML)
|
||||
- When referring to **UI labels that are themselves capitalized** from sentence case (e.g. page titles - Dashboards page, Charts page, Saved queries page, etc.)
|
||||
- User input that is reflected in the UI. E.g. a user-named a dashboard tab
|
||||
|
||||
**Sentence case vs. Title case:** Title case: "A Dog Takes a Walk in Paris" Sentence case: "A dog takes a walk in Paris"
|
||||
**Sentence case vs. Title case:**
|
||||
- Title case: "A Dog Takes a Walk in Paris"
|
||||
- Sentence case: "A dog takes a walk in Paris"
|
||||
|
||||
**Why sentence case?**
|
||||
|
||||
* It's generally accepted as the quickest to read
|
||||
* It's the easiest form to distinguish between common and proper nouns
|
||||
- It's generally accepted as the quickest to read
|
||||
- It's the easiest form to distinguish between common and proper nouns
|
||||
|
||||
### How to refer to UI elements
|
||||
|
||||
@@ -52,21 +54,38 @@ When writing about a UI element, use the same capitalization as used in the UI.
|
||||
|
||||
For example, if an input field is labeled "Name" then you refer to this as the "Name input field". Similarly, if a button has the label "Save" in it, then it is correct to refer to the "Save button".
|
||||
|
||||
Where a product page is titled "Settings", you refer to this in writing as follows: "Edit your personal information on the Settings page".
|
||||
Where a product page is titled "Settings", you refer to this in writing as follows:
|
||||
"Edit your personal information on the Settings page".
|
||||
|
||||
Often a product page will have the same title as the objects it contains. In this case, refer to the page as it appears in the UI, and the objects as common nouns:
|
||||
|
||||
* Upload a dashboard on the Dashboards page
|
||||
* Go to Dashboards
|
||||
* View dashboard
|
||||
* View all dashboards
|
||||
* Upload CSS templates on the CSS templates page
|
||||
* Queries that you save will appear on the Saved queries page
|
||||
* Create custom queries in SQL Lab then create dashboards
|
||||
- Upload a dashboard on the Dashboards page
|
||||
- Go to Dashboards
|
||||
- View dashboard
|
||||
- View all dashboards
|
||||
- Upload CSS templates on the CSS templates page
|
||||
- Queries that you save will appear on the Saved queries page
|
||||
- Create custom queries in SQL Lab then create dashboards
|
||||
|
||||
### **Exceptions to sentence case:**
|
||||
### Exceptions to sentence case
|
||||
|
||||
1. Acronyms and abbreviations. Examples: URL, CSV, XML
|
||||
1. **Acronyms and abbreviations.**
|
||||
Examples: URL, CSV, XML, CSS, SQL, SSH, URI, NaN, CRON, CC, BCC
|
||||
|
||||
2. **Proper nouns and brand names.**
|
||||
Examples: Apache, Superset, AntD JavaScript, GeoJSON, Slack, Google Sheets, SQLAlchemy
|
||||
|
||||
3. **Technical terms derived from proper nouns.**
|
||||
Examples: Jinja, Gaussian, European (as in European time zone)
|
||||
|
||||
4. **Key names.** Capitalize button labels and UI elements as they appear in the product UI.
|
||||
Examples: Shift (as in the keyboard button), Enter key
|
||||
|
||||
5. **Named queries or specific labeled items.**
|
||||
Examples: Query A, Query B
|
||||
|
||||
6. **Database names.** Always capitalize names of database engines and connectors.
|
||||
Examples: Presto, Trino, Drill, Hive, Google Sheets
|
||||
|
||||
## Button Design Guidelines
|
||||
|
||||
@@ -98,6 +117,32 @@ Primary buttons have a fourth style: dropdown.
|
||||
| Tertiary | For less prominent actions; can be used in isolation or paired with a primary button |
|
||||
| Destructive | For actions that could have destructive effects on the user's data |
|
||||
|
||||
### Format
|
||||
|
||||
#### Anatomy
|
||||
|
||||
Button text is centered using the Label style. Icons appear left of text when combined. If no text label exists, an icon must indicate the button's function.
|
||||
|
||||
#### Button size
|
||||
|
||||
- Default dimensions: 160px width × 32px height
|
||||
- Text: 11px, Inter Medium, all caps
|
||||
- Corners: 4px border radius
|
||||
- Minimum padding: 8px around text
|
||||
- Width can decrease if space is limited, but maintain minimum padding
|
||||
|
||||
#### Button groups
|
||||
|
||||
- Group related buttons to establish visual hierarchy
|
||||
- Avoid overwhelming users with too many actions
|
||||
- Limit calls to action; use tertiary/ghost buttons for layouts with 3+ actions
|
||||
- Maintain consistent styles within groups when possible
|
||||
- Space buttons 8px apart vertically or horizontally
|
||||
|
||||
#### Content guidelines
|
||||
|
||||
Button labels should be clear and predictable. Use the "\{verb\} + \{noun\}" format, except for common actions like "Done," "Close," "Cancel," "Add," or "Delete." This formula provides necessary context and aids translation, though compact UIs or localization needs may warrant exceptions.
|
||||
|
||||
## Error Message Design Guidelines
|
||||
|
||||
### Definition
|
||||
@@ -128,10 +173,10 @@ In all cases, encountering errors increases user friction and frustration while
|
||||
|
||||
Select one pattern per error (e.g. do not implement an inline and banner pattern for the same error).
|
||||
|
||||
When the error... | Use...
|
||||
---------------- | ------
|
||||
Is directly related to a UI control | Inline error
|
||||
Is not directly related to a UI control | Banner error
|
||||
| When the error... | Use... |
|
||||
|------------------|--------|
|
||||
| Is directly related to a UI control | Inline error |
|
||||
| Is not directly related to a UI control | Banner error |
|
||||
|
||||
#### Inline
|
||||
|
||||
@@ -146,3 +191,45 @@ Use the `LabeledErrorBoundInput` component for this error pattern.
|
||||
##### Implementation details
|
||||
|
||||
- Where and when relevant, scroll the screen to the UI control with the error
|
||||
- When multiple inline errors are present, scroll to the topmost error
|
||||
|
||||
#### Banner
|
||||
|
||||
Banner errors are used when the source of the error is not directly related to a UI control (text input, selector, etc.) such as a technical failure or a loading problem.
|
||||
|
||||
##### Anatomy
|
||||
|
||||
Use the `ErrorAlert` component for this error pattern.
|
||||
|
||||
1. **Headline** (optional): 1-2 word summary of the error
|
||||
2. **Message**: What went wrong and what users should do next
|
||||
3. **Expand option** (optional): "See more"/"See less"
|
||||
4. **Details** (optional): Additional helpful context
|
||||
5. **Modal** (optional): For spatial constraints using `ToastType.DANGER`
|
||||
|
||||
##### Implementation details
|
||||
|
||||
- Place the banner near the content area most relevant to the error
|
||||
- For chart errors in Explore, use the chart area
|
||||
- For modal errors, use the modal footer
|
||||
- For app-wide errors, use the top of the screen
|
||||
|
||||
### Content guidelines
|
||||
|
||||
Effective error messages communicate:
|
||||
|
||||
1. What went wrong
|
||||
2. What users should do next
|
||||
|
||||
Error messages should be:
|
||||
|
||||
- Clear and accurate, leaving no room for misinterpretation
|
||||
- Short and concise
|
||||
- Understandable to non-technical users
|
||||
- Non-blaming and avoiding negative language
|
||||
|
||||
**Example:**
|
||||
|
||||
❌ "Cannot delete a datasource that has slices attached to it."
|
||||
|
||||
✅ "Please delete all charts using this dataset before deleting the dataset."
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Frontend Style Guidelines
|
||||
sidebar_position: 2
|
||||
title: Overview
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -26,19 +26,25 @@ under the License.
|
||||
|
||||
This is a list of statements that describe how we do frontend development in Superset. While they might not be 100% true for all files in the repo, they represent the gold standard we strive towards for frontend quality and style.
|
||||
|
||||
* We develop using TypeScript.
|
||||
* We use React for building components, and Redux to manage app/global state.
|
||||
* See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines)
|
||||
* We prefer functional components to class components and use hooks for local component state.
|
||||
* We use [Ant Design](https://ant.design/) components from our component library whenever possible, only building our own custom components when it's required.
|
||||
* We use [@emotion](https://emotion.sh/docs/introduction) to provide styling for our components, co-locating styling within component files.
|
||||
* See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines)
|
||||
* We use Jest for unit tests, React Testing Library for component tests, and Cypress for end-to-end tests.
|
||||
* See: [Testing Guidelines and Best Practices](./frontend/testing-guidelines)
|
||||
* We add tests for every new component or file added to the frontend.
|
||||
* We organize our repo so similar files live near each other, and tests are co-located with the files they test.
|
||||
* We prefer small, easily testable files and components.
|
||||
* We use ESLint and Prettier to automatically fix lint errors and format the code.
|
||||
* We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
|
||||
* If there's not a linting rule, we don't have a rule!
|
||||
* We use [React Storybook](https://storybook.js.org/) and [Applitools](https://applitools.com/) to help preview/test and stabilize our components
|
||||
- We develop using TypeScript.
|
||||
- See: [SIP-36](https://github.com/apache/superset/issues/9101)
|
||||
- We use React for building components, and Redux to manage app/global state.
|
||||
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines)
|
||||
- We prefer functional components to class components and use hooks for local component state.
|
||||
- We use [Ant Design](https://ant.design/) components from our component library whenever possible, only building our own custom components when it's required.
|
||||
- See: [SIP-48](https://github.com/apache/superset/issues/11283)
|
||||
- We use [@emotion](https://emotion.sh/docs/introduction) to provide styling for our components, co-locating styling within component files.
|
||||
- See: [SIP-37](https://github.com/apache/superset/issues/9145)
|
||||
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines)
|
||||
- We use Jest for unit tests, React Testing Library for component tests, and Cypress for end-to-end tests.
|
||||
- See: [SIP-56](https://github.com/apache/superset/issues/11830)
|
||||
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines)
|
||||
- We add tests for every new component or file added to the frontend.
|
||||
- We organize our repo so similar files live near each other, and tests are co-located with the files they test.
|
||||
- See: [SIP-61](https://github.com/apache/superset/issues/12098)
|
||||
- We prefer small, easily testable files and components.
|
||||
- We use ESLint and Prettier to automatically fix lint errors and format the code.
|
||||
- We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
|
||||
- If there's not a linting rule, we don't have a rule!
|
||||
- We use [React Storybook](https://storybook.js.org/) and [Applitools](https://applitools.com/) to help preview/test and stabilize our components
|
||||
- A public Storybook with components from the `master` branch is available [here](https://apache-superset.github.io/superset-ui/?path=/story/*)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Component Style Guidelines and Best Practices
|
||||
sidebar_position: 1
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -35,7 +35,7 @@ This guide is intended primarily for reusable components. Whenever possible, all
|
||||
- All components should be made to be reusable whenever possible
|
||||
- All components should follow the structure and best practices as detailed below
|
||||
|
||||
## Directory and component structure
|
||||
### Directory and component structure
|
||||
|
||||
```
|
||||
superset-frontend/src/components
|
||||
@@ -51,208 +51,142 @@ superset-frontend/src/components
|
||||
|
||||
**Component directory name:** Use `PascalCase` for the component directory name
|
||||
|
||||
**Storybook:** Components should come with a storybook file whenever applicable, with the following naming convention `{ComponentName}.stories.tsx`. More details about Storybook below
|
||||
**Storybook:** Components should come with a storybook file whenever applicable, with the following naming convention `\{ComponentName\}.stories.tsx`. More details about Storybook below
|
||||
|
||||
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `{ComponentName}.test.tsx.` Read the [Testing Guidelines and Best Practices](./testing-guidelines) for more details about tests
|
||||
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines) for more details
|
||||
|
||||
## Component Development Best Practices
|
||||
**Reference naming:** Use `PascalCase` for React components and `camelCase` for component instances
|
||||
|
||||
### Use TypeScript
|
||||
|
||||
All new components should be written in TypeScript. This helps catch errors early and provides better development experience with IDE support.
|
||||
|
||||
```tsx
|
||||
interface ComponentProps {
|
||||
title: string;
|
||||
isVisible?: boolean;
|
||||
onClose?: () => void;
|
||||
}
|
||||
|
||||
export const MyComponent: React.FC<ComponentProps> = ({
|
||||
title,
|
||||
isVisible = true,
|
||||
onClose
|
||||
}) => {
|
||||
// Component implementation
|
||||
};
|
||||
**BAD:**
|
||||
```jsx
|
||||
import mainNav from './MainNav';
|
||||
```
|
||||
|
||||
### Prefer Functional Components
|
||||
**GOOD:**
|
||||
```jsx
|
||||
import MainNav from './MainNav';
|
||||
```
|
||||
|
||||
Use functional components with hooks instead of class components:
|
||||
**BAD:**
|
||||
```jsx
|
||||
const NavItem = <MainNav />;
|
||||
```
|
||||
|
||||
**GOOD:**
|
||||
```jsx
|
||||
const navItem = <MainNav />;
|
||||
```
|
||||
|
||||
**Component naming:** Use the file name as the component name
|
||||
|
||||
**BAD:**
|
||||
```jsx
|
||||
import MainNav from './MainNav/index';
|
||||
```
|
||||
|
||||
**GOOD:**
|
||||
```jsx
|
||||
import MainNav from './MainNav';
|
||||
```
|
||||
|
||||
**Props naming:** Do not use DOM related props for different purposes
|
||||
|
||||
**BAD:**
|
||||
```jsx
|
||||
<MainNav style="big" />
|
||||
```
|
||||
|
||||
**GOOD:**
|
||||
```jsx
|
||||
<MainNav variant="big" />
|
||||
```
|
||||
|
||||
**Importing dependencies:** Only import what you need
|
||||
|
||||
**BAD:**
|
||||
```jsx
|
||||
import * as React from "react";
|
||||
```
|
||||
|
||||
**GOOD:**
|
||||
```jsx
|
||||
import React, { useState } from "react";
|
||||
```
|
||||
|
||||
**Default VS named exports:** As recommended by [TypeScript](https://www.typescriptlang.org/docs/handbook/modules.html), "If a module's primary purpose is to house one specific export, then you should consider exporting it as a default export. This makes both importing and actually using the import a little easier". If you're exporting multiple objects, use named exports instead.
|
||||
|
||||
_As a default export_
|
||||
```jsx
|
||||
import MainNav from './MainNav';
|
||||
```
|
||||
|
||||
_As a named export_
|
||||
```jsx
|
||||
import { MainNav, SecondaryNav } from './Navbars';
|
||||
```
|
||||
|
||||
**ARIA roles:** Always make sure you are writing accessible components by using the official [ARIA roles](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA/ARIA_Techniques)
|
||||
|
||||
## Use TypeScript
|
||||
|
||||
All components should be written in TypeScript and their extensions should be `.ts` or `.tsx`
|
||||
|
||||
### type vs interface
|
||||
|
||||
Validate all props with the correct types. This replaces the need for a run-time validation as provided by the prop-types library.
|
||||
|
||||
```tsx
|
||||
// ✅ Good - Functional component with hooks
|
||||
export const MyComponent: React.FC<Props> = ({ data }) => {
|
||||
const [loading, setLoading] = useState(false);
|
||||
type HeadingProps = {
|
||||
param: string;
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
// Effect logic
|
||||
}, []);
|
||||
|
||||
return <div>{/* Component JSX */}</div>;
|
||||
};
|
||||
|
||||
// ❌ Avoid - Class component
|
||||
class MyComponent extends React.Component {
|
||||
// Class implementation
|
||||
export default function Heading({ children }: HeadingProps) {
|
||||
return <h2>{children}</h2>
|
||||
}
|
||||
```
|
||||
|
||||
### Follow Ant Design Patterns
|
||||
Use `type` for your component props and state. Use `interface` when you want to enable _declaration merging_.
|
||||
|
||||
Extend Ant Design components rather than building from scratch:
|
||||
### Define default values for non-required props
|
||||
|
||||
In order to improve the readability of your code and reduce assumptions, always add default values for non required props, when applicable, for example:
|
||||
|
||||
```tsx
|
||||
import { Button } from 'antd';
|
||||
import styled from '@emotion/styled';
|
||||
|
||||
const StyledButton = styled(Button)`
|
||||
// Custom styling using emotion
|
||||
`;
|
||||
const applyDiscount = (price: number, discount = 0.05) => price * (1 - discount);
|
||||
```
|
||||
|
||||
### Reusability and Props Design
|
||||
## Functional components and Hooks
|
||||
|
||||
Design components with reusability in mind:
|
||||
We prefer functional components and the usage of hooks over class components.
|
||||
|
||||
### useState
|
||||
|
||||
Always explicitly declare the type unless the type can easily be assumed by the declaration.
|
||||
|
||||
```tsx
|
||||
interface ButtonProps {
|
||||
variant?: 'primary' | 'secondary' | 'tertiary';
|
||||
size?: 'small' | 'medium' | 'large';
|
||||
loading?: boolean;
|
||||
disabled?: boolean;
|
||||
children: React.ReactNode;
|
||||
onClick?: () => void;
|
||||
}
|
||||
|
||||
export const CustomButton: React.FC<ButtonProps> = ({
|
||||
variant = 'primary',
|
||||
size = 'medium',
|
||||
...props
|
||||
}) => {
|
||||
// Implementation
|
||||
};
|
||||
const [customer, setCustomer] = useState<ICustomer | null>(null);
|
||||
```
|
||||
|
||||
## Testing Components
|
||||
### useReducer
|
||||
|
||||
Every component should include comprehensive tests:
|
||||
Always prefer `useReducer` over `useState` when your state has complex logics.
|
||||
|
||||
```tsx
|
||||
// MyComponent.test.tsx
|
||||
import { render, screen, fireEvent } from '@testing-library/react';
|
||||
import { MyComponent } from './MyComponent';
|
||||
### useMemo and useCallback
|
||||
|
||||
test('renders component with title', () => {
|
||||
render(<MyComponent title="Test Title" />);
|
||||
expect(screen.getByText('Test Title')).toBeInTheDocument();
|
||||
});
|
||||
Always memoize when your components take functions or complex objects as props to avoid unnecessary rerenders.
|
||||
|
||||
test('calls onClose when close button is clicked', () => {
|
||||
const mockOnClose = jest.fn();
|
||||
render(<MyComponent title="Test" onClose={mockOnClose} />);
|
||||
### Custom hooks
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /close/i }));
|
||||
expect(mockOnClose).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
```
|
||||
All custom hooks should be located in the directory `/src/hooks`. Before creating a new custom hook, make sure that is not available in the existing custom hooks.
|
||||
|
||||
## Storybook Stories
|
||||
## Storybook
|
||||
|
||||
Create stories for visual testing and documentation:
|
||||
Each component should come with its dedicated storybook file.
|
||||
|
||||
```tsx
|
||||
// MyComponent.stories.tsx
|
||||
import type { Meta, StoryObj } from '@storybook/react';
|
||||
import { MyComponent } from './MyComponent';
|
||||
**One component per story:** Each storybook file should only contain one component unless substantially different variants are required
|
||||
|
||||
const meta: Meta<typeof MyComponent> = {
|
||||
title: 'Components/MyComponent',
|
||||
component: MyComponent,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
},
|
||||
tags: ['autodocs'],
|
||||
};
|
||||
**Component variants:** If the component behavior is substantially different when certain props are used, it is best to separate the story into different types. See the `superset-frontend/src/components/Select/Select.stories.tsx` as an example.
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof meta>;
|
||||
**Isolated state:** The storybook should show how the component works in an isolated state and with as few dependencies as possible
|
||||
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
title: 'Default Component',
|
||||
isVisible: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const Hidden: Story = {
|
||||
args: {
|
||||
title: 'Hidden Component',
|
||||
isVisible: false,
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Use React.memo for Expensive Components
|
||||
|
||||
```tsx
|
||||
import React, { memo } from 'react';
|
||||
|
||||
export const ExpensiveComponent = memo<Props>(({ data }) => {
|
||||
// Expensive rendering logic
|
||||
return <div>{/* Component content */}</div>;
|
||||
});
|
||||
```
|
||||
|
||||
### Optimize Re-renders
|
||||
|
||||
Use `useCallback` and `useMemo` appropriately:
|
||||
|
||||
```tsx
|
||||
export const OptimizedComponent: React.FC<Props> = ({ items, onSelect }) => {
|
||||
const expensiveValue = useMemo(() => {
|
||||
return items.reduce((acc, item) => acc + item.value, 0);
|
||||
}, [items]);
|
||||
|
||||
const handleSelect = useCallback((id: string) => {
|
||||
onSelect(id);
|
||||
}, [onSelect]);
|
||||
|
||||
return <div>{/* Component content */}</div>;
|
||||
};
|
||||
```
|
||||
|
||||
## Accessibility
|
||||
|
||||
Ensure components are accessible:
|
||||
|
||||
```tsx
|
||||
export const AccessibleButton: React.FC<Props> = ({ children, onClick }) => {
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Descriptive label"
|
||||
onClick={onClick}
|
||||
>
|
||||
{children}
|
||||
</button>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Error Boundaries
|
||||
|
||||
For components that might fail, consider error boundaries:
|
||||
|
||||
```tsx
|
||||
export const SafeComponent: React.FC<Props> = ({ children }) => {
|
||||
return (
|
||||
<ErrorBoundary fallback={<div>Something went wrong</div>}>
|
||||
{children}
|
||||
</ErrorBoundary>
|
||||
);
|
||||
};
|
||||
```
|
||||
**Use args:** It should be possible to test the component with every variant of the available props. We recommend using [args](https://storybook.js.org/docs/react/writing-stories/args)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Emotion Styling Guidelines and Best Practices
|
||||
sidebar_position: 2
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
<!--
|
||||
@@ -33,314 +33,245 @@ under the License.
|
||||
- **DO** use `css` when you want to amend/merge sets of styles compositionally
|
||||
- **DO** use `css` when you're making a small, or single-use set of styles for a component
|
||||
- **DO** move your style definitions from direct usage in the `css` prop to an external variable when they get long
|
||||
- **DO** prefer tagged template literals (`css={css\`...\`}`) over style objects wherever possible for maximum style portability/consistency
|
||||
- **DO** use `useTheme` to get theme variables
|
||||
- **DO** prefer tagged template literals (`css={css`...`}`) over style objects wherever possible for maximum style portability/consistency (note: typescript support may be diminished, but IDE plugins like [this](https://marketplace.visualstudio.com/items?itemName=jpoissonnier.vscode-styled-components) make life easy)
|
||||
- **DO** use `useTheme` to get theme variables. `withTheme` should be only used for wrapping legacy Class-based components.
|
||||
|
||||
### DON'T do these things:
|
||||
|
||||
- **DON'T** use `styled` for small, single-use style tweaks that would be easier to read/review if they were inline
|
||||
- **DON'T** export incomplete Ant Design components
|
||||
- **DON'T** export incomplete AntD components (make sure all their compound components are exported)
|
||||
|
||||
## Emotion Tips and Strategies
|
||||
|
||||
The first thing to consider when adding styles to an element is how much you think a style might be reusable in other areas of Superset. Always err on the side of reusability here. Nobody wants to chase styling inconsistencies, or try to debug little endless overrides scattered around the codebase. The more we can consolidate, the less will have to be figured out by those who follow. Reduce, reuse, recycle.
|
||||
|
||||
## When to use `css` or `styled`
|
||||
|
||||
### Use `css` for:
|
||||
In short, either works for just about any use case! And you'll see them used somewhat interchangeably in the existing codebase. But we need a way to weigh it when we encounter the choice, so here's one way to think about it:
|
||||
|
||||
1. **Small, single-use styles**
|
||||
```tsx
|
||||
import { css } from '@emotion/react';
|
||||
A good use of `styled` syntax if you want to re-use a styled component. In other words, if you wanted to export flavors of a component for use, like so:
|
||||
|
||||
const MyComponent = () => (
|
||||
<div
|
||||
css={css`
|
||||
margin: 8px;
|
||||
padding: 16px;
|
||||
`}
|
||||
>
|
||||
Content
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
2. **Composing styles**
|
||||
```tsx
|
||||
const baseStyles = css`
|
||||
padding: 16px;
|
||||
border-radius: 4px;
|
||||
```jsx
|
||||
const StatusThing = styled.div`
|
||||
padding: 10px;
|
||||
border-radius: 10px;
|
||||
`;
|
||||
|
||||
const primaryStyles = css`
|
||||
${baseStyles}
|
||||
background-color: blue;
|
||||
color: white;
|
||||
export const InfoThing = styled(StatusThing)`
|
||||
background: blue;
|
||||
&::before {
|
||||
content: "ℹ️";
|
||||
}
|
||||
`;
|
||||
|
||||
const secondaryStyles = css`
|
||||
${baseStyles}
|
||||
background-color: gray;
|
||||
color: black;
|
||||
export const WarningThing = styled(StatusThing)`
|
||||
background: orange;
|
||||
&::before {
|
||||
content: "⚠️";
|
||||
}
|
||||
`;
|
||||
```
|
||||
|
||||
3. **Conditional styling**
|
||||
```tsx
|
||||
const MyComponent = ({ isActive }: { isActive: boolean }) => (
|
||||
<div
|
||||
css={[
|
||||
baseStyles,
|
||||
isActive && activeStyles,
|
||||
]}
|
||||
>
|
||||
Content
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
### Use `styled` for:
|
||||
|
||||
1. **Reusable components**
|
||||
```tsx
|
||||
import styled from '@emotion/styled';
|
||||
|
||||
const StyledButton = styled.button`
|
||||
padding: 12px 24px;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
background-color: ${({ theme }) => theme.colors.primary};
|
||||
color: white;
|
||||
|
||||
&:hover {
|
||||
background-color: ${({ theme }) => theme.colors.primaryDark};
|
||||
export const TerribleThing = styled(StatusThing)`
|
||||
background: red;
|
||||
&::before {
|
||||
content: "🔥";
|
||||
}
|
||||
`;
|
||||
```
|
||||
|
||||
2. **Components with complex nested selectors**
|
||||
```tsx
|
||||
const StyledCard = styled.div`
|
||||
padding: 16px;
|
||||
border: 1px solid ${({ theme }) => theme.colors.border};
|
||||
You can also use `styled` when you're building a bigger component, and just want to have some custom bits for internal use in your JSX. For example:
|
||||
|
||||
.card-header {
|
||||
font-weight: bold;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.card-content {
|
||||
color: ${({ theme }) => theme.colors.text};
|
||||
|
||||
p {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
}
|
||||
```jsx
|
||||
const SeparatorOnlyUsedInThisComponent = styled.hr`
|
||||
height: 12px;
|
||||
border: 0;
|
||||
box-shadow: inset 0 12px 12px -12px rgba(0, 0, 0, 0.5);
|
||||
`;
|
||||
|
||||
function SuperComplicatedComponent(props) {
|
||||
return (
|
||||
<>
|
||||
Daily standup for {user.name}!
|
||||
<SeparatorOnlyUsedInThisComponent />
|
||||
<h2>Yesterday:</h2>
|
||||
// spit out a list of accomplishments
|
||||
<SeparatorOnlyUsedInThisComponent />
|
||||
<h2>Today:</h2>
|
||||
// spit out a list of plans
|
||||
<SeparatorOnlyUsedInThisComponent />
|
||||
<h2>Tomorrow:</h2>
|
||||
// spit out a list of goals
|
||||
</>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
3. **Extending Ant Design components**
|
||||
```tsx
|
||||
import { Button } from 'antd';
|
||||
import styled from '@emotion/styled';
|
||||
The `css` prop, in reality, shares all the same styling capabilities as `styled` but it does have some particular use cases that jump out as sensible. For example, if you just want to style one element in your component, you could add the styles inline like so:
|
||||
|
||||
const CustomButton = styled(Button)`
|
||||
border-radius: 8px;
|
||||
font-weight: 600;
|
||||
|
||||
&.ant-btn-primary {
|
||||
background: linear-gradient(45deg, #1890ff, #722ed1);
|
||||
}
|
||||
`;
|
||||
```jsx
|
||||
function SomeFanciness(props) {
|
||||
return (
|
||||
<>
|
||||
Here's an awesome report card for {user.name}!
|
||||
<div
|
||||
css={css`
|
||||
box-shadow: 5px 5px 10px #ccc;
|
||||
border-radius: 10px;
|
||||
`}
|
||||
>
|
||||
<h2>Yesterday:</h2>
|
||||
// ...some stuff
|
||||
<h2>Today:</h2>
|
||||
// ...some stuff
|
||||
<h2>Tomorrow:</h2>
|
||||
// ...some stuff
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Using Theme Variables
|
||||
You can also define the styles as a variable, external to your JSX. This is handy if the styles get long and you just want it out of the way. This is also handy if you want to apply the same styles to disparate element types, kind of like you might use a CSS class on varied elements. Here's a trumped up example:
|
||||
|
||||
Always use theme variables for consistent styling:
|
||||
|
||||
```tsx
|
||||
import { useTheme } from '@emotion/react';
|
||||
|
||||
const MyComponent = () => {
|
||||
const theme = useTheme();
|
||||
```jsx
|
||||
function FakeGlobalNav(props) {
|
||||
const menuItemStyles = css`
|
||||
display: block;
|
||||
border-bottom: 1px solid cadetblue;
|
||||
font-family: "Comic Sans", cursive;
|
||||
`;
|
||||
|
||||
return (
|
||||
<div
|
||||
css={css`
|
||||
background-color: ${theme.colors.grayscale.light5};
|
||||
color: ${theme.colors.grayscale.dark2};
|
||||
padding: ${theme.gridUnit * 4}px;
|
||||
border-radius: ${theme.borderRadius}px;
|
||||
`}
|
||||
>
|
||||
Content
|
||||
<Nav>
|
||||
<a css={menuItemStyles} href="#">One link</a>
|
||||
<Link css={menuItemStyles} to={url}>Another link</Link>
|
||||
<div css={menuItemStyles} onClick={() => alert('clicked')}>Another link</div>
|
||||
</Nav>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## CSS tips and tricks
|
||||
|
||||
### `css` lets you write actual CSS
|
||||
|
||||
By default the `css` prop uses the object syntax with JS style definitions, like so:
|
||||
|
||||
```jsx
|
||||
<div css={{
|
||||
borderRadius: 10,
|
||||
marginTop: 10,
|
||||
backgroundColor: '#00FF00'
|
||||
}}>Howdy</div>
|
||||
```
|
||||
|
||||
But you can use the `css` interpolator as well to get away from icky JS styling syntax. Doesn't this look cleaner?
|
||||
|
||||
```jsx
|
||||
<div css={css`
|
||||
border-radius: 10px;
|
||||
margin-top: 10px;
|
||||
background-color: #00FF00;
|
||||
`}>Howdy</div>
|
||||
```
|
||||
|
||||
You might say "whatever… I can read and write JS syntax just fine." Well, that's great. But… let's say you're migrating in some of our legacy LESS styles… now it's copy/paste! Or if you want to migrate to or from `styled` syntax… also copy/paste!
|
||||
|
||||
### You can combine `css` definitions with array syntax
|
||||
|
||||
You can use multiple groupings of styles with the `css` interpolator, and combine/override them in array syntax, like so:
|
||||
|
||||
```jsx
|
||||
function AnotherSillyExample(props) {
|
||||
const shadowedCard = css`
|
||||
box-shadow: 2px 2px 4px #999;
|
||||
padding: 4px;
|
||||
`;
|
||||
const infoCard = css`
|
||||
background-color: #33f;
|
||||
border-radius: 4px;
|
||||
`;
|
||||
const overrideInfoCard = css`
|
||||
background-color: #f33;
|
||||
`;
|
||||
return (
|
||||
<div className="App">
|
||||
Combining two classes:
|
||||
<div css={[shadowedCard, infoCard]}>Hello</div>
|
||||
Combining again, but now with overrides:
|
||||
<div css={[shadowedCard, infoCard, overrideInfoCard]}>Hello</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Responsive Design
|
||||
```tsx
|
||||
const ResponsiveContainer = styled.div`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
${({ theme }) => theme.breakpoints.up('md')} {
|
||||
flex-direction: row;
|
||||
}
|
||||
`;
|
||||
```
|
||||
|
||||
### Animation
|
||||
```tsx
|
||||
const FadeInComponent = styled.div`
|
||||
opacity: 0;
|
||||
transition: opacity 0.3s ease-in-out;
|
||||
|
||||
&.visible {
|
||||
opacity: 1;
|
||||
}
|
||||
`;
|
||||
```
|
||||
|
||||
### Conditional Props
|
||||
```tsx
|
||||
interface StyledDivProps {
|
||||
isHighlighted?: boolean;
|
||||
size?: 'small' | 'medium' | 'large';
|
||||
}
|
||||
```
|
||||
|
||||
const StyledDiv = styled.div<StyledDivProps>`
|
||||
padding: 16px;
|
||||
background-color: ${({ isHighlighted, theme }) =>
|
||||
isHighlighted ? theme.colors.primary : theme.colors.grayscale.light5};
|
||||
### Style variations with props
|
||||
|
||||
${({ size }) => {
|
||||
switch (size) {
|
||||
case 'small':
|
||||
return css`font-size: 12px;`;
|
||||
case 'large':
|
||||
return css`font-size: 18px;`;
|
||||
default:
|
||||
return css`font-size: 14px;`;
|
||||
}
|
||||
}}
|
||||
You can give any component a custom prop, and reference that prop in your component styles, effectively using the prop to turn on a "flavor" of that component.
|
||||
|
||||
For example, let's make a styled component that acts as a card. Of course, this could be done with any AntD component, or any component at all. But we'll do this with a humble `div` to illustrate the point:
|
||||
|
||||
```jsx
|
||||
const SuperCard = styled.div`
|
||||
${({ theme, cutout }) => `
|
||||
padding: ${theme.gridUnit * 2}px;
|
||||
border-radius: ${theme.borderRadius}px;
|
||||
box-shadow: 10px 5px 10px #ccc ${cutout && 'inset'};
|
||||
border: 1px solid ${cutout ? 'transparent' : theme.colors.secondary.light3};
|
||||
`}
|
||||
`;
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
Then just use the component as `<SuperCard>Some content</SuperCard>` or with the (potentially dynamic) prop: `<SuperCard cutout>Some content</SuperCard>`
|
||||
|
||||
### 1. Use Semantic CSS Properties
|
||||
```tsx
|
||||
// ✅ Good - semantic property names
|
||||
const Header = styled.h1`
|
||||
font-size: ${({ theme }) => theme.typography.sizes.xl};
|
||||
margin-bottom: ${({ theme }) => theme.gridUnit * 4}px;
|
||||
`;
|
||||
## Styled component tips
|
||||
|
||||
// ❌ Avoid - magic numbers
|
||||
const Header = styled.h1`
|
||||
font-size: 24px;
|
||||
margin-bottom: 16px;
|
||||
`;
|
||||
```
|
||||
### No need to use `theme` the hard way
|
||||
|
||||
### 2. Group Related Styles
|
||||
```tsx
|
||||
// ✅ Good - grouped styles
|
||||
const Card = styled.div`
|
||||
/* Layout */
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: ${({ theme }) => theme.gridUnit * 4}px;
|
||||
It's very tempting (and commonly done) to use the `theme` prop inline in the template literal like so:
|
||||
|
||||
/* Appearance */
|
||||
background-color: ${({ theme }) => theme.colors.grayscale.light5};
|
||||
border: 1px solid ${({ theme }) => theme.colors.grayscale.light2};
|
||||
```jsx
|
||||
const SomeStyledThing = styled.div`
|
||||
padding: ${({ theme }) => theme.gridUnit * 2}px;
|
||||
border-radius: ${({ theme }) => theme.borderRadius}px;
|
||||
|
||||
/* Typography */
|
||||
font-family: ${({ theme }) => theme.typography.families.sansSerif};
|
||||
color: ${({ theme }) => theme.colors.grayscale.dark2};
|
||||
border: 1px solid ${({ theme }) => theme.colors.secondary.light3};
|
||||
`;
|
||||
```
|
||||
|
||||
### 3. Extract Complex Styles
|
||||
```tsx
|
||||
// ✅ Good - extract complex styles to variables
|
||||
const complexGradient = css`
|
||||
background: linear-gradient(
|
||||
135deg,
|
||||
${({ theme }) => theme.colors.primary} 0%,
|
||||
${({ theme }) => theme.colors.secondary} 100%
|
||||
);
|
||||
`;
|
||||
Instead, you can make things a little easier to read/type by writing it like so:
|
||||
|
||||
const GradientButton = styled.button`
|
||||
${complexGradient}
|
||||
padding: 12px 24px;
|
||||
border: none;
|
||||
color: white;
|
||||
```jsx
|
||||
const SomeStyledThing = styled.div`
|
||||
${({ theme }) => `
|
||||
padding: ${theme.gridUnit * 2}px;
|
||||
border-radius: ${theme.borderRadius}px;
|
||||
border: 1px solid ${theme.colors.secondary.light3};
|
||||
`}
|
||||
`;
|
||||
```
|
||||
|
||||
### 4. Avoid Deep Nesting
|
||||
```tsx
|
||||
// ✅ Good - shallow nesting
|
||||
const Navigation = styled.nav`
|
||||
.nav-item {
|
||||
padding: 8px 16px;
|
||||
}
|
||||
## Extend an AntD component with custom styling
|
||||
|
||||
.nav-link {
|
||||
color: ${({ theme }) => theme.colors.text};
|
||||
text-decoration: none;
|
||||
}
|
||||
As mentioned, you want to keep your styling as close to the root of your component system as possible, to minimize repetitive styling/overrides, and err on the side of reusability. In some cases, that means you'll want to globally tweak one of our core components to match our design system. In Superset, that's Ant Design (AntD).
|
||||
|
||||
AntD uses a cool trick called compound components. For example, the `Menu` component also lets you use `Menu.Item`, `Menu.SubMenu`, `Menu.ItemGroup`, and `Menu.Divider`.
|
||||
|
||||
### The `Object.assign` trick
|
||||
|
||||
Let's say you want to override an AntD component called `Foo`, and have `Foo.Bar` display some custom CSS for the `Bar` compound component. You can do it effectively like so:
|
||||
|
||||
```jsx
|
||||
import {
|
||||
Foo as AntdFoo,
|
||||
} from 'antd';
|
||||
|
||||
export const StyledBar = styled(AntdFoo.Bar)`
|
||||
border-radius: ${({ theme }) => theme.borderRadius}px;
|
||||
`;
|
||||
|
||||
// ❌ Avoid - deep nesting
|
||||
const Navigation = styled.nav`
|
||||
ul {
|
||||
li {
|
||||
a {
|
||||
span {
|
||||
color: blue; /* Too nested */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
### 1. Avoid Inline Functions in CSS
|
||||
```tsx
|
||||
// ✅ Good - external function
|
||||
const getBackgroundColor = (isActive: boolean, theme: any) =>
|
||||
isActive ? theme.colors.primary : theme.colors.secondary;
|
||||
|
||||
const Button = styled.button<{ isActive: boolean }>`
|
||||
background-color: ${({ isActive, theme }) => getBackgroundColor(isActive, theme)};
|
||||
`;
|
||||
|
||||
// ❌ Avoid - inline function (creates new function on each render)
|
||||
const Button = styled.button<{ isActive: boolean }>`
|
||||
background-color: ${({ isActive, theme }) =>
|
||||
isActive ? theme.colors.primary : theme.colors.secondary};
|
||||
`;
|
||||
```
|
||||
|
||||
### 2. Use CSS Objects for Dynamic Styles
|
||||
```tsx
|
||||
// For highly dynamic styles, consider CSS objects
|
||||
const dynamicStyles = (props: Props) => ({
|
||||
backgroundColor: props.color,
|
||||
fontSize: `${props.size}px`,
|
||||
// ... other dynamic properties
|
||||
export const Foo = Object.assign(AntdFoo, {
|
||||
Bar: StyledBar,
|
||||
});
|
||||
|
||||
const DynamicComponent = (props: Props) => (
|
||||
<div css={dynamicStyles(props)}>
|
||||
Content
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
You can then import this customized `Foo` and use `Foo.Bar` as expected. You should probably save your creation in `src/components` for maximum reusability, and add a Storybook entry so future engineers can view your creation, and designers can better understand how it fits the Superset Design System.
|
||||
|
||||
@@ -1,297 +0,0 @@
|
||||
---
|
||||
title: Testing Guidelines and Best Practices
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Testing Guidelines and Best Practices
|
||||
|
||||
We feel that tests are an important part of a feature and not an additional or optional effort. That's why we colocate test files with functionality and sometimes write tests upfront to help validate requirements and shape the API of our components. Every new component or file added should have an associated test file with the `.test` extension.
|
||||
|
||||
We use Jest, React Testing Library (RTL), and Cypress to write our unit, integration, and end-to-end tests. For each type, we have a set of best practices/tips described below:
|
||||
|
||||
## Jest
|
||||
|
||||
### Write simple, standalone tests
|
||||
|
||||
The importance of simplicity is often overlooked in test cases. Clear, dumb code should always be preferred over complex ones. The test cases should be pretty much standalone and should not involve any external logic if not absolutely necessary. That's because you want the corpus of the tests to be easy to read and understandable at first sight.
|
||||
|
||||
### Avoid nesting when you're testing
|
||||
|
||||
Avoid the use of `describe` blocks in favor of inlined tests. If your tests start to grow and you feel the need to group tests, prefer to break them into multiple test files. Check this awesome [article](https://kentcdodds.com/blog/avoid-nesting-when-youre-testing) written by [Kent C. Dodds](https://kentcdodds.com/) about this topic.
|
||||
|
||||
### No warnings!
|
||||
|
||||
Your tests shouldn't trigger warnings. This is really common when testing async functionality. It's really difficult to read test results when we have a bunch of warnings.
|
||||
|
||||
## React Testing Library (RTL)
|
||||
|
||||
### Keep accessibility in mind when writing your tests
|
||||
|
||||
One of the most important points of RTL is accessibility and this is also a very important point for us. We should try our best to follow the RTL [Priority](https://testing-library.com/docs/queries/about/#priority) when querying for elements in our tests. `getByTestId` is not viewable by the user and should only be used when the element isn't accessible in any other way.
|
||||
|
||||
### Don't use `act` unnecessarily
|
||||
|
||||
`render` and `fireEvent` are already wrapped in `act`, so wrapping them in `act` again is a common mistake. Some solutions to the warnings related to async testing can be found in the RTL docs.
|
||||
|
||||
## Example Test Structure
|
||||
|
||||
```tsx
|
||||
// MyComponent.test.tsx
|
||||
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { MyComponent } from './MyComponent';
|
||||
|
||||
// ✅ Good - Simple, standalone test
|
||||
test('renders loading state initially', () => {
|
||||
render(<MyComponent />);
|
||||
expect(screen.getByText('Loading...')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// ✅ Good - Tests user interaction
|
||||
test('calls onSubmit when form is submitted', async () => {
|
||||
const user = userEvent.setup();
|
||||
const mockOnSubmit = jest.fn();
|
||||
|
||||
render(<MyComponent onSubmit={mockOnSubmit} />);
|
||||
|
||||
await user.type(screen.getByLabelText('Username'), 'testuser');
|
||||
await user.click(screen.getByRole('button', { name: 'Submit' }));
|
||||
|
||||
expect(mockOnSubmit).toHaveBeenCalledWith({ username: 'testuser' });
|
||||
});
|
||||
|
||||
// ✅ Good - Tests async behavior
|
||||
test('displays error message when API call fails', async () => {
|
||||
const mockFetch = jest.fn().mockRejectedValue(new Error('API Error'));
|
||||
global.fetch = mockFetch;
|
||||
|
||||
render(<MyComponent />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Error: API Error')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Testing Best Practices
|
||||
|
||||
### Use appropriate queries in priority order
|
||||
|
||||
1. **Accessible to everyone**: `getByRole`, `getByLabelText`, `getByPlaceholderText`, `getByText`
|
||||
2. **Semantic queries**: `getByAltText`, `getByTitle`
|
||||
3. **Test IDs**: `getByTestId` (last resort)
|
||||
|
||||
```tsx
|
||||
// ✅ Good - using accessible queries
|
||||
test('user can submit form', () => {
|
||||
render(<LoginForm />);
|
||||
|
||||
const usernameInput = screen.getByLabelText('Username');
|
||||
const passwordInput = screen.getByLabelText('Password');
|
||||
const submitButton = screen.getByRole('button', { name: 'Log in' });
|
||||
|
||||
// Test implementation
|
||||
});
|
||||
|
||||
// ❌ Avoid - using test IDs when better options exist
|
||||
test('user can submit form', () => {
|
||||
render(<LoginForm />);
|
||||
|
||||
const usernameInput = screen.getByTestId('username-input');
|
||||
const passwordInput = screen.getByTestId('password-input');
|
||||
const submitButton = screen.getByTestId('submit-button');
|
||||
|
||||
// Test implementation
|
||||
});
|
||||
```
|
||||
|
||||
### Test behavior, not implementation details
|
||||
|
||||
```tsx
|
||||
// ✅ Good - tests what the user experiences
|
||||
test('shows success message after successful form submission', async () => {
|
||||
render(<ContactForm />);
|
||||
|
||||
await userEvent.type(screen.getByLabelText('Email'), 'test@example.com');
|
||||
await userEvent.click(screen.getByRole('button', { name: 'Submit' }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Message sent successfully!')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
// ❌ Avoid - testing implementation details
|
||||
test('calls setState when form is submitted', () => {
|
||||
const component = shallow(<ContactForm />);
|
||||
const instance = component.instance();
|
||||
const spy = jest.spyOn(instance, 'setState');
|
||||
|
||||
instance.handleSubmit();
|
||||
expect(spy).toHaveBeenCalled();
|
||||
});
|
||||
```
|
||||
|
||||
### Mock external dependencies appropriately
|
||||
|
||||
```tsx
|
||||
// Mock API calls
|
||||
jest.mock('../api/userService', () => ({
|
||||
getUser: jest.fn(),
|
||||
createUser: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock components that aren't relevant to the test
|
||||
jest.mock('../Chart/Chart', () => {
|
||||
return function MockChart() {
|
||||
return <div data-testid="mock-chart">Chart Component</div>;
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## Async Testing Patterns
|
||||
|
||||
### Testing async operations
|
||||
|
||||
```tsx
|
||||
test('loads and displays user data', async () => {
|
||||
const mockUser = { id: 1, name: 'John Doe' };
|
||||
const mockGetUser = jest.fn().mockResolvedValue(mockUser);
|
||||
|
||||
render(<UserProfile getUserData={mockGetUser} />);
|
||||
|
||||
// Wait for the async operation to complete
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('John Doe')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(mockGetUser).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
```
|
||||
|
||||
### Testing loading states
|
||||
|
||||
```tsx
|
||||
test('shows loading spinner while fetching data', async () => {
|
||||
const mockGetUser = jest.fn().mockImplementation(
|
||||
() => new Promise(resolve => setTimeout(resolve, 100))
|
||||
);
|
||||
|
||||
render(<UserProfile getUserData={mockGetUser} />);
|
||||
|
||||
expect(screen.getByText('Loading...')).toBeInTheDocument();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Loading...')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Component-Specific Testing
|
||||
|
||||
### Testing form components
|
||||
|
||||
```tsx
|
||||
test('validates required fields', async () => {
|
||||
render(<RegistrationForm />);
|
||||
|
||||
const submitButton = screen.getByRole('button', { name: 'Register' });
|
||||
await userEvent.click(submitButton);
|
||||
|
||||
expect(screen.getByText('Username is required')).toBeInTheDocument();
|
||||
expect(screen.getByText('Email is required')).toBeInTheDocument();
|
||||
});
|
||||
```
|
||||
|
||||
### Testing modals and overlays
|
||||
|
||||
```tsx
|
||||
test('opens and closes modal', async () => {
|
||||
render(<ModalContainer />);
|
||||
|
||||
const openButton = screen.getByRole('button', { name: 'Open Modal' });
|
||||
await userEvent.click(openButton);
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
await userEvent.click(closeButton);
|
||||
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
||||
});
|
||||
```
|
||||
|
||||
### Testing with context providers
|
||||
|
||||
```tsx
|
||||
const renderWithTheme = (component: React.ReactElement) => {
|
||||
return render(
|
||||
<ThemeProvider theme={mockTheme}>
|
||||
{component}
|
||||
</ThemeProvider>
|
||||
);
|
||||
};
|
||||
|
||||
test('applies theme colors correctly', () => {
|
||||
renderWithTheme(<ThemedButton />);
|
||||
|
||||
const button = screen.getByRole('button');
|
||||
expect(button).toHaveStyle({
|
||||
backgroundColor: mockTheme.colors.primary,
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Performance Testing
|
||||
|
||||
### Testing with large datasets
|
||||
|
||||
```tsx
|
||||
test('handles large lists efficiently', () => {
|
||||
const largeDataset = Array.from({ length: 10000 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `Item ${i}`,
|
||||
}));
|
||||
|
||||
const { container } = render(<VirtualizedList items={largeDataset} />);
|
||||
|
||||
// Should only render visible items
|
||||
const renderedItems = container.querySelectorAll('[data-testid="list-item"]');
|
||||
expect(renderedItems.length).toBeLessThan(100);
|
||||
});
|
||||
```
|
||||
|
||||
## Testing Accessibility
|
||||
|
||||
```tsx
|
||||
test('is accessible to screen readers', () => {
|
||||
render(<AccessibleForm />);
|
||||
|
||||
const form = screen.getByRole('form');
|
||||
const inputs = screen.getAllByRole('textbox');
|
||||
|
||||
inputs.forEach(input => {
|
||||
expect(input).toHaveAttribute('aria-label');
|
||||
});
|
||||
|
||||
expect(form).toHaveAttribute('aria-describedby');
|
||||
});
|
||||
```
|
||||
@@ -36,13 +36,21 @@ module.exports = {
|
||||
'extensions/overview',
|
||||
'extensions/quick-start',
|
||||
'extensions/architecture',
|
||||
'extensions/extension-project-structure',
|
||||
'extensions/extension-metadata',
|
||||
'extensions/frontend-contribution-types',
|
||||
'extensions/interacting-with-host',
|
||||
'extensions/deploying-extension',
|
||||
'extensions/development-mode',
|
||||
'extensions/security-implications',
|
||||
'extensions/dependencies',
|
||||
'extensions/contribution-types',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Extension Points',
|
||||
collapsed: true,
|
||||
items: [
|
||||
'extensions/extension-points/sqllab',
|
||||
],
|
||||
},
|
||||
'extensions/development',
|
||||
'extensions/deployment',
|
||||
'extensions/mcp',
|
||||
'extensions/security',
|
||||
'extensions/registry',
|
||||
],
|
||||
},
|
||||
{
|
||||
|
||||
129
docs/developer_portal/testing/testing-guidelines.md
Normal file
129
docs/developer_portal/testing/testing-guidelines.md
Normal file
@@ -0,0 +1,129 @@
|
||||
---
|
||||
title: Testing Guidelines and Best Practices
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Testing Guidelines and Best Practices
|
||||
|
||||
We feel that tests are an important part of a feature and not an additional or optional effort. That's why we colocate test files with functionality and sometimes write tests upfront to help validate requirements and shape the API of our components. Every new component or file added should have an associated test file with the `.test` extension.
|
||||
|
||||
We use Jest, React Testing Library (RTL), and Cypress to write our unit, integration, and end-to-end tests. For each type, we have a set of best practices/tips described below:
|
||||
|
||||
## Jest
|
||||
|
||||
### Write simple, standalone tests
|
||||
|
||||
The importance of simplicity is often overlooked in test cases. Clear, dumb code should always be preferred over complex ones. The test cases should be pretty much standalone and should not involve any external logic if not absolutely necessary. That's because you want the corpus of the tests to be easy to read and understandable at first sight.
|
||||
|
||||
### Avoid nesting when you're testing
|
||||
|
||||
Avoid the use of `describe` blocks in favor of inlined tests. If your tests start to grow and you feel the need to group tests, prefer to break them into multiple test files. Check this awesome [article](https://kentcdodds.com/blog/avoid-nesting-when-youre-testing) written by [Kent C. Dodds](https://kentcdodds.com/) about this topic.
|
||||
|
||||
### No warnings!
|
||||
|
||||
Your tests shouldn't trigger warnings. This is really common when testing async functionality. It's really difficult to read test results when we have a bunch of warnings.
|
||||
|
||||
### Example
|
||||
|
||||
You can find an example of a test [here](https://github.com/apache/superset/blob/e6c5bf4/superset-frontend/src/common/hooks/useChangeEffect/useChangeEffect.test.ts).
|
||||
|
||||
## React Testing Library (RTL)
|
||||
|
||||
### Keep accessibility in mind when writing your tests
|
||||
|
||||
One of the most important points of RTL is accessibility and this is also a very important point for us. We should try our best to follow the RTL [Priority](https://testing-library.com/docs/queries/about/#priority) when querying for elements in our tests. `getByTestId` is not viewable by the user and should only be used when the element isn't accessible in any other way.
|
||||
|
||||
### Don't use `act` unnecessarily
|
||||
|
||||
`render` and `fireEvent` are already wrapped in `act`, so wrapping them in `act` again is a common mistake. Some solutions to the warnings related to `act` might be found [here](https://kentcdodds.com/blog/fix-the-not-wrapped-in-act-warning).
|
||||
|
||||
### Be specific when using *ByRole
|
||||
|
||||
By using the `name` option we can point to the items by their accessible name. For example:
|
||||
|
||||
```jsx
|
||||
screen.getByRole('button', { name: /hello world/i })
|
||||
```
|
||||
|
||||
Using the `name` property also avoids breaking the tests in the future if other components with the same role are added.
|
||||
|
||||
### userEvent vs fireEvent
|
||||
|
||||
Prefer the [user-event](https://github.com/testing-library/user-event) library, which provides a more advanced simulation of browser interactions than the built-in [fireEvent](https://testing-library.com/docs/dom-testing-library/api-events/#fireevent) method.
|
||||
|
||||
### Usage of waitFor
|
||||
|
||||
- [Prefer to use `find`](https://kentcdodds.com/blog/common-mistakes-with-react-testing-library#using-waitfor-to-wait-for-elements-that-can-be-queried-with-find) over `waitFor` when you're querying for elements. Even though both achieve the same objective, the `find` version is simpler and you'll get better error messages.
|
||||
- Prefer to use only [one assertion at a time](https://kentcdodds.com/blog/common-mistakes-with-react-testing-library#having-multiple-assertions-in-a-single-waitfor-callback). If you put multiple assertions inside a `waitFor` we could end up waiting for the whole block timeout before seeing a test failure even if the failure occurred at the first assertion. By putting a single assertion in there, we can improve on test execution time.
|
||||
- Do not perform [side-effects](https://kentcdodds.com/blog/common-mistakes-with-react-testing-library#performing-side-effects-in-waitfor) inside `waitFor`. The callback can be called a non-deterministic number of times and frequency (it's called both on an interval as well as when there are DOM mutations). So this means that your side-effect could run multiple times.
|
||||
|
||||
### Example
|
||||
|
||||
You can find an example of a test [here](https://github.com/apache/superset/blob/master/superset-frontend/src/dashboard/components/PublishedStatus/PublishedStatus.test.tsx).
|
||||
|
||||
## Cypress
|
||||
|
||||
### Prefer to use Cypress for e2e testing and RTL for integration testing
|
||||
|
||||
Here it's important to make the distinction between e2e and integration testing. This [article](https://www.onpathtesting.com/blog/end-to-end-vs-integration-testing) gives an excellent definition:
|
||||
|
||||
> End-to-end testing verifies that your software works correctly from the beginning to the end of a particular user flow. It replicates expected user behavior and various usage scenarios to ensure that your software works as whole. End-to-end testing uses a production equivalent environment and data to simulate real-world situations and may also involve the integrations your software has with external applications.
|
||||
|
||||
> A typical software project consists of multiple software units, usually coded by different developers. Integration testing combines those software units logically and tests them as a group.
|
||||
> Essentially, integration testing verifies whether or not the individual modules or services that make up your application work well together. The purpose of this level of testing is to expose defects in the interaction between these software modules when they are integrated.
|
||||
|
||||
Do not use Cypress when RTL can do it better and faster. Many of the Cypress tests that we have right now, fall into the integration testing category and can be ported to RTL which is much faster and gives more immediate feedback. Cypress should be used mainly for end-to-end testing, replicating the user experience, with positive and negative flows.
|
||||
|
||||
### Isolated and standalone tests
|
||||
|
||||
Tests should never rely on other tests to pass. This might be hard when a single user is used for testing as data will be stored in the database. At every new test, we should reset the database.
|
||||
|
||||
### Cleaning state
|
||||
|
||||
Cleaning the state of the application, such as resetting the DB, or in general, any state that might affect consequent tests should always be done in the `beforeEach` hook and never in the `afterEach` one as the `beforeEach` is guaranteed to run, while the test might never reach the point to run the `afterEach` hook. One example would be if you refresh Cypress in the middle of the test. At this point, you will have built up a partial state in the database, and your clean-up function will never get called. You can read more about it [here](https://docs.cypress.io/guides/references/best-practices#Using-after-or-afterEach-hooks).
|
||||
|
||||
### Unnecessary use of `cy.wait`
|
||||
|
||||
- Unnecessary when using `cy.request()` as it will resolve when a response is received from the server
|
||||
- Unnecessary when using `cy.visit()` as it resolves only when the page fires the load event
|
||||
- Unnecessary when using `cy.get()`. When the selector should wait for a request to happen, aliases would come in handy:
|
||||
|
||||
```js
|
||||
cy.intercept('GET', '/users', [{ name: 'Maggy' }, { name: 'Joan' }]).as('getUsers')
|
||||
cy.get('#fetch').click()
|
||||
cy.wait('@getUsers') // <--- wait explicitly for this route to finish
|
||||
cy.get('table tr').should('have.length', 2)
|
||||
```
|
||||
|
||||
### Accessibility and Resilience
|
||||
|
||||
The same accessibility principles in the RTL section apply here. Use accessible selectors when querying for elements. Those principles also help to isolate selectors from eventual CSS and JS changes and improve the resilience of your tests.
|
||||
|
||||
## References
|
||||
|
||||
- [Avoid Nesting when you're Testing](https://kentcdodds.com/blog/avoid-nesting-when-youre-testing)
|
||||
- [RTL - Queries](https://testing-library.com/docs/queries/about/#priority)
|
||||
- [Fix the "not wrapped in act(...)" warning](https://kentcdodds.com/blog/fix-the-not-wrapped-in-act-warning)
|
||||
- [Common mistakes with React Testing Library](https://kentcdodds.com/blog/common-mistakes-with-react-testing-library)
|
||||
- [ARIA Roles](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA/Roles)
|
||||
- [Cypress - Best Practices](https://docs.cypress.io/guides/references/best-practices)
|
||||
- [End-to-end vs integration tests: what's the difference?](https://www.onpathtesting.com/blog/end-to-end-vs-integration-testing)
|
||||
@@ -16,6 +16,11 @@ Superset's public **REST API** follows the
|
||||
documented here. The docs below are generated using
|
||||
[Swagger React UI](https://www.npmjs.com/package/swagger-ui-react).
|
||||
|
||||
:::resources
|
||||
- [Blog: The Superset REST API](https://preset.io/blog/2020-10-01-superset-api/)
|
||||
- [Blog: Accessing APIs with Superset](https://preset.io/blog/accessing-apis-with-superset/)
|
||||
:::
|
||||
|
||||
<Alert
|
||||
type="info"
|
||||
message={
|
||||
|
||||
@@ -294,6 +294,14 @@ Check this by attempting to `curl` the URL of a report that you see in the error
|
||||
|
||||
In a deployment with authentication measures enabled like HTTPS and Single Sign-On, it may make sense to have the worker navigate directly to the Superset application running in the same location, avoiding the need to sign in. For instance, you could use `WEBDRIVER_BASEURL="http://superset_app:8088"` for a docker compose deployment, and set `"force_https": False,` in your `TALISMAN_CONFIG`.
|
||||
|
||||
### Duplicate report deliveries
|
||||
|
||||
In some deployment configurations a scheduled report can be delivered more than once around its planned time. This typically happens when more than one process is responsible for running the alerts & reports schedule (for example, multiple schedulers or Celery beat instances). To avoid duplicate emails or notifications:
|
||||
|
||||
- Ensure that only a **single scheduler/beat process** is configured to trigger alerts and reports for a given environment.
|
||||
- If you run **multiple Celery workers**, verify that there is still only one component responsible for scheduling the report tasks (workers should execute tasks, not schedule them independently).
|
||||
- Review your deployment/orchestration setup (for example systemd, Docker, or Kubernetes) to make sure the alerts & reports scheduler is **not started from multiple places by accident**.
|
||||
|
||||
## Scheduling Queries as Reports
|
||||
|
||||
You can optionally allow your users to schedule queries directly in SQL Lab. This is done by adding
|
||||
@@ -390,3 +398,8 @@ the user can add the metadata required for scheduling the query.
|
||||
This information can then be retrieved from the endpoint `/api/v1/saved_query/` and used to
|
||||
schedule the queries that have `schedule_info` in their JSON metadata. For schedulers other than
|
||||
Airflow, additional fields can be easily added to the configuration file above.
|
||||
|
||||
:::resources
|
||||
- [Tutorial: Automated Alerts and Reporting via Slack/Email in Superset](https://dev.to/ngtduc693/apache-superset-topic-5-automated-alerts-and-reporting-via-slackemail-in-superset-2gbe)
|
||||
- [Blog: Integrating Slack alerts and Apache Superset for better data observability](https://medium.com/affinityanswers-tech/integrating-slack-alerts-and-apache-superset-for-better-data-observability-fd2f9a12c350)
|
||||
:::
|
||||
|
||||
@@ -52,11 +52,11 @@ To start a job which schedules periodic background jobs, run the following comma
|
||||
celery --app=superset.tasks.celery_app:app beat
|
||||
```
|
||||
|
||||
To setup a result backend, you need to pass an instance of a derivative of from
|
||||
from flask_caching.backends.base import BaseCache to the RESULTS_BACKEND configuration key in your superset_config.py. You can
|
||||
use Memcached, Redis, S3 (https://pypi.python.org/pypi/s3werkzeugcache), memory or the file system
|
||||
(in a single server-type setup or for testing), or to write your own caching interface. Your
|
||||
`superset_config.py` may look something like:
|
||||
To setup a result backend, you need to pass an instance of a derivative of `BaseCache` (`from
|
||||
flask_caching.backends.base import BaseCache`) to the RESULTS_BACKEND configuration key in your
|
||||
superset_config.py. You can use Memcached, Redis, S3 (https://pypi.python.org/pypi/s3werkzeugcache),
|
||||
memory or the file system (in a single server-type setup or for testing), or to write your own
|
||||
caching interface. Your `superset_config.py` may look something like:
|
||||
|
||||
```python
|
||||
# On S3
|
||||
@@ -102,3 +102,7 @@ You can run flower using:
|
||||
```bash
|
||||
celery --app=superset.tasks.celery_app:app flower
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Blog: How to Set Up Global Async Queries (GAQ) in Apache Superset](https://medium.com/@ngigilevis/how-to-set-up-global-async-queries-gaq-in-apache-superset-a-complete-guide-9d2f4a047559)
|
||||
:::
|
||||
|
||||
@@ -152,3 +152,8 @@ Then on configuration:
|
||||
```
|
||||
WEBDRIVER_AUTH_FUNC = auth_driver
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Blog: The Data Engineer's Guide to Lightning-Fast Superset Dashboards](https://preset.io/blog/the-data-engineers-guide-to-lightning-fast-apache-superset-dashboards/)
|
||||
- [Blog: Accelerating Dashboards with Materialized Views](https://preset.io/blog/accelerating-apache-superset-dashboards-with-materialized-views/)
|
||||
:::
|
||||
|
||||
@@ -442,3 +442,7 @@ FEATURE_FLAGS = {
|
||||
```
|
||||
|
||||
A current list of feature flags can be found in [RESOURCES/FEATURE_FLAGS.md](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md).
|
||||
|
||||
:::resources
|
||||
- [Blog: Feature Flags in Apache Superset](https://preset.io/blog/feature-flags-in-apache-superset-and-preset/)
|
||||
:::
|
||||
|
||||
@@ -54,6 +54,7 @@ are compatible with Superset.
|
||||
| [Ascend.io](/docs/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
|
||||
| [Azure MS SQL](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
|
||||
| [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Cloudflare D1](/docs/configuration/databases#cloudflare-d1) | `pip install superset-engine-d1` or `pip install apache_superset[d1]` | `d1://{cloudflare_account_id}:{cloudflare_api_token}@{cloudflare_d1_database_id}` |
|
||||
| [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/docs/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
@@ -98,8 +99,9 @@ exists, please file an issue on the
|
||||
[Superset GitHub repo](https://github.com/apache/superset/issues), so we can work on documenting and
|
||||
supporting it.
|
||||
|
||||
If you'd like to build a database connector for Superset integration,
|
||||
read the [following tutorial](https://preset.io/blog/building-database-connector/).
|
||||
:::resources
|
||||
- [Tutorial: Building a Database Connector for Superset](https://preset.io/blog/building-database-connector/)
|
||||
:::
|
||||
|
||||
### Installing Drivers in Docker Images
|
||||
|
||||
@@ -218,6 +220,10 @@ Here's what the connection string looks like:
|
||||
doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Apache Doris Docs: Superset Integration](https://doris.apache.org/docs/ecosystem/bi/apache-superset/)
|
||||
:::
|
||||
|
||||
#### AWS Athena
|
||||
|
||||
##### PyAthenaJDBC
|
||||
@@ -359,6 +365,27 @@ uses the default user without a password (and doesn't encrypt the connection):
|
||||
clickhousedb://localhost/default
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [ClickHouse Docs: Superset Integration](https://clickhouse.com/docs/integrations/superset)
|
||||
- [Altinity: Connect ClickHouse to Superset](https://docs.altinity.com/integrations/clickhouse-and-superset/connect-clickhouse-to-superset/)
|
||||
- [Instaclustr: Connecting to ClickHouse from Superset](https://www.instaclustr.com/support/documentation/clickhouse/using-a-clickhouse-cluster/connecting-to-clickhouse-from-apache-superset/)
|
||||
- [Blog: ClickHouse and Apache Superset](https://preset.io/blog/2021-5-26-clickhouse-superset/)
|
||||
:::
|
||||
|
||||
#### Cloudflare D1
|
||||
|
||||
To use Cloudflare D1 with superset, install the [superset-engine-d1](https://github.com/sqlalchemy-cf-d1/superset-engine-d1) library.
|
||||
|
||||
```
|
||||
pip install superset-engine-d1
|
||||
```
|
||||
|
||||
The expected connection string is formatted as follows:
|
||||
|
||||
```
|
||||
d1://{cloudflare_account_id}:{cloudflare_api_token}@{cloudflare_d1_database_id}
|
||||
```
|
||||
|
||||
#### CockroachDB
|
||||
|
||||
The recommended connector library for CockroachDB is
|
||||
@@ -417,6 +444,11 @@ Docker Compose.
|
||||
echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [CrateDB Docs: Apache Superset Integration](https://cratedb.com/docs/guide/integrate/apache-superset/index.html)
|
||||
- [Blog: Visualizing Time-Series Data with CrateDB and Superset](https://preset.io/blog/timeseries-cratedb-superset/)
|
||||
:::
|
||||
|
||||
[CrateDB Cloud]: https://cratedb.com/product/cloud
|
||||
[CrateDB Self-Managed]: https://cratedb.com/product/self-managed
|
||||
[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/
|
||||
@@ -549,8 +581,10 @@ The expected connection string for Arrow Flight (Dremio 4.9.1+. Default port is
|
||||
dremio+flight://{username}:{password}@{host}:{port}/dremio
|
||||
```
|
||||
|
||||
This [blog post by Dremio](https://www.dremio.com/tutorials/dremio-apache-superset/) has some
|
||||
additional helpful instructions on connecting Superset to Dremio.
|
||||
:::resources
|
||||
- [Dremio Docs: Superset Integration](https://docs.dremio.com/current/client-applications/superset/)
|
||||
- [Blog: Connecting Dremio to Apache Superset](https://www.dremio.com/tutorials/dremio-apache-superset/)
|
||||
:::
|
||||
|
||||
#### Apache Drill
|
||||
|
||||
@@ -593,6 +627,10 @@ We recommend reading the
|
||||
the [GitHub README](https://github.com/JohnOmernik/sqlalchemy-drill#usage-with-odbc) to learn how to
|
||||
work with Drill through ODBC.
|
||||
|
||||
:::resources
|
||||
- [Tutorial: Query MongoDB in Superset with Apache Drill](https://medium.com/@thoren.lederer/query-data-from-mongodb-in-apache-superset-with-the-help-of-apache-drill-full-tutorial-b34c33eac6c1)
|
||||
:::
|
||||
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
|
||||
#### Apache Druid
|
||||
@@ -659,6 +697,10 @@ much like you would create an aggregation manually, but specify `postagg` as a `
|
||||
then have to provide a valid json post-aggregation definition (as specified in the Druid docs) in
|
||||
the JSON field.
|
||||
|
||||
:::resources
|
||||
- [Blog: Real-Time Business Insights with Apache Druid and Superset](https://www.deep.bi/blog/real-time-business-insights-with-apache-druid-and-apache-superset)
|
||||
:::
|
||||
|
||||
#### Elasticsearch
|
||||
|
||||
The recommended connector library for Elasticsearch is
|
||||
@@ -729,6 +771,10 @@ To disable SSL verification, add the following to the **SQLALCHEMY URI** field:
|
||||
elasticsearch+https://{user}:{password}@{host}:9200/?verify_certs=False
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Blog: Superset Announces Elasticsearch Support](https://preset.io/blog/2019-12-16-elasticsearch-in-superset/)
|
||||
:::
|
||||
|
||||
#### Exasol
|
||||
|
||||
The recommended connector library for Exasol is
|
||||
@@ -777,6 +823,10 @@ or
|
||||
firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Firebolt Docs: Connecting to Apache Superset](https://docs.firebolt.io/guides/integrations/connecting-to-apache-superset)
|
||||
:::
|
||||
|
||||
#### Google BigQuery
|
||||
|
||||
The recommended connector library for BigQuery is
|
||||
@@ -863,15 +913,21 @@ To be able to upload CSV or Excel files to BigQuery in Superset, you'll need to
|
||||
Currently, the Google BigQuery Python SDK is not compatible with `gevent`, due to some dynamic monkeypatching on python core library by `gevent`.
|
||||
So, when you deploy Superset with `gunicorn` server, you have to use worker type except `gevent`.
|
||||
|
||||
:::resources
|
||||
- [Tutorial: Build A StackOverflow Dashboard — Connecting Superset to BigQuery](https://preset.io/blog/2020-08-04-google-bigquery/)
|
||||
:::
|
||||
|
||||
#### Google Sheets
|
||||
|
||||
Google Sheets has a very limited
|
||||
[SQL API](https://developers.google.com/chart/interactive/docs/querylanguage). The recommended
|
||||
connector library for Google Sheets is [shillelagh](https://github.com/betodealmeida/shillelagh).
|
||||
|
||||
There are a few steps involved in connecting Superset to Google Sheets. This
|
||||
[tutorial](https://preset.io/blog/2020-06-01-connect-superset-google-sheets/) has the most up to date
|
||||
instructions on setting up this connection.
|
||||
There are a few steps involved in connecting Superset to Google Sheets.
|
||||
|
||||
:::resources
|
||||
- [Tutorial: Connect Superset to Google Sheets](https://preset.io/blog/2020-06-01-connect-superset-google-sheets/)
|
||||
:::
|
||||
|
||||
#### Hana
|
||||
|
||||
@@ -893,6 +949,10 @@ The expected connection string is formatted as follows:
|
||||
hive://hive@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Cloudera: Connect Apache Hive to Superset](https://docs-archive.cloudera.com/HDPDocuments/HDP3/HDP-3.0.0/integrating-hive/content/hive_connect_hive_to_apache_superset.html)
|
||||
:::
|
||||
|
||||
#### Hologres
|
||||
|
||||
Hologres is a real-time interactive analytics service developed by Alibaba Cloud. It is fully compatible with PostgreSQL 11 and integrates seamlessly with the big data ecosystem.
|
||||
@@ -1044,6 +1104,10 @@ The connection string is formatted as follows:
|
||||
oracle://<username>:<password>@<hostname>:<port>
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Oracle Developers: Steps to use Apache Superset and Oracle Database](https://medium.com/oracledevs/steps-to-use-apache-superset-and-oracle-database-ae0858b4f134)
|
||||
:::
|
||||
|
||||
#### Parseable
|
||||
|
||||
[Parseable](https://www.parseable.io) is a distributed log analytics database that provides SQL-like query interface for log data. The recommended connector library is [sqlalchemy-parseable](https://github.com/parseablehq/sqlalchemy-parseable).
|
||||
@@ -1062,7 +1126,10 @@ parseable://admin:admin@demo.parseable.com:443/ingress-nginx
|
||||
|
||||
Note: The stream_name in the URI represents the Parseable logstream you want to query. You can use both HTTP (port 80) and HTTPS (port 443) connections.
|
||||
|
||||
>>>>>>>
|
||||
:::resources
|
||||
- [Blog: Parseable with Apache Superset](https://www.parseable.com/blog/parseable-with-apache-superset)
|
||||
:::
|
||||
|
||||
#### Apache Pinot
|
||||
|
||||
The recommended connector library for Apache Pinot is [pinotdb](https://pypi.org/project/pinotdb/).
|
||||
@@ -1086,6 +1153,11 @@ Add below argument while creating database connection in Advanced -> Other -> EN
|
||||
{"connect_args":{"use_multistage_engine":"true"}}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Apache Pinot Docs: Superset Integration](https://docs.pinot.apache.org/integrations/superset)
|
||||
- [StarTree: Data Visualization with Apache Superset and Pinot](https://startree.ai/resources/data-visualization-with-apache-superset-and-pinot)
|
||||
:::
|
||||
|
||||
#### Postgres
|
||||
|
||||
Note that, if you're using docker compose, the Postgres connector library [psycopg2](https://www.psycopg.org/docs/)
|
||||
@@ -1122,6 +1194,28 @@ More information about PostgreSQL connection options can be found in the
|
||||
and the
|
||||
[PostgreSQL docs](https://www.postgresql.org/docs/9.1/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS).
|
||||
|
||||
:::resources
|
||||
- [Blog: Data Visualization in PostgreSQL With Apache Superset](https://www.tigerdata.com/blog/data-visualization-in-postgresql-with-apache-superset)
|
||||
:::
|
||||
|
||||
#### QuestDB
|
||||
|
||||
[QuestDB](https://questdb.io/) is a high-performance, open-source time-series database with SQL support.
|
||||
The recommended connector library is the PostgreSQL driver [psycopg2](https://www.psycopg.org/docs/),
|
||||
as QuestDB supports the PostgreSQL wire protocol.
|
||||
|
||||
The connection string is formatted as follows:
|
||||
|
||||
```
|
||||
postgresql+psycopg2://{username}:{password}@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
The default port for QuestDB's PostgreSQL interface is `8812`.
|
||||
|
||||
:::resources
|
||||
- [QuestDB Docs: Apache Superset Integration](https://questdb.com/docs/third-party-tools/superset/)
|
||||
:::
|
||||
|
||||
#### Presto
|
||||
|
||||
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Presto through SQLAlchemy.
|
||||
@@ -1164,6 +1258,10 @@ SSL Secure extra add json config to extra connection information.
|
||||
}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Tutorial: Presto SQL + S3 Data + Superset Data Lake](https://hackernoon.com/presto-sql-s3-data-superset-data-lake)
|
||||
:::
|
||||
|
||||
#### RisingWave
|
||||
|
||||
The recommended connector library for RisingWave is
|
||||
@@ -1237,6 +1335,10 @@ If your private key is stored on server, you can replace "privatekey_body" with
|
||||
}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Snowflake Builders Blog: Building Real-Time Operational Dashboards with Apache Superset and Snowflake](https://medium.com/snowflake/building-real-time-operational-dashboards-with-apache-superset-and-snowflake-23f625e07d7c)
|
||||
:::
|
||||
|
||||
#### Apache Solr
|
||||
|
||||
The [sqlalchemy-solr](https://pypi.org/project/sqlalchemy-solr/) library provides a
|
||||
@@ -1258,6 +1360,10 @@ The expected connection string is formatted as follows:
|
||||
hive://hive@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Tutorial: How to Connect Apache Superset with Apache SparkSQL](https://medium.com/free-or-open-source-software/how-to-connect-apache-superset-with-apache-sparksql-50efe48ac0e4)
|
||||
:::
|
||||
|
||||
#### Arc
|
||||
|
||||
There are two ways to connect Superset to Arc:
|
||||
@@ -1341,8 +1447,8 @@ Here's what the connection string looks like:
|
||||
starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
```
|
||||
|
||||
:::note
|
||||
StarRocks maintains their Superset docuementation [here](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/).
|
||||
:::resources
|
||||
- [StarRocks Docs: Superset Integration](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/)
|
||||
:::
|
||||
|
||||
#### TDengine
|
||||
@@ -1426,6 +1532,11 @@ postgresql://{username}:{password}@{host}:{port}/{database name}?sslmode=require
|
||||
|
||||
[Learn more about TimescaleDB!](https://docs.timescale.com/)
|
||||
|
||||
:::resources
|
||||
- [Timescale DevRel: Visualize time series data with TimescaleDB and Apache Superset](https://attilatoth.dev/speaking/timescaledb-superset/)
|
||||
- [Tutorial: PostgreSQL with TimescaleDB — Visualizing Real-Time Data with Superset](https://www.slingacademy.com/article/postgresql-with-timescaledb-visualizing-real-time-data-with-superset/)
|
||||
:::
|
||||
|
||||
#### Trino
|
||||
|
||||
Supported trino version 352 and higher
|
||||
@@ -1551,9 +1662,11 @@ or factory function (which returns an `Authentication` instance) to `auth_method
|
||||
|
||||
All fields in `auth_params` are passed directly to your class/function.
|
||||
|
||||
**Reference**:
|
||||
|
||||
- [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
|
||||
:::resources
|
||||
- [Starburst Docs: Superset Integration](https://docs.starburst.io/clients/superset.html)
|
||||
- [Podcast: Trino and Superset](https://trino.io/episodes/12.html)
|
||||
- [Blog: Trino and Apache Superset](https://preset.io/blog/2021-6-22-trino-superset/)
|
||||
:::
|
||||
|
||||
#### Vertica
|
||||
|
||||
@@ -1665,6 +1778,10 @@ The connection string looks like:
|
||||
postgresql://{username}:{password}@{host}:{port}/{database}
|
||||
```
|
||||
|
||||
:::resources
|
||||
- [Blog: Introduction to YugabyteDB and Apache Superset](https://preset.io/blog/introduction-yugabytedb-apache-superset/)
|
||||
:::
|
||||
|
||||
## Connecting through the UI
|
||||
|
||||
Here is the documentation on how to leverage the new DB Connection UI. This will provide admins the ability to enhance the UX for users who want to connect to new databases.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user