mirror of
https://github.com/apache/superset.git
synced 2026-05-02 06:24:37 +00:00
Compare commits
507 Commits
fdf19db5e6
...
fix-sqllab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f0bb7147a7 | ||
|
|
c85958a704 | ||
|
|
151d7d76da | ||
|
|
4f19bc4c5f | ||
|
|
11607dde04 | ||
|
|
e1bdb94efc | ||
|
|
6535fdd556 | ||
|
|
5fb89b865d | ||
|
|
6948e73ec7 | ||
|
|
c4cf03f899 | ||
|
|
d3de16c5f5 | ||
|
|
78fb09695b | ||
|
|
06818008c2 | ||
|
|
52ba4fd0cb | ||
|
|
4f2fa41f4e | ||
|
|
bf7ec853fa | ||
|
|
9fe3f634ec | ||
|
|
899e9294b2 | ||
|
|
dc9b459b27 | ||
|
|
7d3881f1da | ||
|
|
e5f9a6bf4b | ||
|
|
5cff657812 | ||
|
|
16387b0815 | ||
|
|
0857611a4e | ||
|
|
51ea2c297d | ||
|
|
fbd062165e | ||
|
|
55625c911f | ||
|
|
fca64de8e9 | ||
|
|
03725d1aaa | ||
|
|
a9487cbc84 | ||
|
|
ce5b2aa424 | ||
|
|
e535dce030 | ||
|
|
a7310b1fce | ||
|
|
cd6ce881a5 | ||
|
|
f0ef9f5e9c | ||
|
|
4ae16cb140 | ||
|
|
3d85e8e23b | ||
|
|
690a411cf3 | ||
|
|
be680408c9 | ||
|
|
4bdc8d4c68 | ||
|
|
db7a2bd682 | ||
|
|
2e0d482ccf | ||
|
|
e5b3a9c25d | ||
|
|
c289731212 | ||
|
|
f850c6b1b1 | ||
|
|
8ce234371b | ||
|
|
e5820b6b2b | ||
|
|
69f062b804 | ||
|
|
735dd5dbae | ||
|
|
cd7dddb5a1 | ||
|
|
7c76fd3d81 | ||
|
|
0b419a07f5 | ||
|
|
0b51e9cd5e | ||
|
|
e7b9fb277e | ||
|
|
838ee870d0 | ||
|
|
84af6c9f29 | ||
|
|
05227e8a80 | ||
|
|
76a209663d | ||
|
|
61c45e3dd8 | ||
|
|
e11a50bedf | ||
|
|
f4a6ea0fde | ||
|
|
e542e9f840 | ||
|
|
e0dcb2908d | ||
|
|
eaccb2e471 | ||
|
|
c1a1f2e7e7 | ||
|
|
45d5501aa7 | ||
|
|
388596e4fe | ||
|
|
b11d4f3ef0 | ||
|
|
998b9e387b | ||
|
|
b3e88db87e | ||
|
|
8471e82342 | ||
|
|
c3a0f2749b | ||
|
|
c2d96e0dce | ||
|
|
44e77fdf2b | ||
|
|
18d6feb499 | ||
|
|
0d91f5e982 | ||
|
|
5661fc9128 | ||
|
|
a6156676c8 | ||
|
|
b2bd2329bc | ||
|
|
724f1484b9 | ||
|
|
84f7b4a973 | ||
|
|
ddcb9be9a7 | ||
|
|
8d9b5bd479 | ||
|
|
4b88fc57b4 | ||
|
|
b76080e291 | ||
|
|
411f769896 | ||
|
|
ffcc6e8b63 | ||
|
|
86575e129b | ||
|
|
3e25f02da9 | ||
|
|
002d8ad1e4 | ||
|
|
6287a07912 | ||
|
|
fa97d0357f | ||
|
|
f836c3eccd | ||
|
|
499e27ea54 | ||
|
|
c2a35e2eea | ||
|
|
5138aa2c11 | ||
|
|
66a9e2e16e | ||
|
|
0f417f0040 | ||
|
|
1462ac9282 | ||
|
|
da371217ef | ||
|
|
c971ea3ec6 | ||
|
|
de98fdc37b | ||
|
|
fa1f12a0b5 | ||
|
|
de40b58e10 | ||
|
|
eea3557f61 | ||
|
|
7a243d329e | ||
|
|
98146251c4 | ||
|
|
0aa8cace1b | ||
|
|
450701ecec | ||
|
|
e9911fbac4 | ||
|
|
69c8eef78e | ||
|
|
2ff50667e7 | ||
|
|
f1cf274751 | ||
|
|
b65396ccd4 | ||
|
|
1ad76e847e | ||
|
|
4583ef93a4 | ||
|
|
f632d2474b | ||
|
|
b1d69f5b39 | ||
|
|
aba7e6dae4 | ||
|
|
8bcc90c766 | ||
|
|
e39dd1afce | ||
|
|
680cef0ee0 | ||
|
|
e17cf3c808 | ||
|
|
f49310b8ff | ||
|
|
c7955a38ef | ||
|
|
68067d7f44 | ||
|
|
5815665cc6 | ||
|
|
6649f35a0d | ||
|
|
5263abdc60 | ||
|
|
c49641538d | ||
|
|
d915e4f3ff | ||
|
|
bad5a35fce | ||
|
|
1bde6f3bfd | ||
|
|
4e0890ee1f | ||
|
|
d63308ca37 | ||
|
|
63cceb6a79 | ||
|
|
b8b2bdedf9 | ||
|
|
d5017e60c3 | ||
|
|
2e80f2a473 | ||
|
|
4c2dd63464 | ||
|
|
62302ad8c3 | ||
|
|
ed659958f3 | ||
|
|
36de05fe36 | ||
|
|
a64609f4f3 | ||
|
|
140f0001f2 | ||
|
|
587fe4af63 | ||
|
|
3a3a6536b7 | ||
|
|
4f695e1b4d | ||
|
|
6ba9096870 | ||
|
|
5106afb07f | ||
|
|
2bd4131636 | ||
|
|
7e452df1cc | ||
|
|
a626d06415 | ||
|
|
d159edc9a6 | ||
|
|
96fa2cbd2b | ||
|
|
9750881193 | ||
|
|
3db92021c7 | ||
|
|
5ccfc530b2 | ||
|
|
5f9fc31ae2 | ||
|
|
8e811de564 | ||
|
|
027de6339b | ||
|
|
bf9aff19b5 | ||
|
|
b05764d070 | ||
|
|
7be2acb2f3 | ||
|
|
83ad1eca26 | ||
|
|
92747246fc | ||
|
|
7380a59ab8 | ||
|
|
e56f8cc4fb | ||
|
|
7c79b9ab61 | ||
|
|
a62be684a0 | ||
|
|
a3dfbd7bff | ||
|
|
12eb40db01 | ||
|
|
d796543f5a | ||
|
|
e5ae626433 | ||
|
|
8195574345 | ||
|
|
6b029997d9 | ||
|
|
7a64483e6b | ||
|
|
e424b55036 | ||
|
|
613e6d6cde | ||
|
|
b3a402d936 | ||
|
|
c7d175b842 | ||
|
|
851bbeea48 | ||
|
|
c5bce756f0 | ||
|
|
3239f058c8 | ||
|
|
7e0c634c3a | ||
|
|
a9ced5c881 | ||
|
|
ace5f9d8c2 | ||
|
|
0452d1515a | ||
|
|
0330fdeb00 | ||
|
|
f2ff24d811 | ||
|
|
c51132f824 | ||
|
|
b4cb815ebf | ||
|
|
08d1ddd9fb | ||
|
|
23ac4cb3a4 | ||
|
|
5662ecab15 | ||
|
|
9e27d682f6 | ||
|
|
f0fcdcc76a | ||
|
|
135e0f8099 | ||
|
|
25eea295f6 | ||
|
|
c372f5980c | ||
|
|
3802acb1e0 | ||
|
|
bdb0030cf8 | ||
|
|
87f0540acd | ||
|
|
985d7b6a79 | ||
|
|
59f92f979a | ||
|
|
5cc286e383 | ||
|
|
26f4a5acad | ||
|
|
fdd08d3b70 | ||
|
|
1aac6c9474 | ||
|
|
7acb0c6d05 | ||
|
|
00eb86d03f | ||
|
|
1d0e836a29 | ||
|
|
ec6640b188 | ||
|
|
ff3b8d8398 | ||
|
|
022342839a | ||
|
|
38f0dc74f7 | ||
|
|
0bae05d4a9 | ||
|
|
1bb41a6e60 | ||
|
|
4423134739 | ||
|
|
190f1a59c5 | ||
|
|
5f99d613a0 | ||
|
|
6adc816805 | ||
|
|
aa97679327 | ||
|
|
94d8735d4b | ||
|
|
64c8d652e1 | ||
|
|
d30c5b4eee | ||
|
|
8ed75787cb | ||
|
|
4ee391e0d7 | ||
|
|
a67ca052d6 | ||
|
|
6b6e3803d1 | ||
|
|
51ec61c675 | ||
|
|
424f99efdf | ||
|
|
070be3de8b | ||
|
|
bd98269628 | ||
|
|
7ce371080c | ||
|
|
872632aca0 | ||
|
|
64bd03bd70 | ||
|
|
1e2d0faa55 | ||
|
|
8559786cc2 | ||
|
|
d4d22909cb | ||
|
|
4fae5758d5 | ||
|
|
f85efe6139 | ||
|
|
6ea9f2ade9 | ||
|
|
4036b784ed | ||
|
|
08a4ad662a | ||
|
|
e4021fb6e7 | ||
|
|
53b1d1097c | ||
|
|
55aa36fef8 | ||
|
|
3abcfb797a | ||
|
|
a741ddc03c | ||
|
|
7d26e33346 | ||
|
|
f6cd8066ab | ||
|
|
daefedebcd | ||
|
|
c37a3ec292 | ||
|
|
4245720851 | ||
|
|
f0b20dc445 | ||
|
|
e6f1209318 | ||
|
|
944944c49e | ||
|
|
ecbf396d4a | ||
|
|
b1474aaa60 | ||
|
|
b49e899974 | ||
|
|
11f2140c37 | ||
|
|
f1cd1ae710 | ||
|
|
e0a0a22542 | ||
|
|
2c9cf0bd55 | ||
|
|
38fdfb4ca2 | ||
|
|
15bab227bb | ||
|
|
d331a043a3 | ||
|
|
41d401a879 | ||
|
|
89f7e5e7ba | ||
|
|
aa1a69555b | ||
|
|
d1903afc69 | ||
|
|
dbc25dc555 | ||
|
|
a5d2324e21 | ||
|
|
38e82e4084 | ||
|
|
6bcc8bf2b2 | ||
|
|
f832f9b0d5 | ||
|
|
fc705d94e3 | ||
|
|
65eae027fa | ||
|
|
ac96f46c76 | ||
|
|
5c782397bb | ||
|
|
40387d5daa | ||
|
|
7f3351011d | ||
|
|
d6a6b6db14 | ||
|
|
388a1fd0be | ||
|
|
c2c929bf94 | ||
|
|
41473a520e | ||
|
|
50a5bb0671 | ||
|
|
20d0cfd156 | ||
|
|
5ad91fbb09 | ||
|
|
6229c99050 | ||
|
|
7e69d5d839 | ||
|
|
8700ec4e6d | ||
|
|
8cbf5fb8df | ||
|
|
9c288d66b5 | ||
|
|
8983edea66 | ||
|
|
95820fb9e6 | ||
|
|
6dc3d7ad9f | ||
|
|
cfa1aba1e0 | ||
|
|
43816d7528 | ||
|
|
6dd82afb0b | ||
|
|
e045f49787 | ||
|
|
38d3a39c06 | ||
|
|
23a5e95884 | ||
|
|
16f5a2a41a | ||
|
|
04e07acf98 | ||
|
|
3506773f51 | ||
|
|
d32e975eb9 | ||
|
|
21fb5a27e9 | ||
|
|
403f4ad78c | ||
|
|
ba5820b088 | ||
|
|
a93e319716 | ||
|
|
12aca72074 | ||
|
|
3fb903fdc6 | ||
|
|
4b26f8c712 | ||
|
|
37c4a36fdb | ||
|
|
811dcb3715 | ||
|
|
ccaac306e5 | ||
|
|
c596df9294 | ||
|
|
6852349d24 | ||
|
|
7c9d75b69e | ||
|
|
42201a98a1 | ||
|
|
09594b32f9 | ||
|
|
e2bb20121e | ||
|
|
56ebfb7848 | ||
|
|
5d9f53ff0c | ||
|
|
89d1b80ce7 | ||
|
|
962abf6904 | ||
|
|
ed3c5280a9 | ||
|
|
7222327992 | ||
|
|
e0b524fff2 | ||
|
|
e67bc5bee5 | ||
|
|
86a260e39b | ||
|
|
fdcb942f3c | ||
|
|
7a5c07b99c | ||
|
|
6d93eeb533 | ||
|
|
44179199ba | ||
|
|
100ad7d9ee | ||
|
|
c96c817ef5 | ||
|
|
519a64da82 | ||
|
|
24be9cd515 | ||
|
|
1987e816a5 | ||
|
|
0f4aa1ceea | ||
|
|
601fb45142 | ||
|
|
c9ebb13fa1 | ||
|
|
618113079f | ||
|
|
cc34d19d24 | ||
|
|
02ffb52f4a | ||
|
|
361afff798 | ||
|
|
2a6b0215f0 | ||
|
|
c1c296233f | ||
|
|
e05fdd8acd | ||
|
|
83823911b5 | ||
|
|
7004369c68 | ||
|
|
f5d7ce0f86 | ||
|
|
32eb8c8263 | ||
|
|
44c2c765ae | ||
|
|
0d5721910e | ||
|
|
28d67d59cd | ||
|
|
1d72480c17 | ||
|
|
1af5da6aad | ||
|
|
ea1c6ee30f | ||
|
|
97ea479cdc | ||
|
|
e088979fbe | ||
|
|
5e5c05362c | ||
|
|
c2a21915ff | ||
|
|
cbb2b2f3c2 | ||
|
|
82a74c88aa | ||
|
|
6b9dd23e3a | ||
|
|
b754f2d173 | ||
|
|
ee233d16d6 | ||
|
|
65f13f773e | ||
|
|
d4646d43a7 | ||
|
|
6465450b64 | ||
|
|
01aa4d3281 | ||
|
|
211f29b723 | ||
|
|
d6bfc98a61 | ||
|
|
5457c2da67 | ||
|
|
14b1b456e1 | ||
|
|
972e15e601 | ||
|
|
03de7e1ec6 | ||
|
|
3edf75123a | ||
|
|
fd1c423826 | ||
|
|
a314e5b35e | ||
|
|
e02ca8871d | ||
|
|
834d2abe70 | ||
|
|
91986fff02 | ||
|
|
05b9970aa6 | ||
|
|
6f301707f9 | ||
|
|
5865176f36 | ||
|
|
461037f645 | ||
|
|
c980f39aab | ||
|
|
a854fa60a2 | ||
|
|
1c8224f4c6 | ||
|
|
ca403dc45d | ||
|
|
96705c156a | ||
|
|
7909095ff3 | ||
|
|
aa5adb0fce | ||
|
|
dcb414aa06 | ||
|
|
afe093f1ca | ||
|
|
cc066b3576 | ||
|
|
39cd1cdd43 | ||
|
|
176bf00c16 | ||
|
|
68e38c8893 | ||
|
|
48220fb33f | ||
|
|
ed622e254a | ||
|
|
6e7d6a85b4 | ||
|
|
e8061a9c2b | ||
|
|
97a66f7a64 | ||
|
|
b6c3b3ef46 | ||
|
|
f4a57a13bc | ||
|
|
242636b36b | ||
|
|
ba7d7dcec0 | ||
|
|
ba7271b4d8 | ||
|
|
ca2d26a1e2 | ||
|
|
f6106cd26f | ||
|
|
1867336907 | ||
|
|
f5383263bc | ||
|
|
d5cf77cd60 | ||
|
|
f458e2d484 | ||
|
|
af5e05db2e | ||
|
|
32a64d02c7 | ||
|
|
9516d1a306 | ||
|
|
d91b96814e | ||
|
|
56d6bb1913 | ||
|
|
fc156d0014 | ||
|
|
0b8df8d3f2 | ||
|
|
83955e87ac | ||
|
|
4a9db243a1 | ||
|
|
d4f1f8db00 | ||
|
|
95f61bd223 | ||
|
|
7f476a79b3 | ||
|
|
65e21cf13c | ||
|
|
7943af359c | ||
|
|
09e9c6a522 | ||
|
|
a9def2fc15 | ||
|
|
27197faba9 | ||
|
|
ffe60bd960 | ||
|
|
d752be5f74 | ||
|
|
3056c41507 | ||
|
|
d42e9c4d1b | ||
|
|
5912941942 | ||
|
|
9b8106b382 | ||
|
|
9215eb5e45 | ||
|
|
fe7f220c21 | ||
|
|
3bb9704cd5 | ||
|
|
eb77452857 | ||
|
|
6d7cfac8b2 | ||
|
|
31754a39c9 | ||
|
|
bde48e563e | ||
|
|
0cfd760a36 | ||
|
|
13fe88000a | ||
|
|
cc8ad23d6f | ||
|
|
5c2cbb58bc | ||
|
|
6342c4f338 | ||
|
|
5fa70bdbd8 | ||
|
|
2a876e8b86 | ||
|
|
0533ca9941 | ||
|
|
5f20d2e15a | ||
|
|
6d1d5d64d1 | ||
|
|
06d6b513cd | ||
|
|
afa51125de | ||
|
|
26c07b1ffb | ||
|
|
9ecca47e69 | ||
|
|
6c1df93215 | ||
|
|
06fd0658ae | ||
|
|
a17f38a4e2 | ||
|
|
6ef4794778 | ||
|
|
4cd3ce164d | ||
|
|
8e3e57c1c8 | ||
|
|
61fbfda501 | ||
|
|
9017b9a74f | ||
|
|
bc99b710bd | ||
|
|
bf55f1e438 | ||
|
|
dca41f9a7b | ||
|
|
62cebc8a0e | ||
|
|
e70c7944b7 | ||
|
|
577654cd02 | ||
|
|
c7a1f57487 | ||
|
|
9983e255f8 | ||
|
|
d9a91f99db | ||
|
|
60577bcd97 | ||
|
|
3cb00bf116 | ||
|
|
a6c0d6321f | ||
|
|
5fb9e17721 | ||
|
|
03ad1789f0 | ||
|
|
296bd7e56b | ||
|
|
5c4bf0f6ea | ||
|
|
db7665c0bc | ||
|
|
84a53eab31 | ||
|
|
3609cd9544 | ||
|
|
7d2efd8c1a | ||
|
|
0d5ade6dd3 | ||
|
|
17df85b5ed | ||
|
|
664c465d80 | ||
|
|
884db9347d | ||
|
|
6c359733e1 | ||
|
|
357e35dc62 | ||
|
|
5f0efd2be9 | ||
|
|
0dbd4c5b90 | ||
|
|
f0416eff78 | ||
|
|
a513406239 | ||
|
|
f6f734f0d1 | ||
|
|
a2c23a2a58 | ||
|
|
20cc3345d8 | ||
|
|
880cab58c3 | ||
|
|
4dfb0e66cb |
@@ -24,7 +24,9 @@ notifications:
|
||||
discussions: notifications@superset.apache.org
|
||||
|
||||
github:
|
||||
del_branch_on_merge: true
|
||||
pull_requests:
|
||||
del_branch_on_merge: true
|
||||
allow_update_branch: true
|
||||
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
|
||||
homepage: https://superset.apache.org/
|
||||
labels:
|
||||
|
||||
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -22,6 +22,11 @@
|
||||
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
|
||||
# Notify PMC members of changes to CI-executed scripts (supply-chain risk:
|
||||
# scripts/ files run directly in CI workflows and can execute arbitrary code)
|
||||
|
||||
/scripts/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @Antonio-RiveroMartnez
|
||||
|
||||
37
.github/SECURITY.md
vendored
37
.github/SECURITY.md
vendored
@@ -18,10 +18,32 @@ e-mail address [security@superset.apache.org](mailto:security@superset.apache.or
|
||||
More details can be found on the ASF website at
|
||||
[ASF vulnerability reporting process](https://apache.org/security/#reporting-a-vulnerability)
|
||||
|
||||
We kindly ask you to include the following information in your report:
|
||||
- Apache Superset version that you are using
|
||||
- A sanitized copy of your `superset_config.py` file or any config overrides
|
||||
- Detailed steps to reproduce the vulnerability
|
||||
**Submission Standards & AI Policy**
|
||||
|
||||
To ensure engineering focus remains on verified risks and to manage high reporting volumes, all reports must meet the following criteria:
|
||||
- Plain Text Format: In accordance with Apache guidelines, please provide all details in plain text within the email body. Avoid sending PDFs, Word documents, or password-protected archives.
|
||||
- Mandatory AI Disclosure: If you utilized Large Language Models (LLMs) or AI tools to identify a flaw or assist in writing a report, you must disclose this in your submission so our triage team can contextualize the findings.
|
||||
- Human-Verified PoC: All submissions must include a manual, step-by-step Proof of Concept (PoC) performed on a supported release. Raw AI outputs, hypothetical chat transcripts, or unverified scanner logs will be closed as Invalid.
|
||||
|
||||
We kindly ask you to include the following information in your report to assist our developers in triaging and remediating issues efficiently:
|
||||
- Version/Commit: The specific version of Apache Superset or the Git commit hash you are using.
|
||||
- Configuration: A sanitized copy of your `superset_config.py` file or any config overrides.
|
||||
- Environment: Your deployment method (e.g., Docker Compose, Helm, or source) and relevant OS/Browser details.
|
||||
- Impacted Component: Identification of the affected area (e.g., Python backend, React frontend, or a specific database connector).
|
||||
- Expected vs. Actual Behavior: A clear description of the intended system behavior versus the observed vulnerability.
|
||||
- Detailed Reproduction Steps: Clear, manual steps to reproduce the vulnerability.
|
||||
|
||||
**Out of Scope Vulnerabilities**
|
||||
|
||||
To prioritize engineering efforts on genuine architectural risks, the following scenarios are explicitly out of scope and will not be issued a CVE:
|
||||
- Attacks requiring Admin privileges: (e.g., CSS injection, template manipulation, dashboard ownership overrides, or modifying global system settings). Per the CVE vulnerability definition in CNA Operational Rules 4.1, a qualifying vulnerability must allow violation of a security policy. The Admin role is a fully trusted operational boundary defined by Apache Superset's security policy; actions within this boundary do not violate that policy and are therefore considered intended capabilities 'by design,' not vulnerabilities.
|
||||
- Brute Force and Rate Limiting: Reports targeting a lack of resource exhaustion protections, generic rate-limiting, or volumetric Denial of Service (DoS) attempts.
|
||||
- Theoretical attack vectors: Issues without a demonstrable, reproducible exploit path.
|
||||
- Non-Exploitable Findings: Missing security headers, generic banner disclosures, or descriptive error messages that do not lead to a direct, documented exploit.
|
||||
|
||||
**Outcome of Reports**
|
||||
|
||||
Reports that are deemed out-of-scope for a CVE but represent valid security best practices or hardening opportunities may be converted into public GitHub issues. This allows the community to contribute to the general hardening of the platform even when a specific vulnerability threshold is not met.
|
||||
|
||||
Note that Apache Superset is not responsible for any third-party dependencies that may
|
||||
have security issues. Any vulnerabilities found in third-party dependencies should be
|
||||
@@ -29,6 +51,13 @@ reported to the maintainers of those projects. Results from security scans of Ap
|
||||
Superset dependencies found on its official Docker image can be remediated at release time
|
||||
by extending the image itself.
|
||||
|
||||
**Vulnerability Aggregation & CVE Attribution**
|
||||
|
||||
In accordance with MITRE CNA Operational Rules (4.1.10, 4.1.11, and 4.2.13), Apache Superset issues CVEs based on the underlying architectural root cause rather than the number of affected endpoints or exploit payloads.
|
||||
- Aggregation: If multiple exploit vectors stem from the same programmatic failure or shared vulnerable code, they must be aggregated into a single, comprehensive report.
|
||||
- Independent Fixes: Separate CVEs will only be assigned if the vulnerabilities reside in decoupled architectural modules and can be fixed independently of one another.
|
||||
Reports that fail to aggregate related findings will be merged during triage to ensure an accurate and defensible CVE record.
|
||||
|
||||
**Your responsible disclosure and collaboration are invaluable.**
|
||||
|
||||
## Extra Information
|
||||
|
||||
6
.github/actions/setup-docker/action.yml
vendored
6
.github/actions/setup-docker/action.yml
vendored
@@ -26,16 +26,16 @@ runs:
|
||||
|
||||
- name: Set up QEMU
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
if: ${{ inputs.login-to-dockerhub == 'true' }}
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-user }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -4,6 +4,10 @@ updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
ignore:
|
||||
# Ignore temporarily as release schedule is too mentally taxing for dep-handling maintainers
|
||||
# Additionally, very few PRs are reviewed by this action.
|
||||
- dependency-name: anthropics/claude-code-action
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
|
||||
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -17,6 +17,11 @@
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/migrations/**'
|
||||
|
||||
"risk:ci-script":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'scripts/**'
|
||||
|
||||
############################################
|
||||
# Dependencies
|
||||
############################################
|
||||
|
||||
20
.github/workflows/bump-python-package.yml
vendored
20
.github/workflows/bump-python-package.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: master
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@@ -51,27 +51,31 @@ jobs:
|
||||
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_PACKAGE: ${{ github.event.inputs.package }}
|
||||
INPUT_GROUP: ${{ github.event.inputs.group }}
|
||||
INPUT_EXTRA_FLAGS: ${{ github.event.inputs.extra-flags }}
|
||||
INPUT_LIMIT: ${{ github.event.inputs.limit }}
|
||||
run: |
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
|
||||
PACKAGE_OPT=""
|
||||
if [ -n "${{ github.event.inputs.package }}" ]; then
|
||||
PACKAGE_OPT="-p ${{ github.event.inputs.package }}"
|
||||
if [ -n "${INPUT_PACKAGE}" ]; then
|
||||
PACKAGE_OPT="-p ${INPUT_PACKAGE}"
|
||||
fi
|
||||
|
||||
GROUP_OPT=""
|
||||
if [ -n "${{ github.event.inputs.group }}" ]; then
|
||||
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
||||
if [ -n "${INPUT_GROUP}" ]; then
|
||||
GROUP_OPT="-g ${INPUT_GROUP}"
|
||||
fi
|
||||
|
||||
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
|
||||
EXTRA_FLAGS="${INPUT_EXTRA_FLAGS}"
|
||||
|
||||
supersetbot bump-python \
|
||||
--verbose \
|
||||
--use-current-repo \
|
||||
--include-subpackages \
|
||||
--limit ${{ github.event.inputs.limit }} \
|
||||
--limit ${INPUT_LIMIT} \
|
||||
$PACKAGE_OPT \
|
||||
$GROUP_OPT \
|
||||
$EXTRA_FLAGS
|
||||
|
||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Cancel duplicate workflow runs
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
|
||||
2
.github/workflows/check-python-deps.yml
vendored
2
.github/workflows/check-python-deps.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -25,9 +25,9 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: Check and notify
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
|
||||
6
.github/workflows/claude.yml
vendored
6
.github/workflows/claude.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Comment access denied
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const message = `👋 Hi @${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}!
|
||||
@@ -71,12 +71,12 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude PR Action
|
||||
uses: anthropics/claude-code-action@beta
|
||||
uses: anthropics/claude-code-action@5fb899572b81d2bb648d4d187173a2f423a9677c # beta
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
timeout_minutes: "60"
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
|
||||
6
.github/workflows/dependency-review.yml
vendored
6
.github/workflows/dependency-review.yml
vendored
@@ -27,9 +27,9 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@v4
|
||||
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
|
||||
continue-on-error: true
|
||||
with:
|
||||
fail-on-severity: critical
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
21
.github/workflows/docker.yml
vendored
21
.github/workflows/docker.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -101,23 +101,6 @@ jobs:
|
||||
docker images $IMAGE_TAG
|
||||
docker history $IMAGE_TAG
|
||||
|
||||
# Scan for vulnerabilities in built container image after pushes to mainline branch.
|
||||
- name: Run Trivy container image vulnerabity scan
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
|
||||
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2
|
||||
with:
|
||||
image-ref: ${{ env.IMAGE_TAG }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
vuln-type: 'os'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
ignore-unfixed: true
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
|
||||
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
||||
- name: docker-compose sanity check
|
||||
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'dev'
|
||||
shell: bash
|
||||
@@ -134,7 +117,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
|
||||
8
.github/workflows/embedded-sdk-release.yml
vendored
8
.github/workflows/embedded-sdk-release.yml
vendored
@@ -16,10 +16,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.NPM_TOKEN != '') || '' }}" ]; then
|
||||
if [ -n "${NPM_TOKEN}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
NPM_TOKEN: ${{ (secrets.NPM_TOKEN != '') || '' }}
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -28,8 +30,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
4
.github/workflows/embedded-sdk-test.yml
vendored
4
.github/workflows/embedded-sdk-test.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
10
.github/workflows/ephemeral-env-pr-close.yml
vendored
10
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -20,10 +20,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
|
||||
if [ -n "${AWS_ACCESS_KEY_ID}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}
|
||||
ephemeral-env-cleanup:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -33,7 +35,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -56,7 +58,7 @@ jobs:
|
||||
- name: Login to Amazon ECR
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
uses: aws-actions/amazon-ecr-login@376925c9d111252e87ae59691e5a442dd100ef6a # v2
|
||||
|
||||
- name: Delete ECR image tag
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Comment (success)
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
40
.github/workflows/ephemeral-env.yml
vendored
40
.github/workflows/ephemeral-env.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
id: eval-label
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
LABEL_NAME="${{ github.event.inputs.label_name }}"
|
||||
LABEL_NAME="${INPUT_LABEL_NAME}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
@@ -60,10 +60,12 @@ jobs:
|
||||
echo "result=noop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
env:
|
||||
INPUT_LABEL_NAME: ${{ github.event.inputs.label_name }}
|
||||
- name: Get event SHA
|
||||
id: get-sha
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -94,7 +96,7 @@ jobs:
|
||||
core.setOutput("sha", prSha);
|
||||
|
||||
- name: Looking for feature flags in PR description
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
id: eval-feature-flags
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
@@ -116,7 +118,7 @@ jobs:
|
||||
return results;
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -160,7 +162,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
persist-credentials: false
|
||||
@@ -189,7 +191,7 @@ jobs:
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -197,7 +199,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
uses: aws-actions/amazon-ecr-login@376925c9d111252e87ae59691e5a442dd100ef6a # v2
|
||||
|
||||
- name: Load, tag and push image to ECR
|
||||
id: push-image
|
||||
@@ -220,12 +222,12 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -233,7 +235,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
uses: aws-actions/amazon-ecr-login@376925c9d111252e87ae59691e5a442dd100ef6a # v2
|
||||
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
@@ -248,7 +250,7 @@ jobs:
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -263,7 +265,7 @@ jobs:
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@77954e213ba1f9f9cb016b86a1d4f6fcdea0d57e # v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
@@ -276,7 +278,9 @@ jobs:
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${INPUT_ISSUE_NUMBER}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
INPUT_ISSUE_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
- name: Create ECS service
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
@@ -296,7 +300,7 @@ jobs:
|
||||
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@fc8fc60f3a60ffd500fcb13b209c59d221ac8c8c # v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||
@@ -307,7 +311,9 @@ jobs:
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${INPUT_ISSUE_NUMBER}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
INPUT_ISSUE_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
@@ -318,7 +324,7 @@ jobs:
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
@@ -331,7 +337,7 @@ jobs:
|
||||
});
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
8
.github/workflows/generate-FOSSA-report.yml
vendored
8
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -16,10 +16,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.FOSSA_API_KEY != '' ) || '' }}" ]; then
|
||||
if [ -n "${FOSSA_API_KEY}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
FOSSA_API_KEY: ${{ (secrets.FOSSA_API_KEY != '' ) || '' }}
|
||||
license_check:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -27,12 +29,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v5
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
with:
|
||||
distribution: "temurin"
|
||||
java-version: "11"
|
||||
|
||||
@@ -14,10 +14,10 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
|
||||
2
.github/workflows/issue_creation.yml
vendored
2
.github/workflows/issue_creation.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
2
.github/workflows/latest-release-tag.yml
vendored
2
.github/workflows/latest-release-tag.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
4
.github/workflows/license-check.yml
vendored
4
.github/workflows/license-check.yml
vendored
@@ -15,12 +15,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v5
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '11'
|
||||
|
||||
2
.github/workflows/no-hold-label.yml
vendored
2
.github/workflows/no-hold-label.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check for 'hold' label
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/pr-lint.yml
vendored
2
.github/workflows/pr-lint.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
16
.github/workflows/pre-commit.yml
vendored
16
.github/workflows/pre-commit.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
python-version: ["current", "previous", "next"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
@@ -57,18 +57,24 @@ jobs:
|
||||
yarn install --immutable
|
||||
|
||||
- name: Cache pre-commit environments
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: ./.github/actions/file-changes-action
|
||||
with:
|
||||
output: ' '
|
||||
|
||||
- name: pre-commit
|
||||
run: |
|
||||
set +e # Don't exit immediately on failure
|
||||
export SKIP=eslint-frontend,type-checking-frontend
|
||||
pre-commit run --all-files
|
||||
export SKIP=type-checking-frontend
|
||||
pre-commit run --files ${{ steps.changed_files.outputs.files }}
|
||||
PRE_COMMIT_EXIT_CODE=$?
|
||||
git diff --quiet --exit-code
|
||||
GIT_DIFF_EXIT_CODE=$?
|
||||
|
||||
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@@ -16,17 +16,19 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}" ]; then
|
||||
if [ -n "${NPM_TOKEN}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
NPM_TOKEN: ${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Bump version and publish package(s)
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
# pulls all commits (needed for lerna / semantic release to correctly version)
|
||||
fetch-depth: 0
|
||||
@@ -42,13 +44,13 @@ jobs:
|
||||
|
||||
- name: Install Node.js
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
with:
|
||||
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -62,7 +64,7 @@ jobs:
|
||||
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir-path.outputs.dir }}
|
||||
|
||||
18
.github/workflows/showtime-trigger.yml
vendored
18
.github/workflows/showtime-trigger.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
steps:
|
||||
- name: Security Check - Authorize Maintainers Only
|
||||
id: auth
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -102,10 +102,12 @@ jobs:
|
||||
- name: Install Superset Showtime
|
||||
if: steps.auth.outputs.authorized == 'true'
|
||||
run: |
|
||||
echo "::notice::Maintainer ${{ github.actor }} triggered deploy for PR ${{ github.event.pull_request.number || github.event.inputs.pr_number }}"
|
||||
echo "::notice::Maintainer ${{ github.actor }} triggered deploy for PR ${PULL_REQUEST_NUMBER}"
|
||||
pip install --upgrade superset-showtime
|
||||
showtime version
|
||||
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
|
||||
- name: Check what actions are needed
|
||||
if: steps.auth.outputs.authorized == 'true'
|
||||
id: check
|
||||
@@ -113,12 +115,14 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_PR_NUMBER: ${{ github.event.inputs.pr_number }}
|
||||
INPUT_SHA: ${{ github.event.inputs.sha }}
|
||||
run: |
|
||||
# Bulletproof PR number extraction
|
||||
if [[ -n "${{ github.event.pull_request.number }}" ]]; then
|
||||
PR_NUM="${{ github.event.pull_request.number }}"
|
||||
elif [[ -n "${{ github.event.inputs.pr_number }}" ]]; then
|
||||
PR_NUM="${{ github.event.inputs.pr_number }}"
|
||||
elif [[ -n "${INPUT_PR_NUMBER}" ]]; then
|
||||
PR_NUM="${INPUT_PR_NUMBER}"
|
||||
else
|
||||
echo "❌ No PR number found in event or inputs"
|
||||
exit 1
|
||||
@@ -127,8 +131,8 @@ jobs:
|
||||
echo "Using PR number: $PR_NUM"
|
||||
|
||||
# Run sync check-only with optional SHA override
|
||||
if [[ -n "${{ github.event.inputs.sha }}" ]]; then
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only --sha "${{ github.event.inputs.sha }}")
|
||||
if [[ -n "${INPUT_SHA}" ]]; then
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only --sha "${INPUT_SHA}")
|
||||
else
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only)
|
||||
fi
|
||||
@@ -147,7 +151,7 @@ jobs:
|
||||
|
||||
- name: Checkout PR code (only if build needed)
|
||||
if: steps.auth.outputs.authorized == 'true' && steps.check.outputs.build_needed == 'true'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ steps.check.outputs.target_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
2
.github/workflows/superset-app-cli.yml
vendored
2
.github/workflows/superset-app-cli.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
14
.github/workflows/superset-docs-deploy.yml
vendored
14
.github/workflows/superset-docs-deploy.yml
vendored
@@ -27,10 +27,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}" ]; then
|
||||
if [ -n "${SUPERSET_SITE_BUILD}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
SUPERSET_SITE_BUILD: ${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}
|
||||
build-deploy:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -38,18 +40,18 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- uses: actions/setup-java@v5
|
||||
- uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '21'
|
||||
@@ -68,7 +70,7 @@ jobs:
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics (if triggered by integration tests)
|
||||
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
|
||||
uses: dawidd6/action-download-artifact@v16
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
@@ -77,7 +79,7 @@ jobs:
|
||||
path: docs/src/data/
|
||||
- name: Try to download latest diagnostics (for push/dispatch triggers)
|
||||
if: github.event_name != 'workflow_run'
|
||||
uses: dawidd6/action-download-artifact@v16
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
|
||||
13
.github/workflows/superset-docs-verify.yml
vendored
13
.github/workflows/superset-docs-verify.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new version first!
|
||||
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
|
||||
@@ -67,12 +67,12 @@ jobs:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
@@ -98,25 +98,26 @@ jobs:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics from integration tests
|
||||
uses: dawidd6/action-download-artifact@v16
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
if_no_artifact_found: 'warning'
|
||||
- name: Use fresh diagnostics
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
|
||||
20
.github/workflows/superset-e2e.yml
vendored
20
.github/workflows/superset-e2e.yml
vendored
@@ -69,21 +69,21 @@ jobs:
|
||||
# Conditional checkout based on context
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -146,7 +146,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
@@ -186,21 +186,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -226,7 +226,7 @@ jobs:
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
working-directory: superset-extensions-cli
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: superset-extensions-cli
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML coverage report
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: superset-extensions-cli-coverage-html
|
||||
path: htmlcov/
|
||||
|
||||
35
.github/workflows/superset-frontend.yml
vendored
35
.github/workflows/superset-frontend.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
@@ -54,14 +54,14 @@ jobs:
|
||||
- name: Save Docker Image as Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
run: |
|
||||
docker save $TAG | gzip > docker-image.tar.gz
|
||||
docker save $TAG | zstd -3 --threads=0 > docker-image.tar.zst
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: docker-image
|
||||
path: docker-image.tar.gz
|
||||
path: docker-image.tar.zst
|
||||
|
||||
sharded-jest-tests:
|
||||
needs: frontend-build
|
||||
@@ -73,12 +73,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: npm run test with coverage
|
||||
run: |
|
||||
@@ -90,7 +91,7 @@ jobs:
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
@@ -103,14 +104,14 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
@@ -127,7 +128,7 @@ jobs:
|
||||
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||
|
||||
- name: Upload Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: javascript
|
||||
use_oidc: true
|
||||
@@ -142,13 +143,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: |
|
||||
docker load < docker-image.tar.gz
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: lint
|
||||
run: |
|
||||
@@ -166,12 +167,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: Build Plugins Packages
|
||||
run: |
|
||||
@@ -184,12 +186,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: Build Storybook and Run Tests
|
||||
run: |
|
||||
|
||||
4
.github/workflows/superset-helm-lint.yml
vendored
4
.github/workflows/superset-helm-lint.yml
vendored
@@ -16,14 +16,14 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
with:
|
||||
version: v3.16.4
|
||||
|
||||
|
||||
6
.github/workflows/superset-helm-release.yml
vendored
6
.github/workflows/superset-helm-release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v4
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
with:
|
||||
version: v3.5.4
|
||||
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||
|
||||
- name: Open Pull Request
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const branchName = '${{ env.branch_name }}';
|
||||
|
||||
10
.github/workflows/superset-playwright.yml
vendored
10
.github/workflows/superset-playwright.yml
vendored
@@ -60,21 +60,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
|
||||
@@ -16,6 +16,8 @@ concurrency:
|
||||
jobs:
|
||||
test-mysql:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -41,7 +43,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -68,11 +70,12 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
- name: Generate database diagnostics for docs
|
||||
if: steps.check.outputs.python
|
||||
env:
|
||||
@@ -98,13 +101,15 @@ jobs:
|
||||
"
|
||||
- name: Upload database diagnostics artifact
|
||||
if: steps.check.outputs.python
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: database-diagnostics
|
||||
path: databases-diagnostics.json
|
||||
retention-days: 7
|
||||
test-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "previous", "next"]
|
||||
@@ -129,7 +134,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -159,14 +164,17 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,postgres
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
test-sqlite:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -182,7 +190,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -211,8 +219,9 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,sqlite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
@@ -17,6 +17,8 @@ concurrency:
|
||||
jobs:
|
||||
test-postgres-presto:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -48,7 +50,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -77,14 +79,17 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,presto
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
test-postgres-hive:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -108,7 +113,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -145,8 +150,9 @@ jobs:
|
||||
pip install -e .[hive]
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,hive
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
@@ -17,6 +17,8 @@ concurrency:
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["previous", "current", "next"]
|
||||
@@ -24,7 +26,7 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -53,8 +55,9 @@ jobs:
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
10
.github/workflows/superset-translations.yml
vendored
10
.github/workflows/superset-translations.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install dependencies
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -62,6 +62,10 @@ jobs:
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
- name: Install msgcat
|
||||
run: sudo apt update && sudo apt install gettext
|
||||
|
||||
- name: Test babel extraction
|
||||
if: steps.check.outputs.python
|
||||
run: ./scripts/translations/babel_update.sh
|
||||
|
||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install dependencies
|
||||
|
||||
4
.github/workflows/supersetbot.yml
vendored
4
.github/workflows/supersetbot.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
steps:
|
||||
- name: Quickly add thumbs up!
|
||||
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
});
|
||||
|
||||
- name: "Checkout ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
24
.github/workflows/tag-release.yml
vendored
24
.github/workflows/tag-release.yml
vendored
@@ -31,10 +31,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}" ]; then
|
||||
if [ -n "${DOCKERHUB_USER}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}
|
||||
docker-release:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -47,7 +49,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -60,7 +62,7 @@ jobs:
|
||||
build: "true"
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -72,17 +74,20 @@ jobs:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_RELEASE: ${{ github.event.inputs.release }}
|
||||
INPUT_FORCE_LATEST: ${{ github.event.inputs.force-latest }}
|
||||
INPUT_GIT_REF: ${{ github.event.inputs.git-ref }}
|
||||
run: |
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
FORCE_LATEST=""
|
||||
EVENT="${{github.event_name}}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
RELEASE="${{ github.event.inputs.release }}"
|
||||
if [ "${{ github.event.inputs.force-latest }}" = "true" ]; then
|
||||
RELEASE="${INPUT_RELEASE}"
|
||||
if [ "${INPUT_FORCE_LATEST}" = "true" ]; then
|
||||
FORCE_LATEST="--force-latest"
|
||||
fi
|
||||
git checkout "${{ github.event.inputs.git-ref }}"
|
||||
git checkout "${INPUT_GIT_REF}"
|
||||
EVENT="release"
|
||||
fi
|
||||
|
||||
@@ -107,12 +112,12 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -122,6 +127,7 @@ jobs:
|
||||
- name: Label the PRs with the right release-related labels
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_RELEASE: ${{ github.event.inputs.release }}
|
||||
run: |
|
||||
export GITHUB_ACTOR=""
|
||||
git fetch --all --tags
|
||||
@@ -129,6 +135,6 @@ jobs:
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
RELEASE="${{ github.event.inputs.release }}"
|
||||
RELEASE="${INPUT_RELEASE}"
|
||||
fi
|
||||
supersetbot release-label $RELEASE
|
||||
|
||||
8
.github/workflows/tech-debt.yml
vendored
8
.github/workflows/tech-debt.yml
vendored
@@ -19,10 +19,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.GSHEET_KEY != '' ) || '' }}" ]; then
|
||||
if [ -n "${GSHEET_KEY}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
GSHEET_KEY: ${{ (secrets.GSHEET_KEY != '' ) || '' }}
|
||||
process-and-upload:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -30,10 +32,10 @@ jobs:
|
||||
name: Generate Reports
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -133,6 +133,7 @@ CLAUDE.local.md
|
||||
PROJECT.md
|
||||
.aider*
|
||||
.claude_rc*
|
||||
.claude/settings.local.json
|
||||
.env.local
|
||||
oxc-custom-build/
|
||||
*.code-workspace
|
||||
|
||||
@@ -458,7 +458,7 @@ cd ../
|
||||
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
|
||||
|
||||
# build the python distribution
|
||||
python setup.py sdist
|
||||
python -m build
|
||||
```
|
||||
|
||||
Publish to PyPI
|
||||
|
||||
@@ -287,6 +287,11 @@ categories:
|
||||
url: https://www.gfk.com/home
|
||||
contributors: ["@mherr"]
|
||||
|
||||
- name: Hifadih Business & Technology
|
||||
url: https://hifadih.net/en
|
||||
logo: hifadih.png
|
||||
contributors: ["@saintLaurent00"]
|
||||
|
||||
# Logo approved by @anmol-hpe on behalf of HPE
|
||||
- name: HPE
|
||||
url: https://www.hpe.com/in/en/home.html
|
||||
@@ -625,6 +630,9 @@ categories:
|
||||
- name: Stockarea
|
||||
url: https://stockarea.io
|
||||
|
||||
- name: VTG
|
||||
url: https://www.vtg.de
|
||||
|
||||
Sports:
|
||||
- name: Club 25 de Agosto (Femenino / Women's Team)
|
||||
url: https://www.instagram.com/25deagosto.basketfemenino/
|
||||
|
||||
26
UPDATING.md
26
UPDATING.md
@@ -24,6 +24,28 @@ assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
### Granular Export Controls
|
||||
|
||||
A new feature flag `GRANULAR_EXPORT_CONTROLS` introduces three fine-grained permissions that replace the legacy `can_csv` permission:
|
||||
|
||||
| Permission | Controls |
|
||||
|---|---|
|
||||
| `can_export_data` | CSV, Excel, JSON exports |
|
||||
| `can_export_image` | Screenshot/PDF exports |
|
||||
| `can_copy_clipboard` | Copy-to-clipboard operations |
|
||||
|
||||
When the feature flag is enabled, these permissions are enforced on both the frontend (disabled buttons with tooltips) and backend (403 responses from API endpoints). When disabled, legacy `can_csv` behavior is preserved.
|
||||
|
||||
**Migration behavior:** All three new permissions are granted to every role that currently has `can_csv`, preserving existing access. Admins can then selectively revoke individual export permissions from specific roles as needed.
|
||||
|
||||
### Deck.gl MapBox viewport and opacity controls are functional
|
||||
|
||||
The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitude**, and **Zoom** controls were previously non-functional — changing them had no effect on the rendered map. These controls are now wired up correctly.
|
||||
|
||||
**Behavior change for existing charts:** Previously, the viewport controls had hard-coded default values (`-122.405293`, `37.772123`, zoom `11` — San Francisco) that were stored in each chart's `form_data` but never applied. The map always used `fitBounds` to center on the data. With this fix, those stored values are now respected, which means existing MapBox charts may open centered on the old default coordinates instead of fitting to data bounds.
|
||||
|
||||
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
|
||||
|
||||
### ClickHouse minimum driver version bump
|
||||
|
||||
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
|
||||
@@ -300,13 +322,13 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
There's a migration added that can potentially affect a significant number of existing charts.
|
||||
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||
- [31590](https://github.com/apache/superset/pull/31590) Marks the begining of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
|
||||
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [32432](https://github.com/apache/superset/pull/32432) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [34319](https://github.com/apache/superset/pull/34319) Drill to Detail and Drill By is now supported in Embedded mode, and also with the `DASHBOARD_RBAC` FF. If you don't want to expose these features in Embedded / `DASHBOARD_RBAC`, make sure the roles used for Embedded / `DASHBOARD_RBAC`don't have the required permissions to perform D2D actions.
|
||||
|
||||
## 5.0.0
|
||||
|
||||
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
|
||||
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [32000](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
|
||||
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
|
||||
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
|
||||
|
||||
@@ -115,6 +115,10 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
EXAMPLES_HOST: db-light
|
||||
EXAMPLES_DB: superset_light
|
||||
EXAMPLES_USER: superset
|
||||
EXAMPLES_PASSWORD: superset
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
GITHUB_HEAD_REF: ${GITHUB_HEAD_REF:-}
|
||||
GITHUB_SHA: ${GITHUB_SHA:-}
|
||||
@@ -137,6 +141,10 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
EXAMPLES_HOST: db-light
|
||||
EXAMPLES_DB: superset_light
|
||||
EXAMPLES_USER: superset
|
||||
EXAMPLES_PASSWORD: superset
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
healthcheck:
|
||||
disable: true
|
||||
@@ -157,6 +165,7 @@ services:
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
DISABLE_TS_CHECKER: "${DISABLE_TS_CHECKER:-true}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset-light:8088"
|
||||
# Webpack dev server must bind to 0.0.0.0 to be accessible from outside the container
|
||||
|
||||
@@ -80,7 +80,7 @@ case "${1}" in
|
||||
;;
|
||||
app)
|
||||
echo "Starting web app (using development server)..."
|
||||
flask run -p $PORT --reload --debugger --without-threads --host=0.0.0.0 --exclude-patterns "*/node_modules/*:*/.venv/*:*/build/*:*/__pycache__/*"
|
||||
flask run -p $PORT --reload --debugger --host=0.0.0.0 --exclude-patterns "*/node_modules/*:*/.venv/*:*/build/*:*/__pycache__/*:*/superset-frontend/*"
|
||||
;;
|
||||
app-gunicorn)
|
||||
echo "Starting web app..."
|
||||
|
||||
@@ -1 +1 @@
|
||||
v20.20.0
|
||||
v22.22.0
|
||||
|
||||
@@ -233,6 +233,20 @@ def alert_dynamic_minimal_interval(**kwargs) -> int:
|
||||
ALERT_MINIMUM_INTERVAL = alert_dynamic_minimal_interval
|
||||
```
|
||||
|
||||
## External Link Redirection
|
||||
|
||||
For security, Superset rewrites external links in alert/report email HTML so
|
||||
they go through a warning page before the user is navigated to the external
|
||||
site. Internal links (matching your configured base URL) are not affected.
|
||||
|
||||
```python
|
||||
# Disable external link redirection entirely (default: True)
|
||||
ALERT_REPORTS_ENABLE_LINK_REDIRECT = False
|
||||
```
|
||||
|
||||
The feature uses `WEBDRIVER_BASEURL_USER_FRIENDLY` (or `WEBDRIVER_BASEURL`)
|
||||
to determine which hosts are internal.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
There are many reasons that reports might not be working. Try these steps to check for specific issues.
|
||||
|
||||
@@ -109,6 +109,14 @@ SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY'
|
||||
|
||||
You can generate a strong secure key with `openssl rand -base64 42`.
|
||||
|
||||
Alternatively, you can set the secret key using `SUPERSET_SECRET_KEY` environment variable:
|
||||
|
||||
On a Unix-based system, such as Linux or macOS, you can do so by running the following command in your terminal:
|
||||
|
||||
```bash
|
||||
export SUPERSET_SECRET_KEY=$(openssl rand -base64 42)
|
||||
```
|
||||
|
||||
:::caution Use a strong secret key
|
||||
This key will be used for securely signing session cookies and encrypting sensitive information stored in Superset's application metadata database.
|
||||
Your deployment must use a complex, unique key.
|
||||
|
||||
684
docs/admin_docs/configuration/mcp-server.mdx
Normal file
684
docs/admin_docs/configuration/mcp-server.mdx
Normal file
@@ -0,0 +1,684 @@
|
||||
---
|
||||
title: MCP Server Deployment & Authentication
|
||||
hide_title: true
|
||||
sidebar_position: 14
|
||||
version: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# MCP Server Deployment & Authentication
|
||||
|
||||
Superset includes a built-in [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) server that lets AI assistants -- Claude, ChatGPT, and other MCP-compatible clients -- interact with your Superset instance. Through MCP, clients can list dashboards, query datasets, execute SQL, create charts, and more.
|
||||
|
||||
This guide covers how to run, secure, and deploy the MCP server.
|
||||
|
||||
:::tip Looking for user docs?
|
||||
See **[Using AI with Superset](/user-docs/using-superset/using-ai-with-superset)** for a guide on what AI can do with Superset and how to connect your AI client.
|
||||
:::
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
A["AI Client<br/>(Claude, ChatGPT, etc.)"] -- "MCP protocol<br/>(HTTP + JSON-RPC)" --> B["MCP Server<br/>(:5008/mcp)"]
|
||||
B -- "Superset context<br/>(app, db, RBAC)" --> C["Superset<br/>(:8088)"]
|
||||
C --> D[("Database<br/>(Postgres)")]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
Get the MCP server running locally and connect an AI client in three steps.
|
||||
|
||||
### 1. Start the MCP server
|
||||
|
||||
The MCP server runs as a separate process alongside Superset:
|
||||
|
||||
```bash
|
||||
superset mcp run --host 127.0.0.1 --port 5008
|
||||
```
|
||||
|
||||
| Flag | Default | Description |
|
||||
|------|---------|-------------|
|
||||
| `--host` | `127.0.0.1` | Host to bind to |
|
||||
| `--port` | `5008` | Port to bind to |
|
||||
| `--debug` | off | Enable debug logging |
|
||||
|
||||
The endpoint is available at `http://<host>:<port>/mcp`.
|
||||
|
||||
### 2. Set a development user
|
||||
|
||||
For local development, tell the MCP server which Superset user to impersonate (the user must already exist in your database):
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_DEV_USERNAME = "admin"
|
||||
```
|
||||
|
||||
### 3. Connect an AI client
|
||||
|
||||
Point your MCP client at the server. For **Claude Desktop**, edit the config file:
|
||||
|
||||
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
||||
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
|
||||
- **Linux**: `~/.config/Claude/claude_desktop_config.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Restart Claude Desktop. The hammer icon in the chat bar confirms the connection.
|
||||
|
||||
See [Connecting AI Clients](#connecting-ai-clients) for Claude Code, Claude Web, ChatGPT, and raw HTTP examples.
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Apache Superset 5.0+ running and accessible
|
||||
- Python 3.10+
|
||||
- The `fastmcp` package (`pip install fastmcp`)
|
||||
|
||||
---
|
||||
|
||||
## Authentication
|
||||
|
||||
The MCP server supports multiple authentication methods depending on your deployment scenario.
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
R["Incoming MCP Request"] --> F{"MCP_AUTH_FACTORY<br/>set?"}
|
||||
F -- Yes --> CF["Custom Auth Provider"]
|
||||
F -- No --> AE{"MCP_AUTH_ENABLED?"}
|
||||
AE -- "True" --> JWT["JWT Validation"]
|
||||
AE -- "False" --> DU["Dev Mode<br/>(MCP_DEV_USERNAME)"]
|
||||
|
||||
JWT --> ALG{"MCP_JWT_ALGORITHM"}
|
||||
ALG -- "RS256 + JWKS" --> JWKS["Fetch keys from<br/>MCP_JWKS_URI"]
|
||||
ALG -- "RS256 + static" --> PK["Use<br/>MCP_JWT_PUBLIC_KEY"]
|
||||
ALG -- "HS256" --> SEC["Use<br/>MCP_JWT_SECRET"]
|
||||
|
||||
JWKS --> V["Validate token<br/>(exp, iss, aud, scopes)"]
|
||||
PK --> V
|
||||
SEC --> V
|
||||
V --> UR["Resolve Superset user<br/>from token claims"]
|
||||
UR --> OK["Authenticated request"]
|
||||
CF --> OK
|
||||
DU --> OK
|
||||
```
|
||||
|
||||
### Development Mode (No Auth)
|
||||
|
||||
Disable authentication and use a fixed user:
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_AUTH_ENABLED = False
|
||||
MCP_DEV_USERNAME = "admin"
|
||||
```
|
||||
|
||||
All operations run as the configured user.
|
||||
|
||||
:::warning
|
||||
Never use development mode in production. Always enable authentication for any internet-facing deployment.
|
||||
:::
|
||||
|
||||
### JWT Authentication
|
||||
|
||||
For production, enable JWT-based authentication. The MCP server validates a Bearer token on every request.
|
||||
|
||||
#### Option A: RS256 with JWKS endpoint
|
||||
|
||||
The most common setup for OAuth 2.0 / OIDC providers that publish a JWKS (JSON Web Key Set) endpoint:
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_AUTH_ENABLED = True
|
||||
MCP_JWT_ALGORITHM = "RS256"
|
||||
MCP_JWKS_URI = "https://your-identity-provider.com/.well-known/jwks.json"
|
||||
MCP_JWT_ISSUER = "https://your-identity-provider.com/"
|
||||
MCP_JWT_AUDIENCE = "your-superset-instance"
|
||||
```
|
||||
|
||||
#### Option B: RS256 with static public key
|
||||
|
||||
Use this when you have a fixed RSA key pair (e.g., self-signed tokens):
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_AUTH_ENABLED = True
|
||||
MCP_JWT_ALGORITHM = "RS256"
|
||||
MCP_JWT_PUBLIC_KEY = """-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA...
|
||||
-----END PUBLIC KEY-----"""
|
||||
MCP_JWT_ISSUER = "your-issuer"
|
||||
MCP_JWT_AUDIENCE = "your-audience"
|
||||
```
|
||||
|
||||
#### Option C: HS256 with shared secret
|
||||
|
||||
Use this when both the token issuer and the MCP server share a symmetric secret:
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_AUTH_ENABLED = True
|
||||
MCP_JWT_ALGORITHM = "HS256"
|
||||
MCP_JWT_SECRET = "your-shared-secret-key"
|
||||
MCP_JWT_ISSUER = "your-issuer"
|
||||
MCP_JWT_AUDIENCE = "your-audience"
|
||||
```
|
||||
|
||||
:::warning
|
||||
Store `MCP_JWT_SECRET` securely. Never commit it to version control. Use environment variables:
|
||||
```python
|
||||
import os
|
||||
MCP_JWT_SECRET = os.environ.get("MCP_JWT_SECRET")
|
||||
```
|
||||
:::
|
||||
|
||||
#### JWT claims
|
||||
|
||||
The MCP server validates these standard claims:
|
||||
|
||||
| Claim | Config Key | Description |
|
||||
|-------|-----------|-------------|
|
||||
| `exp` | -- | Expiration time (always validated) |
|
||||
| `iss` | `MCP_JWT_ISSUER` | Token issuer (optional but recommended) |
|
||||
| `aud` | `MCP_JWT_AUDIENCE` | Token audience (optional but recommended) |
|
||||
| `sub` | -- | Subject -- primary claim used to resolve the Superset user |
|
||||
|
||||
#### User resolution
|
||||
|
||||
After validating the token, the MCP server resolves a Superset username from the claims. It checks these in order, using the first non-empty value:
|
||||
|
||||
1. `subject` -- the standard `sub` claim (via the access token object)
|
||||
2. `client_id` -- for machine-to-machine tokens
|
||||
3. `payload["sub"]` -- fallback to raw payload
|
||||
4. `payload["email"]` -- email-based lookup
|
||||
5. `payload["username"]` -- explicit username claim
|
||||
|
||||
The resolved value must match a `username` in the Superset `ab_user` table.
|
||||
|
||||
#### Scoped access
|
||||
|
||||
Require specific scopes in the JWT to limit what MCP operations a token can perform:
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_REQUIRED_SCOPES = ["mcp:read", "mcp:write"]
|
||||
```
|
||||
|
||||
Only tokens that include **all** required scopes are accepted.
|
||||
|
||||
### Custom Auth Provider
|
||||
|
||||
For advanced scenarios (e.g., a proprietary auth system), provide a factory function. This takes precedence over all built-in JWT configuration:
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
def my_custom_auth_factory(app):
|
||||
"""Return a FastMCP auth provider instance."""
|
||||
from fastmcp.server.auth.providers.jwt import JWTVerifier
|
||||
return JWTVerifier(
|
||||
jwks_uri="https://my-auth.example.com/.well-known/jwks.json",
|
||||
issuer="https://my-auth.example.com/",
|
||||
audience="superset-mcp",
|
||||
)
|
||||
|
||||
MCP_AUTH_FACTORY = my_custom_auth_factory
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Connecting AI Clients
|
||||
|
||||
### Claude Desktop
|
||||
|
||||
**Local development (no auth):**
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**With JWT authentication:**
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote@latest",
|
||||
"http://your-superset-host:5008/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer YOUR_TOKEN"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Claude Code (CLI)
|
||||
|
||||
Add to your project's `.mcp.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"type": "url",
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
With authentication:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"type": "url",
|
||||
"url": "http://localhost:5008/mcp",
|
||||
"headers": {
|
||||
"Authorization": "Bearer YOUR_TOKEN"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Claude Web (claude.ai)
|
||||
|
||||
1. Open [claude.ai](https://claude.ai)
|
||||
2. Click the **+** button (or your profile icon)
|
||||
3. Select **Connectors**
|
||||
4. Click **Manage Connectors** > **Add custom connector**
|
||||
5. Enter a name and your MCP URL (e.g., `https://your-superset-host/mcp`)
|
||||
6. Click **Add**
|
||||
|
||||
:::info
|
||||
Custom connectors on Claude Web require a Pro, Max, Team, or Enterprise plan.
|
||||
:::
|
||||
|
||||
### ChatGPT
|
||||
|
||||
1. Click your profile icon > **Settings** > **Apps and Connectors**
|
||||
2. Enable **Developer Mode** in Advanced Settings
|
||||
3. In the chat composer, press **+** > **Add sources** > **App** > **Connect more** > **Create app**
|
||||
4. Enter a name and your MCP server URL
|
||||
5. Click **I understand and continue**
|
||||
|
||||
:::info
|
||||
ChatGPT MCP connectors require a Pro, Team, Enterprise, or Edu plan.
|
||||
:::
|
||||
|
||||
### Direct HTTP requests
|
||||
|
||||
Call the MCP server directly with any HTTP client:
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:5008/mcp \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Authorization: Bearer YOUR_JWT_TOKEN' \
|
||||
-d '{"jsonrpc": "2.0", "method": "tools/list", "id": 1}'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Deployment
|
||||
|
||||
### Single Process
|
||||
|
||||
The simplest setup: run the MCP server alongside Superset on the same host.
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
subgraph host["Host / VM"]
|
||||
direction TB
|
||||
S["Superset<br/>:8088"] --> DB[("Postgres")]
|
||||
M["MCP Server<br/>:5008"] --> DB
|
||||
end
|
||||
C["AI Client"] -- "HTTPS" --> P["Reverse Proxy<br/>(Nginx / Caddy)"]
|
||||
U["Browser"] -- "HTTPS" --> P
|
||||
P -- ":8088" --> S
|
||||
P -- ":5008/mcp" --> M
|
||||
```
|
||||
|
||||
**superset_config.py:**
|
||||
|
||||
```python
|
||||
MCP_SERVICE_HOST = "0.0.0.0"
|
||||
MCP_SERVICE_PORT = 5008
|
||||
MCP_DEV_USERNAME = "admin" # or enable JWT auth
|
||||
|
||||
# If behind a reverse proxy, set the public-facing URL so
|
||||
# MCP-generated links (chart previews, SQL Lab URLs) resolve correctly:
|
||||
MCP_SERVICE_URL = "https://superset.example.com"
|
||||
```
|
||||
|
||||
**Start both processes:**
|
||||
|
||||
```bash
|
||||
# Terminal 1 -- Superset web server
|
||||
superset run -h 0.0.0.0 -p 8088
|
||||
|
||||
# Terminal 2 -- MCP server
|
||||
superset mcp run --host 0.0.0.0 --port 5008
|
||||
```
|
||||
|
||||
**Nginx reverse proxy with TLS:**
|
||||
|
||||
```nginx
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name superset.example.com;
|
||||
|
||||
ssl_certificate /path/to/cert.pem;
|
||||
ssl_certificate_key /path/to/key.pem;
|
||||
|
||||
# Superset web UI
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8088;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
}
|
||||
|
||||
# MCP endpoint
|
||||
location /mcp {
|
||||
proxy_pass http://127.0.0.1:5008/mcp;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Authorization $http_authorization;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Docker Compose
|
||||
|
||||
Run Superset and the MCP server as separate containers sharing the same config:
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
services:
|
||||
superset:
|
||||
image: apache/superset:latest
|
||||
ports:
|
||||
- "8088:8088"
|
||||
volumes:
|
||||
- ./superset_config.py:/app/superset_config.py
|
||||
environment:
|
||||
- SUPERSET_CONFIG_PATH=/app/superset_config.py
|
||||
|
||||
mcp:
|
||||
image: apache/superset:latest
|
||||
command: ["superset", "mcp", "run", "--host", "0.0.0.0", "--port", "5008"]
|
||||
ports:
|
||||
- "5008:5008"
|
||||
volumes:
|
||||
- ./superset_config.py:/app/superset_config.py
|
||||
environment:
|
||||
- SUPERSET_CONFIG_PATH=/app/superset_config.py
|
||||
depends_on:
|
||||
- superset
|
||||
```
|
||||
|
||||
Both containers share the same `superset_config.py`, so authentication settings, database connections, and feature flags stay in sync.
|
||||
|
||||
### Multi-Pod (Kubernetes)
|
||||
|
||||
For high-availability deployments, configure Redis so that replicas share session state:
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
LB["Load Balancer"] --> M1["MCP Pod 1"]
|
||||
LB --> M2["MCP Pod 2"]
|
||||
LB --> M3["MCP Pod 3"]
|
||||
M1 --> R[("Redis<br/>(session store)")]
|
||||
M2 --> R
|
||||
M3 --> R
|
||||
M1 --> DB[("Postgres")]
|
||||
M2 --> DB
|
||||
M3 --> DB
|
||||
```
|
||||
|
||||
**superset_config.py:**
|
||||
|
||||
```python
|
||||
MCP_STORE_CONFIG = {
|
||||
"enabled": True,
|
||||
"CACHE_REDIS_URL": "redis://redis-host:6379/0",
|
||||
"event_store_max_events": 100,
|
||||
"event_store_ttl": 3600,
|
||||
}
|
||||
```
|
||||
|
||||
When `CACHE_REDIS_URL` is set, the MCP server uses a Redis-backed EventStore for session management, allowing replicas to share state. Without Redis, each pod manages its own in-memory sessions and stateful MCP interactions may fail when requests hit different replicas.
|
||||
|
||||
---
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
All MCP settings go in `superset_config.py`. Defaults are defined in `superset/mcp_service/mcp_config.py`.
|
||||
|
||||
### Core
|
||||
|
||||
| Setting | Default | Description |
|
||||
|---------|---------|-------------|
|
||||
| `MCP_SERVICE_HOST` | `"localhost"` | Host the MCP server binds to |
|
||||
| `MCP_SERVICE_PORT` | `5008` | Port the MCP server binds to |
|
||||
| `MCP_SERVICE_URL` | `None` | Public base URL for MCP-generated links (set this when behind a reverse proxy) |
|
||||
| `MCP_DEBUG` | `False` | Enable debug logging |
|
||||
| `MCP_DEV_USERNAME` | -- | Superset username for development mode (no auth) |
|
||||
|
||||
### Authentication
|
||||
|
||||
| Setting | Default | Description |
|
||||
|---------|---------|-------------|
|
||||
| `MCP_AUTH_ENABLED` | `False` | Enable JWT authentication |
|
||||
| `MCP_JWT_ALGORITHM` | `"RS256"` | JWT signing algorithm (`RS256` or `HS256`) |
|
||||
| `MCP_JWKS_URI` | `None` | JWKS endpoint URL (RS256) |
|
||||
| `MCP_JWT_PUBLIC_KEY` | `None` | Static RSA public key string (RS256) |
|
||||
| `MCP_JWT_SECRET` | `None` | Shared secret string (HS256) |
|
||||
| `MCP_JWT_ISSUER` | `None` | Expected `iss` claim |
|
||||
| `MCP_JWT_AUDIENCE` | `None` | Expected `aud` claim |
|
||||
| `MCP_REQUIRED_SCOPES` | `[]` | Required JWT scopes |
|
||||
| `MCP_JWT_DEBUG_ERRORS` | `False` | Log detailed JWT errors server-side (never exposed in HTTP responses per RFC 6750) |
|
||||
| `MCP_AUTH_FACTORY` | `None` | Custom auth provider factory `(flask_app) -> auth_provider`. Takes precedence over built-in JWT |
|
||||
|
||||
### Response Size Guard
|
||||
|
||||
Limits response sizes to prevent exceeding LLM context windows:
|
||||
|
||||
```python
|
||||
MCP_RESPONSE_SIZE_CONFIG = {
|
||||
"enabled": True,
|
||||
"token_limit": 25000,
|
||||
"warn_threshold_pct": 80,
|
||||
"excluded_tools": [
|
||||
"health_check",
|
||||
"get_chart_preview",
|
||||
"generate_explore_link",
|
||||
"open_sql_lab_with_context",
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
| Key | Default | Description |
|
||||
|-----|---------|-------------|
|
||||
| `enabled` | `True` | Enable response size checking |
|
||||
| `token_limit` | `25000` | Maximum estimated token count per response |
|
||||
| `warn_threshold_pct` | `80` | Warn when response exceeds this percentage of the limit |
|
||||
| `excluded_tools` | See above | Tools exempt from size checking (e.g., tools that return URLs, not data) |
|
||||
|
||||
### Caching
|
||||
|
||||
Optional response caching for read-heavy workloads. Requires Redis when used with multiple replicas.
|
||||
|
||||
```python
|
||||
MCP_CACHE_CONFIG = {
|
||||
"enabled": False,
|
||||
"CACHE_KEY_PREFIX": None,
|
||||
"list_tools_ttl": 300, # 5 min
|
||||
"list_resources_ttl": 300,
|
||||
"list_prompts_ttl": 300,
|
||||
"read_resource_ttl": 3600, # 1 hour
|
||||
"get_prompt_ttl": 3600,
|
||||
"call_tool_ttl": 3600,
|
||||
"max_item_size": 1048576, # 1 MB
|
||||
"excluded_tools": [
|
||||
"execute_sql",
|
||||
"generate_dashboard",
|
||||
"generate_chart",
|
||||
"update_chart",
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
| Key | Default | Description |
|
||||
|-----|---------|-------------|
|
||||
| `enabled` | `False` | Enable response caching |
|
||||
| `CACHE_KEY_PREFIX` | `None` | Optional prefix for cache keys (useful for shared Redis) |
|
||||
| `list_tools_ttl` | `300` | Cache TTL in seconds for `tools/list` |
|
||||
| `list_resources_ttl` | `300` | Cache TTL for `resources/list` |
|
||||
| `list_prompts_ttl` | `300` | Cache TTL for `prompts/list` |
|
||||
| `read_resource_ttl` | `3600` | Cache TTL for `resources/read` |
|
||||
| `get_prompt_ttl` | `3600` | Cache TTL for `prompts/get` |
|
||||
| `call_tool_ttl` | `3600` | Cache TTL for `tools/call` |
|
||||
| `max_item_size` | `1048576` | Maximum cached item size in bytes (1 MB) |
|
||||
| `excluded_tools` | See above | Tools that are never cached (mutating or non-deterministic) |
|
||||
|
||||
### Redis Store (Multi-Pod)
|
||||
|
||||
Enables Redis-backed session and event storage for multi-replica deployments:
|
||||
|
||||
```python
|
||||
MCP_STORE_CONFIG = {
|
||||
"enabled": False,
|
||||
"CACHE_REDIS_URL": None,
|
||||
"event_store_max_events": 100,
|
||||
"event_store_ttl": 3600,
|
||||
}
|
||||
```
|
||||
|
||||
| Key | Default | Description |
|
||||
|-----|---------|-------------|
|
||||
| `enabled` | `False` | Enable Redis-backed store |
|
||||
| `CACHE_REDIS_URL` | `None` | Redis connection URL (e.g., `redis://redis-host:6379/0`) |
|
||||
| `event_store_max_events` | `100` | Maximum events retained per session |
|
||||
| `event_store_ttl` | `3600` | Event TTL in seconds |
|
||||
|
||||
### Session & CSRF
|
||||
|
||||
These values are flat-merged into the Flask app config used by the MCP server process:
|
||||
|
||||
```python
|
||||
MCP_SESSION_CONFIG = {
|
||||
"SESSION_COOKIE_HTTPONLY": True,
|
||||
"SESSION_COOKIE_SECURE": False,
|
||||
"SESSION_COOKIE_SAMESITE": "Lax",
|
||||
"SESSION_COOKIE_NAME": "superset_session",
|
||||
"PERMANENT_SESSION_LIFETIME": 86400,
|
||||
}
|
||||
|
||||
MCP_CSRF_CONFIG = {
|
||||
"WTF_CSRF_ENABLED": True,
|
||||
"WTF_CSRF_TIME_LIMIT": None,
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Server won't start
|
||||
|
||||
- Verify `fastmcp` is installed: `pip install fastmcp`
|
||||
- Check that `MCP_DEV_USERNAME` is set if auth is disabled -- the server requires a user identity
|
||||
- Confirm the port is not already in use: `lsof -i :5008`
|
||||
|
||||
### 401 Unauthorized
|
||||
|
||||
- Verify your JWT token has not expired (`exp` claim)
|
||||
- Check that `MCP_JWT_ISSUER` and `MCP_JWT_AUDIENCE` match the token's `iss` and `aud` claims exactly
|
||||
- For RS256 with JWKS: confirm the JWKS URI is reachable from the MCP server
|
||||
- For RS256 with static key: confirm the public key string includes the `BEGIN`/`END` markers
|
||||
- For HS256: confirm the secret matches between the token issuer and `MCP_JWT_SECRET`
|
||||
- Enable `MCP_JWT_DEBUG_ERRORS = True` for detailed server-side logging (errors are never leaked to the client)
|
||||
|
||||
### Tool not found
|
||||
|
||||
- Ensure the MCP server and Superset share the same `superset_config.py`
|
||||
- Check server logs at startup -- tool registration errors are logged with the tool name and reason
|
||||
|
||||
### Client can't connect
|
||||
|
||||
- Verify the MCP server URL is reachable from the client machine
|
||||
- For Claude Desktop: fully quit the app (not just close the window) and restart after config changes
|
||||
- For remote access: ensure your firewall and reverse proxy allow traffic to the MCP port
|
||||
- Confirm the URL path ends with `/mcp` (e.g., `http://localhost:5008/mcp`)
|
||||
|
||||
### Permission errors on tool calls
|
||||
|
||||
- The MCP server enforces Superset's RBAC permissions -- the authenticated user must have the required roles
|
||||
- In development mode, ensure `MCP_DEV_USERNAME` maps to a user with appropriate roles (e.g., Admin)
|
||||
- Check `superset/security/manager.py` for the specific permission tuples required by each tool domain (e.g., `("can_execute_sql_query", "SQLLab")`)
|
||||
|
||||
### Response too large
|
||||
|
||||
- If a tool call returns an error about exceeding token limits, the response size guard is blocking an oversized result
|
||||
- Reduce `page_size` or `limit` parameters, use `select_columns` to exclude large fields, or add filters to narrow results
|
||||
- To adjust the threshold, change `token_limit` in `MCP_RESPONSE_SIZE_CONFIG`
|
||||
- To disable the guard entirely, set `MCP_RESPONSE_SIZE_CONFIG = {"enabled": False}`
|
||||
|
||||
---
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
- **Use TLS** for all production MCP endpoints -- place the server behind a reverse proxy with HTTPS
|
||||
- **Enable JWT authentication** for any internet-facing deployment
|
||||
- **RBAC enforcement** -- The MCP server respects Superset's role-based access control. Users can only access data their roles permit
|
||||
- **Secrets management** -- Store `MCP_JWT_SECRET`, database credentials, and API keys in environment variables or a secrets manager, never in config files committed to version control
|
||||
- **Scoped tokens** -- Use `MCP_REQUIRED_SCOPES` to limit what operations a token can perform
|
||||
- **Network isolation** -- In Kubernetes, restrict MCP pod network policies to only allow traffic from your AI client endpoints
|
||||
- Review the **[Security documentation](/developer-docs/extensions/security)** for additional extension security guidance
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Using AI with Superset](/user-docs/using-superset/using-ai-with-superset)** -- What AI can do with Superset and how to get started
|
||||
- **[MCP Integration](/developer-docs/extensions/mcp)** -- Build custom MCP tools and prompts via Superset extensions
|
||||
- **[Security](/developer-docs/extensions/security)** -- Security best practices for extensions
|
||||
- **[Deployment](/developer-docs/extensions/deployment)** -- Package and deploy Superset extensions
|
||||
@@ -22,6 +22,15 @@ While powerful, this feature executes template code on the server. Within the Su
|
||||
|
||||
If you grant these permissions to untrusted users, this feature can be exploited as a **Server-Side Template Injection (SSTI)** vulnerability. Do not enable `ENABLE_TEMPLATE_PROCESSING` unless you fully understand and accept the associated security risks.
|
||||
|
||||
Additionally:
|
||||
|
||||
- The `url_param()` macro allows URL parameters to influence the rendered SQL. Always validate or restrict `url_param()` values in your templates rather than interpolating them directly.
|
||||
- `filter.get('val')` returns raw filter values without escaping. Use the safe helpers described below (`|where_in`, `| replace("'", "''")`) rather than concatenating values directly into SQL strings.
|
||||
|
||||
:::
|
||||
|
||||
:::tip
|
||||
`ENABLE_TEMPLATE_PROCESSING` defaults to `False`. Only enable it if your deployment requires Jinja templates and all users with dataset/chart edit access are administrators or fully trusted internal users.
|
||||
:::
|
||||
|
||||
When templating is enabled, python code can be embedded in virtual datasets and
|
||||
@@ -324,6 +333,16 @@ cache hit in the future and Superset can retrieve cached data.
|
||||
The `{{ url_param('custom_variable') }}` macro lets you define arbitrary URL
|
||||
parameters and reference them in your SQL code.
|
||||
|
||||
:::warning
|
||||
Always treat `url_param()` values as untrusted input. Escaping behaviour varies by context and configuration, so do not rely on it. Restrict values to an explicit allowlist before using them in SQL:
|
||||
|
||||
```sql
|
||||
{% set cc = url_param('countrycode') %}
|
||||
{% if cc not in ('US', 'ES', 'FR') %}{% set cc = 'US' %}{% endif %}
|
||||
WHERE country_code = '{{ cc }}'
|
||||
```
|
||||
:::
|
||||
|
||||
Here's a concrete example:
|
||||
|
||||
- You write the following query in SQL Lab:
|
||||
@@ -398,6 +417,16 @@ This is useful if:
|
||||
- You want to handle generating custom SQL conditions for a filter
|
||||
- You want to have the ability to filter inside the main query for speed purposes
|
||||
|
||||
:::warning
|
||||
`filter.get('val')` returns the raw filter value without escaping. For multi-value filters, use the `|where_in` Jinja filter, which handles quoting safely. For single-value operators like `LIKE`, escape single quotes before interpolating:
|
||||
|
||||
```sql
|
||||
{%- if filter.get('op') == 'LIKE' -%}
|
||||
AND full_name LIKE '{{ filter.get('val') | replace("'", "''") }}'
|
||||
{%- endif -%}
|
||||
```
|
||||
:::
|
||||
|
||||
Here's a concrete example:
|
||||
|
||||
```sql
|
||||
@@ -424,7 +453,7 @@ Here's a concrete example:
|
||||
|
||||
{%- if filter.get('op') == 'LIKE' -%}
|
||||
AND
|
||||
full_name LIKE {{ "'" + filter.get('val') + "'" }}
|
||||
full_name LIKE '{{ filter.get('val') | replace("'", "''") }}'
|
||||
{%- endif -%}
|
||||
|
||||
{%- endfor -%}
|
||||
|
||||
@@ -24,6 +24,14 @@ A table with the permissions for these roles can be found at [/RESOURCES/STANDAR
|
||||
Admins have all possible rights, including granting or revoking rights from other
|
||||
users and altering other people’s slices and dashboards.
|
||||
|
||||
>#### Threat Model and Privilege Boundaries: The Admin Role
|
||||
>
|
||||
>Apache Superset is built with a granular permission model where users assigned the Admin role are considered fully trusted. Admins possess complete control over the application's configuration, UI rendering, and access controls.
|
||||
>
|
||||
>Consequently, actions performed by an Admin that alter the application's behavior or presentation—such as injecting custom CSS, modifying Jinja templates, or altering security flags—are intended administrative capabilities by design.
|
||||
>
|
||||
>In accordance with MITRE CNA Rule 4.1, a vulnerability must represent a violation of an explicit security policy. Because the Admin role is defined as a trusted operational boundary, actions executed with Admin privileges do not cross a security perimeter. Therefore, exploit vectors that strictly require Admin access are not classified as security vulnerabilities and are ineligible for CVE assignment.
|
||||
|
||||
### Alpha
|
||||
|
||||
Alpha users have access to all data sources, but they cannot grant or revoke access
|
||||
|
||||
@@ -47,10 +47,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the CSRF token](./api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
|
||||
| `POST` | [Get a guest token](./api/get-a-guest-token) | `/api/v1/security/guest_token/` |
|
||||
| `POST` | [Create security login](./api/create-security-login) | `/api/v1/security/login` |
|
||||
| `POST` | [Create security refresh](./api/create-security-refresh) | `/api/v1/security/refresh` |
|
||||
| `GET` | [Get the CSRF token](/developer-docs/api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
|
||||
| `POST` | [Get a guest token](/developer-docs/api/get-a-guest-token) | `/api/v1/security/guest_token/` |
|
||||
| `POST` | [Create security login](/developer-docs/api/create-security-login) | `/api/v1/security/login` |
|
||||
| `POST` | [Create security refresh](/developer-docs/api/create-security-refresh) | `/api/v1/security/refresh` |
|
||||
|
||||
---
|
||||
|
||||
@@ -63,32 +63,32 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete dashboards](./api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get a list of dashboards](./api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
|
||||
| `POST` | [Create a new dashboard](./api/create-a-new-dashboard) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get metadata information about this API resource (dashboard--info)](./api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
|
||||
| `GET` | [Get a dashboard detail information](./api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
|
||||
| `GET` | [Get a dashboard's chart definitions.](./api/get-a-dashboard-s-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
|
||||
| `POST` | [Create a copy of an existing dashboard](./api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
|
||||
| `GET` | [Get dashboard's datasets](./api/get-dashboard-s-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
|
||||
| `DELETE` | [Delete a dashboard's embedded configuration](./api/delete-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get the dashboard's embedded configuration](./api/get-the-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `POST` | [Set a dashboard's embedded configuration](./api/set-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `PUT` | [Update dashboard by id_or_slug embedded](./api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get dashboard's tabs](./api/get-dashboard-s-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
|
||||
| `DELETE` | [Delete a dashboard](./api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `PUT` | [Update a dashboard](./api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](./api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
|
||||
| `PUT` | [Update colors configuration for a dashboard.](./api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
|
||||
| `DELETE` | [Remove the dashboard from the user favorite list](./api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `POST` | [Mark the dashboard as favorite for the current user](./api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `PUT` | [Update native filters configuration for a dashboard.](./api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
|
||||
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get dashboard's thumbnail](./api/get-dashboard-s-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
|
||||
| `GET` | [Download multiple dashboards as YAML files](./api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
|
||||
| `GET` | [Check favorited dashboards for current user](./api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
|
||||
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](./api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
|
||||
| `GET` | [Get related fields data (dashboard-related-column-name)](./api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete dashboards](/developer-docs/api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get a list of dashboards](/developer-docs/api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
|
||||
| `POST` | [Create a new dashboard](/developer-docs/api/create-a-new-dashboard) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get metadata information about this API resource (dashboard--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
|
||||
| `GET` | [Get a dashboard detail information](/developer-docs/api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
|
||||
| `GET` | [Get a dashboard's chart definitions.](/developer-docs/api/get-a-dashboard-s-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
|
||||
| `POST` | [Create a copy of an existing dashboard](/developer-docs/api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
|
||||
| `GET` | [Get dashboard's datasets](/developer-docs/api/get-dashboard-s-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
|
||||
| `DELETE` | [Delete a dashboard's embedded configuration](/developer-docs/api/delete-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get the dashboard's embedded configuration](/developer-docs/api/get-the-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `POST` | [Set a dashboard's embedded configuration](/developer-docs/api/set-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `PUT` | [Update dashboard by id_or_slug embedded](/developer-docs/api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get dashboard's tabs](/developer-docs/api/get-dashboard-s-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
|
||||
| `DELETE` | [Delete a dashboard](/developer-docs/api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `PUT` | [Update a dashboard](/developer-docs/api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](/developer-docs/api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
|
||||
| `PUT` | [Update colors configuration for a dashboard.](/developer-docs/api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
|
||||
| `DELETE` | [Remove the dashboard from the user favorite list](/developer-docs/api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `POST` | [Mark the dashboard as favorite for the current user](/developer-docs/api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `PUT` | [Update native filters configuration for a dashboard.](/developer-docs/api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
|
||||
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](/developer-docs/api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get dashboard's thumbnail](/developer-docs/api/get-dashboard-s-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
|
||||
| `GET` | [Download multiple dashboards as YAML files](/developer-docs/api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
|
||||
| `GET` | [Check favorited dashboards for current user](/developer-docs/api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
|
||||
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](/developer-docs/api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
|
||||
| `GET` | [Get related fields data (dashboard-related-column-name)](/developer-docs/api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -97,26 +97,26 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete charts](./api/bulk-delete-charts) | `/api/v1/chart/` |
|
||||
| `GET` | [Get a list of charts](./api/get-a-list-of-charts) | `/api/v1/chart/` |
|
||||
| `POST` | [Create a new chart](./api/create-a-new-chart) | `/api/v1/chart/` |
|
||||
| `GET` | [Get metadata information about this API resource (chart--info)](./api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
|
||||
| `DELETE` | [Delete a chart](./api/delete-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Get a chart detail information](./api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
|
||||
| `PUT` | [Update a chart](./api/update-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](./api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
|
||||
| `GET` | [Return payload data response for a chart](./api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
|
||||
| `DELETE` | [Remove the chart from the user favorite list](./api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `POST` | [Mark the chart as favorite for the current user](./api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get chart thumbnail](./api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
|
||||
| `POST` | [Return payload data response for the given query (chart-data)](./api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
|
||||
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](./api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
|
||||
| `GET` | [Download multiple charts as YAML files](./api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
|
||||
| `GET` | [Check favorited charts for current user](./api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
|
||||
| `POST` | [Import chart(s) with associated datasets and databases](./api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
|
||||
| `GET` | [Get related fields data (chart-related-column-name)](./api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for the chart](./api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
|
||||
| `DELETE` | [Bulk delete charts](/developer-docs/api/bulk-delete-charts) | `/api/v1/chart/` |
|
||||
| `GET` | [Get a list of charts](/developer-docs/api/get-a-list-of-charts) | `/api/v1/chart/` |
|
||||
| `POST` | [Create a new chart](/developer-docs/api/create-a-new-chart) | `/api/v1/chart/` |
|
||||
| `GET` | [Get metadata information about this API resource (chart--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
|
||||
| `DELETE` | [Delete a chart](/developer-docs/api/delete-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Get a chart detail information](/developer-docs/api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
|
||||
| `PUT` | [Update a chart](/developer-docs/api/update-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](/developer-docs/api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
|
||||
| `GET` | [Return payload data response for a chart](/developer-docs/api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
|
||||
| `DELETE` | [Remove the chart from the user favorite list](/developer-docs/api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `POST` | [Mark the chart as favorite for the current user](/developer-docs/api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](/developer-docs/api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get chart thumbnail](/developer-docs/api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
|
||||
| `POST` | [Return payload data response for the given query (chart-data)](/developer-docs/api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
|
||||
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](/developer-docs/api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
|
||||
| `GET` | [Download multiple charts as YAML files](/developer-docs/api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
|
||||
| `GET` | [Check favorited charts for current user](/developer-docs/api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
|
||||
| `POST` | [Import chart(s) with associated datasets and databases](/developer-docs/api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
|
||||
| `GET` | [Get related fields data (chart-related-column-name)](/developer-docs/api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for the chart](/developer-docs/api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -125,24 +125,24 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete datasets](./api/bulk-delete-datasets) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get a list of datasets](./api/get-a-list-of-datasets) | `/api/v1/dataset/` |
|
||||
| `POST` | [Create a new dataset](./api/create-a-new-dataset) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get metadata information about this API resource (dataset--info)](./api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
|
||||
| `DELETE` | [Delete a dataset](./api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `GET` | [Get a dataset](./api/get-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `PUT` | [Update a dataset](./api/update-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `DELETE` | [Delete a dataset column](./api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
|
||||
| `DELETE` | [Delete a dataset metric](./api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
|
||||
| `PUT` | [Refresh and update columns of a dataset](./api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
|
||||
| `GET` | [Get charts and dashboards count associated to a dataset](./api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
|
||||
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](./api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
|
||||
| `POST` | [Duplicate a dataset](./api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
|
||||
| `GET` | [Download multiple datasets as YAML files](./api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
|
||||
| `POST` | [Retrieve a table by name, or create it if it does not exist](./api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
|
||||
| `POST` | [Import dataset(s) with associated databases](./api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
|
||||
| `GET` | [Get related fields data (dataset-related-column-name)](./api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for each chart powered by the given table](./api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
|
||||
| `DELETE` | [Bulk delete datasets](/developer-docs/api/bulk-delete-datasets) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get a list of datasets](/developer-docs/api/get-a-list-of-datasets) | `/api/v1/dataset/` |
|
||||
| `POST` | [Create a new dataset](/developer-docs/api/create-a-new-dataset) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get metadata information about this API resource (dataset--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
|
||||
| `DELETE` | [Delete a dataset](/developer-docs/api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `GET` | [Get a dataset](/developer-docs/api/get-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `PUT` | [Update a dataset](/developer-docs/api/update-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `DELETE` | [Delete a dataset column](/developer-docs/api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
|
||||
| `DELETE` | [Delete a dataset metric](/developer-docs/api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
|
||||
| `PUT` | [Refresh and update columns of a dataset](/developer-docs/api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
|
||||
| `GET` | [Get charts and dashboards count associated to a dataset](/developer-docs/api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
|
||||
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
|
||||
| `POST` | [Duplicate a dataset](/developer-docs/api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
|
||||
| `GET` | [Download multiple datasets as YAML files](/developer-docs/api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
|
||||
| `POST` | [Retrieve a table by name, or create it if it does not exist](/developer-docs/api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
|
||||
| `POST` | [Import dataset(s) with associated databases](/developer-docs/api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
|
||||
| `GET` | [Get related fields data (dataset-related-column-name)](/developer-docs/api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for each chart powered by the given table](/developer-docs/api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -151,37 +151,37 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of databases](./api/get-a-list-of-databases) | `/api/v1/database/` |
|
||||
| `POST` | [Create a new database](./api/create-a-new-database) | `/api/v1/database/` |
|
||||
| `GET` | [Get metadata information about this API resource (database--info)](./api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
|
||||
| `DELETE` | [Delete a database](./api/delete-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get a database](./api/get-a-database) | `/api/v1/database/{pk}` |
|
||||
| `PUT` | [Change a database](./api/change-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get all catalogs from a database](./api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
|
||||
| `GET` | [Get a database connection info](./api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
|
||||
| `GET` | [Get function names supported by a database](./api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
|
||||
| `GET` | [Get charts and dashboards count associated to a database](./api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
|
||||
| `GET` | [The list of the database schemas where to upload information](./api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
|
||||
| `GET` | [Get all schemas from a database](./api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
|
||||
| `DELETE` | [Delete a SSH tunnel](./api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
|
||||
| `POST` | [Re-sync all permissions for a database connection](./api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](./api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get table metadata](./api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](./api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
|
||||
| `GET` | [Get database table metadata](./api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get a list of tables for given database](./api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
|
||||
| `POST` | [Upload a file to a database table](./api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
|
||||
| `POST` | [Validate arbitrary SQL](./api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
|
||||
| `GET` | [Get names of databases currently available](./api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
|
||||
| `GET` | [Download database(s) and associated dataset(s) as a zip file](./api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
|
||||
| `POST` | [Import database(s) with associated datasets](./api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
|
||||
| `GET` | [Receive personal access tokens from OAuth2](./api/receive-personal-access-tokens-from-oauth2) | `/api/v1/database/oauth2/` |
|
||||
| `GET` | [Get related fields data (database-related-column-name)](./api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
|
||||
| `POST` | [Test a database connection](./api/test-a-database-connection) | `/api/v1/database/test_connection/` |
|
||||
| `POST` | [Upload a file and returns file metadata](./api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
|
||||
| `POST` | [Validate database connection parameters](./api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
|
||||
| `GET` | [Get a list of databases](/developer-docs/api/get-a-list-of-databases) | `/api/v1/database/` |
|
||||
| `POST` | [Create a new database](/developer-docs/api/create-a-new-database) | `/api/v1/database/` |
|
||||
| `GET` | [Get metadata information about this API resource (database--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
|
||||
| `DELETE` | [Delete a database](/developer-docs/api/delete-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get a database](/developer-docs/api/get-a-database) | `/api/v1/database/{pk}` |
|
||||
| `PUT` | [Change a database](/developer-docs/api/change-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get all catalogs from a database](/developer-docs/api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
|
||||
| `GET` | [Get a database connection info](/developer-docs/api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
|
||||
| `GET` | [Get function names supported by a database](/developer-docs/api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
|
||||
| `GET` | [Get charts and dashboards count associated to a database](/developer-docs/api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
|
||||
| `GET` | [The list of the database schemas where to upload information](/developer-docs/api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
|
||||
| `GET` | [Get all schemas from a database](/developer-docs/api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](/developer-docs/api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](/developer-docs/api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
|
||||
| `DELETE` | [Delete a SSH tunnel](/developer-docs/api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
|
||||
| `POST` | [Re-sync all permissions for a database connection](/developer-docs/api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](/developer-docs/api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get table metadata](/developer-docs/api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](/developer-docs/api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
|
||||
| `GET` | [Get database table metadata](/developer-docs/api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get a list of tables for given database](/developer-docs/api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
|
||||
| `POST` | [Upload a file to a database table](/developer-docs/api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
|
||||
| `POST` | [Validate arbitrary SQL](/developer-docs/api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
|
||||
| `GET` | [Get names of databases currently available](/developer-docs/api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
|
||||
| `GET` | [Download database(s) and associated dataset(s) as a zip file](/developer-docs/api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
|
||||
| `POST` | [Import database(s) with associated datasets](/developer-docs/api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
|
||||
| `GET` | [Receive personal access tokens from OAuth2](/developer-docs/api/receive-personal-access-tokens-from-oauth2) | `/api/v1/database/oauth2/` |
|
||||
| `GET` | [Get related fields data (database-related-column-name)](/developer-docs/api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
|
||||
| `POST` | [Test a database connection](/developer-docs/api/test-a-database-connection) | `/api/v1/database/test_connection/` |
|
||||
| `POST` | [Upload a file and returns file metadata](/developer-docs/api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
|
||||
| `POST` | [Validate database connection parameters](/developer-docs/api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -192,7 +192,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Assemble Explore related information in a single endpoint](./api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
|
||||
| `GET` | [Assemble Explore related information in a single endpoint](/developer-docs/api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -201,12 +201,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the bootstrap data for SqlLab page](./api/get-the-bootstrap-data-for-sqllab-page) | `/api/v1/sqllab/` |
|
||||
| `POST` | [Estimate the SQL query execution cost](./api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
|
||||
| `POST` | [Execute a SQL query](./api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
|
||||
| `GET` | [Export the SQL query results to a CSV](./api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
|
||||
| `POST` | [Format SQL code](./api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
|
||||
| `GET` | [Get the result of a SQL query execution](./api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
|
||||
| `GET` | [Get the bootstrap data for SqlLab page](/developer-docs/api/get-the-bootstrap-data-for-sqllab-page) | `/api/v1/sqllab/` |
|
||||
| `POST` | [Estimate the SQL query execution cost](/developer-docs/api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
|
||||
| `POST` | [Execute a SQL query](/developer-docs/api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
|
||||
| `GET` | [Export the SQL query results to a CSV](/developer-docs/api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
|
||||
| `POST` | [Format SQL code](/developer-docs/api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
|
||||
| `GET` | [Get the result of a SQL query execution](/developer-docs/api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -215,23 +215,23 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of queries](./api/get-a-list-of-queries) | `/api/v1/query/` |
|
||||
| `GET` | [Get query detail information](./api/get-query-detail-information) | `/api/v1/query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (query-distinct-column-name)](./api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
|
||||
| `GET` | [Get related fields data (query-related-column-name)](./api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
|
||||
| `POST` | [Manually stop a query with client_id](./api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
|
||||
| `GET` | [Get a list of queries that changed after last_updated_ms](./api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
|
||||
| `DELETE` | [Bulk delete saved queries](./api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get a list of saved queries](./api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `POST` | [Create a saved query](./api/create-a-saved-query) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get metadata information about this API resource (saved-query--info)](./api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
|
||||
| `DELETE` | [Delete a saved query](./api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get a saved query](./api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `PUT` | [Update a saved query](./api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](./api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
|
||||
| `GET` | [Download multiple saved queries as YAML files](./api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
|
||||
| `POST` | [Import saved queries with associated databases](./api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
|
||||
| `GET` | [Get related fields data (saved-query-related-column-name)](./api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
|
||||
| `GET` | [Get a list of queries](/developer-docs/api/get-a-list-of-queries) | `/api/v1/query/` |
|
||||
| `GET` | [Get query detail information](/developer-docs/api/get-query-detail-information) | `/api/v1/query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (query-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
|
||||
| `GET` | [Get related fields data (query-related-column-name)](/developer-docs/api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
|
||||
| `POST` | [Manually stop a query with client_id](/developer-docs/api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
|
||||
| `GET` | [Get a list of queries that changed after last_updated_ms](/developer-docs/api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
|
||||
| `DELETE` | [Bulk delete saved queries](/developer-docs/api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get a list of saved queries](/developer-docs/api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `POST` | [Create a saved query](/developer-docs/api/create-a-saved-query) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get metadata information about this API resource (saved-query--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
|
||||
| `DELETE` | [Delete a saved query](/developer-docs/api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get a saved query](/developer-docs/api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `PUT` | [Update a saved query](/developer-docs/api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
|
||||
| `GET` | [Download multiple saved queries as YAML files](/developer-docs/api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
|
||||
| `POST` | [Import saved queries with associated databases](/developer-docs/api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
|
||||
| `GET` | [Get related fields data (saved-query-related-column-name)](/developer-docs/api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -240,7 +240,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get possible values for a datasource column](./api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
|
||||
| `GET` | [Get possible values for a datasource column](/developer-docs/api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -249,8 +249,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Return an AdvancedDataTypeResponse](./api/return-an-advanceddatatyperesponse) | `/api/v1/advanced_data_type/convert` |
|
||||
| `GET` | [Return a list of available advanced data types](./api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
|
||||
| `GET` | [Return an AdvancedDataTypeResponse](/developer-docs/api/return-an-advanceddatatyperesponse) | `/api/v1/advanced_data_type/convert` |
|
||||
| `GET` | [Return a list of available advanced data types](/developer-docs/api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -261,21 +261,21 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete tags](./api/bulk-delete-tags) | `/api/v1/tag/` |
|
||||
| `GET` | [Get a list of tags](./api/get-a-list-of-tags) | `/api/v1/tag/` |
|
||||
| `POST` | [Create a tag](./api/create-a-tag) | `/api/v1/tag/` |
|
||||
| `GET` | [Get metadata information about tag API endpoints](./api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
|
||||
| `POST` | [Add tags to an object](./api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
|
||||
| `DELETE` | [Delete a tagged object](./api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
|
||||
| `DELETE` | [Delete a tag](./api/delete-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `GET` | [Get a tag detail information](./api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
|
||||
| `PUT` | [Update a tag](./api/update-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `DELETE` | [Delete tag by pk favorites](./api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Create tag by pk favorites](./api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Bulk create tags and tagged objects](./api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
|
||||
| `GET` | [Get tag favorite status](./api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
|
||||
| `GET` | [Get all objects associated with a tag](./api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
|
||||
| `GET` | [Get related fields data (tag-related-column-name)](./api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete tags](/developer-docs/api/bulk-delete-tags) | `/api/v1/tag/` |
|
||||
| `GET` | [Get a list of tags](/developer-docs/api/get-a-list-of-tags) | `/api/v1/tag/` |
|
||||
| `POST` | [Create a tag](/developer-docs/api/create-a-tag) | `/api/v1/tag/` |
|
||||
| `GET` | [Get metadata information about tag API endpoints](/developer-docs/api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
|
||||
| `POST` | [Add tags to an object](/developer-docs/api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
|
||||
| `DELETE` | [Delete a tagged object](/developer-docs/api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
|
||||
| `DELETE` | [Delete a tag](/developer-docs/api/delete-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `GET` | [Get a tag detail information](/developer-docs/api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
|
||||
| `PUT` | [Update a tag](/developer-docs/api/update-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `DELETE` | [Delete tag by pk favorites](/developer-docs/api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Create tag by pk favorites](/developer-docs/api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Bulk create tags and tagged objects](/developer-docs/api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
|
||||
| `GET` | [Get tag favorite status](/developer-docs/api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
|
||||
| `GET` | [Get all objects associated with a tag](/developer-docs/api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
|
||||
| `GET` | [Get related fields data (tag-related-column-name)](/developer-docs/api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -284,20 +284,20 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Delete multiple annotation layers in a bulk operation](./api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer)](./api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer)](./api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](./api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](./api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk)](./api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk)](./api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `DELETE` | [Bulk delete annotation layers](./api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](./api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](./api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get related fields data (annotation-layer-related-column-name)](./api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
|
||||
| `DELETE` | [Delete multiple annotation layers in a bulk operation](/developer-docs/api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer)](/developer-docs/api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer)](/developer-docs/api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](/developer-docs/api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk)](/developer-docs/api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk)](/developer-docs/api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `DELETE` | [Bulk delete annotation layers](/developer-docs/api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](/developer-docs/api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](/developer-docs/api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get related fields data (annotation-layer-related-column-name)](/developer-docs/api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -306,14 +306,14 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete CSS templates](./api/bulk-delete-css-templates) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get a list of CSS templates](./api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
|
||||
| `POST` | [Create a CSS template](./api/create-a-css-template) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get metadata information about this API resource (css-template--info)](./api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
|
||||
| `DELETE` | [Delete a CSS template](./api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get a CSS template](./api/get-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `PUT` | [Update a CSS template](./api/update-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get related fields data (css-template-related-column-name)](./api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete CSS templates](/developer-docs/api/bulk-delete-css-templates) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get a list of CSS templates](/developer-docs/api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
|
||||
| `POST` | [Create a CSS template](/developer-docs/api/create-a-css-template) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get metadata information about this API resource (css-template--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
|
||||
| `DELETE` | [Delete a CSS template](/developer-docs/api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get a CSS template](/developer-docs/api/get-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `PUT` | [Update a CSS template](/developer-docs/api/update-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get related fields data (css-template-related-column-name)](/developer-docs/api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -324,8 +324,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new dashboard's permanent link](./api/create-a-new-dashboard-s-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
|
||||
| `GET` | [Get dashboard's permanent link state](./api/get-dashboard-s-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
|
||||
| `POST` | [Create a new dashboard's permanent link](/developer-docs/api/create-a-new-dashboard-s-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
|
||||
| `GET` | [Get dashboard's permanent link state](/developer-docs/api/get-dashboard-s-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -334,8 +334,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new permanent link (explore-permalink)](./api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
|
||||
| `GET` | [Get chart's permanent link state](./api/get-chart-s-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
|
||||
| `POST` | [Create a new permanent link (explore-permalink)](/developer-docs/api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
|
||||
| `GET` | [Get chart's permanent link state](/developer-docs/api/get-chart-s-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -344,8 +344,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new permanent link (sqllab-permalink)](./api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
|
||||
| `GET` | [Get permanent link state for SQLLab editor.](./api/get-permanent-link-state-for-sqllab-editor) | `/api/v1/sqllab/permalink/{key}` |
|
||||
| `POST` | [Create a new permanent link (sqllab-permalink)](/developer-docs/api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
|
||||
| `GET` | [Get permanent link state for SQLLab editor.](/developer-docs/api/get-permanent-link-state-for-sqllab-editor) | `/api/v1/sqllab/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -354,7 +354,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](./api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
|
||||
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](/developer-docs/api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -363,10 +363,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a dashboard's filter state](./api/create-a-dashboard-s-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
|
||||
| `DELETE` | [Delete a dashboard's filter state value](./api/delete-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `GET` | [Get a dashboard's filter state value](./api/get-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `PUT` | [Update a dashboard's filter state value](./api/update-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `POST` | [Create a dashboard's filter state](/developer-docs/api/create-a-dashboard-s-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
|
||||
| `DELETE` | [Delete a dashboard's filter state value](/developer-docs/api/delete-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `GET` | [Get a dashboard's filter state value](/developer-docs/api/get-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `PUT` | [Update a dashboard's filter state value](/developer-docs/api/update-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -375,10 +375,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new form_data](./api/create-a-new-form-data) | `/api/v1/explore/form_data` |
|
||||
| `DELETE` | [Delete a form_data](./api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `GET` | [Get a form_data](./api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `PUT` | [Update an existing form_data](./api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `POST` | [Create a new form_data](/developer-docs/api/create-a-new-form-data) | `/api/v1/explore/form_data` |
|
||||
| `DELETE` | [Delete a form_data](/developer-docs/api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `GET` | [Get a form_data](/developer-docs/api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `PUT` | [Update an existing form_data](/developer-docs/api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -389,17 +389,17 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete report schedules](./api/bulk-delete-report-schedules) | `/api/v1/report/` |
|
||||
| `GET` | [Get a list of report schedules](./api/get-a-list-of-report-schedules) | `/api/v1/report/` |
|
||||
| `POST` | [Create a report schedule](./api/create-a-report-schedule) | `/api/v1/report/` |
|
||||
| `GET` | [Get metadata information about this API resource (report--info)](./api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
|
||||
| `DELETE` | [Delete a report schedule](./api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a report schedule](./api/get-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `PUT` | [Update a report schedule](./api/update-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a list of report schedule logs](./api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
|
||||
| `GET` | [Get a report schedule log (report-pk-log-log-id)](./api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
|
||||
| `GET` | [Get related fields data (report-related-column-name)](./api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
|
||||
| `GET` | [Get slack channels](./api/get-slack-channels) | `/api/v1/report/slack_channels/` |
|
||||
| `DELETE` | [Bulk delete report schedules](/developer-docs/api/bulk-delete-report-schedules) | `/api/v1/report/` |
|
||||
| `GET` | [Get a list of report schedules](/developer-docs/api/get-a-list-of-report-schedules) | `/api/v1/report/` |
|
||||
| `POST` | [Create a report schedule](/developer-docs/api/create-a-report-schedule) | `/api/v1/report/` |
|
||||
| `GET` | [Get metadata information about this API resource (report--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
|
||||
| `DELETE` | [Delete a report schedule](/developer-docs/api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a report schedule](/developer-docs/api/get-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `PUT` | [Update a report schedule](/developer-docs/api/update-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a list of report schedule logs](/developer-docs/api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
|
||||
| `GET` | [Get a report schedule log (report-pk-log-log-id)](/developer-docs/api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
|
||||
| `GET` | [Get related fields data (report-related-column-name)](/developer-docs/api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
|
||||
| `GET` | [Get slack channels](/developer-docs/api/get-slack-channels) | `/api/v1/report/slack_channels/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -410,16 +410,16 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security roles](./api/get-security-roles) | `/api/v1/security/roles/` |
|
||||
| `POST` | [Create security roles](./api/create-security-roles) | `/api/v1/security/roles/` |
|
||||
| `GET` | [Get security roles info](./api/get-security-roles-info) | `/api/v1/security/roles/_info` |
|
||||
| `DELETE` | [Delete security roles by pk](./api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `GET` | [Get security roles by pk](./api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `PUT` | [Update security roles by pk](./api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `POST` | [Create security roles by role_id permissions](./api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
|
||||
| `GET` | [Get security roles by role_id permissions](./api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
|
||||
| `PUT` | [Update security roles by role_id users](./api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
|
||||
| `GET` | [List roles](./api/list-roles) | `/api/v1/security/roles/search/` |
|
||||
| `GET` | [Get security roles](/developer-docs/api/get-security-roles) | `/api/v1/security/roles/` |
|
||||
| `POST` | [Create security roles](/developer-docs/api/create-security-roles) | `/api/v1/security/roles/` |
|
||||
| `GET` | [Get security roles info](/developer-docs/api/get-security-roles-info) | `/api/v1/security/roles/_info` |
|
||||
| `DELETE` | [Delete security roles by pk](/developer-docs/api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `GET` | [Get security roles by pk](/developer-docs/api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `PUT` | [Update security roles by pk](/developer-docs/api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `POST` | [Create security roles by role_id permissions](/developer-docs/api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
|
||||
| `GET` | [Get security roles by role_id permissions](/developer-docs/api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
|
||||
| `PUT` | [Update security roles by role_id users](/developer-docs/api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
|
||||
| `GET` | [List roles](/developer-docs/api/list-roles) | `/api/v1/security/roles/search/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -428,12 +428,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security users](./api/get-security-users) | `/api/v1/security/users/` |
|
||||
| `POST` | [Create security users](./api/create-security-users) | `/api/v1/security/users/` |
|
||||
| `GET` | [Get security users info](./api/get-security-users-info) | `/api/v1/security/users/_info` |
|
||||
| `DELETE` | [Delete security users by pk](./api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users by pk](./api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `PUT` | [Update security users by pk](./api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users](/developer-docs/api/get-security-users) | `/api/v1/security/users/` |
|
||||
| `POST` | [Create security users](/developer-docs/api/create-security-users) | `/api/v1/security/users/` |
|
||||
| `GET` | [Get security users info](/developer-docs/api/get-security-users-info) | `/api/v1/security/users/_info` |
|
||||
| `DELETE` | [Delete security users by pk](/developer-docs/api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users by pk](/developer-docs/api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `PUT` | [Update security users by pk](/developer-docs/api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -442,9 +442,9 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security permissions](./api/get-security-permissions) | `/api/v1/security/permissions/` |
|
||||
| `GET` | [Get security permissions info](./api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
|
||||
| `GET` | [Get security permissions by pk](./api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
|
||||
| `GET` | [Get security permissions](/developer-docs/api/get-security-permissions) | `/api/v1/security/permissions/` |
|
||||
| `GET` | [Get security permissions info](/developer-docs/api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
|
||||
| `GET` | [Get security permissions by pk](/developer-docs/api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -453,12 +453,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security resources](./api/get-security-resources) | `/api/v1/security/resources/` |
|
||||
| `POST` | [Create security resources](./api/create-security-resources) | `/api/v1/security/resources/` |
|
||||
| `GET` | [Get security resources info](./api/get-security-resources-info) | `/api/v1/security/resources/_info` |
|
||||
| `DELETE` | [Delete security resources by pk](./api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources by pk](./api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `PUT` | [Update security resources by pk](./api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources](/developer-docs/api/get-security-resources) | `/api/v1/security/resources/` |
|
||||
| `POST` | [Create security resources](/developer-docs/api/create-security-resources) | `/api/v1/security/resources/` |
|
||||
| `GET` | [Get security resources info](/developer-docs/api/get-security-resources-info) | `/api/v1/security/resources/_info` |
|
||||
| `DELETE` | [Delete security resources by pk](/developer-docs/api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources by pk](/developer-docs/api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `PUT` | [Update security resources by pk](/developer-docs/api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -467,12 +467,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security permissions resources](./api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `POST` | [Create security permissions resources](./api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `GET` | [Get security permissions resources info](./api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
|
||||
| `DELETE` | [Delete security permissions resources by pk](./api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources by pk](./api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `PUT` | [Update security permissions resources by pk](./api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources](/developer-docs/api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `POST` | [Create security permissions resources](/developer-docs/api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `GET` | [Get security permissions resources info](/developer-docs/api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
|
||||
| `DELETE` | [Delete security permissions resources by pk](/developer-docs/api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources by pk](/developer-docs/api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `PUT` | [Update security permissions resources by pk](/developer-docs/api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -481,14 +481,14 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete RLS rules](./api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get a list of RLS](./api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
|
||||
| `POST` | [Create a new RLS rule](./api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](./api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
|
||||
| `DELETE` | [Delete an RLS](./api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get an RLS](./api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `PUT` | [Update an RLS rule](./api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](./api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete RLS rules](/developer-docs/api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get a list of RLS](/developer-docs/api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
|
||||
| `POST` | [Create a new RLS rule](/developer-docs/api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
|
||||
| `DELETE` | [Delete an RLS](/developer-docs/api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get an RLS](/developer-docs/api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `PUT` | [Update an RLS rule](/developer-docs/api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](/developer-docs/api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -499,8 +499,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Export all assets](./api/export-all-assets) | `/api/v1/assets/export/` |
|
||||
| `POST` | [Import multiple assets](./api/import-multiple-assets) | `/api/v1/assets/import/` |
|
||||
| `GET` | [Export all assets](/developer-docs/api/export-all-assets) | `/api/v1/assets/export/` |
|
||||
| `POST` | [Import multiple assets](/developer-docs/api/import-multiple-assets) | `/api/v1/assets/import/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -509,7 +509,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Invalidate cache records and remove the database records](./api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
|
||||
| `POST` | [Invalidate cache records and remove the database records](/developer-docs/api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -518,10 +518,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of logs](./api/get-a-list-of-logs) | `/api/v1/log/` |
|
||||
| `POST` | [Create log](./api/create-log) | `/api/v1/log/` |
|
||||
| `GET` | [Get a log detail information](./api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
|
||||
| `GET` | [Get recent activity data for a user](./api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
|
||||
| `GET` | [Get a list of logs](/developer-docs/api/get-a-list-of-logs) | `/api/v1/log/` |
|
||||
| `POST` | [Create log](/developer-docs/api/create-log) | `/api/v1/log/` |
|
||||
| `GET` | [Get a log detail information](/developer-docs/api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
|
||||
| `GET` | [Get recent activity data for a user](/developer-docs/api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -532,8 +532,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the user object](./api/get-the-user-object) | `/api/v1/me/` |
|
||||
| `GET` | [Get the user roles](./api/get-the-user-roles) | `/api/v1/me/roles/` |
|
||||
| `GET` | [Get the user object](/developer-docs/api/get-the-user-object) | `/api/v1/me/` |
|
||||
| `GET` | [Get the user roles](/developer-docs/api/get-the-user-roles) | `/api/v1/me/roles/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -542,7 +542,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the user avatar](./api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
|
||||
| `GET` | [Get the user avatar](/developer-docs/api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -551,7 +551,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get menu](./api/get-menu) | `/api/v1/menu/` |
|
||||
| `GET` | [Get menu](/developer-docs/api/get-menu) | `/api/v1/menu/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -560,7 +560,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get all available domains](./api/get-all-available-domains) | `/api/v1/available_domains/` |
|
||||
| `GET` | [Get all available domains](/developer-docs/api/get-all-available-domains) | `/api/v1/available_domains/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -569,7 +569,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Read off of the Redis events stream](./api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
|
||||
| `GET` | [Read off of the Redis events stream](/developer-docs/api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -578,7 +578,29 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get api by version openapi](./api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
|
||||
| `GET` | [Get api by version openapi](/developer-docs/api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Themes</strong> (14 endpoints) — Manage UI themes for customizing Superset's appearance.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete themes](/developer-docs/api/bulk-delete-themes) | `/api/v1/theme/` |
|
||||
| `GET` | [Get a list of themes](/developer-docs/api/get-a-list-of-themes) | `/api/v1/theme/` |
|
||||
| `POST` | [Create a theme](/developer-docs/api/create-a-theme) | `/api/v1/theme/` |
|
||||
| `GET` | [Get metadata information about this API resource (theme-info)](/developer-docs/api/get-metadata-information-about-this-api-resource-theme-info) | `/api/v1/theme/_info` |
|
||||
| `DELETE` | [Delete a theme](/developer-docs/api/delete-a-theme) | `/api/v1/theme/{pk}` |
|
||||
| `GET` | [Get a theme](/developer-docs/api/get-a-theme) | `/api/v1/theme/{pk}` |
|
||||
| `PUT` | [Update a theme](/developer-docs/api/update-a-theme) | `/api/v1/theme/{pk}` |
|
||||
| `PUT` | [Set a theme as the system dark theme](/developer-docs/api/set-a-theme-as-the-system-dark-theme) | `/api/v1/theme/{pk}/set_system_dark` |
|
||||
| `PUT` | [Set a theme as the system default theme](/developer-docs/api/set-a-theme-as-the-system-default-theme) | `/api/v1/theme/{pk}/set_system_default` |
|
||||
| `GET` | [Download multiple themes as YAML files](/developer-docs/api/download-multiple-themes-as-yaml-files) | `/api/v1/theme/export/` |
|
||||
| `POST` | [Import themes from a ZIP file](/developer-docs/api/import-themes-from-a-zip-file) | `/api/v1/theme/import/` |
|
||||
| `GET` | [Get related fields data (theme-related-column-name)](/developer-docs/api/get-related-fields-data-theme-related-column-name) | `/api/v1/theme/related/{column_name}` |
|
||||
| `DELETE` | [Clear the system dark theme](/developer-docs/api/clear-the-system-dark-theme) | `/api/v1/theme/unset_system_dark` |
|
||||
| `DELETE` | [Clear the system default theme](/developer-docs/api/clear-the-system-default-theme) | `/api/v1/theme/unset_system_default` |
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@@ -33,13 +33,15 @@ The extension architecture is built on six core principles that guide all techni
|
||||
### 1. Lean Core
|
||||
|
||||
Superset's core should remain minimal, with many features delegated to extensions. Built-in features use the same APIs and extension mechanisms available to external developers. This approach:
|
||||
|
||||
- Reduces maintenance burden and complexity
|
||||
- Encourages modularity
|
||||
- Allows the community to innovate independently of the main codebase
|
||||
|
||||
### 2. Explicit Contribution Points
|
||||
|
||||
All extension points are clearly defined and documented. Extension authors know exactly where and how they can interact with the host system. Backend contributions are declared in `extension.json`. Frontend contributions are registered directly in code at module load time, giving the host clear visibility into what each extension provides:
|
||||
All extension points are clearly defined and documented. Extension authors know exactly where and how they can interact with the host system. Both backend and frontend contributions are registered directly in code — backend contributions via classes decorated with `@api` (and other decorators) imported from the auto-discovered entrypoint, frontend contributions via calls like `views.registerView` and `commands.registerCommand` executed at module load time in `index.tsx`. This gives the host clear visibility into what each extension provides:
|
||||
|
||||
- Manage the extension lifecycle
|
||||
- Provide a consistent user experience
|
||||
- Validate extension compatibility
|
||||
@@ -47,6 +49,7 @@ All extension points are clearly defined and documented. Extension authors know
|
||||
### 3. Versioned and Stable APIs
|
||||
|
||||
Public interfaces for extensions follow semantic versioning, allowing for:
|
||||
|
||||
- Safe evolution of the platform
|
||||
- Backward compatibility
|
||||
- Clear upgrade paths for extension authors
|
||||
@@ -54,6 +57,7 @@ Public interfaces for extensions follow semantic versioning, allowing for:
|
||||
### 4. Lazy Loading and Activation
|
||||
|
||||
Extensions are loaded and activated only when needed, which:
|
||||
|
||||
- Minimizes performance overhead
|
||||
- Reduces resource consumption
|
||||
- Improves startup time
|
||||
@@ -61,6 +65,7 @@ Extensions are loaded and activated only when needed, which:
|
||||
### 5. Composability and Reuse
|
||||
|
||||
The architecture encourages reusing extension points and patterns across different modules, promoting:
|
||||
|
||||
- Consistency across extensions
|
||||
- Reduced duplication
|
||||
- Shared best practices
|
||||
@@ -80,6 +85,7 @@ Two core packages provide the foundation for extension development:
|
||||
**Frontend: `@apache-superset/core`**
|
||||
|
||||
This package provides essential building blocks for frontend extensions and the host application:
|
||||
|
||||
- Shared UI components
|
||||
- Utility functions
|
||||
- APIs and hooks
|
||||
@@ -90,6 +96,7 @@ By centralizing these resources, both extensions and built-in features use the s
|
||||
**Backend: `apache-superset-core`**
|
||||
|
||||
This package exposes key classes and APIs for backend extensions:
|
||||
|
||||
- Database connectors
|
||||
- API extensions
|
||||
- Security manager customization
|
||||
@@ -102,6 +109,7 @@ It includes dependencies on critical libraries like Flask-AppBuilder and SQLAlch
|
||||
**`apache-superset-extensions-cli`**
|
||||
|
||||
The CLI provides comprehensive commands for extension development:
|
||||
|
||||
- Project scaffolding
|
||||
- Code generation
|
||||
- Building and bundling
|
||||
@@ -114,6 +122,7 @@ By standardizing these processes, the CLI ensures extensions are built consisten
|
||||
The Superset host application serves as the runtime environment for extensions:
|
||||
|
||||
**Extension Management**
|
||||
|
||||
- Exposes `/api/v1/extensions` endpoint for registration and management
|
||||
- Provides a dedicated UI for managing extensions
|
||||
- Stores extension metadata in the `extensions` database table
|
||||
@@ -121,6 +130,7 @@ The Superset host application serves as the runtime environment for extensions:
|
||||
**Extension Storage**
|
||||
|
||||
The extensions table contains:
|
||||
|
||||
- Extension name, version, and author
|
||||
- Metadata and configuration
|
||||
- Built frontend and/or backend code
|
||||
@@ -132,6 +142,7 @@ The following diagram illustrates how these components work together:
|
||||
<img width="955" height="586" alt="Extension System Architecture" src="https://github.com/user-attachments/assets/cc2a41df-55a4-48c8-b056-35f7a1e567c6" />
|
||||
|
||||
The diagram shows:
|
||||
|
||||
1. **Extension projects** depend on core packages for development
|
||||
2. **Core packages** provide APIs and type definitions
|
||||
3. **The host application** implements the APIs and manages extensions
|
||||
@@ -151,23 +162,25 @@ The architecture leverages Webpack's Module Federation to enable dynamic loading
|
||||
|
||||
Extensions configure Webpack to expose their entry points:
|
||||
|
||||
``` typescript
|
||||
new ModuleFederationPlugin({
|
||||
name: 'my_extension',
|
||||
filename: 'remoteEntry.[contenthash].js',
|
||||
exposes: {
|
||||
'./index': './src/index.tsx',
|
||||
},
|
||||
externalsType: 'window',
|
||||
externals: {
|
||||
'@apache-superset/core': 'superset',
|
||||
},
|
||||
shared: {
|
||||
react: { singleton: true },
|
||||
'react-dom': { singleton: true },
|
||||
'antd-v5': { singleton: true }
|
||||
}
|
||||
})
|
||||
```javascript
|
||||
externalsType: 'window',
|
||||
externals: {
|
||||
'@apache-superset/core': 'superset',
|
||||
},
|
||||
plugins: [
|
||||
new ModuleFederationPlugin({
|
||||
name: 'my_extension',
|
||||
filename: 'remoteEntry.[contenthash].js',
|
||||
exposes: {
|
||||
'./index': './src/index.tsx',
|
||||
},
|
||||
shared: {
|
||||
react: { singleton: true, import: false },
|
||||
'react-dom': { singleton: true, import: false },
|
||||
antd: { singleton: true, import: false },
|
||||
},
|
||||
}),
|
||||
]
|
||||
```
|
||||
|
||||
This configuration does several important things:
|
||||
@@ -195,24 +208,12 @@ Here's what happens at runtime:
|
||||
|
||||
On the Superset side, the APIs are mapped to `window.superset` during application bootstrap:
|
||||
|
||||
``` typescript
|
||||
```typescript
|
||||
import * as supersetCore from '@apache-superset/core';
|
||||
import {
|
||||
authentication,
|
||||
core,
|
||||
commands,
|
||||
extensions,
|
||||
sqlLab,
|
||||
} from 'src/extensions';
|
||||
|
||||
export default function setupExtensionsAPI() {
|
||||
window.superset = {
|
||||
...supersetCore,
|
||||
authentication,
|
||||
core,
|
||||
commands,
|
||||
extensions,
|
||||
sqlLab,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
@@ -23,7 +23,7 @@ sidebar_label: Alert
|
||||
-->
|
||||
|
||||
import { StoryWithControls } from '../../../src/components/StorybookWrapper';
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
import { Alert } from '@apache-superset/core/components';
|
||||
|
||||
# Alert
|
||||
|
||||
@@ -105,10 +105,10 @@ function Demo() {
|
||||
|
||||
## Usage in Extensions
|
||||
|
||||
This component is available in the `@apache-superset/core/ui` package, which is automatically available to Superset extensions.
|
||||
This component is available in the `@apache-superset/core/components` package, which is automatically available to Superset extensions.
|
||||
|
||||
```tsx
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
import { Alert } from '@apache-superset/core/components';
|
||||
|
||||
function MyExtension() {
|
||||
return (
|
||||
|
||||
@@ -25,7 +25,7 @@ sidebar_position: 1
|
||||
|
||||
# Extension Components
|
||||
|
||||
These UI components are available to Superset extension developers through the `@apache-superset/core/ui` package. They provide a consistent look and feel with the rest of Superset and are designed to be used in extension panels, views, and other UI elements.
|
||||
These UI components are available to Superset extension developers through the `@apache-superset/core/components` package. They provide a consistent look and feel with the rest of Superset and are designed to be used in extension panels, views, and other UI elements.
|
||||
|
||||
## Available Components
|
||||
|
||||
@@ -33,10 +33,10 @@ These UI components are available to Superset extension developers through the `
|
||||
|
||||
## Usage
|
||||
|
||||
All components are exported from the `@apache-superset/core/ui` package:
|
||||
All components are exported from the `@apache-superset/core/components` package:
|
||||
|
||||
```tsx
|
||||
import { Alert } from '@apache-superset/core/ui';
|
||||
import { Alert } from '@apache-superset/core/components';
|
||||
|
||||
export function MyExtensionPanel() {
|
||||
return (
|
||||
@@ -49,7 +49,7 @@ export function MyExtensionPanel() {
|
||||
|
||||
## Adding New Components
|
||||
|
||||
Components in `@apache-superset/core/ui` are automatically documented here. To add a new extension component:
|
||||
Components in `@apache-superset/core/components` are automatically documented here. To add a new extension component:
|
||||
|
||||
1. Add the component to `superset-frontend/packages/superset-core/src/ui/components/`
|
||||
2. Export it from `superset-frontend/packages/superset-core/src/ui/components/index.ts`
|
||||
|
||||
@@ -28,7 +28,7 @@ To facilitate the development of extensions, we define a set of well-defined con
|
||||
|
||||
## Frontend
|
||||
|
||||
Frontend contribution types allow extensions to extend Superset's user interface with new views, commands, and menu items. Frontend contributions are registered directly in code from your extension's `index.tsx` entry point — they do not need to be declared in `extension.json`.
|
||||
Frontend contribution types allow extensions to extend Superset's user interface with new views, commands, and menu items. Frontend contributions are registered directly in code from your extension's `index.tsx` entry point.
|
||||
|
||||
### Views
|
||||
|
||||
@@ -68,25 +68,28 @@ commands.registerCommand(
|
||||
|
||||
### Menus
|
||||
|
||||
Extensions can contribute new menu items or context menus to the host application, providing users with additional actions and options. Each menu item specifies the target area, the command to execute, and its placement (primary, secondary, or context). Menu contribution areas are uniquely identified (e.g., `sqllab.editor` for the SQL Lab editor).
|
||||
Extensions can contribute new menu items or context menus to the host application, providing users with additional actions and options. Each menu item specifies the view and command to execute, the target area, and the location (`primary`, `secondary`, or `context`). Menu contribution areas are uniquely identified (e.g., `sqllab.editor` for the SQL Lab editor).
|
||||
|
||||
```typescript
|
||||
import { menus } from '@apache-superset/core';
|
||||
|
||||
menus.addMenuItem('sqllab.editor', {
|
||||
placement: 'primary',
|
||||
command: 'my-extension.copy-query',
|
||||
});
|
||||
menus.registerMenuItem(
|
||||
{ view: 'sqllab.editor', command: 'my-extension.copy-query' },
|
||||
'sqllab.editor',
|
||||
'primary',
|
||||
);
|
||||
|
||||
menus.addMenuItem('sqllab.editor', {
|
||||
placement: 'secondary',
|
||||
command: 'my-extension.prettify',
|
||||
});
|
||||
menus.registerMenuItem(
|
||||
{ view: 'sqllab.editor', command: 'my-extension.prettify' },
|
||||
'sqllab.editor',
|
||||
'secondary',
|
||||
);
|
||||
|
||||
menus.addMenuItem('sqllab.editor', {
|
||||
placement: 'context',
|
||||
command: 'my-extension.clear',
|
||||
});
|
||||
menus.registerMenuItem(
|
||||
{ view: 'sqllab.editor', command: 'my-extension.clear' },
|
||||
'sqllab.editor',
|
||||
'context',
|
||||
);
|
||||
```
|
||||
|
||||
### Editors
|
||||
@@ -111,24 +114,31 @@ See [Editors Extension Point](./extension-points/editors) for implementation det
|
||||
|
||||
## Backend
|
||||
|
||||
Backend contribution types allow extensions to extend Superset's server-side capabilities with new API endpoints, MCP tools, and MCP prompts.
|
||||
Backend contribution types allow extensions to extend Superset's server-side capabilities. Backend contributions are registered at startup via classes and functions imported from the auto-discovered `entrypoint.py` file.
|
||||
|
||||
### REST API Endpoints
|
||||
|
||||
Extensions can register custom REST API endpoints under the `/extensions/` namespace. This dedicated namespace prevents conflicts with built-in endpoints and provides a clear separation between core and extension functionality.
|
||||
|
||||
```python
|
||||
from superset_core.api.rest_api import RestApi, api
|
||||
from flask_appbuilder.api import expose, protect
|
||||
from flask import Response
|
||||
from flask_appbuilder.api import expose, permission_name, protect, safe
|
||||
from superset_core.rest_api.api import RestApi
|
||||
from superset_core.rest_api.decorators import api
|
||||
|
||||
@api(
|
||||
id="my_extension_api",
|
||||
name="My Extension API",
|
||||
description="Custom API endpoints for my extension"
|
||||
description="Custom API endpoints for my extension",
|
||||
)
|
||||
class MyExtensionAPI(RestApi):
|
||||
openapi_spec_tag = "My Extension"
|
||||
class_permission_name = "my_extension_api"
|
||||
|
||||
@expose("/hello", methods=("GET",))
|
||||
@protect()
|
||||
@safe
|
||||
@permission_name("read")
|
||||
def hello(self) -> Response:
|
||||
return self.response(200, result={"message": "Hello from extension!"})
|
||||
|
||||
@@ -136,7 +146,7 @@ class MyExtensionAPI(RestApi):
|
||||
from .api import MyExtensionAPI
|
||||
```
|
||||
|
||||
**Note**: The [`@api`](superset-core/src/superset_core/api/rest_api.py:59) decorator automatically detects context and generates appropriate paths:
|
||||
**Note**: The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
|
||||
|
||||
- **Extension context**: `/extensions/{publisher}/{name}/` with ID prefixed as `extensions.{publisher}.{name}.{id}`
|
||||
- **Host context**: `/api/v1/` with original ID
|
||||
@@ -152,16 +162,65 @@ You can also specify a `resource_name` parameter to add an additional path segme
|
||||
@api(
|
||||
id="analytics_api",
|
||||
name="Analytics API",
|
||||
resource_name="analytics" # Adds /analytics to the path
|
||||
resource_name="analytics", # Adds /analytics to the path
|
||||
)
|
||||
class AnalyticsAPI(RestApi):
|
||||
|
||||
@expose("/insights", methods=("GET",))
|
||||
def insights(self):
|
||||
@protect()
|
||||
@safe
|
||||
@permission_name("read")
|
||||
def insights(self) -> Response:
|
||||
# This endpoint will be available at:
|
||||
# /extensions/my-org/dataset-tools/analytics/insights
|
||||
return self.response(200, result={"insights": []})
|
||||
```
|
||||
|
||||
### MCP Tools and Prompts
|
||||
### MCP Tools
|
||||
|
||||
Extensions can contribute Model Context Protocol (MCP) tools and prompts that AI agents can discover and use. See [MCP Integration](./mcp) for detailed documentation.
|
||||
Extensions can register Python functions as MCP tools that AI agents can discover and call. Tools provide executable functionality such as data processing, custom analytics, or integration with external services. Each tool specifies a unique name and an optional description that helps AI agents decide when to use it.
|
||||
|
||||
```python
|
||||
from superset_core.mcp.decorators import tool
|
||||
|
||||
@tool(
|
||||
name="my-extension.get_summary",
|
||||
description="Get a summary of recent query activity",
|
||||
tags=["analytics", "queries"],
|
||||
)
|
||||
def get_summary() -> dict:
|
||||
"""Returns a summary of recent query activity."""
|
||||
return {"status": "success", "result": {"queries_today": 42}}
|
||||
```
|
||||
|
||||
See [MCP Integration](./mcp) for implementation details.
|
||||
|
||||
### MCP Prompts
|
||||
|
||||
Extensions can register MCP prompts that provide interactive guidance and context to AI agents. Prompts help agents understand domain-specific workflows, best practices, or troubleshooting steps for your extension's use cases.
|
||||
|
||||
```python
|
||||
from superset_core.mcp.decorators import prompt
|
||||
from fastmcp import Context
|
||||
|
||||
@prompt(
|
||||
"my-extension.analysis_guide",
|
||||
title="Analysis Guide",
|
||||
description="Step-by-step guidance for data analysis workflows",
|
||||
)
|
||||
async def analysis_guide(ctx: Context) -> str:
|
||||
"""Provides guidance for data analysis workflows."""
|
||||
return """
|
||||
# Data Analysis Guide
|
||||
|
||||
Follow these steps for effective analysis:
|
||||
|
||||
1. **Explore your data** - Review available datasets and schema
|
||||
2. **Build your query** - Use SQL Lab to craft and test queries
|
||||
3. **Visualize results** - Choose the right chart type for your data
|
||||
|
||||
What would you like to analyze today?
|
||||
"""
|
||||
```
|
||||
|
||||
See [MCP Integration](./mcp) for implementation details.
|
||||
|
||||
@@ -70,8 +70,8 @@ import { someInternalFunction } from 'src/explore/components/SomeComponent';
|
||||
|
||||
```python
|
||||
# ✅ Public API - stable
|
||||
from superset_core.api.models import Database
|
||||
from superset_core.api.daos import DatabaseDAO
|
||||
from superset_core.common.models import Database
|
||||
from superset_core.common.daos import DatabaseDAO
|
||||
|
||||
# ❌ Internal code - may break without notice
|
||||
from superset.views.core import SomeInternalClass
|
||||
@@ -117,7 +117,7 @@ Extension developers should depend on and use core libraries directly:
|
||||
|
||||
**Frontend (examples):**
|
||||
- [React](https://react.dev/) - UI framework
|
||||
- [Ant Design](https://ant.design/) - UI component library (prefer Superset components from `@apache-superset/core/ui` when available to preserve visual consistency)
|
||||
- [Ant Design](https://ant.design/) - UI component library (prefer Superset components from `@apache-superset/core/components` when available to preserve visual consistency)
|
||||
- [Emotion](https://emotion.sh/) - CSS-in-JS styling
|
||||
- ...
|
||||
|
||||
|
||||
@@ -38,6 +38,12 @@ superset-extensions build: Builds extension assets.
|
||||
superset-extensions bundle: Packages the extension into a .supx file.
|
||||
|
||||
superset-extensions dev: Automatically rebuilds the extension as files change.
|
||||
|
||||
superset-extensions validate: Validates the extension structure and metadata consistency.
|
||||
|
||||
superset-extensions update: Updates derived and generated files in the extension project.
|
||||
Use --version [<version>] to update the version (prompts if no value given).
|
||||
Use --license [<license>] to update the license (prompts if no value given).
|
||||
```
|
||||
|
||||
When creating a new extension with `superset-extensions init`, the CLI generates a standardized folder structure:
|
||||
@@ -52,9 +58,10 @@ dataset-references/
|
||||
│ └── package.json
|
||||
├── backend/
|
||||
│ ├── src/
|
||||
│ │ └── superset_extensions/
|
||||
│ │ └── my_org/
|
||||
│ │ └── dataset_references/
|
||||
│ ├── tests/
|
||||
│ │ ├── api.py
|
||||
│ │ └── entrypoint.py
|
||||
│ ├── pyproject.toml
|
||||
│ └── requirements.txt
|
||||
├── dist/
|
||||
@@ -64,20 +71,20 @@ dataset-references/
|
||||
│ │ ├── remoteEntry.d7a9225d042e4ccb6354.js
|
||||
│ │ └── 900.038b20cdff6d49cfa8d9.js
|
||||
│ └── backend
|
||||
│ └── superset_extensions/
|
||||
│ └── my_org/
|
||||
│ └── dataset_references/
|
||||
│ ├── __init__.py
|
||||
│ ├── api.py
|
||||
│ └── entrypoint.py
|
||||
├── dataset-references-1.0.0.supx
|
||||
├── my-org.dataset-references-1.0.0.supx
|
||||
└── README.md
|
||||
```
|
||||
|
||||
**Note**: The extension ID (`dataset-references`) serves as the basis for all technical names:
|
||||
**Note**: With publisher `my-org` and name `dataset-references`, the technical names are:
|
||||
- Directory name: `dataset-references` (kebab-case)
|
||||
- Backend Python package: `dataset_references` (snake_case)
|
||||
- Frontend package name: `dataset-references` (kebab-case)
|
||||
- Module Federation name: `datasetReferences` (camelCase)
|
||||
- Backend Python namespace: `my_org.dataset_references`
|
||||
- Backend distribution package: `my_org-dataset_references`
|
||||
- Frontend package name: `@my-org/dataset-references` (scoped)
|
||||
- Module Federation name: `myOrg_datasetReferences` (camelCase)
|
||||
|
||||
The `extension.json` file serves as the declared metadata for the extension, containing the extension's name, version, author, description, and a list of capabilities. This file is essential for the host application to understand how to load and manage the extension.
|
||||
|
||||
@@ -108,7 +115,7 @@ The `extension.json` file contains the metadata necessary for the host applicati
|
||||
|
||||
Extensions use standardized entry point locations:
|
||||
|
||||
- **Backend**: `backend/src/superset_extensions/{publisher}/{name}/entrypoint.py`
|
||||
- **Backend**: `backend/src/{publisher}/{name}/entrypoint.py`
|
||||
- **Frontend**: `frontend/src/index.tsx`
|
||||
|
||||
### Build Configuration
|
||||
@@ -124,7 +131,7 @@ license = "Apache-2.0"
|
||||
[tool.apache_superset_extensions.build]
|
||||
# Files to include in the extension build/bundle
|
||||
include = [
|
||||
"src/superset_extensions/my_org/dataset_references/**/*.py",
|
||||
"src/my_org/dataset_references/**/*.py",
|
||||
]
|
||||
exclude = []
|
||||
```
|
||||
@@ -201,9 +208,10 @@ Backend APIs (via `apache-superset-core`) follow a similar pattern, providing ac
|
||||
Extension endpoints are registered under a dedicated `/extensions` namespace to avoid conflicting with built-in endpoints and also because they don't share the same version constraints. By grouping all extension endpoints under `/extensions`, Superset establishes a clear boundary between core and extension functionality, making it easier to manage, document, and secure both types of APIs.
|
||||
|
||||
```python
|
||||
from superset_core.api.models import Database, get_session
|
||||
from superset_core.api.daos import DatabaseDAO
|
||||
from superset_core.api.rest_api import RestApi, api
|
||||
from superset_core.common.models import Database, get_session
|
||||
from superset_core.common.daos import DatabaseDAO
|
||||
from superset_core.rest_api.api import RestApi
|
||||
from superset_core.rest_api.decorators import api
|
||||
from flask_appbuilder.api import expose, protect
|
||||
|
||||
@api(
|
||||
@@ -244,7 +252,7 @@ class DatasetReferencesAPI(RestApi):
|
||||
|
||||
### Automatic Context Detection
|
||||
|
||||
The [`@api`](superset-core/src/superset_core/api/rest_api.py:59) decorator automatically detects whether it's being used in host or extension code:
|
||||
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
|
||||
|
||||
- **Extension APIs**: Registered under `/extensions/{publisher}/{name}/` with IDs prefixed as `extensions.{publisher}.{name}.{id}`
|
||||
- **Host APIs**: Registered under `/api/v1/` with original IDs
|
||||
@@ -262,7 +270,7 @@ LOCAL_EXTENSIONS = [
|
||||
]
|
||||
```
|
||||
|
||||
This instructs Superset to load and serve extensions directly from disk, so you can iterate quickly. Running `superset-extensions dev` watches for file changes and rebuilds assets automatically, while the Webpack development server (started separately with `npm run dev-server`) serves updated files as soon as they're modified. This enables immediate feedback for React components, styles, and other frontend code. Changes to backend files are also detected automatically and immediately synced, ensuring that both frontend and backend updates are reflected in your development environment.
|
||||
This instructs Superset to load and serve extensions directly from disk, so you can iterate quickly. Running `superset-extensions dev` watches for file changes and rebuilds assets automatically, while the Webpack development server (started separately with `npm run start`) serves updated files as soon as they're modified. This enables immediate feedback for React components, styles, and other frontend code. Changes to backend files are also detected automatically and immediately synced, ensuring that both frontend and backend updates are reflected in your development environment.
|
||||
|
||||
Example output when running in development mode:
|
||||
|
||||
|
||||
@@ -37,31 +37,11 @@ Superset uses text editors in various places throughout the application:
|
||||
| `css` | Dashboard Properties, CSS Template Modal |
|
||||
| `markdown` | Dashboard Markdown component |
|
||||
| `yaml` | Template Params Editor |
|
||||
| `javascript` | Custom JavaScript editor contexts |
|
||||
| `python` | Custom Python editor contexts |
|
||||
| `text` | Plain text editor contexts |
|
||||
|
||||
By registering an editor provider for a language, your extension replaces the default Ace editor in **all** locations that use that language.
|
||||
|
||||
## Manifest Configuration
|
||||
|
||||
Declare editor contributions in your `extension.json` manifest:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "monaco-editor",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"editors": [
|
||||
{
|
||||
"id": "monaco-editor.sql",
|
||||
"name": "Monaco SQL Editor",
|
||||
"languages": ["sql"],
|
||||
"description": "Monaco-based SQL editor with IntelliSense"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
By registering an editor for a language, your extension replaces the default Ace editor in **all** locations that use that language.
|
||||
|
||||
## Implementing an Editor
|
||||
|
||||
@@ -165,21 +145,22 @@ const MonacoSQLEditor = forwardRef<editors.EditorHandle, editors.EditorProps>(
|
||||
export default MonacoSQLEditor;
|
||||
```
|
||||
|
||||
### activate.ts
|
||||
### index.tsx
|
||||
|
||||
Register the editor at module load time from your extension's entry point:
|
||||
|
||||
```typescript
|
||||
import { editors } from '@apache-superset/core';
|
||||
import MonacoSQLEditor from './MonacoSQLEditor';
|
||||
|
||||
export function activate(context) {
|
||||
// Register the Monaco editor for SQL using the contribution ID from extension.json
|
||||
const disposable = editors.registerEditorProvider(
|
||||
'monaco-sql-editor.sql',
|
||||
MonacoSQLEditor,
|
||||
);
|
||||
|
||||
context.subscriptions.push(disposable);
|
||||
}
|
||||
editors.registerEditor(
|
||||
{
|
||||
id: 'my-extension.monaco-sql',
|
||||
name: 'Monaco SQL Editor',
|
||||
languages: ['sql'],
|
||||
},
|
||||
MonacoSQLEditor,
|
||||
);
|
||||
```
|
||||
|
||||
## Handling Hotkeys
|
||||
|
||||
@@ -86,132 +86,73 @@ Extensions can replace the default SQL editor with custom implementations (Monac
|
||||
|
||||
This example adds a "Data Profiler" panel to SQL Lab:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "data_profiler",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"views": {
|
||||
"sqllab": {
|
||||
"panels": [
|
||||
{
|
||||
"id": "data_profiler.main",
|
||||
"name": "Data Profiler"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
import { core } from '@apache-superset/core';
|
||||
import React from 'react';
|
||||
import { views } from '@apache-superset/core';
|
||||
import DataProfilerPanel from './DataProfilerPanel';
|
||||
|
||||
export function activate(context) {
|
||||
// Register the panel view with the ID declared in extension.json
|
||||
const disposable = core.registerView('data_profiler.main', <DataProfilerPanel />);
|
||||
context.subscriptions.push(disposable);
|
||||
}
|
||||
views.registerView(
|
||||
{ id: 'my-extension.data-profiler', name: 'Data Profiler' },
|
||||
'sqllab.panels',
|
||||
() => <DataProfilerPanel />,
|
||||
);
|
||||
```
|
||||
|
||||
### Adding Actions to the Editor
|
||||
|
||||
This example adds primary, secondary, and context actions to the editor:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "query_tools",
|
||||
"version": "1.0.0",
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"commands": [
|
||||
{
|
||||
"command": "query_tools.format",
|
||||
"title": "Format Query",
|
||||
"icon": "FormatPainterOutlined"
|
||||
},
|
||||
{
|
||||
"command": "query_tools.explain",
|
||||
"title": "Explain Query"
|
||||
},
|
||||
{
|
||||
"command": "query_tools.copy_as_cte",
|
||||
"title": "Copy as CTE"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
"sqllab": {
|
||||
"editor": {
|
||||
"primary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "query_tools.format"
|
||||
}
|
||||
],
|
||||
"secondary": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "query_tools.explain"
|
||||
}
|
||||
],
|
||||
"context": [
|
||||
{
|
||||
"view": "builtin.editor",
|
||||
"command": "query_tools.copy_as_cte"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
import { commands, sqlLab } from '@apache-superset/core';
|
||||
import { commands, menus, sqlLab } from '@apache-superset/core';
|
||||
|
||||
export function activate(context) {
|
||||
// Register the commands declared in extension.json
|
||||
const formatCommand = commands.registerCommand(
|
||||
'query_tools.format',
|
||||
async () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab) {
|
||||
const editor = await tab.getEditor();
|
||||
// Format the SQL query
|
||||
}
|
||||
},
|
||||
);
|
||||
commands.registerCommand(
|
||||
{ id: 'my-extension.format', title: 'Format Query', icon: 'FormatPainterOutlined' },
|
||||
async () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab) {
|
||||
const editor = await tab.getEditor();
|
||||
// Format the SQL query
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
const explainCommand = commands.registerCommand(
|
||||
'query_tools.explain',
|
||||
async () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab) {
|
||||
const editor = await tab.getEditor();
|
||||
// Show query explanation
|
||||
}
|
||||
},
|
||||
);
|
||||
commands.registerCommand(
|
||||
{ id: 'my-extension.explain', title: 'Explain Query' },
|
||||
async () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab) {
|
||||
const editor = await tab.getEditor();
|
||||
// Show query explanation
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
const copyAsCteCommand = commands.registerCommand(
|
||||
'query_tools.copy_as_cte',
|
||||
async () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab) {
|
||||
const editor = await tab.getEditor();
|
||||
// Copy selected text as CTE
|
||||
}
|
||||
},
|
||||
);
|
||||
commands.registerCommand(
|
||||
{ id: 'my-extension.copy-as-cte', title: 'Copy as CTE' },
|
||||
async () => {
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
if (tab) {
|
||||
const editor = await tab.getEditor();
|
||||
// Copy selected text as CTE
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
context.subscriptions.push(formatCommand, explainCommand, copyAsCteCommand);
|
||||
}
|
||||
menus.registerMenuItem(
|
||||
{ view: 'builtin.editor', command: 'my-extension.format' },
|
||||
'sqllab.editor',
|
||||
'primary',
|
||||
);
|
||||
menus.registerMenuItem(
|
||||
{ view: 'builtin.editor', command: 'my-extension.explain' },
|
||||
'sqllab.editor',
|
||||
'secondary',
|
||||
);
|
||||
menus.registerMenuItem(
|
||||
{ view: 'builtin.editor', command: 'my-extension.copy-as-cte' },
|
||||
'sqllab.editor',
|
||||
'context',
|
||||
);
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
@@ -61,7 +61,7 @@ Prompts provide interactive guidance and context to AI agents. They help agents
|
||||
The simplest way to create an MCP tool is using the `@tool` decorator:
|
||||
|
||||
```python
|
||||
from superset_core.api.mcp import tool
|
||||
from superset_core.mcp.decorators import tool
|
||||
|
||||
@tool
|
||||
def hello_world() -> dict:
|
||||
@@ -94,7 +94,7 @@ Here's a more comprehensive example showing best practices:
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
from superset_core.api.mcp import tool
|
||||
from superset_core.mcp.decorators import tool
|
||||
|
||||
class RandomNumberRequest(BaseModel):
|
||||
"""Request schema for random number generation."""
|
||||
@@ -253,7 +253,7 @@ The AI agent sees your tool's:
|
||||
Create interactive prompts using the `@prompt` decorator:
|
||||
|
||||
```python
|
||||
from superset_core.api.mcp import prompt
|
||||
from superset_core.mcp.decorators import prompt
|
||||
from fastmcp import Context
|
||||
|
||||
@prompt("my_extension.workflow_guide")
|
||||
|
||||
@@ -43,7 +43,7 @@ Extensions can provide:
|
||||
|
||||
## UI Components for Extensions
|
||||
|
||||
Extension developers have access to pre-built UI components via `@apache-superset/core/ui`. Browse all available components on the [UI Components](/docs/components/) page and filter by **Extension Compatible** to see components available to extensions.
|
||||
Extension developers have access to pre-built UI components via `@apache-superset/core/components`. Browse all available components on the [UI Components](/docs/components/) page and filter by **Extension Compatible** to see components available to extensions.
|
||||
|
||||
## Next Steps
|
||||
|
||||
|
||||
@@ -64,26 +64,24 @@ Include backend? [Y/n]: Y
|
||||
```
|
||||
|
||||
**Publisher Namespaces**: Extensions use organizational namespaces similar to VS Code extensions, providing collision-safe naming across organizations:
|
||||
|
||||
- **NPM package**: `@my-org/hello-world` (scoped package for frontend distribution)
|
||||
- **Module Federation name**: `myOrg_helloWorld` (collision-safe JavaScript identifier)
|
||||
- **Backend package**: `my_org-hello_world` (collision-safe Python distribution name)
|
||||
- **Python namespace**: `superset_extensions.my_org.hello_world`
|
||||
- **Python namespace**: `my_org.hello_world`
|
||||
|
||||
This approach ensures that extensions from different organizations cannot conflict, even if they use the same technical name (e.g., both `acme.dashboard-widgets` and `corp.dashboard-widgets` can coexist).
|
||||
|
||||
This creates a complete project structure:
|
||||
|
||||
```
|
||||
my-org.hello-world/
|
||||
hello-world/
|
||||
├── extension.json # Extension metadata and configuration
|
||||
├── backend/ # Backend Python code
|
||||
│ ├── src/
|
||||
│ │ └── superset_extensions/
|
||||
│ │ └── my_org/
|
||||
│ │ ├── __init__.py
|
||||
│ │ └── hello_world/
|
||||
│ │ ├── __init__.py
|
||||
│ │ └── entrypoint.py # Backend registration
|
||||
│ │ └── my_org/
|
||||
│ │ └── hello_world/
|
||||
│ │ └── entrypoint.py # Backend registration
|
||||
│ └── pyproject.toml
|
||||
└── frontend/ # Frontend TypeScript/React code
|
||||
├── src/
|
||||
@@ -95,7 +93,7 @@ my-org.hello-world/
|
||||
|
||||
## Step 3: Configure Extension Metadata
|
||||
|
||||
The generated `extension.json` contains the extension's metadata. It is used to identify the extension and declare its backend entry points. Frontend contributions are registered directly in code (see Step 5).
|
||||
The generated `extension.json` contains the extension's metadata.
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -104,10 +102,6 @@ The generated `extension.json` contains the extension's metadata. It is used to
|
||||
"displayName": "Hello World",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"backend": {
|
||||
"entryPoints": ["superset_extensions.my_org.hello_world.entrypoint"],
|
||||
"files": ["backend/src/superset_extensions/my_org/hello_world/**/*.py"]
|
||||
},
|
||||
"permissions": ["can_read"]
|
||||
}
|
||||
```
|
||||
@@ -117,19 +111,19 @@ The generated `extension.json` contains the extension's metadata. It is used to
|
||||
- `publisher`: Organizational namespace for the extension
|
||||
- `name`: Technical identifier (kebab-case)
|
||||
- `displayName`: Human-readable name shown to users
|
||||
- `backend.entryPoints`: Python modules to load eagerly when the extension starts
|
||||
- `backend.files`: Glob patterns for Python source files to include in the bundle
|
||||
- `permissions`: List of permissions the extension requires
|
||||
|
||||
## Step 4: Create Backend API
|
||||
|
||||
The CLI generated a basic `backend/src/superset_extensions/my_org/hello_world/entrypoint.py`. We'll create an API endpoint.
|
||||
The CLI generated a basic `backend/src/my_org/hello_world/entrypoint.py`. We'll create an API endpoint.
|
||||
|
||||
**Create `backend/src/superset_extensions/my_org/hello_world/api.py`**
|
||||
**Create `backend/src/my_org/hello_world/api.py`**
|
||||
|
||||
```python
|
||||
from flask import Response
|
||||
from flask_appbuilder.api import expose, protect, safe
|
||||
from superset_core.api.rest_api import RestApi, api
|
||||
from superset_core.rest_api.api import RestApi
|
||||
from superset_core.rest_api.decorators import api
|
||||
|
||||
|
||||
@api(
|
||||
@@ -174,25 +168,23 @@ class HelloWorldAPI(RestApi):
|
||||
|
||||
**Key points:**
|
||||
|
||||
- Uses [`@api`](superset-core/src/superset_core/api/rest_api.py:59) decorator with automatic context detection
|
||||
- Extends `RestApi` from `superset_core.api.rest_api`
|
||||
- Uses [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
|
||||
- Extends `RestApi` from `superset_core.rest_api.api`
|
||||
- Uses Flask-AppBuilder decorators (`@expose`, `@protect`, `@safe`)
|
||||
- Returns responses using `self.response(status_code, result=data)`
|
||||
- The endpoint will be accessible at `/extensions/my-org/hello-world/message` (automatic extension context)
|
||||
- OpenAPI docstrings are crucial - Flask-AppBuilder uses them to automatically generate interactive API documentation at `/swagger/v1`, allowing developers to explore endpoints, understand schemas, and test the API directly from the browser
|
||||
|
||||
**Update `backend/src/superset_extensions/my_org/hello_world/entrypoint.py`**
|
||||
**Update `backend/src/my_org/hello_world/entrypoint.py`**
|
||||
|
||||
Replace the generated print statement with API import to trigger registration:
|
||||
|
||||
```python
|
||||
# Importing the API class triggers the @api decorator registration
|
||||
from .api import HelloWorldAPI
|
||||
|
||||
print("Hello World extension loaded successfully!")
|
||||
from .api import HelloWorldAPI # noqa: F401
|
||||
```
|
||||
|
||||
The [`@api`](superset-core/src/superset_core/api/rest_api.py:59) decorator automatically detects extension context and registers your API with proper namespacing.
|
||||
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
|
||||
|
||||
## Step 5: Create Frontend Component
|
||||
|
||||
@@ -236,52 +228,53 @@ The webpack configuration requires specific settings for Module Federation. Key
|
||||
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture#module-federation) for a full explanation.
|
||||
|
||||
```javascript
|
||||
const path = require("path");
|
||||
const { ModuleFederationPlugin } = require("webpack").container;
|
||||
const packageConfig = require("./package.json");
|
||||
const path = require('path');
|
||||
const { ModuleFederationPlugin } = require('webpack').container;
|
||||
const packageConfig = require('./package');
|
||||
const extensionConfig = require('../extension.json');
|
||||
|
||||
module.exports = (env, argv) => {
|
||||
const isProd = argv.mode === "production";
|
||||
const isProd = argv.mode === 'production';
|
||||
|
||||
return {
|
||||
entry: isProd ? {} : "./src/index.tsx",
|
||||
mode: isProd ? "production" : "development",
|
||||
entry: isProd ? {} : './src/index.tsx',
|
||||
mode: isProd ? 'production' : 'development',
|
||||
devServer: {
|
||||
port: 3001,
|
||||
port: 3000,
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
},
|
||||
},
|
||||
output: {
|
||||
filename: isProd ? undefined : "[name].[contenthash].js",
|
||||
chunkFilename: "[name].[contenthash].js",
|
||||
clean: true,
|
||||
path: path.resolve(__dirname, "dist"),
|
||||
publicPath: `/api/v1/extensions/my-org/hello-world/`,
|
||||
filename: isProd ? undefined : '[name].[contenthash].js',
|
||||
chunkFilename: '[name].[contenthash].js',
|
||||
path: path.resolve(__dirname, 'dist'),
|
||||
publicPath: `/api/v1/extensions/${extensionConfig.publisher}/${extensionConfig.name}/`,
|
||||
},
|
||||
resolve: {
|
||||
extensions: [".ts", ".tsx", ".js", ".jsx"],
|
||||
extensions: ['.ts', '.tsx', '.js', '.jsx'],
|
||||
},
|
||||
// Map @apache-superset/core imports to window.superset at runtime
|
||||
externalsType: "window",
|
||||
externalsType: 'window',
|
||||
externals: {
|
||||
"@apache-superset/core": "superset",
|
||||
'@apache-superset/core': 'superset',
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.tsx?$/,
|
||||
use: "ts-loader",
|
||||
use: 'ts-loader',
|
||||
exclude: /node_modules/,
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new ModuleFederationPlugin({
|
||||
name: "myOrg_helloWorld",
|
||||
filename: "remoteEntry.[contenthash].js",
|
||||
name: 'myOrg_helloWorld',
|
||||
filename: 'remoteEntry.[contenthash].js',
|
||||
exposes: {
|
||||
"./index": "./src/index.tsx",
|
||||
'./index': './src/index.tsx',
|
||||
},
|
||||
shared: {
|
||||
react: {
|
||||
@@ -289,9 +282,14 @@ module.exports = (env, argv) => {
|
||||
requiredVersion: packageConfig.peerDependencies.react,
|
||||
import: false, // Use host's React, don't bundle
|
||||
},
|
||||
"react-dom": {
|
||||
'react-dom': {
|
||||
singleton: true,
|
||||
requiredVersion: packageConfig.peerDependencies["react-dom"],
|
||||
requiredVersion: packageConfig.peerDependencies['react-dom'],
|
||||
import: false,
|
||||
},
|
||||
antd: {
|
||||
singleton: true,
|
||||
requiredVersion: packageConfig.peerDependencies['antd'],
|
||||
import: false,
|
||||
},
|
||||
},
|
||||
@@ -306,8 +304,9 @@ module.exports = (env, argv) => {
|
||||
```json
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"moduleResolution": "node",
|
||||
"target": "es5",
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node10",
|
||||
"jsx": "react",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
@@ -332,16 +331,16 @@ const HelloWorldPanel: React.FC = () => {
|
||||
const [error, setError] = useState<string>('');
|
||||
|
||||
useEffect(() => {
|
||||
const fetchMessage = async () => {
|
||||
try {
|
||||
const csrfToken = await authentication.getCSRFToken();
|
||||
const response = await fetch('/extensions/my-org/hello-world/message', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': csrfToken!,
|
||||
},
|
||||
});
|
||||
const fetchMessage = async () => {
|
||||
try {
|
||||
const csrfToken = await authentication.getCSRFToken();
|
||||
const response = await fetch('/extensions/my-org/hello-world/message', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': csrfToken!,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Server returned ${response.status}`);
|
||||
@@ -496,8 +495,8 @@ Superset will extract and validate the extension metadata, load the assets, regi
|
||||
|
||||
Here's what happens when your extension loads:
|
||||
|
||||
1. **Superset starts**: Reads `extension.json` and loads the backend entrypoint
|
||||
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](superset-core/src/superset_core/api/rest_api.py:59) decorator to register it automatically
|
||||
1. **Superset starts**: Reads `manifest.json` from the `.supx` bundle and loads the backend entrypoint
|
||||
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
|
||||
3. **Frontend loads**: When SQL Lab opens, Superset fetches the remote entry file
|
||||
4. **Module Federation**: Webpack loads your extension module and resolves `@apache-superset/core` to `window.superset`
|
||||
5. **Registration**: The module executes at load time, calling `views.registerView` to register your panel
|
||||
|
||||
@@ -30,15 +30,15 @@ This page serves as a registry of community-created Superset extensions. These e
|
||||
|
||||
| Name | Description | Author | Preview |
|
||||
| ------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| [Extensions API Explorer](https://github.com/michael-s-molina/superset-extensions/tree/main/api_explorer) | A SQL Lab panel that demonstrates the Extensions API by providing an interactive explorer for testing commands like getTabs, getCurrentTab, and getDatabases. Useful for extension developers to understand and experiment with the available APIs. | Michael S. Molina | <a href="/img/extensions/api-explorer.png" target="_blank"><img src="/img/extensions/api-explorer.png" alt="Extensions API Explorer" width="120" /></a> |
|
||||
| [Extensions API Explorer](https://github.com/michael-s-molina/superset-extensions/tree/main/api-explorer) | A SQL Lab panel that demonstrates the Extensions API by providing an interactive explorer for testing commands like getTabs, getCurrentTab, and getDatabases. Useful for extension developers to understand and experiment with the available APIs. | Michael S. Molina | <a href="/img/extensions/api-explorer.png" target="_blank"><img src="/img/extensions/api-explorer.png" alt="Extensions API Explorer" width="120" /></a> |
|
||||
| [SQL Query Flow Visualizer](https://github.com/msyavuz/superset-sql-visualizer) | A SQL Lab panel that transforms SQL queries into interactive flow diagrams, helping developers and analysts understand query execution paths and data relationships. | Mehmet Salih Yavuz | <a href="/img/extensions/sql-flow-visualizer.png" target="_blank"><img src="/img/extensions/sql-flow-visualizer.png" alt="SQL Flow Visualizer" width="120" /></a> |
|
||||
| [SQL Lab Export to Google Sheets](https://github.com/michael-s-molina/superset-extensions/tree/main/sqllab_gsheets) | A Superset extension that allows users to export SQL Lab query results directly to Google Sheets. | Michael S. Molina | <a href="/img/extensions/gsheets-export.png" target="_blank"><img src="/img/extensions/gsheets-export.png" alt="SQL Lab Export to Google Sheets" width="120" /></a> |
|
||||
| [SQL Lab Export to Google Sheets](https://github.com/michael-s-molina/superset-extensions/tree/main/sqllab-gsheets) | A Superset extension that allows users to export SQL Lab query results directly to Google Sheets. | Michael S. Molina | <a href="/img/extensions/gsheets-export.png" target="_blank"><img src="/img/extensions/gsheets-export.png" alt="SQL Lab Export to Google Sheets" width="120" /></a> |
|
||||
| [SQL Lab Export to Parquet](https://github.com/rusackas/superset-extensions/tree/main/sqllab_parquet) | Export SQL Lab query results directly to Apache Parquet format with Snappy compression. | Evan Rusackas | <a href="/img/extensions/parquet-export.png" target="_blank"><img src="/img/extensions/parquet-export.png" alt="SQL Lab Export to Parquet" width="120" /></a> |
|
||||
| [SQL Lab Query Comparison](https://github.com/michael-s-molina/superset-extensions/tree/main/query_comparison) | A SQL Lab extension that enables side-by-side comparison of query results across different tabs, with GitHub-style diff visualization showing added/removed rows and columns. | Michael S. Molina | <a href="/img/extensions/query-comparison.png" target="_blank"><img src="/img/extensions/query-comparison.png" alt="Query Comparison" width="120" /></a> |
|
||||
| [SQL Lab Result Stats](https://github.com/michael-s-molina/superset-extensions/tree/main/result_stats) | A SQL Lab extension that automatically computes statistics for query results, providing type-aware analysis including numeric metrics (min, max, mean, median, std dev), string analysis (length, empty counts), and date range information. | Michael S. Molina | <a href="/img/extensions/result-stats.png" target="_blank"><img src="/img/extensions/result-stats.png" alt="Result Stats" width="120" /></a> |
|
||||
| [SQL Snippets](https://github.com/michael-s-molina/superset-extensions/tree/main/sql_snippets) | A SQL Lab extension that provides reusable SQL code snippets, enabling quick insertion of commonly used code blocks such as license headers, author information, and frequently used SQL patterns. | Michael S. Molina | <a href="/img/extensions/sql-snippets.png" target="_blank"><img src="/img/extensions/sql-snippets.png" alt="SQL Snippets" width="120" /></a> |
|
||||
| [SQL Lab Query Estimator](https://github.com/michael-s-molina/superset-extensions/tree/main/query_estimator) | A SQL Lab panel that analyzes query execution plans to estimate resource impact, detect performance issues like Cartesian products and high-cost operations, and visualize the query plan tree. | Michael S. Molina | <a href="/img/extensions/query-estimator.png" target="_blank"><img src="/img/extensions/query-estimator.png" alt="Query Estimator" width="120" /></a> |
|
||||
| [Editors Bundle](https://github.com/michael-s-molina/superset-extensions/tree/main/editors_bundle) | A Superset extension that demonstrates how to provide custom code editors for different languages. This extension showcases the editor contribution system by registering alternative editors that can replace Superset's default Ace editor. | Michael S. Molina | <a href="/img/extensions/editors-bundle.png" target="_blank"><img src="/img/extensions/editors-bundle.png" alt="Editors Bundle" width="120" /></a> |
|
||||
| [SQL Lab Query Comparison](https://github.com/michael-s-molina/superset-extensions/tree/main/query-comparison) | A SQL Lab extension that enables side-by-side comparison of query results across different tabs, with GitHub-style diff visualization showing added/removed rows and columns. | Michael S. Molina | <a href="/img/extensions/query-comparison.png" target="_blank"><img src="/img/extensions/query-comparison.png" alt="Query Comparison" width="120" /></a> |
|
||||
| [SQL Lab Result Stats](https://github.com/michael-s-molina/superset-extensions/tree/main/result-stats) | A SQL Lab extension that automatically computes statistics for query results, providing type-aware analysis including numeric metrics (min, max, mean, median, std dev), string analysis (length, empty counts), and date range information. | Michael S. Molina | <a href="/img/extensions/result-stats.png" target="_blank"><img src="/img/extensions/result-stats.png" alt="Result Stats" width="120" /></a> |
|
||||
| [Editor Snippets](https://github.com/michael-s-molina/superset-extensions/tree/main/editor-snippets) | A SQL Lab extension for managing and inserting reusable code snippets into the editor, with server-side persistence per user. | Michael S. Molina | <a href="/img/extensions/editor-snippets.png" target="_blank"><img src="/img/extensions/editor-snippets.png" alt="Editor Snippets" width="120" /></a> |
|
||||
| [SQL Lab Query Estimator](https://github.com/michael-s-molina/superset-extensions/tree/main/query-estimator) | A SQL Lab panel that analyzes query execution plans to estimate resource impact, detect performance issues like Cartesian products and high-cost operations, and visualize the query plan tree. | Michael S. Molina | <a href="/img/extensions/query-estimator.png" target="_blank"><img src="/img/extensions/query-estimator.png" alt="Query Estimator" width="120" /></a> |
|
||||
| [Editors Bundle](https://github.com/michael-s-molina/superset-extensions/tree/main/editors-bundle) | A Superset extension that demonstrates how to provide custom code editors for different languages. This extension showcases the editor contribution system by registering alternative editors that can replace Superset's default Ace editor. | Michael S. Molina | <a href="/img/extensions/editors-bundle.png" target="_blank"><img src="/img/extensions/editors-bundle.png" alt="Editors Bundle" width="120" /></a> |
|
||||
|
||||
## How to Add Your Extension
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ When GTF is considered stable, it will replace legacy Celery tasks for built-in
|
||||
### Define a Task
|
||||
|
||||
```python
|
||||
from superset_core.api.tasks import task, get_context
|
||||
from superset_core.tasks.decorators import task, get_context
|
||||
|
||||
@task
|
||||
def process_data(dataset_id: int) -> None:
|
||||
@@ -245,7 +245,8 @@ Always implement an abort handler for long-running tasks. This allows users to c
|
||||
Set a timeout to automatically abort tasks that run too long:
|
||||
|
||||
```python
|
||||
from superset_core.api.tasks import task, get_context, TaskOptions
|
||||
from superset_core.tasks.decorators import task, get_context
|
||||
from superset_core.tasks.types import TaskOptions
|
||||
|
||||
# Set default timeout in decorator
|
||||
@task(timeout=300) # 5 minutes
|
||||
@@ -299,7 +300,7 @@ Timeouts require an abort handler to be effective. Without one, the timeout trig
|
||||
Use `task_key` to prevent duplicate task execution:
|
||||
|
||||
```python
|
||||
from superset_core.api.tasks import TaskOptions
|
||||
from superset_core.tasks.types import TaskOptions
|
||||
|
||||
# Without key - creates new task each time (random UUID)
|
||||
task1 = my_task.schedule(x=1)
|
||||
@@ -331,7 +332,8 @@ print(task2.status) # "success" (terminal status)
|
||||
## Task Scopes
|
||||
|
||||
```python
|
||||
from superset_core.api.tasks import task, TaskScope
|
||||
from superset_core.tasks.decorators import task
|
||||
from superset_core.tasks.types import TaskScope
|
||||
|
||||
@task # Private by default
|
||||
def private_task(): ...
|
||||
|
||||
@@ -63,6 +63,109 @@ pytest tests/unit_tests/
|
||||
pytest tests/integration_tests/
|
||||
```
|
||||
|
||||
## Testing Alerts & Reports with Celery and MailHog
|
||||
|
||||
The Alerts & Reports feature relies on Celery for task scheduling and execution. To test it locally, you need Redis (message broker), Celery Beat (scheduler), a Celery Worker (executor), and an SMTP server to receive email notifications.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Redis running on `localhost:6379`
|
||||
- [MailHog](https://github.com/mailhog/MailHog) installed (a local SMTP server with a web UI for viewing caught emails)
|
||||
|
||||
### superset_config.py
|
||||
|
||||
Your `CeleryConfig` **must** include `beat_schedule`. When you define a custom `CeleryConfig` class in `superset_config.py`, it replaces the default entirely. If you omit `beat_schedule`, Celery Beat will start but never schedule any report tasks.
|
||||
|
||||
```python
|
||||
from celery.schedules import crontab
|
||||
from superset.tasks.types import ExecutorType
|
||||
|
||||
REDIS_HOST = "localhost"
|
||||
REDIS_PORT = "6379"
|
||||
|
||||
class CeleryConfig:
|
||||
broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/0"
|
||||
result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/0"
|
||||
broker_connection_retry_on_startup = True
|
||||
imports = (
|
||||
"superset.sql_lab",
|
||||
"superset.tasks.scheduler",
|
||||
"superset.tasks.thumbnails",
|
||||
"superset.tasks.cache",
|
||||
)
|
||||
worker_prefetch_multiplier = 10
|
||||
task_acks_late = True
|
||||
beat_schedule = {
|
||||
"reports.scheduler": {
|
||||
"task": "reports.scheduler",
|
||||
"schedule": crontab(minute="*", hour="*"),
|
||||
},
|
||||
"reports.prune_log": {
|
||||
"task": "reports.prune_log",
|
||||
"schedule": crontab(minute=0, hour=0),
|
||||
},
|
||||
}
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
# SMTP settings pointing to MailHog
|
||||
SMTP_HOST = "localhost"
|
||||
SMTP_PORT = 1025
|
||||
SMTP_STARTTLS = False
|
||||
SMTP_SSL = False
|
||||
SMTP_USER = ""
|
||||
SMTP_PASSWORD = ""
|
||||
SMTP_MAIL_FROM = "superset@localhost"
|
||||
|
||||
# Must match where your frontend is running
|
||||
WEBDRIVER_BASEURL = "http://localhost:9000/"
|
||||
|
||||
ALERT_REPORTS_EXECUTE_AS = [ExecutorType.OWNER]
|
||||
|
||||
FEATURE_FLAGS = {
|
||||
"ALERT_REPORTS": True,
|
||||
# Recommended for better screenshot support (WebGL/DeckGL charts)
|
||||
"PLAYWRIGHT_REPORTS_AND_THUMBNAILS": True,
|
||||
}
|
||||
```
|
||||
|
||||
:::note
|
||||
Do not include `"superset.tasks.async_queries"` in `CeleryConfig.imports` unless you need Global Async Queries. That module accesses `current_app.config` at import time and will crash the worker with a "Working outside of application context" error.
|
||||
:::
|
||||
|
||||
### Starting the Services
|
||||
|
||||
Start MailHog, then Celery Beat and Worker in separate terminals:
|
||||
|
||||
```bash
|
||||
# Terminal 1 - MailHog (SMTP on :1025, Web UI on :8025)
|
||||
MailHog
|
||||
|
||||
# Terminal 2 - Celery Beat (scheduler)
|
||||
celery --app=superset.tasks.celery_app:app beat --loglevel=info
|
||||
|
||||
# Terminal 3 - Celery Worker (executor)
|
||||
celery --app=superset.tasks.celery_app:app worker --concurrency=1 --loglevel=info
|
||||
```
|
||||
|
||||
Use `--concurrency=1` to limit resource usage on your dev machine.
|
||||
|
||||
### Verifying the Setup
|
||||
|
||||
1. **Beat** should log `Scheduler: Sending due task reports.scheduler (reports.scheduler)` once per minute
|
||||
2. **Worker** should log `Scheduling alert <name> eta: <timestamp>` for each active report
|
||||
3. Create a test report in **Settings > Alerts & Reports** with a `* * * * *` cron schedule
|
||||
4. Check **http://localhost:8025** (MailHog web UI) for the email within 1-2 minutes
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---|---|
|
||||
| Beat shows no output | Ensure `beat_schedule` is defined in your `CeleryConfig` and `--loglevel=info` is set |
|
||||
| "Report Schedule is still working, refusing to re-compute" | Previous executions are stuck. Reset with: `UPDATE report_schedule SET last_state = 'Not triggered' WHERE id = <id>;` |
|
||||
| Task backlog overwhelming the worker | Flush Redis: `redis-cli FLUSHDB`, then restart Beat and Worker |
|
||||
| Screenshot timeout | Ensure your frontend dev server is running and `WEBDRIVER_BASEURL` matches its URL |
|
||||
|
||||
---
|
||||
|
||||
*This documentation is under active development. Check back soon for updates!*
|
||||
|
||||
@@ -91,7 +91,7 @@ or a view.
|
||||
When working with tables, the solution would be to create a table that contains all the fields
|
||||
needed for your analysis, most likely through some scheduled batch process.
|
||||
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL queries as a virtual table. This can
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL query as a virtual table. This can
|
||||
allow you to join and union multiple tables and to apply some transformation using arbitrary SQL
|
||||
expressions. The limitation there is your database performance, as Superset effectively will run a
|
||||
query on top of your query (view). A good practice may be to limit yourself to joining your main
|
||||
|
||||
78
docs/docs/security/granular-export-controls.mdx
Normal file
78
docs/docs/security/granular-export-controls.mdx
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
title: Granular Export Controls
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Granular Export Controls
|
||||
|
||||
Superset provides granular, permission-based controls for data export, image export, and clipboard operations. These replace the legacy `can_csv` permission with three fine-grained permissions that can be assigned independently to roles.
|
||||
|
||||
## Feature Flag
|
||||
|
||||
Granular export controls are gated behind the `GRANULAR_EXPORT_CONTROLS` feature flag. When the flag is disabled, the legacy `can_csv` permission behavior is preserved.
|
||||
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"GRANULAR_EXPORT_CONTROLS": True,
|
||||
}
|
||||
```
|
||||
|
||||
## Permissions
|
||||
|
||||
| Permission | Resource | Controls |
|
||||
| -------------------- | ---------- | ---------------------------------------------------------------------- |
|
||||
| `can_export_data` | `Superset` | CSV, Excel, and JSON data exports from charts, dashboards, and SQL Lab |
|
||||
| `can_export_image` | `Superset` | Screenshot (JPEG/PNG) and PDF exports from charts and dashboards |
|
||||
| `can_copy_clipboard` | `Superset` | Copy-to-clipboard operations in SQL Lab and the Explore data pane |
|
||||
|
||||
## Default Role Assignments
|
||||
|
||||
The migration grants all three new permissions (`can_export_data`, `can_export_image`, `can_copy_clipboard`) to every role that currently has `can_csv`. This preserves existing behavior — no role loses access during the upgrade.
|
||||
|
||||
After the migration, admins can selectively revoke individual export permissions from any role to restrict access. For example, to prevent Gamma users from exporting data or images while still allowing clipboard operations, revoke `can_export_data` and `can_export_image` from the Gamma role.
|
||||
|
||||
## Configuration Steps
|
||||
|
||||
1. **Enable the feature flag** in `superset_config.py`:
|
||||
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"GRANULAR_EXPORT_CONTROLS": True,
|
||||
}
|
||||
```
|
||||
|
||||
2. **Run the database migration** to register the new permissions:
|
||||
|
||||
```bash
|
||||
superset db upgrade
|
||||
```
|
||||
|
||||
3. **Initialize permissions** so roles are populated:
|
||||
|
||||
```bash
|
||||
superset init
|
||||
```
|
||||
|
||||
4. **Verify role assignments** in **Settings > List Roles**. Confirm that each role has the expected permissions from the table above.
|
||||
|
||||
5. **Customize as needed**: Grant or revoke individual export permissions on any role through the role editor.
|
||||
|
||||
## User Experience
|
||||
|
||||
When a user lacks a required export permission:
|
||||
|
||||
- **Menu items** (CSV, Excel, JSON, screenshot) appear **disabled** with an info tooltip icon explaining the restriction
|
||||
- **Buttons** (SQL Lab download, clipboard copy) appear **disabled** with a tooltip on hover
|
||||
- **API endpoints** return **403 Forbidden** when the corresponding permission is missing
|
||||
|
||||
## API Enforcement
|
||||
|
||||
The following API endpoints enforce granular export permissions when the feature flag is enabled:
|
||||
|
||||
| Endpoint | Required Permission |
|
||||
| --------------------------------------------------------- | ------------------- |
|
||||
| `GET /api/v1/chart/{id}/data/` (CSV/Excel format) | `can_export_data` |
|
||||
| `GET /api/v1/chart/{id}/cache_screenshot/` | `can_export_image` |
|
||||
| `POST /api/v1/dashboard/{id}/cache_dashboard_screenshot/` | `can_export_image` |
|
||||
| `GET /api/v1/sqllab/export/{client_id}/` | `can_export_data` |
|
||||
| `POST /api/v1/sqllab/export_streaming/` | `can_export_data` |
|
||||
245
docs/docs/using-superset/using-ai-with-superset.mdx
Normal file
245
docs/docs/using-superset/using-ai-with-superset.mdx
Normal file
@@ -0,0 +1,245 @@
|
||||
---
|
||||
title: Using AI with Superset
|
||||
hide_title: true
|
||||
sidebar_position: 5
|
||||
version: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Using AI with Superset
|
||||
|
||||
Superset supports AI assistants through the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/). Connect Claude, ChatGPT, or other MCP-compatible clients to explore your data, build charts, create dashboards, and run SQL -- all through natural language.
|
||||
|
||||
:::info
|
||||
Requires Superset 5.0+. Your admin must enable and deploy the MCP server before you can connect.
|
||||
See the **[MCP Server admin guide](/admin-docs/configuration/mcp-server)** for setup instructions.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## What Can AI Do with Superset?
|
||||
|
||||
### Explore Your Data
|
||||
|
||||
Ask your AI assistant to browse what's available in your Superset instance:
|
||||
|
||||
- **List datasets** -- see all datasets you have access to, with filtering and search
|
||||
- **Get dataset details** -- column names, types, available metrics, and filters
|
||||
- **List charts and dashboards** -- find existing visualizations by name or keyword
|
||||
- **Get chart and dashboard details** -- understand what a chart shows, its query, and configuration
|
||||
|
||||
**Example prompts:**
|
||||
> "What datasets are available?"
|
||||
> "Show me the columns in the sales_orders dataset"
|
||||
> "Find dashboards related to revenue"
|
||||
|
||||
### Build Charts
|
||||
|
||||
Describe the visualization you want and AI creates it for you:
|
||||
|
||||
- **Create charts from natural language** -- describe what you want to see and AI picks the right chart type, metrics, and dimensions
|
||||
- **Preview before saving** -- AI generates a preview so you can review before committing
|
||||
- **Modify existing charts** -- update filters, change chart types, add metrics
|
||||
- **Get Explore links** -- open any chart in Superset's Explore view for further refinement
|
||||
|
||||
**Example prompts:**
|
||||
> "Create a bar chart showing monthly revenue by region from the sales dataset"
|
||||
> "Update chart 42 to use a line chart instead"
|
||||
> "Give me a link to explore this chart further"
|
||||
|
||||
### Create Dashboards
|
||||
|
||||
Build dashboards from a collection of charts:
|
||||
|
||||
- **Generate dashboards** -- create a new dashboard with a set of charts, automatically laid out
|
||||
- **Add charts to existing dashboards** -- place a chart on an existing dashboard with automatic positioning
|
||||
|
||||
**Example prompts:**
|
||||
> "Create a dashboard called 'Q4 Sales Overview' with charts 10, 15, and 22"
|
||||
> "Add the revenue trend chart to the executive dashboard"
|
||||
|
||||
### Run SQL Queries
|
||||
|
||||
Execute SQL directly through your AI assistant:
|
||||
|
||||
- **Run queries** -- execute SQL with full Superset RBAC enforcement (you can only query data your roles allow)
|
||||
- **Open SQL Lab** -- get a link to SQL Lab pre-populated with a query, ready to run and explore
|
||||
|
||||
**Example prompts:**
|
||||
> "Run this query: SELECT region, SUM(revenue) FROM sales GROUP BY region"
|
||||
> "Open SQL Lab with a query to show the top 10 customers by order count"
|
||||
|
||||
### Analyze Chart Data
|
||||
|
||||
Pull the raw data behind any chart:
|
||||
|
||||
- **Get chart data** -- retrieve the data a chart displays, with support for JSON, CSV, and Excel export formats
|
||||
- **Inspect results** -- useful for verifying what a visualization shows or feeding data into other tools
|
||||
|
||||
**Example prompts:**
|
||||
> "Get the data behind chart 42"
|
||||
> "Export chart 15 data as CSV"
|
||||
|
||||
### Check Instance Status
|
||||
|
||||
- **Health check** -- verify your Superset instance is up and the MCP connection is working
|
||||
- **Instance info** -- get high-level statistics about your Superset instance (number of datasets, charts, dashboards)
|
||||
|
||||
**Example prompts:**
|
||||
> "Is Superset healthy?"
|
||||
> "How many dashboards are in this instance?"
|
||||
|
||||
---
|
||||
|
||||
## Connecting Your AI Client
|
||||
|
||||
Once your admin has deployed the MCP server, connect your AI client using the instructions below.
|
||||
|
||||
### Claude Desktop
|
||||
|
||||
Edit your Claude Desktop config file:
|
||||
|
||||
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
||||
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
|
||||
- **Linux**: `~/.config/Claude/claude_desktop_config.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Restart Claude Desktop. The hammer icon in the chat bar confirms the connection.
|
||||
|
||||
If your admin has enabled JWT authentication, you may need to include a token:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote@latest",
|
||||
"http://your-superset-host:5008/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer YOUR_TOKEN"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Claude Code (CLI)
|
||||
|
||||
Add to your project's `.mcp.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"type": "url",
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ChatGPT
|
||||
|
||||
1. Click your profile icon > **Settings** > **Apps and Connectors**
|
||||
2. Enable **Developer Mode** in Advanced Settings
|
||||
3. In the chat composer, press **+** > **Add sources** > **App** > **Connect more** > **Create app**
|
||||
4. Enter a name and your MCP server URL
|
||||
5. Click **I understand and continue**
|
||||
|
||||
:::info
|
||||
ChatGPT MCP connectors require a Pro, Team, Enterprise, or Edu plan.
|
||||
:::
|
||||
|
||||
Ask your admin for the MCP server URL and any authentication tokens you need.
|
||||
|
||||
---
|
||||
|
||||
## Tips for Best Results
|
||||
|
||||
- **Be specific** -- "Create a bar chart of monthly revenue by region from the sales dataset" works better than "Make me a chart"
|
||||
- **Start with exploration** -- ask what datasets and charts exist before creating new ones
|
||||
- **Review AI-generated content** -- always check chart configurations and SQL before saving or sharing
|
||||
- **Use Explore for refinement** -- ask AI for an Explore link, then fine-tune interactively in the Superset UI
|
||||
- **Check permissions if you get errors** -- AI respects Superset's RBAC, so you can only access data your roles allow
|
||||
|
||||
---
|
||||
|
||||
## Available Tools Reference
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `health_check` | Verify the MCP server is running and connected |
|
||||
| `get_instance_info` | Get instance statistics (dataset, chart, dashboard counts) |
|
||||
| `get_schema` | Discover available charts, datasets, and dashboards with schema info |
|
||||
| `list_datasets` | List datasets with filtering and search |
|
||||
| `get_dataset_info` | Get dataset metadata (columns, metrics, filters) |
|
||||
| `list_charts` | List charts with filtering and search |
|
||||
| `get_chart_info` | Get chart metadata and configuration |
|
||||
| `get_chart_data` | Retrieve chart data (JSON, CSV, or Excel) |
|
||||
| `get_chart_preview` | Generate a chart preview (URL, ASCII, table, or Vega-Lite) |
|
||||
| `generate_chart` | Create a new chart from a specification |
|
||||
| `update_chart` | Modify an existing chart's configuration |
|
||||
| `update_chart_preview` | Update a cached chart preview without saving |
|
||||
| `list_dashboards` | List dashboards with filtering and search |
|
||||
| `get_dashboard_info` | Get dashboard metadata and layout |
|
||||
| `generate_dashboard` | Create a new dashboard with specified charts |
|
||||
| `add_chart_to_existing_dashboard` | Add a chart to an existing dashboard |
|
||||
| `execute_sql` | Run a SQL query with RBAC enforcement |
|
||||
| `open_sql_lab_with_context` | Open SQL Lab with a pre-populated query |
|
||||
| `generate_explore_link` | Generate an Explore URL for interactive visualization |
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Connection refused" or "Cannot connect"
|
||||
|
||||
- Confirm the MCP server URL with your admin
|
||||
- For Claude Desktop: fully quit the app (not just close the window) and restart after config changes
|
||||
- Check that the URL path ends with `/mcp` (e.g., `http://localhost:5008/mcp`)
|
||||
|
||||
### "Permission denied" or missing data
|
||||
|
||||
- Superset's RBAC controls what you can access through AI, just like in the Superset UI
|
||||
- Ask your admin to verify your roles and permissions
|
||||
- Try accessing the same data through the Superset web UI to confirm your access
|
||||
|
||||
### "Response too large"
|
||||
|
||||
- Ask for smaller result sets: use filters, reduce `page_size`, or request specific columns
|
||||
- Example: "Show me the top 10 rows from the sales dataset" instead of "Show me all sales data"
|
||||
|
||||
### AI doesn't see Superset tools
|
||||
|
||||
- Verify the connection in your AI client (e.g., the hammer icon in Claude Desktop)
|
||||
- Ask the AI "What Superset tools are available?" to confirm the connection
|
||||
- Restart your AI client if you recently changed the configuration
|
||||
@@ -40,13 +40,13 @@
|
||||
"version:remove:components": "node scripts/manage-versions.mjs remove components"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^6.1.0",
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/faster": "^3.9.2",
|
||||
"@docusaurus/plugin-client-redirects": "3.9.2",
|
||||
"@docusaurus/preset-classic": "3.9.2",
|
||||
"@docusaurus/theme-live-codeblock": "^3.9.2",
|
||||
"@docusaurus/theme-mermaid": "^3.9.2",
|
||||
"@ant-design/icons": "^6.1.1",
|
||||
"@docusaurus/core": "^3.10.0",
|
||||
"@docusaurus/faster": "^3.10.0",
|
||||
"@docusaurus/plugin-client-redirects": "^3.10.0",
|
||||
"@docusaurus/preset-classic": "3.10.0",
|
||||
"@docusaurus/theme-live-codeblock": "^3.10.0",
|
||||
"@docusaurus/theme-mermaid": "^3.10.0",
|
||||
"@emotion/core": "^11.0.0",
|
||||
"@emotion/react": "^11.13.3",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
@@ -55,24 +55,24 @@
|
||||
"@fontsource/inter": "^5.2.8",
|
||||
"@mdx-js/react": "^3.1.1",
|
||||
"@saucelabs/theme-github-codeblock": "^0.3.0",
|
||||
"@storybook/addon-docs": "^8.6.17",
|
||||
"@storybook/addon-docs": "^8.6.18",
|
||||
"@storybook/blocks": "^8.6.15",
|
||||
"@storybook/channels": "^8.6.17",
|
||||
"@storybook/client-logger": "^8.6.17",
|
||||
"@storybook/components": "^8.6.17",
|
||||
"@storybook/core": "^8.6.17",
|
||||
"@storybook/core-events": "^8.6.17",
|
||||
"@storybook/channels": "^8.6.18",
|
||||
"@storybook/client-logger": "^8.6.18",
|
||||
"@storybook/components": "^8.6.18",
|
||||
"@storybook/core": "^8.6.18",
|
||||
"@storybook/core-events": "^8.6.18",
|
||||
"@storybook/csf": "^0.1.13",
|
||||
"@storybook/docs-tools": "^8.6.17",
|
||||
"@storybook/preview-api": "^8.6.17",
|
||||
"@storybook/docs-tools": "^8.6.18",
|
||||
"@storybook/preview-api": "^8.6.18",
|
||||
"@storybook/theming": "^8.6.15",
|
||||
"@superset-ui/core": "^0.20.4",
|
||||
"@swc/core": "^1.15.17",
|
||||
"antd": "^6.3.1",
|
||||
"baseline-browser-mapping": "^2.10.0",
|
||||
"caniuse-lite": "^1.0.30001775",
|
||||
"docusaurus-plugin-openapi-docs": "^4.6.0",
|
||||
"docusaurus-theme-openapi-docs": "^4.6.0",
|
||||
"@swc/core": "^1.15.26",
|
||||
"antd": "^6.3.6",
|
||||
"baseline-browser-mapping": "^2.10.19",
|
||||
"caniuse-lite": "^1.0.30001788",
|
||||
"docusaurus-plugin-openapi-docs": "^5.0.1",
|
||||
"docusaurus-theme-openapi-docs": "^5.0.1",
|
||||
"js-yaml": "^4.1.1",
|
||||
"js-yaml-loader": "^1.2.2",
|
||||
"json-bigint": "^1.0.0",
|
||||
@@ -85,15 +85,15 @@
|
||||
"react-table": "^7.8.0",
|
||||
"remark-import-partial": "^0.0.2",
|
||||
"reselect": "^5.1.1",
|
||||
"storybook": "^8.6.17",
|
||||
"swagger-ui-react": "^5.32.0",
|
||||
"storybook": "^8.6.18",
|
||||
"swagger-ui-react": "^5.32.4",
|
||||
"swc-loader": "^0.2.7",
|
||||
"tinycolor2": "^1.4.2",
|
||||
"unist-util-visit": "^5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "^3.9.1",
|
||||
"@docusaurus/tsconfig": "^3.9.2",
|
||||
"@docusaurus/module-type-aliases": "^3.10.0",
|
||||
"@docusaurus/tsconfig": "^3.10.0",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/react": "^19.1.8",
|
||||
@@ -103,11 +103,11 @@
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.5",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"globals": "^17.4.0",
|
||||
"prettier": "^3.8.1",
|
||||
"globals": "^17.5.0",
|
||||
"prettier": "^3.8.3",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.56.1",
|
||||
"webpack": "^5.105.3"
|
||||
"typescript-eslint": "^8.58.2",
|
||||
"webpack": "^5.106.2"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
@@ -124,7 +124,8 @@
|
||||
"resolutions": {
|
||||
"react-redux": "^9.2.0",
|
||||
"@reduxjs/toolkit": "^2.5.0",
|
||||
"baseline-browser-mapping": "^2.9.19"
|
||||
"baseline-browser-mapping": "^2.9.19",
|
||||
"webpackbar": "^7.0.0"
|
||||
},
|
||||
"packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610"
|
||||
}
|
||||
|
||||
@@ -129,6 +129,30 @@ def add_missing_schemas(spec: dict[str, Any]) -> tuple[dict[str, Any], list[str]
|
||||
}
|
||||
fixed.append("DashboardColorsConfigUpdateSchema")
|
||||
|
||||
# DashboardChartCustomizationsConfigUpdateSchema (dashboards/schemas.py)
|
||||
if "DashboardChartCustomizationsConfigUpdateSchema" not in schemas:
|
||||
schemas["DashboardChartCustomizationsConfigUpdateSchema"] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"deleted": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "List of deleted chart customization IDs.",
|
||||
},
|
||||
"modified": {
|
||||
"type": "array",
|
||||
"items": {"type": "object"},
|
||||
"description": "List of modified chart customizations.",
|
||||
},
|
||||
"reordered": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "List of chart customization IDs in new order.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("DashboardChartCustomizationsConfigUpdateSchema")
|
||||
|
||||
# FormatQueryPayloadSchema - based on superset/sqllab/schemas.py
|
||||
if "FormatQueryPayloadSchema" not in schemas:
|
||||
schemas["FormatQueryPayloadSchema"] = {
|
||||
@@ -295,6 +319,7 @@ TAG_DESCRIPTIONS = {
|
||||
"Security Roles": "Manage security roles and their permissions.",
|
||||
"Security Users": "Manage user accounts.",
|
||||
"Tags": "Organize assets with tags.",
|
||||
"Themes": "Manage UI themes for customizing Superset's appearance.",
|
||||
"User": "User profile and preferences.",
|
||||
}
|
||||
|
||||
|
||||
@@ -202,7 +202,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
mdx += `| Method | Endpoint | Description |\n`;
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
for (const ep of tagEndpoints['Security']) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
mdx += '\n';
|
||||
renderedTags.add('Security');
|
||||
@@ -229,7 +229,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
|
||||
for (const ep of endpoints) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
mdx += `\n</details>\n\n`;
|
||||
@@ -252,7 +252,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
|
||||
for (const ep of endpoints) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
mdx += `\n</details>\n\n`;
|
||||
|
||||
@@ -152,8 +152,8 @@ const SOURCES = [
|
||||
{
|
||||
name: 'Extension Components',
|
||||
path: 'packages/superset-core/src',
|
||||
importPrefix: '@apache-superset/core/ui',
|
||||
docImportPrefix: '@apache-superset/core/ui',
|
||||
importPrefix: '@apache-superset/core/components',
|
||||
docImportPrefix: '@apache-superset/core/components',
|
||||
category: 'extension',
|
||||
enabled: true,
|
||||
extensionCompatible: true,
|
||||
@@ -1155,7 +1155,7 @@ Help improve it by [editing the story file](https://github.com/apache/superset/e
|
||||
const CATEGORY_LABELS = {
|
||||
ui: { title: 'Core Components', sidebarLabel: 'Core Components', description: 'Buttons, inputs, modals, selects, and other fundamental UI elements.' },
|
||||
'design-system': { title: 'Layout Components', sidebarLabel: 'Layout Components', description: 'Grid, Layout, Table, Flex, Space, and container components for page structure.' },
|
||||
extension: { title: 'Extension Components', sidebarLabel: 'Extension Components', description: 'Components available to extension developers via @apache-superset/core/ui.' },
|
||||
extension: { title: 'Extension Components', sidebarLabel: 'Extension Components', description: 'Components available to extension developers via @apache-superset/core/components.' },
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -1463,7 +1463,7 @@ function generateExtensionTypeDeclarations(extensionComponents) {
|
||||
*/
|
||||
|
||||
/**
|
||||
* Type declarations for @apache-superset/core/ui
|
||||
* Type declarations for @apache-superset/core/components
|
||||
*
|
||||
* AUTO-GENERATED by scripts/generate-superset-components.mjs
|
||||
* Do not edit manually - regenerate by running: yarn generate:superset-components
|
||||
|
||||
@@ -39,7 +39,7 @@ function getComponentRegistry() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const SupersetComponents = require('@superset/components');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const CoreUI = require('@apache-superset/core/ui');
|
||||
const CoreUI = require('@apache-superset/core/components');
|
||||
|
||||
// Build component registry with antd as base fallback layer.
|
||||
// Some Superset components (e.g., Typography) use styled-components that may
|
||||
@@ -65,7 +65,7 @@ function getProviders() {
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { themeObject } = require('@apache-superset/core/ui');
|
||||
const { themeObject } = require('@apache-superset/core/theme');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { App, ConfigProvider } = require('antd');
|
||||
|
||||
|
||||
120
docs/src/theme/ApiExplorer/MethodEndpoint/index.tsx
Normal file
120
docs/src/theme/ApiExplorer/MethodEndpoint/index.tsx
Normal file
@@ -0,0 +1,120 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*
|
||||
* Swizzled from docusaurus-theme-openapi-docs to fix SSG crash.
|
||||
*
|
||||
* The original component calls useTypedSelector (Redux) at the top level,
|
||||
* which fails during static site generation because no Redux store is
|
||||
* available. This version moves the hook into a browser-only child component
|
||||
* so SSG can render the page without a store context.
|
||||
*/
|
||||
|
||||
import React from "react";
|
||||
|
||||
import BrowserOnly from "@docusaurus/BrowserOnly";
|
||||
import { useSelector } from "react-redux";
|
||||
|
||||
interface ServerVariable {
|
||||
default?: string;
|
||||
}
|
||||
|
||||
interface ServerValue {
|
||||
url: string;
|
||||
variables?: Record<string, ServerVariable>;
|
||||
}
|
||||
|
||||
interface StoreState {
|
||||
server: { value: ServerValue | null };
|
||||
}
|
||||
|
||||
function colorForMethod(method: string) {
|
||||
switch (method.toLowerCase()) {
|
||||
case "get":
|
||||
return "primary";
|
||||
case "post":
|
||||
return "success";
|
||||
case "delete":
|
||||
return "danger";
|
||||
case "put":
|
||||
return "info";
|
||||
case "patch":
|
||||
return "warning";
|
||||
case "head":
|
||||
return "secondary";
|
||||
case "event":
|
||||
return "secondary";
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export interface Props {
|
||||
method: string;
|
||||
path: string;
|
||||
context?: "endpoint" | "callback";
|
||||
}
|
||||
|
||||
// Inner component rendered only in the browser, where the Redux store exists.
|
||||
function ServerUrl() {
|
||||
const serverValue = useSelector((state: StoreState) => state.server.value);
|
||||
|
||||
if (serverValue && serverValue.variables) {
|
||||
let serverUrlWithVariables = serverValue.url.replace(/\/$/, "");
|
||||
Object.keys(serverValue.variables).forEach((variable) => {
|
||||
serverUrlWithVariables = serverUrlWithVariables.replace(
|
||||
`{${variable}}`,
|
||||
serverValue.variables?.[variable].default ?? ""
|
||||
);
|
||||
});
|
||||
return <>{serverUrlWithVariables}</>;
|
||||
}
|
||||
|
||||
if (serverValue && serverValue.url) {
|
||||
return <>{serverValue.url}</>;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function MethodEndpoint({ method, path, context }: Props) {
|
||||
const renderServerUrl = () => {
|
||||
if (context === "callback") {
|
||||
return "";
|
||||
}
|
||||
return <BrowserOnly>{() => <ServerUrl />}</BrowserOnly>;
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<pre className="openapi__method-endpoint">
|
||||
<span className={"badge badge--" + colorForMethod(method)}>
|
||||
{method === "event" ? "Webhook" : method.toUpperCase()}
|
||||
</span>{" "}
|
||||
{method !== "event" && (
|
||||
<h2 className="openapi__method-endpoint-path">
|
||||
{renderServerUrl()}
|
||||
{`${path.replace(/{([a-z0-9-_]+)}/gi, ":$1")}`}
|
||||
</h2>
|
||||
)}
|
||||
</pre>
|
||||
<div className="openapi__divider" />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default MethodEndpoint;
|
||||
@@ -35,7 +35,7 @@ function getThemeWrapper() {
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { themeObject } = require('@apache-superset/core/ui');
|
||||
const { themeObject } = require('@apache-superset/core/theme');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { App } = require('antd');
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ if (isBrowser) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const SupersetComponents = require('@superset/components');
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const { Alert } = require('@apache-superset/core/ui');
|
||||
const { Alert } = require('@apache-superset/core/components');
|
||||
|
||||
console.log('[ReactLiveScope] SupersetComponents keys:', Object.keys(SupersetComponents || {}).slice(0, 10));
|
||||
console.log('[ReactLiveScope] Has Button?', 'Button' in (SupersetComponents || {}));
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
*/
|
||||
|
||||
/**
|
||||
* Type declarations for @apache-superset/core/ui
|
||||
* Type declarations for @apache-superset/core/components
|
||||
*
|
||||
* AUTO-GENERATED by scripts/generate-superset-components.mjs
|
||||
* Do not edit manually - regenerate by running: yarn generate:superset-components
|
||||
|
||||
@@ -156,9 +156,9 @@ export default function webpackExtendPlugin(): Plugin<void> {
|
||||
// to source so the docs build doesn't depend on pre-built lib/ artifacts.
|
||||
// More specific sub-path aliases must come first; webpack matches the
|
||||
// longest prefix.
|
||||
'@apache-superset/core/ui': path.resolve(
|
||||
'@apache-superset/core/components': path.resolve(
|
||||
__dirname,
|
||||
'../../superset-frontend/packages/superset-core/src/ui',
|
||||
'../../superset-frontend/packages/superset-core/src/components',
|
||||
),
|
||||
'@apache-superset/core/api/core': path.resolve(
|
||||
__dirname,
|
||||
|
||||
12
docs/static/feature-flags.json
vendored
12
docs/static/feature-flags.json
vendored
@@ -51,6 +51,18 @@
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Superset extensions for custom functionality without modifying core"
|
||||
},
|
||||
{
|
||||
"name": "FAB_API_KEY_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable API key authentication via FAB SecurityManager When enabled, users can create/manage API keys in the User Info page"
|
||||
},
|
||||
{
|
||||
"name": "GRANULAR_EXPORT_CONTROLS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable granular export controls (can_export_data, can_export_image, can_copy_clipboard) instead of the single can_csv permission"
|
||||
},
|
||||
{
|
||||
"name": "MATRIXIFY",
|
||||
"default": false,
|
||||
|
||||
BIN
docs/static/img/databases/datastore.png
vendored
Normal file
BIN
docs/static/img/databases/datastore.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.1 KiB |
|
Before Width: | Height: | Size: 358 KiB After Width: | Height: | Size: 358 KiB |
BIN
docs/static/img/logos/hifadih.png
vendored
Normal file
BIN
docs/static/img/logos/hifadih.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
5791
docs/static/resources/openapi.json
vendored
5791
docs/static/resources/openapi.json
vendored
File diff suppressed because it is too large
Load Diff
@@ -14,10 +14,10 @@
|
||||
"paths": {
|
||||
"@superset-ui/core": ["../superset-frontend/packages/superset-ui-core/src"],
|
||||
"@superset-ui/core/*": ["../superset-frontend/packages/superset-ui-core/src/*"],
|
||||
// Types for @apache-superset/core/ui are auto-generated by scripts/generate-superset-components.mjs
|
||||
// Types for @apache-superset/core/components are auto-generated by scripts/generate-superset-components.mjs
|
||||
// Runtime resolution uses webpack alias pointing to actual source (see src/webpack.extend.ts)
|
||||
// Using /ui path matches the established pattern used throughout the Superset codebase
|
||||
"@apache-superset/core/ui": ["./src/types/apache-superset-core"],
|
||||
"@apache-superset/core/components": ["./src/types/apache-superset-core"],
|
||||
"*": ["src/*", "node_modules/*"]
|
||||
}
|
||||
},
|
||||
|
||||
@@ -109,6 +109,14 @@ SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY'
|
||||
|
||||
You can generate a strong secure key with `openssl rand -base64 42`.
|
||||
|
||||
Alternatively, you can set the secret key using `SUPERSET_SECRET_KEY` environment variable:
|
||||
|
||||
On a Unix-based system, such as Linux or macOS, you can do so by running the following command in your terminal:
|
||||
|
||||
```bash
|
||||
export SUPERSET_SECRET_KEY=$(openssl rand -base64 42)
|
||||
```
|
||||
|
||||
:::caution Use a strong secret key
|
||||
This key will be used for securely signing session cookies and encrypting sensitive information stored in Superset's application metadata database.
|
||||
Your deployment must use a complex, unique key.
|
||||
|
||||
@@ -35,7 +35,7 @@ or a view.
|
||||
When working with tables, the solution would be to create a table that contains all the fields
|
||||
needed for your analysis, most likely through some scheduled batch process.
|
||||
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL queries as a virtual table. This can
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL query as a virtual table. This can
|
||||
allow you to join and union multiple tables and to apply some transformation using arbitrary SQL
|
||||
expressions. The limitation there is your database performance, as Superset effectively will run a
|
||||
query on top of your query (view). A good practice may be to limit yourself to joining your main
|
||||
|
||||
2592
docs/yarn.lock
2592
docs/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -48,7 +48,7 @@ dependencies = [
|
||||
"cryptography>=42.0.4, <47.0.0",
|
||||
"deprecation>=2.1.0, <2.2.0",
|
||||
"flask>=2.2.5, <4.0.0",
|
||||
"flask-appbuilder>=5.0.2,<6",
|
||||
"flask-appbuilder>=5.2.1, <6.0.0",
|
||||
"flask-caching>=2.1.0, <3",
|
||||
"flask-compress>=1.13, <2.0",
|
||||
"flask-talisman>=1.0.0, <2.0",
|
||||
@@ -144,7 +144,7 @@ solr = ["sqlalchemy-solr >= 0.2.0"]
|
||||
elasticsearch = ["elasticsearch-dbapi>=0.2.12, <0.3.0"]
|
||||
exasol = ["sqlalchemy-exasol >= 2.4.0, <3.0"]
|
||||
excel = ["xlrd>=1.2.0, <1.3"]
|
||||
fastmcp = ["fastmcp==2.14.3"]
|
||||
fastmcp = ["fastmcp>=3.1.0,<4.0"]
|
||||
firebird = ["sqlalchemy-firebird>=0.7.0, <0.8"]
|
||||
firebolt = ["firebolt-sqlalchemy>=1.0.0, <2"]
|
||||
gevent = ["gevent>=23.9.1"]
|
||||
@@ -183,6 +183,7 @@ risingwave = ["sqlalchemy-risingwave"]
|
||||
shillelagh = ["shillelagh[all]>=1.4.3, <2"]
|
||||
singlestore = ["sqlalchemy-singlestoredb>=1.1.1, <2"]
|
||||
snowflake = ["snowflake-sqlalchemy>=1.2.4, <2"]
|
||||
sqlite = ["syntaqlite>=0.1.0"]
|
||||
spark = [
|
||||
"pyhive[hive]>=0.6.5;python_version<'3.11'",
|
||||
"pyhive[hive_pure_sasl]>=0.7",
|
||||
@@ -226,6 +227,7 @@ development = [
|
||||
"ruff",
|
||||
"sqloxide",
|
||||
"statsd",
|
||||
"syntaqlite>=0.1.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -238,7 +240,7 @@ combine_as_imports = true
|
||||
include_trailing_comma = true
|
||||
line_length = 88
|
||||
known_first_party = "superset, apache-superset-core, apache-superset-extensions-cli"
|
||||
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, marshmallow-union, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
|
||||
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, marshmallow-union, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, syntaqlite, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
|
||||
multi_line_output = 3
|
||||
order_by_type = false
|
||||
|
||||
@@ -372,6 +374,7 @@ unfixable = []
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"superset/mcp_service/app.py" = ["S608", "E501"] # LLM instruction text: SQL examples (S608) and long lines in multiline string (E501)
|
||||
"scripts/*" = ["TID251"]
|
||||
"setup.py" = ["TID251"]
|
||||
"superset/config.py" = ["TID251"]
|
||||
|
||||
@@ -18,8 +18,8 @@
|
||||
#
|
||||
# Security: CVE-2026-21441 - decompression bomb bypass on redirects
|
||||
urllib3>=2.6.3,<3.0.0
|
||||
# Security: GHSA-87hc-h4r5-73f7 - Windows path traversal fix
|
||||
werkzeug>=3.1.5,<4.0.0
|
||||
# Security: CVE-2026-27199 - Windows device name handling in safe_join
|
||||
werkzeug>=3.1.6,<4.0.0
|
||||
# Security: CVE-2025-68146 - TOCTOU symlink vulnerability
|
||||
filelock>=3.20.3,<4.0.0
|
||||
# Security: decompression bomb fix (required by aiohttp 3.13.3)
|
||||
|
||||
@@ -54,7 +54,7 @@ certifi==2025.6.15
|
||||
# via
|
||||
# requests
|
||||
# selenium
|
||||
cffi==1.17.1
|
||||
cffi==2.0.0
|
||||
# via
|
||||
# cryptography
|
||||
# pynacl
|
||||
@@ -86,7 +86,7 @@ cron-descriptor==1.4.5
|
||||
# via apache-superset (pyproject.toml)
|
||||
croniter==6.0.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
cryptography==44.0.3
|
||||
cryptography==46.0.6
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# paramiko
|
||||
@@ -120,7 +120,7 @@ flask==2.3.3
|
||||
# flask-session
|
||||
# flask-sqlalchemy
|
||||
# flask-wtf
|
||||
flask-appbuilder==5.0.2
|
||||
flask-appbuilder==5.2.1
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# apache-superset-core
|
||||
@@ -209,7 +209,7 @@ mako==1.3.10
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# alembic
|
||||
markdown==3.8
|
||||
markdown==3.8.1
|
||||
# via apache-superset (pyproject.toml)
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
@@ -219,7 +219,7 @@ markupsafe==3.0.2
|
||||
# mako
|
||||
# werkzeug
|
||||
# wtforms
|
||||
marshmallow==3.26.1
|
||||
marshmallow==3.26.2
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# flask-appbuilder
|
||||
@@ -279,7 +279,7 @@ parsedatetime==2.6
|
||||
# via apache-superset (pyproject.toml)
|
||||
pgsanity==0.2.9
|
||||
# via apache-superset (pyproject.toml)
|
||||
pillow==11.3.0
|
||||
pillow==12.1.1
|
||||
# via apache-superset (pyproject.toml)
|
||||
platformdirs==4.3.8
|
||||
# via requests-cache
|
||||
@@ -293,7 +293,7 @@ prompt-toolkit==3.0.51
|
||||
# via click-repl
|
||||
pyarrow==16.1.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
pyasn1==0.6.2
|
||||
pyasn1==0.6.3
|
||||
# via
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
@@ -309,17 +309,17 @@ pydantic-core==2.33.2
|
||||
# via pydantic
|
||||
pygeohash==3.2.2
|
||||
# via apache-superset (pyproject.toml)
|
||||
pygments==2.19.1
|
||||
pygments==2.20.0
|
||||
# via rich
|
||||
pyjwt==2.10.1
|
||||
pyjwt==2.12.0
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# flask-appbuilder
|
||||
# flask-jwt-extended
|
||||
# redis
|
||||
pynacl==1.5.0
|
||||
pynacl==1.6.2
|
||||
# via paramiko
|
||||
pyopenssl==25.1.0
|
||||
pyopenssl==25.3.0
|
||||
# via shillelagh
|
||||
pyparsing==3.2.3
|
||||
# via apache-superset (pyproject.toml)
|
||||
@@ -457,7 +457,7 @@ wcwidth==0.2.13
|
||||
# via prompt-toolkit
|
||||
websocket-client==1.8.0
|
||||
# via selenium
|
||||
werkzeug==3.1.5
|
||||
werkzeug==3.1.6
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# flask
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
# via
|
||||
# -r requirements/development.in
|
||||
# apache-superset
|
||||
aiofile==3.9.0
|
||||
# via py-key-value-aio
|
||||
alembic==1.15.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -26,8 +28,10 @@ anyio==4.11.0
|
||||
# via
|
||||
# httpx
|
||||
# mcp
|
||||
# py-key-value-aio
|
||||
# sse-starlette
|
||||
# starlette
|
||||
# watchfiles
|
||||
apispec==6.6.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -48,7 +52,7 @@ attrs==25.3.0
|
||||
# referencing
|
||||
# requests-cache
|
||||
# trio
|
||||
authlib==1.6.5
|
||||
authlib==1.6.7
|
||||
# via fastmcp
|
||||
babel==2.17.0
|
||||
# via
|
||||
@@ -65,9 +69,7 @@ bcrypt==4.3.0
|
||||
# -c requirements/base-constraint.txt
|
||||
# paramiko
|
||||
beartype==0.22.5
|
||||
# via
|
||||
# py-key-value-aio
|
||||
# py-key-value-shared
|
||||
# via py-key-value-aio
|
||||
billiard==4.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -100,6 +102,8 @@ cachetools==6.2.1
|
||||
# -c requirements/base-constraint.txt
|
||||
# google-auth
|
||||
# py-key-value-aio
|
||||
caio==0.9.25
|
||||
# via aiofile
|
||||
cattrs==25.1.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -115,7 +119,7 @@ certifi==2025.6.15
|
||||
# httpx
|
||||
# requests
|
||||
# selenium
|
||||
cffi==1.17.1
|
||||
cffi==2.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# cryptography
|
||||
@@ -138,7 +142,6 @@ click==8.2.1
|
||||
# click-repl
|
||||
# flask
|
||||
# flask-appbuilder
|
||||
# typer
|
||||
# uvicorn
|
||||
click-didyoumean==0.3.1
|
||||
# via
|
||||
@@ -156,8 +159,6 @@ click-repl==0.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# celery
|
||||
cloudpickle==3.1.2
|
||||
# via pydocket
|
||||
cmdstanpy==1.1.0
|
||||
# via prophet
|
||||
colorama==0.4.6
|
||||
@@ -177,7 +178,7 @@ croniter==6.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
cryptography==44.0.3
|
||||
cryptography==46.0.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -206,8 +207,6 @@ deprecation==2.1.0
|
||||
# apache-superset
|
||||
dill==0.4.0
|
||||
# via pylint
|
||||
diskcache==5.6.3
|
||||
# via py-key-value-aio
|
||||
distlib==0.3.8
|
||||
# via virtualenv
|
||||
dnspython==2.7.0
|
||||
@@ -237,9 +236,7 @@ et-xmlfile==2.0.0
|
||||
# openpyxl
|
||||
exceptiongroup==1.3.0
|
||||
# via fastmcp
|
||||
fakeredis==2.32.1
|
||||
# via pydocket
|
||||
fastmcp==2.14.3
|
||||
fastmcp==3.1.0
|
||||
# via apache-superset
|
||||
filelock==3.20.3
|
||||
# via
|
||||
@@ -262,7 +259,7 @@ flask==2.3.3
|
||||
# flask-sqlalchemy
|
||||
# flask-testing
|
||||
# flask-wtf
|
||||
flask-appbuilder==5.0.2
|
||||
flask-appbuilder==5.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -474,6 +471,8 @@ jsonpath-ng==1.7.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
jsonref==1.1.0
|
||||
# via fastmcp
|
||||
jsonschema==4.23.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -504,14 +503,12 @@ limits==5.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# flask-limiter
|
||||
lupa==2.6
|
||||
# via fakeredis
|
||||
mako==1.3.10
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# alembic
|
||||
# apache-superset
|
||||
markdown==3.8
|
||||
markdown==3.8.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -526,7 +523,7 @@ markupsafe==3.0.2
|
||||
# mako
|
||||
# werkzeug
|
||||
# wtforms
|
||||
marshmallow==3.26.1
|
||||
marshmallow==3.26.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -603,7 +600,7 @@ openpyxl==3.1.5
|
||||
# -c requirements/base-constraint.txt
|
||||
# pandas
|
||||
opentelemetry-api==1.39.1
|
||||
# via pydocket
|
||||
# via fastmcp
|
||||
ordered-set==4.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -622,6 +619,7 @@ packaging==25.0
|
||||
# deprecation
|
||||
# docker
|
||||
# duckdb-engine
|
||||
# fastmcp
|
||||
# google-cloud-bigquery
|
||||
# gunicorn
|
||||
# limits
|
||||
@@ -653,13 +651,11 @@ parsedatetime==2.6
|
||||
# apache-superset
|
||||
pathable==0.4.3
|
||||
# via jsonschema-path
|
||||
pathvalidate==3.3.1
|
||||
# via py-key-value-aio
|
||||
pgsanity==0.2.9
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
pillow==11.3.0
|
||||
pillow==12.1.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -691,8 +687,6 @@ prison==0.2.1
|
||||
# flask-appbuilder
|
||||
progress==1.6
|
||||
# via apache-superset
|
||||
prometheus-client==0.23.1
|
||||
# via pydocket
|
||||
prompt-toolkit==3.0.51
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -703,7 +697,7 @@ proto-plus==1.25.0
|
||||
# via
|
||||
# google-api-core
|
||||
# google-cloud-bigquery-storage
|
||||
protobuf==4.25.5
|
||||
protobuf==4.25.8
|
||||
# via
|
||||
# google-api-core
|
||||
# google-cloud-bigquery-storage
|
||||
@@ -714,19 +708,15 @@ psutil==6.1.0
|
||||
# via apache-superset
|
||||
psycopg2-binary==2.9.9
|
||||
# via apache-superset
|
||||
py-key-value-aio==0.3.0
|
||||
# via
|
||||
# fastmcp
|
||||
# pydocket
|
||||
py-key-value-shared==0.3.0
|
||||
# via py-key-value-aio
|
||||
py-key-value-aio==0.4.4
|
||||
# via fastmcp
|
||||
pyarrow==16.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# db-dtypes
|
||||
# pandas-gbq
|
||||
pyasn1==0.6.2
|
||||
pyasn1==0.6.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# pyasn1-modules
|
||||
@@ -758,8 +748,6 @@ pydantic-settings==2.10.1
|
||||
# via mcp
|
||||
pydata-google-auth==1.9.0
|
||||
# via pandas-gbq
|
||||
pydocket==0.17.1
|
||||
# via fastmcp
|
||||
pydruid==0.6.9
|
||||
# via apache-superset
|
||||
pyfakefs==5.3.5
|
||||
@@ -768,7 +756,7 @@ pygeohash==3.2.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
pygments==2.19.1
|
||||
pygments==2.20.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# rich
|
||||
@@ -776,7 +764,7 @@ pyhive==0.7.0
|
||||
# via apache-superset
|
||||
pyinstrument==4.4.0
|
||||
# via apache-superset
|
||||
pyjwt==2.10.1
|
||||
pyjwt==2.12.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -786,11 +774,11 @@ pyjwt==2.10.1
|
||||
# redis
|
||||
pylint==3.3.7
|
||||
# via apache-superset
|
||||
pynacl==1.5.0
|
||||
pynacl==1.6.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# paramiko
|
||||
pyopenssl==25.1.0
|
||||
pyopenssl==25.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# shillelagh
|
||||
@@ -844,8 +832,6 @@ python-dotenv==1.1.0
|
||||
# apache-superset
|
||||
# fastmcp
|
||||
# pydantic-settings
|
||||
python-json-logger==4.0.0
|
||||
# via pydocket
|
||||
python-ldap==3.4.4
|
||||
# via apache-superset
|
||||
python-multipart==0.0.20
|
||||
@@ -866,15 +852,13 @@ pyyaml==6.0.2
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# apispec
|
||||
# fastmcp
|
||||
# jsonschema-path
|
||||
# pre-commit
|
||||
redis==5.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# fakeredis
|
||||
# py-key-value-aio
|
||||
# pydocket
|
||||
referencing==0.36.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -910,9 +894,7 @@ rich==13.9.4
|
||||
# cyclopts
|
||||
# fastmcp
|
||||
# flask-limiter
|
||||
# pydocket
|
||||
# rich-rst
|
||||
# typer
|
||||
rich-rst==1.3.1
|
||||
# via cyclopts
|
||||
rpds-py==0.25.0
|
||||
@@ -944,8 +926,6 @@ setuptools==80.9.0
|
||||
# pydata-google-auth
|
||||
# zope-event
|
||||
# zope-interface
|
||||
shellingham==1.5.4
|
||||
# via typer
|
||||
shillelagh==1.4.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -973,7 +953,6 @@ sniffio==1.3.1
|
||||
sortedcontainers==2.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# fakeredis
|
||||
# trio
|
||||
sqlalchemy==1.4.54
|
||||
# via
|
||||
@@ -1009,14 +988,18 @@ sshtunnel==0.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
starlette==0.48.0
|
||||
starlette==0.49.1
|
||||
# via mcp
|
||||
statsd==4.0.1
|
||||
# via apache-superset
|
||||
syntaqlite==0.1.0
|
||||
# via apache-superset
|
||||
tabulate==0.9.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
tomli-w==1.2.0
|
||||
# via apache-superset-extensions-cli
|
||||
tomlkit==0.13.3
|
||||
# via pylint
|
||||
tqdm==4.67.1
|
||||
@@ -1034,8 +1017,6 @@ trio-websocket==0.12.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# selenium
|
||||
typer==0.20.0
|
||||
# via pydocket
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -1048,16 +1029,14 @@ typing-extensions==4.15.0
|
||||
# limits
|
||||
# mcp
|
||||
# opentelemetry-api
|
||||
# py-key-value-shared
|
||||
# py-key-value-aio
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# pydocket
|
||||
# pyopenssl
|
||||
# referencing
|
||||
# selenium
|
||||
# shillelagh
|
||||
# starlette
|
||||
# typer
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.1
|
||||
# via
|
||||
@@ -1072,6 +1051,8 @@ tzdata==2025.2
|
||||
# pandas
|
||||
tzlocal==5.2
|
||||
# via trino
|
||||
uncalled-for==0.2.0
|
||||
# via fastmcp
|
||||
url-normalize==2.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -1101,6 +1082,8 @@ watchdog==6.0.0
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
watchfiles==1.1.1
|
||||
# via fastmcp
|
||||
wcwidth==0.2.13
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -1111,7 +1094,7 @@ websocket-client==1.8.0
|
||||
# selenium
|
||||
websockets==15.0.1
|
||||
# via fastmcp
|
||||
werkzeug==3.1.5
|
||||
werkzeug==3.1.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# flask
|
||||
|
||||
@@ -31,70 +31,70 @@ The official core package for building Apache Superset backend extensions and in
|
||||
pip install apache-superset-core
|
||||
```
|
||||
|
||||
## 🏗️ Architecture
|
||||
## 🏗️ Package Structure
|
||||
|
||||
The package is organized into logical modules, each providing specific functionality:
|
||||
|
||||
- **`api`** - REST API base classes, models access, query utilities, and registration
|
||||
- **`api.models`** - Access to Superset's database models (datasets, databases, etc.)
|
||||
- **`api.query`** - Database query utilities and SQL dialect handling
|
||||
- **`api.rest_api`** - Extension API registration and management
|
||||
- **`api.types.rest_api`** - REST API base classes and type definitions
|
||||
```
|
||||
src/superset_core/
|
||||
├── common/
|
||||
├── extensions/
|
||||
├── mcp/
|
||||
├── queries/
|
||||
├── rest_api/
|
||||
├── tasks/
|
||||
└── __init__.py
|
||||
```
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Basic Extension Structure
|
||||
### Basic Extension API
|
||||
|
||||
```python
|
||||
from flask import request, Response
|
||||
from flask_appbuilder.api import expose, permission_name, protect, safe
|
||||
from superset_core.api import models, query, rest_api
|
||||
from superset_core.api.rest_api import RestApi
|
||||
from superset_core.rest_api.api import RestApi
|
||||
from superset_core.rest_api.decorators import api
|
||||
|
||||
|
||||
@api(id="dataset_references", name="Dataset References API")
|
||||
class DatasetReferencesAPI(RestApi):
|
||||
"""Example extension API demonstrating core functionality."""
|
||||
|
||||
resource_name = "dataset_references"
|
||||
openapi_spec_tag = "Dataset references"
|
||||
class_permission_name = "dataset_references"
|
||||
|
||||
@expose("/metadata", methods=("POST",))
|
||||
@protect()
|
||||
@safe
|
||||
@permission_name("read")
|
||||
def metadata(self) -> Response:
|
||||
"""Get dataset metadata for tables referenced in SQL."""
|
||||
sql: str = request.json.get("sql")
|
||||
database_id: int = request.json.get("databaseId")
|
||||
|
||||
# Access Superset's models using core APIs
|
||||
databases = models.get_databases(id=database_id)
|
||||
if not databases:
|
||||
return self.response_404()
|
||||
|
||||
database = databases[0]
|
||||
dialect = query.get_sqlglot_dialect(database)
|
||||
|
||||
# Access datasets to get owner information
|
||||
datasets = models.get_datasets()
|
||||
owners_map = {
|
||||
dataset.table_name: [
|
||||
f"{owner.first_name} {owner.last_name}"
|
||||
for owner in dataset.owners
|
||||
]
|
||||
for dataset in datasets
|
||||
}
|
||||
|
||||
# Process SQL and return dataset metadata
|
||||
return self.response(200, result=owners_map)
|
||||
|
||||
# Register the extension API
|
||||
rest_api.add_extension_api(DatasetReferencesAPI)
|
||||
# ... endpoint implementation
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
### Background Tasks
|
||||
|
||||
We welcome contributions! Please see the [Developer Portal](https://superset.apache.org/developer_portal/) for details.
|
||||
```python
|
||||
from superset_core.tasks.decorators import task
|
||||
from superset_core.tasks.types import TaskScope
|
||||
|
||||
@task(name="generate_report", scope=TaskScope.SHARED)
|
||||
def generate_report(chart_id: int) -> None:
|
||||
# ... task implementation
|
||||
```
|
||||
|
||||
### MCP Tools
|
||||
|
||||
```python
|
||||
from superset_core.mcp.decorators import tool
|
||||
|
||||
@tool(name="my_tool", description="Custom business logic", tags=["extension"])
|
||||
def my_extension_tool(param: str) -> dict:
|
||||
# ... tool implementation
|
||||
```
|
||||
|
||||
### MCP Prompts
|
||||
|
||||
```python
|
||||
from superset_core.mcp.decorators import prompt
|
||||
|
||||
@prompt(name="my_prompt", title="My Prompt", description="Interactive prompt", tags={"extension"})
|
||||
async def my_prompt_handler(ctx: Context) -> str:
|
||||
# ... prompt implementation
|
||||
```
|
||||
|
||||
## 📄 License
|
||||
|
||||
@@ -102,12 +102,6 @@ Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com
|
||||
|
||||
## 🔗 Links
|
||||
|
||||
- [Apache Superset](https://superset.apache.org/)
|
||||
- [Documentation](https://superset.apache.org/docs/)
|
||||
- [Community](https://superset.apache.org/community/)
|
||||
- [GitHub Repository](https://github.com/apache/superset)
|
||||
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
|
||||
|
||||
---
|
||||
|
||||
**Note**: This package is currently in release candidate status. APIs may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.
|
||||
- [Extensions Documentation](https://superset.apache.org/developer-docs/extensions/overview)
|
||||
|
||||
@@ -18,20 +18,20 @@
|
||||
|
||||
[project]
|
||||
name = "apache-superset-core"
|
||||
version = "0.0.1rc4"
|
||||
version = "0.1.0rc2"
|
||||
description = "Core Python package for building Apache Superset backend extensions and integrations"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
|
||||
]
|
||||
license = { file="LICENSE.txt" }
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE.txt"]
|
||||
requires-python = ">=3.10"
|
||||
keywords = ["superset", "apache", "analytics", "business-intelligence", "extensions", "visualization"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Web Environment",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user